From 3d13c09363013e23726c2ac5fa299a8e5344fd8c Mon Sep 17 00:00:00 2001 From: Ikenna Omekam Date: Thu, 29 Feb 2024 16:16:09 -0500 Subject: [PATCH 01/47] feat!: make Network and IBC vats durable (#8721) * feat(whenable): first cut * test(whenable): find bugs and improve the implementation * ci(test-all-packages): add `packages/whenable` * fix(whenable): enhance to pass basic tests * feat(whenable): use features from more recent Endo * fixup! feat(whenable): first cut * refactor(whenable): putting on some polish for reviewers * chore(internal): add `whenable.js` to preserve layering * fixup! refactor(whenable): putting on some polish for reviewers * fixup! refactor(whenable): putting on some polish for reviewers * chore(whenable): remove hard dependency on `@agoric/internal` * fixup! chore(whenable): remove hard dependency on `@agoric/internal` * fix(whenable): properly chain `watch`ed whenables * fixup! fix(whenable): properly chain `watch`ed whenables This reverts commit cc7626d08153956ce42f5766d38425309add1e63. * fixup! refactor(whenable): putting on some polish for reviewers * fixup! fix(whenable): properly chain `watch`ed whenables * build(whenable): update Endo dependencies * fix(whenable): better fidelity of shimmed `watchPromise` * fixup! fix(whenable): properly chain `watch`ed whenables * chore(whenable): `whenable0` -> `whenableV0` * docs(whenable): add some * chore(whenable): copy `E.js` from `@endo/eventual-send` * feat(whenable): working version of `E` * fixup! feat(whenable): working version of `E` * fixup! feat(whenable): working version of `E` * fixup! chore(whenable): remove hard dependency on `@agoric/internal` * fixup! feat(whenable): working version of `E` * fixup! feat(whenable): working version of `E` * fixup! chore(whenable): remove hard dependency on `@agoric/internal` * fixup! chore(whenable): remove hard dependency on `@agoric/internal` * fixup! feat(whenable): working version of `E` * chore(whenable): `prepareWhenableModule` -> `prepareWhenableToolrs` * chore(vat-data): adopt `@agoric/internal/whenable.js` * amend! chore(whenable): `prepareWhenableModule` -> `prepareWhenableToolrs` chore(whenable): `prepareWhenableModule` -> `prepareWhenableTools` * fixup! chore(whenable): `prepareWhenableModule` -> `prepareWhenableTools` * fixup! chore(vat-data): adopt `@agoric/internal/whenable.js` * chore(vow): rename `Whenable` -> `Vow` * test(pegasus): cleanups * feat(agd): support `NO_BUILD=true` * test(swingset-liveslots): make fake stuff support `watchPromise` * feat(network): allow protocols to use Whenables * feat(vats): begin durable network/IBC vats * feat(pegasus): compatibility with whenables * test(network): add some missing `Far` annotations * fixup! feat(vats): begin durable network/IBC vats * fixup! fix(whenable): enhance to pass basic tests * fixup! fix(whenable): enhance to pass basic tests * current work. Will rebase this commit message * get rest of test working * remove Far from network vat * add zone to peg test * fix(ibc+network) making durable * fix: access when and makeWhenableKit correctly * fix: tests part 1 * fix: linting * fix: more lint and removing unneeded files * go back to returning durable * fix: return protocol * Revert "fix: more lint and removing unneeded files" This reverts commit 953e35226935903938aa24b506d7d55163cc5ab9. * fix: linting and types * fix: more linting * fix: more linting * fix: linting * fix: more linting fix and far fix * fix: more linting * fix: decouple makeBinder and fix pegasus start * fix: linting after rebase * fix: linting * make network proposal durable * docs: types (WIP) * prune dead code (WIP) * WIP: factor out this.state * WIP: consistent Kit naming * WIP: clarify echo tag * feat(vats): vatUpgradeInfo store for vat restart info WIP - TODO: add static type to BootstrapSpace - TODO: fix (or at least: document) lack of attenuation: anyone with access to it can scribble all over the whole thing * chore(vats): restart network, ibc vats * test(boot): durability of protocol registrations in network vat * WIP(test-network-misc): hoist prepare * docs(whenable): export When type * chore(network): thread when thru all relevant awaits * WIP: docs: @agoric/network entrypoints * WIP: move prepareCallbacks * WIP: skip nonceMaker (cross-vat weirdness) * WIP echo stuff -> Kit * WIP: prepareCallbacks in vat-ibc * WIP: prepare all the things in vat-network * WIP: don't prepare in the bootstrap vat * WIP: use registerNetworkProtocols in unit test * WIP: test: log basic features from network README * remove unused import * fix test by passing in handler * move around types * address IOU * lint fix * fix boot tests * finish whenable -> vow changes * fix pegasus test * remove debugger * lint fixes * more linting * linting... * only create once to fix vat upgrades * 'this' idiom * add missing bind * linting * more linting * add check for ibc protocol handler * build(network): @endo/patterns for interface guards * chore(network): interface guards for 6 exoClass(Kit)s - check types.js: optional args - Vow$(S) is S or Vow - names for remotables plus Endpoint, Data, Bytes * chore(ibc): interface guards for 4 exoClass(Kit)s * chore(router): interface guards for 3 exoClass(Kit)s * fix interface guards for test * fix some lniting * fix lint on test * linting * more linting * more resolver fixes * test fixes * remove heap zone call * don't store error * fix incorrect merge * fix lint * more linting * linting (format) --------- Co-authored-by: Michael FIG Co-authored-by: Dan Connolly --- bin/agd | 2 +- .../test/bootstrapTests/test-vats-restart.ts | 62 + .../builders/scripts/pegasus/init-core.js | 2 +- packages/network/package.json | 4 + packages/network/src/index.js | 2 +- packages/network/src/network.js | 1293 +++++++++++------ packages/network/src/router.js | 224 ++- packages/network/src/types.js | 65 +- packages/network/test/test-network-misc.js | 471 ++++-- packages/network/tsconfig.json | 1 + packages/network/typedoc.json | 9 + packages/notifier/src/index.js | 1 + packages/pegasus/package.json | 2 + packages/pegasus/src/contract.js | 34 + packages/pegasus/src/courier.js | 5 +- packages/pegasus/src/pegasus.js | 28 +- .../pegasus/src/proposals/core-proposal.js | 10 +- packages/pegasus/test/test-peg.js | 125 +- packages/vats/src/core/client-behaviors.js | 1 + packages/vats/src/core/types-ambient.d.ts | 1 + packages/vats/src/ibc.js | 1085 ++++++++------ .../vats/src/proposals/network-proposal.js | 78 +- .../src/proposals/restart-vats-proposal.js | 14 +- packages/vats/src/proposals/zcf-proposal.js | 1 + packages/vats/src/vat-ibc.js | 29 +- packages/vats/src/vat-network.js | 42 +- packages/vats/test/test-network.js | 185 ++- packages/vats/tools/boot-test-utils.js | 3 +- yarn.lock | 2 +- 29 files changed, 2453 insertions(+), 1328 deletions(-) create mode 100644 packages/network/typedoc.json create mode 100644 packages/pegasus/src/contract.js diff --git a/bin/agd b/bin/agd index 38b09622d4f..e30888281ca 100755 --- a/bin/agd +++ b/bin/agd @@ -125,7 +125,7 @@ if $need_nodejs; then esac fi -( +${NO_BUILD:-false} || ( # Send the build output to stderr unless we're only building. This prevents # the daemon's stdout from being polluted with build output. $only_build || exec 1>&2 diff --git a/packages/boot/test/bootstrapTests/test-vats-restart.ts b/packages/boot/test/bootstrapTests/test-vats-restart.ts index c80a0eda69f..66ad605ff61 100644 --- a/packages/boot/test/bootstrapTests/test-vats-restart.ts +++ b/packages/boot/test/bootstrapTests/test-vats-restart.ts @@ -97,6 +97,49 @@ test.serial('open vault', async t => { }); }); +test.serial('run network vat proposal', async t => { + const { controller, buildProposal } = t.context; + + t.log('building network proposal'); + const proposal = await buildProposal( + '@agoric/builders/scripts/vats/init-network.js', + ); + + for await (const bundle of proposal.bundles) { + await controller.validateAndInstallBundle(bundle); + } + t.log('installed', proposal.bundles.length, 'bundles'); + + t.log('executing proposal'); + const bridgeMessage = { + type: 'CORE_EVAL', + evals: proposal.evals, + }; + t.log({ bridgeMessage }); + const { EV } = t.context.runUtils; + const coreEvalBridgeHandler: BridgeHandler = await EV.vat( + 'bootstrap', + ).consumeItem('coreEvalBridgeHandler'); + await EV(coreEvalBridgeHandler).fromBridge(bridgeMessage); + + t.log('network proposal executed'); + t.pass(); // reached here without throws +}); + +test.serial('register network protocol before upgrade', async t => { + const { EV } = t.context.runUtils; + const net = await EV.vat('bootstrap').consumeItem('networkVat'); + const h1 = await EV(net).makeLoopbackProtocolHandler(); + + t.log('register P1'); + await EV(net).registerProtocolHandler(['P1'], h1); + + t.log('register P1 again? No.'); + await t.throwsAsync(EV(net).registerProtocolHandler(['P1'], h1), { + message: /key "P1" already registered/, + }); +}); + test.serial('run restart-vats proposal', async t => { const { controller, buildProposal } = t.context; @@ -126,6 +169,25 @@ test.serial('run restart-vats proposal', async t => { t.pass(); // reached here without throws }); +test.serial('networkVat registrations are durable', async t => { + const { EV } = t.context.runUtils; + const net = await EV.vat('bootstrap').consumeItem('networkVat'); + + const h2 = await EV(net).makeLoopbackProtocolHandler(); + t.log('register P1 again? No.'); + await t.throwsAsync(EV(net).registerProtocolHandler(['P1'], h2), { + message: /key "P1" already registered/, + }); + + t.log('IBC protocol handler already registered?'); + await t.throwsAsync( + EV(net).registerProtocolHandler(['/ibc-port', '/ibc-hop'], h2), + { + message: /key "\/ibc-port" already registered in collection "prefix"/, + }, + ); +}); + test.serial('read metrics', async t => { const { EV } = t.context.runUtils; diff --git a/packages/builders/scripts/pegasus/init-core.js b/packages/builders/scripts/pegasus/init-core.js index 652f4df2d42..7fdadf984cf 100644 --- a/packages/builders/scripts/pegasus/init-core.js +++ b/packages/builders/scripts/pegasus/init-core.js @@ -9,7 +9,7 @@ export const defaultProposalBuilder = async ({ publishRef, install }) => { pegasusRef: publishRef( install( - '@agoric/pegasus/src/pegasus.js', + '@agoric/pegasus/src/contract.js', '../bundles/bundle-pegasus.js', ), ), diff --git a/packages/network/package.json b/packages/network/package.json index 677d9931e8a..3811268b801 100644 --- a/packages/network/package.json +++ b/packages/network/package.json @@ -25,11 +25,15 @@ "@agoric/internal": "^0.3.2", "@agoric/store": "^0.9.2", "@endo/base64": "^1.0.2", + "@agoric/vat-data": "^0.5.2", "@endo/far": "^1.0.4", + "@endo/patterns": "^1.1.0", "@endo/promise-kit": "^1.0.4" }, "devDependencies": { + "@agoric/swingset-liveslots": "^0.10.2", "@agoric/swingset-vat": "^0.32.2", + "@agoric/zone": "^0.2.2", "@endo/bundle-source": "^3.1.0", "ava": "^5.3.0", "c8": "^7.13.0" diff --git a/packages/network/src/index.js b/packages/network/src/index.js index b35f7581eec..7616bb35f12 100644 --- a/packages/network/src/index.js +++ b/packages/network/src/index.js @@ -1,4 +1,4 @@ export * from './network.js'; -export { default as makeRouter, makeRouterProtocol } from './router.js'; +export { prepareRouter, prepareRouterProtocol } from './router.js'; export * from './multiaddr.js'; export * from './bytes.js'; diff --git a/packages/network/src/network.js b/packages/network/src/network.js index 32e51d22740..9061aa235be 100644 --- a/packages/network/src/network.js +++ b/packages/network/src/network.js @@ -1,6 +1,7 @@ -import { makeScalarMapStore, makeLegacyMap } from '@agoric/store'; -import { Far, E } from '@endo/far'; -import { makePromiseKit } from '@endo/promise-kit'; +// @ts-check + +import { E } from '@endo/far'; +import { M } from '@endo/patterns'; import { Fail } from '@agoric/assert'; import { whileTrue } from '@agoric/internal'; import { toBytes } from './bytes.js'; @@ -14,6 +15,130 @@ import '@agoric/store/exported.js'; */ export const ENDPOINT_SEPARATOR = '/'; +const Shape1 = /** @type {const} */ ({ + /** + * Data is string | Buffer | ArrayBuffer + * but only string is passable + */ + Data: M.string(), + Bytes: M.string(), + Endpoint: M.string(), + // TODO: match on "Vow" tag + // @endo/patterns supports it as of + // https://github.com/endojs/endo/pull/2091 + // but that's not in agoric-sdk yet. + // For now, use M.any() to avoid: + // cannot check unrecognized tag "Vow": "[Vow]" + Vow: M.any(), + + ConnectionHandler: M.remotable('ConnectionHandler'), + Connection: M.remotable('Connection'), + InboundAttempt: M.remotable('InboundAttempt'), + Listener: M.remotable('Listener'), + ListenHandler: M.remotable('ListenHandler'), + Port: M.remotable('Port'), + ProtocolHandler: M.remotable('ProtocolHandler'), + ProtocolImpl: M.remotable('ProtocolImpl'), +}); + +const Shape2 = /** @type {const} */ ({ + ...Shape1, + Vow$: shape => M.or(shape, Shape1.Vow), + AttemptDescription: M.splitRecord( + { handler: Shape1.ConnectionHandler }, + { remoteAddress: Shape1.Endpoint, localAddress: Shape1.Endpoint }, + ), + Opts: M.recordOf(M.string(), M.any()), +}); + +export const Shape = /** @type {const} */ harden({ + ...Shape2, + ConnectionI: M.interface('Connection', { + send: M.callWhen(Shape2.Data) + .optional(Shape2.Opts) + .returns(Shape2.Vow$(Shape2.Bytes)), + close: M.callWhen().returns(Shape2.Vow$(M.undefined())), + getLocalAddress: M.call().returns(Shape2.Endpoint), + getRemoteAddress: M.call().returns(Shape2.Endpoint), + }), + InboundAttemptI: M.interface('InboundAttempt', { + accept: M.callWhen(Shape2.AttemptDescription).returns( + Shape2.Vow$(Shape2.Connection), + ), + getLocalAddress: M.call().returns(Shape2.Endpoint), + getRemoteAddress: M.call().returns(Shape2.Endpoint), + close: M.callWhen().returns(Shape2.Vow$(M.undefined())), + }), + PortI: M.interface('Port', { + getLocalAddress: M.call().returns(Shape2.Endpoint), + addListener: M.callWhen(Shape2.Listener).returns( + Shape2.Vow$(M.undefined()), + ), + connect: M.callWhen(Shape2.Endpoint) + .optional(Shape2.ConnectionHandler) + .returns(Shape2.Vow$(Shape2.Connection)), + removeListener: M.callWhen(Shape2.Listener).returns( + Shape2.Vow$(M.undefined()), + ), + revoke: M.callWhen().returns(M.undefined()), + }), + ProtocolHandlerI: M.interface('ProtocolHandler', { + onCreate: M.callWhen(M.remotable(), Shape2.ProtocolHandler).returns( + Shape2.Vow$(M.undefined()), + ), + generatePortID: M.callWhen(Shape2.Endpoint, Shape2.ProtocolHandler).returns( + Shape2.Vow$(M.string()), + ), + onBind: M.callWhen( + Shape2.Port, + Shape2.Endpoint, + Shape2.ProtocolHandler, + ).returns(Shape2.Vow$(M.undefined())), + onListen: M.callWhen( + Shape2.Port, + Shape2.Endpoint, + Shape2.ListenHandler, + Shape2.ProtocolHandler, + ).returns(Shape2.Vow$(M.undefined())), + onListenRemove: M.callWhen( + Shape2.Port, + Shape2.Endpoint, + Shape2.ListenHandler, + Shape2.ProtocolHandler, + ).returns(Shape2.Vow$(M.undefined())), + onInstantiate: M.callWhen( + Shape2.Port, + Shape2.Endpoint, + Shape2.Endpoint, + Shape2.ProtocolHandler, + ).returns(Shape2.Vow$(Shape2.Endpoint)), + onConnect: M.callWhen( + Shape2.Port, + Shape2.Endpoint, + Shape2.Endpoint, + Shape2.ConnectionHandler, + Shape2.ProtocolHandler, + ).returns(Shape2.Vow$(Shape2.AttemptDescription)), + onRevoke: M.callWhen( + Shape2.Port, + Shape2.Endpoint, + Shape2.ProtocolHandler, + ).returns(Shape2.Vow$(M.undefined())), + }), + ProtocolImplI: M.interface('ProtocolImpl', { + bind: M.callWhen(Shape2.Endpoint).returns(Shape2.Vow$(Shape2.Port)), + inbound: M.callWhen(Shape2.Endpoint, Shape2.Endpoint).returns( + Shape2.Vow$(Shape2.InboundAttempt), + ), + outbound: M.callWhen( + Shape2.Port, + Shape2.Endpoint, + Shape2.ConnectionHandler, + ).returns(Shape2.Vow$(Shape2.Connection)), + }), +}); + +/** @param {unknown} err */ export const rethrowUnlessMissing = err => { // Ugly hack rather than being able to determine if the function // exists. @@ -27,279 +152,389 @@ export const rethrowUnlessMissing = err => { }; /** - * Create a handled Connection. + * Get the list of prefixes from longest to shortest. * - * @param {ConnectionHandler} handler - * @param {Endpoint} localAddr - * @param {Endpoint} remoteAddr - * @param {Set} [current] - * @returns {Connection} + * @param {string} addr */ -export const makeConnection = ( - handler, - localAddr, - remoteAddr, - current = new Set(), -) => { - let closed; - /** @type {Set>} */ - const pendingAcks = new Set(); - /** @type {Connection} */ - const connection = Far('Connection', { - getLocalAddress() { - return localAddr; - }, - getRemoteAddress() { - return remoteAddr; - }, - async close() { - if (closed) { - throw closed; - } - current.delete(connection); - closed = Error('Connection closed'); - for (const ackDeferred of [...pendingAcks.values()]) { - pendingAcks.delete(ackDeferred); - ackDeferred.reject(closed); - } - await E(handler) - .onClose(connection, undefined, handler) - .catch(rethrowUnlessMissing); - }, - async send(data, opts) { - // console.log('send', data, local === srcHandler); - if (closed) { - throw closed; - } - const bytes = toBytes(data); - const ackDeferred = makePromiseKit(); - pendingAcks.add(ackDeferred); - E(handler) - .onReceive(connection, bytes, handler, opts) - .catch(err => { - rethrowUnlessMissing(err); - return ''; - }) - .then( - ack => { - pendingAcks.delete(ackDeferred); - ackDeferred.resolve(toBytes(ack)); - }, - err => { - pendingAcks.delete(ackDeferred); - ackDeferred.reject(err); - }, - ); - return ackDeferred.promise; - }, - }); +export function getPrefixes(addr) { + const parts = addr.split(ENDPOINT_SEPARATOR); - current.add(connection); - E(handler) - .onOpen(connection, localAddr, remoteAddr, handler) - .catch(rethrowUnlessMissing); - return connection; -}; + /** @type {string[]} */ + const ret = []; + for (let i = parts.length; i > 0; i -= 1) { + // Try most specific match. + const prefix = parts.slice(0, i).join(ENDPOINT_SEPARATOR); + ret.push(prefix); + } + return ret; +} /** - * @param {ConnectionHandler} handler0 - * @param {Endpoint} addr0 - * @param {ConnectionHandler} handler1 - * @param {Endpoint} addr1 - * @param {WeakSet} [current] - * @returns {[Connection, Connection]} + * @typedef {object} ConnectionOpts + * @property {Endpoint[]} addrs + * @property {ConnectionHandler[]} handlers + * @property {MapStore} conns + * @property {WeakSetStore} current + * @property {0|1} l + * @property {0|1} r */ -export function crossoverConnection( - handler0, - addr0, - handler1, - addr1, - current = new WeakSet(), -) { - /** @type {Connection[]} */ - const conns = []; - /** @type {ConnectionHandler[]} */ - const handlers = [handler0, handler1]; - /** @type {Endpoint[]} */ - const addrs = [addr0, addr1]; - function makeHalfConnection(l, r) { - let closed; - conns[l] = Far('Connection', { +/** + * @param {import('@agoric/base-zone').Zone} zone + * @param {ReturnType} powers + */ +const prepareHalfConnection = (zone, { when }) => { + const makeHalfConnection = zone.exoClass( + 'Connection', + Shape.ConnectionI, + /** @param {ConnectionOpts} opts */ + ({ addrs, handlers, conns, current, l, r }) => { + /** @type {string | undefined} */ + let closed; + + return { + addrs, + handlers, + conns, + current, + l, + r, + closed, + }; + }, + { getLocalAddress() { + const { addrs, l } = this.state; return addrs[l]; }, getRemoteAddress() { + const { addrs, r } = this.state; return addrs[r]; }, + /** @param {Data} packetBytes */ async send(packetBytes) { + const { closed, handlers, r, conns } = this.state; if (closed) { throw closed; } - const ack = await E(handlers[r]) - .onReceive(conns[r], toBytes(packetBytes), handlers[r]) - .catch(rethrowUnlessMissing); + + const ack = await when( + E(handlers[r]) + .onReceive(conns.get(r), toBytes(packetBytes), handlers[r]) + .catch(rethrowUnlessMissing), + ); + return toBytes(ack || ''); }, async close() { + const { closed, current, conns, l, handlers } = this.state; if (closed) { - throw closed; + throw Error(closed); } - closed = Error('Connection closed'); - current.delete(conns[l]); - await E(handlers[l]) - .onClose(conns[l], undefined, handlers[l]) - .catch(rethrowUnlessMissing); + this.state.closed = 'Connection closed'; + current.delete(conns.get(l)); + await when( + E(this.state.handlers[l]).onClose( + conns.get(l), + undefined, + handlers[l], + ), + ).catch(rethrowUnlessMissing); }, - }); - } + }, + ); - makeHalfConnection(0, 1); - makeHalfConnection(1, 0); + return makeHalfConnection; +}; + +/** + * @param {import('@agoric/zone').Zone} zone + * @param {ConnectionHandler} handler0 + * @param {Endpoint} addr0 + * @param {ConnectionHandler} handler1 + * @param {Endpoint} addr1 + * @param {(opts: ConnectionOpts) => Connection} makeConnection + * @param {WeakSetStore} current + */ +export const crossoverConnection = ( + zone, + handler0, + addr0, + handler1, + addr1, + makeConnection, + current = zone.detached().weakSetStore('crossoverCurrentConnections'), +) => { + const detached = zone.detached(); + + /** @type {MapStore} */ + const conns = detached.mapStore('addrToConnections'); + + /** @type {ConnectionHandler[]} */ + const handlers = harden([handler0, handler1]); + /** @type {Endpoint[]} */ + const addrs = harden([addr0, addr1]); + + /** + * @param {0|1} l + * @param {0|1} r + */ + const makeHalfConnection = (l, r) => { + conns.init(l, makeConnection({ addrs, handlers, conns, current, l, r })); + }; /** * @param {number} l local side of the connection * @param {number} r remote side of the connection */ - function openHalfConnection(l, r) { - current.add(conns[l]); + const openHalfConnection = (l, r) => { + current.add(conns.get(l)); E(handlers[l]) - .onOpen(conns[l], addrs[l], addrs[r], handlers[l]) + .onOpen(conns.get(l), addrs[l], addrs[r], handlers[l]) .catch(rethrowUnlessMissing); - } + }; + + makeHalfConnection(0, 1); + makeHalfConnection(1, 0); openHalfConnection(0, 1); openHalfConnection(1, 0); - const [conn0, conn1] = conns; - return [conn0, conn1]; -} + return [conns.get(0), conns.get(1)]; +}; /** - * Get the list of prefixes from longest to shortest. - * - * @param {string} addr + * @param {import('@agoric/zone').Zone} zone + * @param {(opts: ConnectionOpts) => Connection} makeConnection + * @param {ReturnType} powers */ -export function getPrefixes(addr) { - const parts = addr.split(ENDPOINT_SEPARATOR); +const prepareInboundAttempt = (zone, makeConnection, { when }) => { + const makeInboundAttempt = zone.exoClass( + 'InboundAttempt', + Shape.InboundAttemptI, + /** + * @param {object} opts + * @param {string} opts.localAddr + * @param {string} opts.remoteAddr + * @param { MapStore> } opts.currentConnections + * @param {string} opts.listenPrefix + * @param {MapStore} opts.listening + */ + ({ + localAddr, + remoteAddr, + currentConnections, + listenPrefix, + listening, + }) => { + /** @type {String | undefined} */ + let consummated; - /** @type {string[]} */ - const ret = []; - for (let i = parts.length; i > 0; i -= 1) { - // Try most specific match. - const prefix = parts.slice(0, i).join(ENDPOINT_SEPARATOR); - ret.push(prefix); - } - return ret; -} + return { + localAddr, + remoteAddr, + consummated, + currentConnections, + listenPrefix, + listening, + }; + }, + { + getLocalAddress() { + // Return address metadata. + return this.state.localAddr; + }, + getRemoteAddress() { + return this.state.remoteAddr; + }, + async close() { + const { consummated, localAddr, remoteAddr } = this.state; + const { listening, listenPrefix, currentConnections } = this.state; -/** - * Create a protocol that has a handler. - * - * @param {ProtocolHandler} protocolHandler - * @returns {Protocol} the local capability for connecting and listening - */ -export function makeNetworkProtocol(protocolHandler) { - /** @type {LegacyMap>} */ - // Legacy because we're storing a JS Set - const currentConnections = makeLegacyMap('port'); + if (consummated) { + throw Error(consummated); + } + this.state.consummated = 'Already closed'; - /** - * Currently must be a single listenHandler. TODO: Do something sensible with - * multiple handlers? - * - * @type {MapStore} - */ - const listening = makeScalarMapStore('localAddr'); + const [port, listener] = listening.get(listenPrefix); + + const current = currentConnections.get(port); + current.delete(this.self); + + await when( + E(listener).onReject(port, localAddr, remoteAddr, listener), + ).catch(rethrowUnlessMissing); + }, + /** + * @param {object} opts + * @param {string} [opts.localAddress] + * @param {string} [opts.remoteAddress] + * @param {ConnectionHandler} opts.handler + */ + async accept({ localAddress, remoteAddress, handler: rchandler }) { + const { consummated, localAddr, remoteAddr } = this.state; + const { listening, listenPrefix, currentConnections } = this.state; + if (consummated) { + throw Error(consummated); + } + this.state.consummated = 'Already accepted'; + + if (localAddress === undefined) { + localAddress = localAddr; + } + + if (remoteAddress === undefined) { + remoteAddress = remoteAddr; + } + + const [port, listener] = listening.get(listenPrefix); + const current = currentConnections.get(port); - /** @type {MapStore} */ - const boundPorts = makeScalarMapStore('localAddr'); + current.delete(this.self); - /** @param {Endpoint} localAddr */ - const bind = async localAddr => { - // Check if we are underspecified (ends in slash) - const underspecified = localAddr.endsWith(ENDPOINT_SEPARATOR); - for await (const _ of whileTrue(() => underspecified)) { - const portID = await E(protocolHandler).generatePortID( + const lchandler = await when( + E(listener).onAccept(port, localAddress, remoteAddress, listener), + ); + + return crossoverConnection( + zone, + lchandler, + localAddress, + rchandler, + remoteAddress, + makeConnection, + current, + )[1]; + }, + }, + ); + + return makeInboundAttempt; +}; + +/** @enum {number} */ +const RevokeState = /** @type {const} */ ({ + NOT_REVOKED: 0, + REVOKING: 1, + REVOKED: 2, +}); + +/** + * @param {import('@agoric/zone').Zone} zone + * @param {ReturnType} powers + */ +const preparePort = (zone, { when }) => { + const makeIncapable = zone.exoClass('Incapable', undefined, () => ({}), {}); + + const makePort = zone.exoClass( + 'Port', + Shape.PortI, + /** + * @param {object} opts + * @param {Endpoint} opts.localAddr + * @param {MapStore} opts.listening + * @param {SetStore} opts.openConnections + * @param {MapStore>} opts.currentConnections + * @param {MapStore} opts.boundPorts + * @param {ProtocolHandler} opts.protocolHandler + * @param {ProtocolImpl} opts.protocolImpl + */ + ({ + localAddr, + listening, + openConnections, + currentConnections, + boundPorts, + protocolHandler, + protocolImpl, + }) => { + return { + listening, + openConnections, + currentConnections, + boundPorts, localAddr, protocolHandler, - ); - const newAddr = `${localAddr}${portID}`; - if (!boundPorts.has(newAddr)) { - localAddr = newAddr; - break; - } - } - - if (boundPorts.has(localAddr)) { - return boundPorts.get(localAddr); - } - - /** @enum {number} */ - const RevokeState = { - NOT_REVOKED: 0, - REVOKING: 1, - REVOKED: 2, - }; - - /** @type {RevokeState} */ - let revoked = RevokeState.NOT_REVOKED; - const openConnections = new Set(); - - /** @type {Port} */ - const port = Far('Port', { + protocolImpl, + /** @type {RevokeState | undefined} */ + revoked: undefined, + }; + }, + { getLocalAddress() { // Works even after revoke(). - return localAddr; + return this.state.localAddr; }, + /** @param {ListenHandler} listenHandler */ async addListener(listenHandler) { - !revoked || Fail`Port ${localAddr} is revoked`; + const { revoked, listening, localAddr, protocolHandler } = this.state; + + !revoked || Fail`Port ${this.state.localAddr} is revoked`; listenHandler || Fail`listenHandler is not defined`; + if (listening.has(localAddr)) { // Last one wins. const [lport, lhandler] = listening.get(localAddr); if (lhandler === listenHandler) { return; } - listening.set(localAddr, [port, listenHandler]); + listening.set(localAddr, [this.self, listenHandler]); E(lhandler).onRemove(lport, lhandler).catch(rethrowUnlessMissing); } else { - listening.init(localAddr, [port, listenHandler]); + listening.init(localAddr, harden([this.self, listenHandler])); } // TODO: Check that the listener defines onAccept. - await E(protocolHandler).onListen( - port, - localAddr, - listenHandler, - protocolHandler, + await when( + E(protocolHandler).onListen( + this.self, + localAddr, + listenHandler, + protocolHandler, + ), + ); + await when(E(listenHandler).onListen(this.self, listenHandler)).catch( + rethrowUnlessMissing, ); - await E(listenHandler) - .onListen(port, listenHandler) - .catch(rethrowUnlessMissing); }, + /** @param {ListenHandler} listenHandler */ async removeListener(listenHandler) { + const { listening, localAddr, protocolHandler } = this.state; listening.has(localAddr) || Fail`Port ${localAddr} is not listening`; listening.get(localAddr)[1] === listenHandler || Fail`Port ${localAddr} handler to remove is not listening`; listening.delete(localAddr); - await E(protocolHandler).onListenRemove( - port, - localAddr, - listenHandler, - protocolHandler, + await when( + E(protocolHandler).onListenRemove( + this.self, + localAddr, + listenHandler, + protocolHandler, + ), + ); + await when(E(listenHandler).onRemove(this.self, listenHandler)).catch( + rethrowUnlessMissing, ); - await E(listenHandler) - .onRemove(port, listenHandler) - .catch(rethrowUnlessMissing); }, - async connect(remotePort, connectionHandler = {}) { + /** + * @param {Endpoint} remotePort + * @param {ConnectionHandler} connectionHandler + */ + async connect( + remotePort, + connectionHandler = /** @type {any} */ (makeIncapable()), + ) { + const { revoked, localAddr, protocolImpl, openConnections } = + this.state; + !revoked || Fail`Port ${localAddr} is revoked`; /** @type {Endpoint} */ const dst = harden(remotePort); // eslint-disable-next-line no-use-before-define - const conn = await protocolImpl.outbound(port, dst, connectionHandler); + const conn = await when( + protocolImpl.outbound(this.self, dst, connectionHandler), + ); if (revoked) { void E(conn).close(); } else { @@ -308,273 +543,481 @@ export function makeNetworkProtocol(protocolHandler) { return conn; }, async revoke() { + const { revoked, localAddr } = this.state; + const { protocolHandler, currentConnections, listening, boundPorts } = + this.state; + revoked !== RevokeState.REVOKED || Fail`Port ${localAddr} is already revoked`; - revoked = RevokeState.REVOKING; - await E(protocolHandler).onRevoke(port, localAddr, protocolHandler); - revoked = RevokeState.REVOKED; + this.state.revoked = RevokeState.REVOKING; + await when( + E(protocolHandler).onRevoke(this.self, localAddr, protocolHandler), + ); + this.state.revoked = RevokeState.REVOKED; // Clean up everything we did. - const ps = [...currentConnections.get(port)].map(conn => - E(conn) - .close() - .catch(_ => {}), - ); + const values = [...currentConnections.get(this.self).values()]; + const ps = values.map(conn => when(E(conn).close()).catch(_ => {})); if (listening.has(localAddr)) { const listener = listening.get(localAddr)[1]; - ps.push(port.removeListener(listener)); + ps.push(this.self.removeListener(listener)); } await Promise.all(ps); - currentConnections.delete(port); + currentConnections.delete(this.self); boundPorts.delete(localAddr); - return `Port ${localAddr} revoked`; }, - }); + }, + ); - await E(protocolHandler).onBind(port, localAddr, protocolHandler); - boundPorts.init(localAddr, port); - currentConnections.init(port, new Set()); - return port; - }; + return makePort; +}; - /** @type {ProtocolImpl} */ - const protocolImpl = Far('ProtocolImpl', { - bind, - async inbound(listenAddr, remoteAddr) { - let lastFailure = Error(`No listeners for ${listenAddr}`); - for await (const listenPrefix of getPrefixes(listenAddr)) { - if (!listening.has(listenPrefix)) { - continue; - } - const [port, listener] = listening.get(listenPrefix); - let localAddr; - await (async () => { - // See if our protocol is willing to receive this connection. - const localInstance = await E(protocolHandler) - .onInstantiate(port, listenPrefix, remoteAddr, protocolHandler) - .catch(rethrowUnlessMissing); - localAddr = localInstance - ? `${listenAddr}/${localInstance}` - : listenAddr; - })().catch(e => { - lastFailure = e; - }); - if (!localAddr) { - continue; - } - // We have a legitimate inbound attempt. - let consummated; - const current = currentConnections.get(port); - const inboundAttempt = Far('InboundAttempt', { - getLocalAddress() { - // Return address metadata. - return localAddr; - }, - getRemoteAddress() { - return remoteAddr; - }, - async close() { - if (consummated) { - throw consummated; +/** + * @param {import('@agoric/base-zone').Zone} zone + * @param {ReturnType} powers + */ +const prepareBinder = (zone, powers) => { + const makeConnection = prepareHalfConnection(zone, powers); + const { when } = powers; + const makeInboundAttempt = prepareInboundAttempt( + zone, + makeConnection, + powers, + ); + const makePort = preparePort(zone, powers); + const detached = zone.detached(); + + const makeBinderKit = zone.exoClassKit( + 'binder', + { + protocolImpl: Shape.ProtocolImplI, + binder: M.interface('Binder', { + bind: M.callWhen(Shape.Endpoint).returns(Shape.Port), + }), + }, + /** + * @param {object} opts + * @param { MapStore> } opts.currentConnections + * @param {MapStore} opts.boundPorts + * @param {MapStore} opts.listening + * @param {ProtocolHandler} opts.protocolHandler + */ + ({ currentConnections, boundPorts, listening, protocolHandler }) => { + /** @type {SetStore} */ + const openConnections = detached.setStore('openConnections'); + + return { + currentConnections, + boundPorts, + listening, + revoked: RevokeState.NOT_REVOKED, + openConnections, + protocolHandler, + /** @type {Endpoint | undefined} */ + localAddr: undefined, + }; + }, + { + protocolImpl: { + /** + * @param {Endpoint} listenAddr + * @param {Endpoint} remoteAddr + */ + async inbound(listenAddr, remoteAddr) { + const { listening, protocolHandler, currentConnections } = this.state; + + let lastFailure = Error(`No listeners for ${listenAddr}`); + for await (const listenPrefix of getPrefixes(listenAddr)) { + if (!listening.has(listenPrefix)) { + continue; } - consummated = Error(`Already closed`); - current.delete(inboundAttempt); - await E(listener) - .onReject(port, localAddr, remoteAddr, listener) - .catch(rethrowUnlessMissing); - }, - async accept({ - localAddress = localAddr, - remoteAddress = remoteAddr, - handler: rchandler, - }) { - if (consummated) { - throw consummated; + const [port, _] = listening.get(listenPrefix); + let localAddr; + + await (async () => { + // See if our protocol is willing to receive this connection. + const localInstance = await when( + E(protocolHandler).onInstantiate( + port, + listenPrefix, + remoteAddr, + protocolHandler, + ), + ).catch(rethrowUnlessMissing); + localAddr = localInstance + ? `${listenAddr}/${localInstance}` + : listenAddr; + })().catch(e => { + lastFailure = e; + }); + if (!localAddr) { + continue; } - consummated = Error(`Already accepted`); - current.delete(inboundAttempt); + // We have a legitimate inbound attempt. + const current = currentConnections.get(port); + const inboundAttempt = makeInboundAttempt({ + localAddr, + remoteAddr, + currentConnections, + listenPrefix, + listening, + }); - const lchandler = await E(listener).onAccept( + current.add(inboundAttempt); + return inboundAttempt; + } + throw lastFailure; + }, + /** + * @param {Port} port + * @param {Endpoint} remoteAddr + * @param {ConnectionHandler} lchandler + */ + async outbound(port, remoteAddr, lchandler) { + const { protocolHandler, currentConnections } = this.state; + + const localAddr = await E(port).getLocalAddress(); + + // Allocate a local address. + const initialLocalInstance = await when( + E(protocolHandler).onInstantiate( port, localAddr, remoteAddr, - listener, + protocolHandler, + ), + ).catch(rethrowUnlessMissing); + const initialLocalAddr = initialLocalInstance + ? `${localAddr}/${initialLocalInstance}` + : localAddr; + + let lastFailure; + let accepted; + await (async () => { + // Attempt the loopback connection. + const attempt = await when( + this.facets.protocolImpl.inbound(remoteAddr, initialLocalAddr), ); + accepted = await when(attempt.accept({ handler: lchandler })); + })().catch(e => { + lastFailure = e; + }); + if (accepted) { + return accepted; + } - return crossoverConnection( - lchandler, - localAddress, - rchandler, - remoteAddress, - current, - )[1]; - }, - }); - current.add(inboundAttempt); - return inboundAttempt; - } - throw lastFailure; - }, - async outbound(port, remoteAddr, lchandler) { - const localAddr = - /** @type {string} */ - (await E(port).getLocalAddress()); - - // Allocate a local address. - const initialLocalInstance = await E(protocolHandler) - .onInstantiate(port, localAddr, remoteAddr, protocolHandler) - .catch(rethrowUnlessMissing); - const initialLocalAddr = initialLocalInstance - ? `${localAddr}/${initialLocalInstance}` - : localAddr; - - let lastFailure; - let accepted; - await (async () => { - // Attempt the loopback connection. - const attempt = await protocolImpl.inbound( - remoteAddr, - initialLocalAddr, - ); - accepted = await attempt.accept({ handler: lchandler }); - })().catch(e => { - lastFailure = e; - }); - if (accepted) { - return accepted; - } - - const { - remoteAddress = remoteAddr, - handler: rchandler, - localAddress = localAddr, - } = - /** @type {Partial} */ - ( - await E(protocolHandler).onConnect( - port, - initialLocalAddr, - remoteAddr, + const { + remoteAddress = remoteAddr, + handler: rchandler, + localAddress = localAddr, + } = + /** @type {Partial} */ + ( + await when( + E(protocolHandler).onConnect( + port, + initialLocalAddr, + remoteAddr, + lchandler, + protocolHandler, + ), + ) + ); + + if (!rchandler) { + throw lastFailure; + } + + const current = currentConnections.get(port); + return crossoverConnection( + zone, lchandler, + localAddress, + rchandler, + remoteAddress, + makeConnection, + current, + )[0]; + }, + async bind(localAddr) { + return this.facets.binder.bind(localAddr); + }, + }, + binder: { + /** @param {string} localAddr */ + async bind(localAddr) { + const { protocolHandler, - ) - ); + boundPorts, + listening, + openConnections, + currentConnections, + } = this.state; + + // Check if we are underspecified (ends in slash) + const underspecified = localAddr.endsWith(ENDPOINT_SEPARATOR); + for await (const _ of whileTrue(() => underspecified)) { + const portID = await when( + E(protocolHandler).generatePortID(localAddr, protocolHandler), + ); + const newAddr = `${localAddr}${portID}`; + if (!boundPorts.has(newAddr)) { + localAddr = newAddr; + break; + } + } - if (!rchandler) { - throw lastFailure; - } + this.state.localAddr = localAddr; - const current = currentConnections.get(port); - return crossoverConnection( - lchandler, - localAddress, - rchandler, - remoteAddress, - current, - )[0]; + if (boundPorts.has(localAddr)) { + return boundPorts.get(localAddr); + } + + const port = makePort({ + localAddr, + listening, + openConnections, + currentConnections, + boundPorts, + protocolHandler, + protocolImpl: this.facets.protocolImpl, + }); + + await when( + E(protocolHandler).onBind(port, localAddr, protocolHandler), + ); + boundPorts.init(localAddr, harden(port)); + currentConnections.init(port, detached.setStore('connections')); + return port; + }, + }, }, - }); + ); - // Wire up the local protocol to the handler. - void E(protocolHandler).onCreate(protocolImpl, protocolHandler); + return makeBinderKit; +}; - // Return the user-facing protocol. - return Far('binder', { bind }); -} +/** + * @param {import('@agoric/base-zone').Zone} zone + * @param {ReturnType} powers + */ +export const prepareNetworkProtocol = (zone, powers) => { + const makeBinderKit = prepareBinder(zone, powers); + + /** + * @param {ProtocolHandler} protocolHandler + * @returns {Protocol} + */ + const makeNetworkProtocol = protocolHandler => { + const detached = zone.detached(); + + /** @type {MapStore>} */ + const currentConnections = detached.mapStore('portToCurrentConnections'); + + /** @type {MapStore} */ + const boundPorts = detached.mapStore('addrToPort'); + + /** @type {MapStore} */ + const listening = detached.mapStore('listening'); + + const { binder, protocolImpl } = makeBinderKit({ + currentConnections, + boundPorts, + listening, + protocolHandler, + }); + + // Wire up the local protocol to the handler. + void E(protocolHandler).onCreate(protocolImpl, protocolHandler); + return binder; + }; + + return makeNetworkProtocol; +}; /** * Create a ConnectionHandler that just echoes its packets. * - * @returns {ConnectionHandler} + * @param {import('@agoric/base-zone').Zone} zone */ -export function makeEchoConnectionHandler() { - let closed; - /** @type {Connection} */ - return Far('ConnectionHandler', { - async onReceive(_connection, bytes, _connectionHandler) { - if (closed) { - throw closed; - } - return bytes; +export const prepareEchoConnectionKit = zone => { + const makeEchoConnectionKit = zone.exoClassKit( + 'EchoConnectionKit', + { + handler: M.interface('ConnectionHandler', { + onReceive: M.callWhen( + Shape2.Connection, + Shape2.Bytes, + Shape2.ConnectionHandler, + ) + .optional(Shape2.Opts) + .returns(Shape2.Data), + onClose: M.callWhen(Shape2.Connection) + .optional(M.any(), Shape2.ConnectionHandler) + .returns(M.undefined()), + }), + listener: M.interface('Listener', { + onListen: M.callWhen(Shape.Port, Shape.ListenHandler).returns( + Shape.Vow$(M.undefined()), + ), + onAccept: M.callWhen( + Shape.Port, + Shape.Endpoint, + Shape.Endpoint, + Shape.ListenHandler, + ).returns(Shape.Vow$(Shape.ConnectionHandler)), + }), }, - async onClose(_connection, _reason, _connectionHandler) { - if (closed) { - throw closed; - } - closed = Error('Connection closed'); + () => { + /** @type {string | undefined} */ + let closed; + return { + closed, + }; }, - }); -} + { + handler: { + /** + * @param {Connection} _connection + * @param {Bytes} bytes + * @param {ConnectionHandler} _connectionHandler + */ + async onReceive(_connection, bytes, _connectionHandler) { + const { closed } = this.state; + + if (closed) { + throw closed; + } + return bytes; + }, + /** + * @param {Connection} _connection + * @param {CloseReason} [_reason] + * @param {ConnectionHandler} [_connectionHandler] + */ + async onClose(_connection, _reason, _connectionHandler) { + const { closed } = this.state; + + if (closed) { + throw Error(closed); + } -export function makeNonceMaker(prefix = '', suffix = '') { - let nonce = 0; - return async () => { - nonce += 1; - return `${prefix}${nonce}${suffix}`; - }; -} + this.state.closed = 'Connection closed'; + }, + }, + listener: { + async onAccept(_port, _localAddr, _remoteAddr, _listenHandler) { + return harden(this.facets.handler); + }, + async onListen(port, _listenHandler) { + console.debug(`listening on echo port: ${port}`); + }, + }, + }, + ); + + return makeEchoConnectionKit; +}; /** * Create a protocol handler that just connects to itself. * - * @param {ProtocolHandler['onInstantiate']} [onInstantiate] - * @returns {ProtocolHandler} The localhost handler + * @param {import('@agoric/base-zone').Zone} zone + * @param {ReturnType} powers */ -export function makeLoopbackProtocolHandler( - onInstantiate = makeNonceMaker('nonce/'), -) { - /** @type {MapStore} */ - const listeners = makeScalarMapStore('localAddr'); - - const makePortID = makeNonceMaker('port'); - - return Far('ProtocolHandler', { - // eslint-disable-next-line no-empty-function - async onCreate(_impl, _protocolHandler) { - // TODO - }, - async generatePortID(_protocolHandler) { - return makePortID(); - }, - async onBind(_port, _localAddr, _protocolHandler) { - // TODO: Maybe handle a bind? - }, - async onConnect(_port, localAddr, remoteAddr, _chandler, protocolHandler) { - const [lport, lhandler] = listeners.get(remoteAddr); - const rchandler = - /** @type {ConnectionHandler} */ - (await E(lhandler).onAccept(lport, remoteAddr, localAddr, lhandler)); - // console.log(`rchandler is`, rchandler); - const remoteInstance = await E(protocolHandler) - .onInstantiate(lport, remoteAddr, localAddr, protocolHandler) - .catch(rethrowUnlessMissing); +export function prepareLoopbackProtocolHandler(zone, { when }) { + const detached = zone.detached(); + + const makeLoopbackProtocolHandler = zone.exoClass( + 'ProtocolHandler', + Shape.ProtocolHandlerI, + /** @param {string} [instancePrefix] */ + (instancePrefix = 'nonce/') => { + /** @type {MapStore} */ + const listeners = detached.mapStore('localAddr'); + return { - remoteInstance, - handler: rchandler, + listeners, + portNonce: 0n, + instancePrefix, + instanceNonce: 0n, }; }, - onInstantiate, - async onListen(port, localAddr, listenHandler, _protocolHandler) { - // TODO: Implement other listener replacement policies. - if (listeners.has(localAddr)) { - const lhandler = listeners.get(localAddr)[1]; - if (lhandler !== listenHandler) { - // Last-one-wins. - listeners.set(localAddr, [port, listenHandler]); + { + async onCreate(_impl, _protocolHandler) { + // TODO + }, + async generatePortID(_localAddr, _protocolHandler) { + this.state.portNonce += 1n; + return `port${this.state.portNonce}`; + }, + async onBind(_port, _localAddr, _protocolHandler) { + // TODO: Maybe handle a bind? + }, + async onConnect( + _port, + localAddr, + remoteAddr, + _chandler, + protocolHandler, + ) { + const { listeners } = this.state; + const [lport, lhandler] = listeners.get(remoteAddr); + const rchandler = await when( + E(lhandler).onAccept(lport, remoteAddr, localAddr, lhandler), + ); + // console.log(`rchandler is`, rchandler); + const remoteInstance = await when( + E(protocolHandler).onInstantiate( + lport, + remoteAddr, + localAddr, + protocolHandler, + ), + ).catch(rethrowUnlessMissing); + return { + remoteInstance, + handler: rchandler, + }; + }, + async onInstantiate(_port, _localAddr, _remote, _protocol) { + const { instancePrefix } = this.state; + this.state.instanceNonce += 1n; + return `${instancePrefix}${this.state.instanceNonce}`; + }, + async onListen(port, localAddr, listenHandler, _protocolHandler) { + const { listeners } = this.state; + + // TODO: Implement other listener replacement policies. + if (listeners.has(localAddr)) { + const lhandler = listeners.get(localAddr)[1]; + if (lhandler !== listenHandler) { + // Last-one-wins. + listeners.set(localAddr, [port, listenHandler]); + } + } else { + listeners.init(localAddr, harden([port, listenHandler])); } - } else { - listeners.init(localAddr, [port, listenHandler]); - } - }, - async onListenRemove(port, localAddr, listenHandler, _protocolHandler) { - const [lport, lhandler] = listeners.get(localAddr); - lport === port || Fail`Port does not match listener on ${localAddr}`; - lhandler === listenHandler || - Fail`Listen handler does not match listener on ${localAddr}`; - listeners.delete(localAddr); - }, - async onRevoke(_port, _localAddr, _protocolHandler) { - // TODO: maybe clean up? + }, + /** + * @param {Port} port + * @param {Endpoint} localAddr + * @param {ListenHandler} listenHandler + * @param {*} _protocolHandler + */ + async onListenRemove(port, localAddr, listenHandler, _protocolHandler) { + const { listeners } = this.state; + const [lport, lhandler] = listeners.get(localAddr); + lport === port || Fail`Port does not match listener on ${localAddr}`; + lhandler === listenHandler || + Fail`Listen handler does not match listener on ${localAddr}`; + listeners.delete(localAddr); + }, + async onRevoke(_port, _localAddr, _protocolHandler) { + // TODO: maybe clean up? + }, }, - }); + ); + + return makeLoopbackProtocolHandler; } diff --git a/packages/network/src/router.js b/packages/network/src/router.js index 01a3efaf607..81e2cfbc757 100644 --- a/packages/network/src/router.js +++ b/packages/network/src/router.js @@ -1,7 +1,12 @@ -import { Far, E as defaultE } from '@endo/far'; -import { makeScalarMapStore } from '@agoric/store'; +// @ts-check +import { E as defaultE } from '@endo/far'; +import { M } from '@endo/patterns'; import { Fail } from '@agoric/assert'; -import { makeNetworkProtocol, ENDPOINT_SEPARATOR } from './network.js'; +import { + ENDPOINT_SEPARATOR, + Shape, + prepareNetworkProtocol, +} from './network.js'; import '@agoric/store/exported.js'; /// @@ -17,51 +22,81 @@ import '@agoric/store/exported.js'; * prefix->route from the database */ +export const RouterI = M.interface('Router', { + getRoutes: M.call(Shape.Endpoint).returns(M.arrayOf([M.string(), M.any()])), + register: M.call(M.string(), M.any()).returns(M.undefined()), + unregister: M.call(M.string(), M.any()).returns(M.undefined()), +}); + /** - * Create a slash-delimited router. - * * @template T - * @returns {Router} a new Router + * @param {import('@agoric/base-zone').Zone} zone */ -export default function makeRouter() { - /** @type {MapStore} */ - const prefixToRoute = makeScalarMapStore('prefix'); - return Far('Router', { - getRoutes(addr) { - const parts = addr.split(ENDPOINT_SEPARATOR); - /** @type {[string, T][]} */ - const ret = []; - for (let i = parts.length; i > 0; i -= 1) { - // Try most specific match. - const prefix = parts.slice(0, i).join(ENDPOINT_SEPARATOR); - if (prefixToRoute.has(prefix)) { - ret.push([prefix, prefixToRoute.get(prefix)]); - } - // Trim off the last value (after the slash). - const defaultPrefix = prefix.substr( - 0, - prefix.lastIndexOf(ENDPOINT_SEPARATOR) + 1, - ); - if (prefixToRoute.has(defaultPrefix)) { - ret.push([defaultPrefix, prefixToRoute.get(defaultPrefix)]); - } - } - return harden(ret); - }, - register(prefix, route) { - prefixToRoute.init(prefix, route); +export const prepareRouter = zone => { + const detached = zone.detached(); + + const makeRouter = zone.exoClass( + 'Router', + RouterI, + () => { + /** @type {MapStore} */ + const prefixToRoute = detached.mapStore('prefix'); + + return { + prefixToRoute, + }; }, - unregister(prefix, route) { - prefixToRoute.get(prefix) === route || - Fail`Router is not registered at prefix ${prefix}`; - prefixToRoute.delete(prefix); + { + /** @param {Endpoint} addr */ + getRoutes(addr) { + const parts = addr.split(ENDPOINT_SEPARATOR); + /** @type {[string, T][]} */ + const ret = []; + for (let i = parts.length; i > 0; i -= 1) { + // Try most specific match. + const prefix = parts.slice(0, i).join(ENDPOINT_SEPARATOR); + if (this.state.prefixToRoute.has(prefix)) { + ret.push([prefix, this.state.prefixToRoute.get(prefix)]); + } + // Trim off the last value (after the slash). + const defaultPrefix = prefix.substr( + 0, + prefix.lastIndexOf(ENDPOINT_SEPARATOR) + 1, + ); + if (this.state.prefixToRoute.has(defaultPrefix)) { + ret.push([ + defaultPrefix, + this.state.prefixToRoute.get(defaultPrefix), + ]); + } + } + return harden(ret); + }, + /** + * @param {string} prefix + * @param {T} route + */ + register(prefix, route) { + this.state.prefixToRoute.init(prefix, route); + }, + /** + * @param {string} prefix + * @param {T} route + */ + unregister(prefix, route) { + this.state.prefixToRoute.get(prefix) === route || + Fail`Router is not registered at prefix ${prefix}`; + this.state.prefixToRoute.delete(prefix); + }, }, - }); -} + ); + + return makeRouter; +}; /** * @typedef {object} RouterProtocol - * @property {(prefix: string) => Promise} bind + * @property {(prefix: string) => PromiseVow} bind * @property {(paths: string[], protocolHandler: ProtocolHandler) => void} registerProtocolHandler * @property {(prefix: string, protocolHandler: ProtocolHandler) => void} unregisterProtocolHandler */ @@ -69,46 +104,81 @@ export default function makeRouter() { /** * Create a router that behaves like a Protocol. * + * @param {import('@agoric/base-zone').Zone} zone + * @param {ReturnType} powers * @param {typeof defaultE} [E] Eventual sender - * @returns {RouterProtocol} The new delegated protocol */ -export function makeRouterProtocol(E = defaultE) { - const router = makeRouter(); - /** @type {MapStore} */ - const protocols = makeScalarMapStore('prefix'); - /** @type {MapStore} */ - const protocolHandlers = makeScalarMapStore('prefix'); +export const prepareRouterProtocol = (zone, powers, E = defaultE) => { + const detached = zone.detached(); + + const makeRouter = prepareRouter(zone); + const makeNetworkProtocol = prepareNetworkProtocol(zone, powers); + + const makeRouterProtocol = zone.exoClass( + 'RouterProtocol', + M.interface('RouterProtocol', { + registerProtocolHandler: M.call( + M.arrayOf(M.string()), + M.remotable(), + ).returns(), + unregisterProtocolHandler: M.call(M.string(), M.remotable()).returns(), + bind: M.callWhen(Shape.Endpoint).returns(Shape.Vow$(Shape.Port)), + }), + () => { + /** @type {Router} */ + const router = makeRouter(); - function registerProtocolHandler(paths, protocolHandler) { - const protocol = makeNetworkProtocol(protocolHandler); - for (const prefix of paths) { - router.register(prefix, protocol); - protocols.init(prefix, protocol); - protocolHandlers.init(prefix, protocolHandler); - } - } + /** @type {MapStore} */ + const protocols = detached.mapStore('prefix'); - // FIXME: Buggy. - // Needs to account for multiple paths. - function unregisterProtocolHandler(prefix, protocolHandler) { - const ph = protocolHandlers.get(prefix); - ph === protocolHandler || - Fail`Protocol handler is not registered at prefix ${prefix}`; - router.unregister(prefix, ph); - protocols.delete(prefix); - protocolHandlers.delete(prefix); - } + /** @type {MapStore} */ + const protocolHandlers = detached.mapStore('prefix'); - /** @type {Protocol['bind']} */ - async function bind(localAddr) { - const [route] = router.getRoutes(localAddr); - route !== undefined || Fail`No registered router for ${localAddr}`; - return E(route[1]).bind(localAddr); - } + return { + router, + protocolHandlers, + protocols, + }; + }, + { + /** + * @param {string[]} paths + * @param {ProtocolHandler} protocolHandler + */ + registerProtocolHandler(paths, protocolHandler) { + const protocol = makeNetworkProtocol(protocolHandler); + for (const prefix of paths) { + this.state.router.register(prefix, protocol); + this.state.protocols.init(prefix, protocol); + this.state.protocolHandlers.init(prefix, protocolHandler); + } + }, + // FIXME: Buggy. + // Needs to account for multiple paths. + /** + * @param {string} prefix + * @param {ProtocolHandler} protocolHandler + */ + unregisterProtocolHandler(prefix, protocolHandler) { + const ph = this.state.protocolHandlers.get(prefix); + ph === protocolHandler || + Fail`Protocol handler is not registered at prefix ${prefix}`; + // TODO: unmap protocol hanlders to their corresponding protocol + // e.g. using a map + // before unregistering + // @ts-expect-error note FIXME above + this.state.router.unregister(prefix, ph); + this.state.protocols.delete(prefix); + this.state.protocolHandlers.delete(prefix); + }, + /** @param {Endpoint} localAddr */ + async bind(localAddr) { + const [route] = this.state.router.getRoutes(localAddr); + route !== undefined || Fail`No registered router for ${localAddr}`; + return E(route[1]).bind(localAddr); + }, + }, + ); - return Far('RouterProtocol', { - bind, - registerProtocolHandler, - unregisterProtocolHandler, - }); -} + return makeRouterProtocol; +}; diff --git a/packages/network/src/types.js b/packages/network/src/types.js index 3b818600b61..6bd4c724a1a 100644 --- a/packages/network/src/types.js +++ b/packages/network/src/types.js @@ -1,3 +1,10 @@ +// @ts-check + +/** + * @template T + * @typedef {Promise>} PromiseVow + */ + /** * @typedef {string | Buffer | ArrayBuffer} Data * @@ -11,12 +18,12 @@ /** * @typedef {object} Closable A closable object - * @property {() => Promise} close Terminate the object + * @property {() => PromiseVow} close Terminate the object */ /** * @typedef {object} Protocol The network Protocol - * @property {(prefix: Endpoint) => Promise} bind Claim a port, or if + * @property {(prefix: Endpoint) => PromiseVow} bind Claim a port, or if * ending in ENDPOINT_SEPARATOR, a fresh name */ @@ -24,14 +31,14 @@ * @typedef {object} Port A port that has been bound to a protocol * @property {() => Endpoint} getLocalAddress Get the locally bound name of this * port - * @property {(acceptHandler: ListenHandler) => Promise} addListener + * @property {(acceptHandler: ListenHandler) => PromiseVow} addListener * Begin accepting incoming connections * @property {( * remote: Endpoint, * connectionHandler?: ConnectionHandler, - * ) => Promise} connect + * ) => PromiseVow} connect * Make an outbound connection - * @property {(acceptHandler: ListenHandler) => Promise} removeListener + * @property {(acceptHandler: ListenHandler) => PromiseVow} removeListener * Remove the currently-bound listener * @property {() => void} revoke Deallocate the port entirely, removing all * listeners and closing all active connections @@ -39,25 +46,25 @@ /** * @typedef {object} ListenHandler A handler for incoming connections - * @property {(port: Port, l: ListenHandler) => Promise} [onListen] The + * @property {(port: Port, l: ListenHandler) => PromiseVow} [onListen] The * listener has been registered * @property {( * port: Port, * localAddr: Endpoint, * remoteAddr: Endpoint, * l: ListenHandler, - * ) => Promise} onAccept + * ) => PromiseVow} onAccept * A new connection is incoming * @property {( * port: Port, * localAddr: Endpoint, * remoteAddr: Endpoint, * l: ListenHandler, - * ) => Promise} [onReject] + * ) => PromiseVow} [onReject] * The connection was rejected - * @property {(port: Port, rej: any, l: ListenHandler) => Promise} [onError] + * @property {(port: Port, rej: any, l: ListenHandler) => PromiseVow} [onError] * There was an error while listening - * @property {(port: Port, l: ListenHandler) => Promise} [onRemove] The + * @property {(port: Port, l: ListenHandler) => PromiseVow} [onRemove] The * listener has been removed */ @@ -66,9 +73,9 @@ * @property {( * packetBytes: Data, * opts?: Record, - * ) => Promise} send + * ) => PromiseVow} send * Send a packet on the connection - * @property {() => Promise} close Close both ends of the connection + * @property {() => PromiseVow} close Close both ends of the connection * @property {() => Endpoint} getLocalAddress Get the locally bound name of this * connection * @property {() => Endpoint} getRemoteAddress Get the name of the counterparty @@ -81,20 +88,20 @@ * localAddr: Endpoint, * remoteAddr: Endpoint, * c: ConnectionHandler, - * ) => void} [onOpen] + * ) => PromiseVow} [onOpen] * The connection has been opened * @property {( * connection: Connection, - * packetBytes: Bytes, + * ack: Bytes, * c: ConnectionHandler, * opts?: Record, - * ) => Promise} [onReceive] + * ) => PromiseVow} [onReceive] * The connection received a packet * @property {( * connection: Connection, * reason?: CloseReason, * c?: ConnectionHandler, - * ) => Promise} [onClose] + * ) => PromiseVow} [onClose] * The connection has been closed * * @typedef {any | null} CloseReason The reason a connection was closed @@ -110,36 +117,36 @@ /** * @typedef {object} ProtocolHandler A handler for things the protocol * implementation will invoke - * @property {(protocol: ProtocolImpl, p: ProtocolHandler) => Promise} onCreate + * @property {(protocol: ProtocolImpl, p: ProtocolHandler) => PromiseVow} onCreate * This protocol is created - * @property {(localAddr: Endpoint, p: ProtocolHandler) => Promise} generatePortID + * @property {(localAddr: Endpoint, p: ProtocolHandler) => PromiseVow} generatePortID * Create a fresh port identifier for this protocol * @property {( * port: Port, * localAddr: Endpoint, * p: ProtocolHandler, - * ) => Promise} onBind + * ) => PromiseVow} onBind * A port will be bound * @property {( * port: Port, * localAddr: Endpoint, * listenHandler: ListenHandler, * p: ProtocolHandler, - * ) => Promise} onListen + * ) => PromiseVow} onListen * A port was listening * @property {( * port: Port, * localAddr: Endpoint, * listenHandler: ListenHandler, * p: ProtocolHandler, - * ) => Promise} onListenRemove + * ) => PromiseVow} onListenRemove * A port listener has been reset * @property {( * port: Port, * localAddr: Endpoint, * remote: Endpoint, * p: ProtocolHandler, - * ) => Promise} [onInstantiate] + * ) => PromiseVow} [onInstantiate] * Return unique suffix for local address * @property {( * port: Port, @@ -147,36 +154,36 @@ * remote: Endpoint, * c: ConnectionHandler, * p: ProtocolHandler, - * ) => Promise} onConnect + * ) => PromiseVow} onConnect * A port initiates an outbound connection * @property {( * port: Port, * localAddr: Endpoint, * p: ProtocolHandler, - * ) => Promise} onRevoke + * ) => PromiseVow} onRevoke * The port is being completely destroyed * * @typedef {object} InboundAttempt An inbound connection attempt - * @property {(desc: AttemptDescription) => Promise} accept + * @property {(desc: AttemptDescription) => PromiseVow} accept * Establish the connection * @property {() => Endpoint} getLocalAddress Return the local address for this * attempt * @property {() => Endpoint} getRemoteAddress Return the remote address for * this attempt - * @property {() => Promise} close Abort the attempt + * @property {() => PromiseVow} close Abort the attempt * * @typedef {object} ProtocolImpl Things the protocol can do for us - * @property {(prefix: Endpoint) => Promise} bind Claim a port, or if + * @property {(prefix: Endpoint) => PromiseVow} bind Claim a port, or if * ending in ENDPOINT_SEPARATOR, a fresh name * @property {( * listenAddr: Endpoint, * remoteAddr: Endpoint, - * ) => Promise} inbound + * ) => PromiseVow} inbound * Make an attempt to connect into this protocol * @property {( * port: Port, * remoteAddr: Endpoint, * connectionHandler: ConnectionHandler, - * ) => Promise} outbound + * ) => PromiseVow} outbound * Create an outbound connection */ diff --git a/packages/network/test/test-network-misc.js b/packages/network/test/test-network-misc.js index b10714a0dd8..5964cfb6968 100644 --- a/packages/network/test/test-network-misc.js +++ b/packages/network/test/test-network-misc.js @@ -1,133 +1,203 @@ +// @ts-check // eslint-disable-next-line import/order import { test } from '@agoric/swingset-vat/tools/prepare-test-env-ava.js'; +import { reincarnate } from '@agoric/swingset-liveslots/tools/setup-vat-data.js'; -import { makePromiseKit } from '@endo/promise-kit'; -import { Far } from '@endo/far'; +import { E } from '@endo/far'; +import { prepareVowTools } from '@agoric/vat-data/vow.js'; +import { makeDurableZone } from '@agoric/zone/durable.js'; import { parse, unparse, - makeEchoConnectionHandler, - makeLoopbackProtocolHandler, - makeNetworkProtocol, - makeRouter, + prepareEchoConnectionKit, + prepareRouter, + prepareLoopbackProtocolHandler, + prepareNetworkProtocol, } from '../src/index.js'; +import '../src/types.js'; + // eslint-disable-next-line no-constant-condition const log = false ? console.log : () => {}; /** - * @param {any} t - * @returns {import('../src.js').ProtocolHandler} A testing handler + * @param {import('@agoric/zone').Zone} zone + * @param {import('ava').ExecutionContext} t + * @param {ReturnType} makeEchoConnectionHandler + * @param {*} powers */ -const makeProtocolHandler = t => { - /** @type {import('../src.js').ListenHandler} */ - let l; - let lp; - let nonce = 0; - return Far('ProtocolHandler', { - async onCreate(_protocol, _impl) { - log('created', _protocol, _impl); - }, - async generatePortID() { - nonce += 1; - return `${nonce}`; - }, - async onBind(port, localAddr) { - t.assert(port, `port is supplied to onBind`); - t.assert(localAddr, `local address is supplied to onBind`); - }, - async onConnect(port, localAddr, remoteAddr) { - t.assert(port, `port is tracked in onConnect`); - t.assert(localAddr, `local address is supplied to onConnect`); - t.assert(remoteAddr, `remote address is supplied to onConnect`); - if (lp) { - return l - .onAccept(lp, localAddr, remoteAddr, l) - .then(ch => [localAddr, ch]); - } - return { handler: makeEchoConnectionHandler() }; - }, - async onListen(port, localAddr, listenHandler) { - t.assert(port, `port is tracked in onListen`); - t.assert(localAddr, `local address is supplied to onListen`); - t.assert(listenHandler, `listen handler is tracked in onListen`); - lp = port; - l = listenHandler; - log('listening', port.getLocalAddress(), listenHandler); - }, - async onListenRemove(port, localAddr, listenHandler) { - t.assert(port, `port is tracked in onListen`); - t.assert(localAddr, `local address is supplied to onListen`); - t.is(listenHandler, l, `listenHandler is tracked in onListenRemove`); - l = undefined; - lp = undefined; - log('port done listening', port.getLocalAddress()); +const prepareProtocolHandler = ( + zone, + t, + makeEchoConnectionHandler, + { when }, +) => { + const makeProtocolHandler = zone.exoClass( + 'ProtocolHandler', + undefined, + () => { + return { + /** @type {ListenHandler | undefined } */ + l: undefined, + lp: undefined, + nonce: 0, + }; }, - async onRevoke(port, localAddr) { - t.assert(port, `port is tracked in onRevoke`); - t.assert(localAddr, `local address is supplied to onRevoke`); - log('port done revoking', port.getLocalAddress()); + { + async onInstantiate(_port, _localAddr, _remote, _protocol) { + return ''; + }, + async onCreate(_protocol, _impl) { + log('created', _protocol, _impl); + }, + async generatePortID() { + this.state.nonce += 1; + return `${this.state.nonce}`; + }, + async onBind(port, localAddr) { + t.assert(port, `port is supplied to onBind`); + t.assert(localAddr, `local address is supplied to onBind`); + }, + async onConnect(port, localAddr, remoteAddr) { + t.assert(port, `port is tracked in onConnect`); + t.assert(localAddr, `local address is supplied to onConnect`); + t.assert(remoteAddr, `remote address is supplied to onConnect`); + if (!this.state.lp) { + return { handler: makeEchoConnectionHandler().handler }; + } + assert(this.state.l); + const ch = await when( + this.state.l.onAccept( + this.state.lp, + localAddr, + remoteAddr, + this.state.l, + ), + ); + return { localAddr, handler: ch }; + }, + async onListen(port, localAddr, listenHandler) { + t.assert(port, `port is tracked in onListen`); + t.assert(localAddr, `local address is supplied to onListen`); + t.assert(listenHandler, `listen handler is tracked in onListen`); + this.state.lp = port; + this.state.l = listenHandler; + log('listening', port.getLocalAddress(), listenHandler); + }, + async onListenRemove(port, localAddr, listenHandler) { + t.assert(port, `port is tracked in onListen`); + t.assert(localAddr, `local address is supplied to onListen`); + t.is( + listenHandler, + this.state.lp && this.state.l, + `listenHandler is tracked in onListenRemove`, + ); + this.state.lp = undefined; + log('port done listening', port.getLocalAddress()); + }, + async onRevoke(port, localAddr) { + t.assert(port, `port is tracked in onRevoke`); + t.assert(localAddr, `local address is supplied to onRevoke`); + log('port done revoking', port.getLocalAddress()); + }, }, - }); + ); + + return makeProtocolHandler; +}; + +const { fakeVomKit } = reincarnate({ relaxDurabilityRules: false }); +const provideBaggage = key => { + const root = fakeVomKit.cm.provideBaggage(); + const zone = makeDurableZone(root); + return zone.mapStore(`${key} baggage`); }; test('handled protocol', async t => { - const protocol = makeNetworkProtocol(makeProtocolHandler(t)); - - const closed = makePromiseKit(); - const port = await protocol.bind('/ibc/*/ordered'); - await port.connect( - '/ibc/*/ordered/echo', - Far('ProtocolHandler', { - async onOpen(connection, localAddr, remoteAddr) { - t.is(localAddr, '/ibc/*/ordered'); - t.is(remoteAddr, '/ibc/*/ordered/echo'); - const ack = await connection.send('ping'); - // log(ack); - t.is(`${ack}`, 'ping', 'received pong'); - void connection.close(); - }, - async onClose(_connection, reason) { - t.is(reason, undefined, 'no close reason'); - closed.resolve(); - }, - async onReceive(_connection, bytes) { - t.is(`${bytes}`, 'ping'); - return 'pong'; - }, - }), + const zone = makeDurableZone(provideBaggage('network-handled-protocol')); + const powers = prepareVowTools(zone); + const { makeVowKit, when } = powers; + const makeNetworkProtocol = prepareNetworkProtocol(zone, powers); + const makeEchoConnectionHandler = prepareEchoConnectionKit(zone); + const makeProtocolHandler = prepareProtocolHandler( + zone, + t, + makeEchoConnectionHandler, + powers, ); - await closed.promise; - await port.revoke(); + const protocol = makeNetworkProtocol(makeProtocolHandler()); + + const port = await when(protocol.bind('/ibc/*/ordered')); + + const { vow, resolver } = makeVowKit(); + + const prepareTestProtocolHandler = () => { + const makeTestProtocolHandler = zone.exoClass( + 'TestProtocolHandler', + undefined, + () => ({ resolver }), + { + async onOpen(connection, localAddr, remoteAddr) { + t.is(localAddr, '/ibc/*/ordered'); + t.is(remoteAddr, '/ibc/*/ordered/echo'); + const ack = await E(connection).send('ping'); + // log(ack); + t.is(`${ack}`, 'ping', 'received pong'); + void connection.close(); + }, + async onClose(_connection, reason) { + t.is(reason, undefined, 'no close reason'); + this.state.resolver.resolve(null); + }, + async onReceive(_connection, bytes) { + t.is(`${bytes}`, 'ping'); + return 'pong'; + }, + }, + ); + + return makeTestProtocolHandler; + }; + + const makeTestProtocolHandler = prepareTestProtocolHandler(); + + await port.connect('/ibc/*/ordered/echo', makeTestProtocolHandler()); + await when(vow); + port.revoke(); }); test('protocol connection listen', async t => { - const protocol = makeNetworkProtocol(makeProtocolHandler(t)); + const zone = makeDurableZone(provideBaggage('network-protocol-connection')); + const powers = prepareVowTools(zone); + const { makeVowKit, when } = powers; + const makeNetworkProtocol = prepareNetworkProtocol(zone, powers); + const makeEchoConnectionHandler = prepareEchoConnectionKit(zone); + const makeProtocolHandler = prepareProtocolHandler( + zone, + t, + makeEchoConnectionHandler, + powers, + ); + const protocol = makeNetworkProtocol(makeProtocolHandler()); - const closed = makePromiseKit(); + const port = await when(protocol.bind('/net/ordered/ordered/some-portname')); + const { vow, resolver } = makeVowKit(); - const port = await protocol.bind('/net/ordered/ordered/some-portname'); + const prepareConnectionHandler = () => { + let handler; - /** @type {import('../src.js').ListenHandler} */ - const listener = Far('listener', { - async onListen(p, listenHandler) { - t.is(p, port, `port is tracked in onListen`); - t.assert(listenHandler, `listenHandler is tracked in onListen`); - }, - async onAccept(p, localAddr, remoteAddr, listenHandler) { - t.assert(localAddr, `local address is passed to onAccept`); - t.assert(remoteAddr, `remote address is passed to onAccept`); - t.is(p, port, `port is tracked in onAccept`); - t.is(listenHandler, listener, `listenHandler is tracked in onAccept`); - let handler; - return harden({ + const makeConnectionHandler = zone.exoClass( + 'connectionHandler', + undefined, + () => ({ resolver }), + { async onOpen(connection, _localAddr, _remoteAddr, connectionHandler) { t.assert(connectionHandler, `connectionHandler is tracked in onOpen`); handler = connectionHandler; - const ack = await connection.send('ping'); + const ack = await when(connection.send('ping')); t.is(`${ack}`, 'ping', 'received pong'); - connection.close(); + await when(connection.close()); }, async onClose(c, reason, connectionHandler) { t.is( @@ -138,7 +208,7 @@ test('protocol connection listen', async t => { handler = undefined; t.assert(c, 'connection is passed to onClose'); t.is(reason, undefined, 'no close reason'); - closed.resolve(); + this.state.resolver.resolve(null); }, async onReceive(c, packet, connectionHandler) { t.is( @@ -150,82 +220,181 @@ test('protocol connection listen', async t => { t.is(`${packet}`, 'ping', 'expected ping'); return 'pong'; }, - }); - }, - async onError(p, rej, listenHandler) { - t.is(p, port, `port is tracked in onError`); - t.is(listenHandler, listener, `listenHandler is tracked in onError`); - t.isNot(rej, rej, 'unexpected error'); - }, - async onRemove(p, listenHandler) { - t.is(listenHandler, listener, `listenHandler is tracked in onRemove`); - t.is(p, port, `port is passed to onReset`); - }, - }); + }, + ); + return makeConnectionHandler; + }; + + const prepareListenHandler = () => { + const makeListenHandler = zone.exoClass( + 'ListenHandler', + undefined, + () => ({ port }), + { + async onListen(p, listenHandler) { + t.is(p, this.state.port, `port is tracked in onListen`); + t.assert(listenHandler, `listenHandler is tracked in onListen`); + }, + async onAccept(p, localAddr, remoteAddr, listenHandler) { + t.assert(localAddr, `local address is passed to onAccept`); + t.assert(remoteAddr, `remote address is passed to onAccept`); + t.is(p, this.state.port, `port is tracked in onAccept`); + t.is( + listenHandler, + this.self, + `listenHandler is tracked in onAccept`, + ); + + const makeConnectionHandler = prepareConnectionHandler(); + return makeConnectionHandler(); + }, + async onError(p, rej, listenHandler) { + t.is(p, port, `port is tracked in onError`); + t.is(listenHandler, this.self, `listenHandler is tracked in onError`); + t.not(rej, rej, 'unexpected error'); + }, + async onRemove(p, listenHandler) { + t.is( + listenHandler, + this.self, + `listenHandler is tracked in onRemove`, + ); + t.is(p, this.state.port, `port is passed to onReset`); + }, + }, + ); + + return makeListenHandler; + }; + + const makeListenHandler = prepareListenHandler(); + const listener = makeListenHandler(); await port.addListener(listener); - const port2 = await protocol.bind('/net/ordered'); - const connectionHandler = makeEchoConnectionHandler(); - await port2.connect( - '/net/ordered/ordered/some-portname', - Far('connectionHandlerWithOpen', { - ...connectionHandler, - async onOpen(connection, localAddr, remoteAddr, c) { - if (connectionHandler.onOpen) { - await connectionHandler.onOpen(connection, localAddr, remoteAddr, c); - } - void connection.send('ping'); + const port2 = await when(protocol.bind('/net/ordered')); + const { handler } = makeEchoConnectionHandler(); + + const prepareHandlerWithOpen = () => { + const makeHandlerWithOpen = zone.exoClass( + 'connectionHandlerWithOpen', + undefined, + () => ({}), + { + async onReceive(_connection, bytes, _connectionHandler) { + return handler.onReceive(_connection, bytes, _connectionHandler); + }, + async onClose(_connection, _reason, _connectionHandler) { + return handler.onClose(_connection, _reason, _connectionHandler); + }, + async onOpen(connection, _localAddr, _remoteAddr, _c) { + void connection.send('ping'); + }, }, - }), + ); + + return makeHandlerWithOpen; + }; + + const makeHandlerWithOpen = prepareHandlerWithOpen(); + + await when( + port2.connect('/net/ordered/ordered/some-portname', makeHandlerWithOpen()), ); - await closed.promise; + await when(vow); - await port.removeListener(listener); - await port.revoke(); + await when(port.removeListener(listener)); + await when(port.revoke()); }); test('loopback protocol', async t => { + const zone = makeDurableZone(provideBaggage('network-loopback-protocol')); + const powers = prepareVowTools(zone); + const { makeVowKit, when } = powers; + const makeLoopbackProtocolHandler = prepareLoopbackProtocolHandler( + zone, + powers, + ); + const makeNetworkProtocol = prepareNetworkProtocol(zone, powers); const protocol = makeNetworkProtocol(makeLoopbackProtocolHandler()); + const { vow, resolver } = makeVowKit(); - const closed = makePromiseKit(); - - const port = await protocol.bind('/loopback/foo'); + const port = await when(protocol.bind('/loopback/foo')); - /** @type {import('../src.js').ListenHandler} */ - const listener = Far('listener', { - async onAccept(_p, _localAddr, _remoteAddr, _listenHandler) { - return harden({ + const prepareConnectionHandler = () => { + const makeConnectionHandler = zone.exoClass( + 'connectionHandler', + undefined, + () => ({}), + { async onReceive(c, packet, _connectionHandler) { t.is(`${packet}`, 'ping', 'expected ping'); return 'pingack'; }, - }); - }, - }); - await port.addListener(listener); + }, + ); + return makeConnectionHandler; + }; + + const makeConnectionHandler = prepareConnectionHandler(); + + const prepareListenHandler = () => { + const makeListenHandler = zone.exoClass( + 'listener', + undefined, + () => ({ port }), + { + async onAccept(_p, _localAddr, _remoteAddr, _listenHandler) { + return makeConnectionHandler(); + }, + async onRemove(p, _listenHandler) { + console.log('onRemove', p); + }, + }, + ); + + return makeListenHandler; + }; + + const makeListenHandler = prepareListenHandler(); + const listener = makeListenHandler(); + await when(port.addListener(listener)); - const port2 = await protocol.bind('/loopback/bar'); - await port2.connect( - port.getLocalAddress(), - Far('opener', { - async onOpen(c, localAddr, remoteAddr, _connectionHandler) { - t.is(localAddr, '/loopback/bar/nonce/1'); - t.is(remoteAddr, '/loopback/foo/nonce/2'); - const pingack = await c.send('ping'); - t.is(pingack, 'pingack', 'expected pingack'); - closed.resolve(); + const port2 = await when(protocol.bind('/loopback/bar')); + const prepareOpener = () => { + const openerHandler = zone.exoClass( + 'opener', + undefined, + ({ resolver: innerResolver }) => ({ innerResolver }), + { + async onOpen(c, localAddr, remoteAddr, _connectionHandler) { + t.is(localAddr, '/loopback/bar/nonce/1'); + t.is(remoteAddr, '/loopback/foo/nonce/2'); + const pingack = await when(c.send('ping')); + t.is(pingack, 'pingack', 'expected pingack'); + this.state.innerResolver.resolve(null); + }, }, - }), + ); + + return openerHandler; + }; + + const makeOpenerHandler = prepareOpener(); + + await when( + port2.connect(port.getLocalAddress(), makeOpenerHandler({ resolver })), ); - await closed.promise; + await when(vow); await port.removeListener(listener); }); test('routing', async t => { + const zone = makeDurableZone(provideBaggage('network-loopback-protocol')); + const makeRouter = prepareRouter(zone); const router = makeRouter(); t.deepEqual(router.getRoutes('/if/local'), [], 'get routes matches none'); router.register('/if/', 'a'); diff --git a/packages/network/tsconfig.json b/packages/network/tsconfig.json index 369a34eb383..325db156bdc 100644 --- a/packages/network/tsconfig.json +++ b/packages/network/tsconfig.json @@ -2,6 +2,7 @@ "extends": "../../tsconfig.json", "compilerOptions": { "checkJs": false, + "maxNodeModuleJsDepth": 1, }, "include": [ "*.js", diff --git a/packages/network/typedoc.json b/packages/network/typedoc.json new file mode 100644 index 00000000000..fdae9f7f2a6 --- /dev/null +++ b/packages/network/typedoc.json @@ -0,0 +1,9 @@ +{ + "extends": [ + "../../typedoc.base.json" + ], + "entryPoints": [ + "./src/index.js", + "./src/types.js", + ] +} \ No newline at end of file diff --git a/packages/notifier/src/index.js b/packages/notifier/src/index.js index 4402bee7884..6374f5c7b18 100644 --- a/packages/notifier/src/index.js +++ b/packages/notifier/src/index.js @@ -16,6 +16,7 @@ export { makeNotifierFromSubscriber, } from './notifier.js'; export { makeSubscription, makeSubscriptionKit } from './subscriber.js'; +export { makePinnedHistoryTopic } from './topic.js'; export { observeNotifier, observeIterator, diff --git a/packages/pegasus/package.json b/packages/pegasus/package.json index 2e7fa2c7e5c..945d8d634c5 100644 --- a/packages/pegasus/package.json +++ b/packages/pegasus/package.json @@ -36,8 +36,10 @@ "@agoric/store": "^0.9.2", "@agoric/swingset-vat": "^0.32.2", "@agoric/vats": "^0.15.1", + "@agoric/vat-data": "^0.5.2", "@agoric/zoe": "^0.26.2", "@endo/bundle-source": "^3.1.0", + "@agoric/zone": "^0.2.2", "@endo/captp": "^4.0.4", "@endo/far": "^1.0.4", "@endo/init": "^1.0.4", diff --git a/packages/pegasus/src/contract.js b/packages/pegasus/src/contract.js new file mode 100644 index 00000000000..2bd951f38db --- /dev/null +++ b/packages/pegasus/src/contract.js @@ -0,0 +1,34 @@ +import { prepareVowTools } from '@agoric/vat-data/vow.js'; +import { makeDurableZone } from '@agoric/zone/durable.js'; +import { makePegasus } from './pegasus.js'; + +import '@agoric/zoe/exported.js'; + +import '../exported.js'; + +/** + * @type {ContractStartFn, + * namesByAddress: ERef + * }} + */ +export const start = (zcf, privateArgs, baggage) => { + const zone = makeDurableZone(baggage); + + const whenZone = zone.subZone('when'); + const { when } = prepareVowTools(whenZone); + + const { board, namesByAddress } = privateArgs; + + // start requires that the object passed in must be durable. Given that we + // haven't made pegasus durable yet, we'll wrap its non-durable methods within + // an exo object to workaround this requirement. + const publicFacet = zone.exo('PublicFacet', undefined, { + ...makePegasus({ zcf, board, namesByAddress, when }), + }); + + return harden({ + publicFacet, + }); +}; +harden(start); diff --git a/packages/pegasus/src/courier.js b/packages/pegasus/src/courier.js index dc15c4d4aad..a7c5cdb637b 100644 --- a/packages/pegasus/src/courier.js +++ b/packages/pegasus/src/courier.js @@ -40,6 +40,7 @@ export const getCourierPK = (key, keyToCourierPK) => { * @property {(zcfSeat: ZCFSeat, amounts: AmountKeywordRecord) => void} retain * @property {(zcfSeat: ZCFSeat, amounts: AmountKeywordRecord) => void} redeem * @property {ERef} transferProtocol + * @property {ReturnType['when']} when * @param {ERef} connection * @returns {(args: CourierArgs) => Courier} */ @@ -54,6 +55,7 @@ export const makeCourierMaker = retain, redeem, transferProtocol, + when, }) => { /** @type {Sender} */ const send = async (zcfSeat, depositAddress, memo, opts) => { @@ -71,8 +73,7 @@ export const makeCourierMaker = retain(zcfSeat, { Transfer: amount }); // The payment is already escrowed, and proposed to retain, so try sending. - return E(connection) - .send(transferPacket) + return when(E(connection).send(transferPacket)) .then(ack => E(transferProtocol).assertTransferPacketAck(ack)) .then( _ => zcfSeat.exit(), diff --git a/packages/pegasus/src/pegasus.js b/packages/pegasus/src/pegasus.js index 9fcc7a27fe8..bae6a05d963 100644 --- a/packages/pegasus/src/pegasus.js +++ b/packages/pegasus/src/pegasus.js @@ -29,13 +29,15 @@ const TRANSFER_PROPOSAL_SHAPE = { /** * Make a Pegasus public API. * - * @param {ZCF} zcf the Zoe Contract Facet - * @param {ERef} board where to find depositFacets by boardID - * @param {ERef} namesByAddress where to find depositFacets by bech32 + * @param {object} powers + * @param {ZCF} powers.zcf the Zoe Contract Facet + * @param {ERef} powers.board where to find depositFacets by boardID + * @param {ERef} powers.namesByAddress where to find depositFacets by bech32 + * @param {ReturnType['when']} powers.when * * @typedef {import('@agoric/vats').NameHub} NameHub */ -const makePegasus = (zcf, board, namesByAddress) => { +export const makePegasus = ({ zcf, board, namesByAddress, when }) => { /** * @typedef {object} LocalDenomState * @property {string} localAddr @@ -190,6 +192,7 @@ const makePegasus = (zcf, board, namesByAddress) => { zcfMint.mintGains(harden(amounts), zcfSeat); }, transferProtocol, + when, }); const courierPK = getCourierPK(receiveDenom, receiveDenomToCourierPK); @@ -282,6 +285,7 @@ const makePegasus = (zcf, board, namesByAddress) => { transferSeat, ), transferProtocol, + when, }); const { receiveDenomToCourierPK } = localDenomState; @@ -489,22 +493,8 @@ const makePegasus = (zcf, board, namesByAddress) => { }, }); }; +harden(makePegasus); /** * @typedef {ReturnType} Pegasus */ - -/** - * @param {ZCF<{board: ERef, namesByAddress: ERef}>} zcf - */ -const start = zcf => { - const { board, namesByAddress } = zcf.getTerms(); - - return { - publicFacet: makePegasus(zcf, board, namesByAddress), - }; -}; - -harden(start); -harden(makePegasus); -export { start, makePegasus }; diff --git a/packages/pegasus/src/proposals/core-proposal.js b/packages/pegasus/src/proposals/core-proposal.js index f11ac823462..dd099e57f99 100644 --- a/packages/pegasus/src/proposals/core-proposal.js +++ b/packages/pegasus/src/proposals/core-proposal.js @@ -1,5 +1,4 @@ // @ts-check -/* eslint @typescript-eslint/no-floating-promises: "warn" */ import { E, Far } from '@endo/far'; import { makeNameHubKit } from '@agoric/vats/src/nameHub.js'; import { observeIteration, subscribeEach } from '@agoric/notifier'; @@ -42,12 +41,13 @@ export const startPegasus = async ({ }, }) => { const [board, namesByAddress] = await Promise.all([boardP, namesByAddressP]); - const terms = { board, namesByAddress }; + const privates = { board, namesByAddress }; const { instance } = await E(zoe).startInstance( pegasusInstall, undefined, - terms, + undefined, + privates, ); produceInstance.resolve(instance); @@ -76,7 +76,9 @@ export const addPegasusTransferPort = async ( void E(pegasusConnectionsAdmin).delete(localAddr); } }, - }); + }).catch(err => + console.error('Error observing Pegasus connection kit:', err), + ); return E(port).addListener( Far('listener', { async onAccept(_port, _localAddr, _remoteAddr, _listenHandler) { diff --git a/packages/pegasus/test/test-peg.js b/packages/pegasus/test/test-peg.js index cbd574d7c09..5517ec996eb 100644 --- a/packages/pegasus/test/test-peg.js +++ b/packages/pegasus/test/test-peg.js @@ -4,22 +4,25 @@ import { test } from '@agoric/swingset-vat/tools/prepare-test-env-ava.js'; import path from 'path'; import { E, Far } from '@endo/far'; import { - makeNetworkProtocol, - makeLoopbackProtocolHandler, + prepareNetworkProtocol, + prepareLoopbackProtocolHandler, } from '@agoric/network'; import bundleSource from '@endo/bundle-source'; import { AmountMath } from '@agoric/ertp'; import { makeZoeForTest } from '@agoric/zoe/tools/setup-zoe.js'; import { makeSubscription } from '@agoric/notifier'; +import { prepareVowTools } from '@agoric/vat-data/vow.js'; import '@agoric/ertp/exported.js'; import { makePromiseKit } from '@endo/promise-kit'; +import { makeScalarMapStore } from '@agoric/vat-data'; +import { makeDurableZone } from '@agoric/zone/durable.js'; const filename = new URL(import.meta.url).pathname; const dirname = path.dirname(filename); -const contractPath = `${dirname}/../src/pegasus.js`; +const contractPath = `${dirname}/../src/contract.js`; /** * @template T @@ -31,12 +34,22 @@ const makeAsyncIteratorFromSubscription = sub => Symbol.asyncIterator ](); +const provideBaggage = key => { + const zone = makeDurableZone(makeScalarMapStore()); + return zone.mapStore(`${key} baggage`); +}; + /** * @param {import('ava').Assertions} t */ async function testRemotePeg(t) { t.plan(24); + // const zone = makeHeapZone(); + const zone = makeDurableZone(provideBaggage('peagsus')); + const powers = prepareVowTools(zone); + const { makeVowKit, when } = powers; + /** * @type {PromiseRecord} */ @@ -69,6 +82,7 @@ async function testRemotePeg(t) { const { publicFacet: publicAPI } = await E(zoe).startInstance( installationHandle, {}, + {}, { board: fakeBoard, namesByAddress: fakeNamesByAddress }, ); @@ -76,7 +90,10 @@ async function testRemotePeg(t) { * @type {import('../src/pegasus.js').Pegasus} */ const pegasus = publicAPI; - const network = makeNetworkProtocol(makeLoopbackProtocolHandler()); + + const makeLoopbackHandler = prepareLoopbackProtocolHandler(zone, powers); + const makeNetworkProtocol = prepareNetworkProtocol(zone, powers); + const network = makeNetworkProtocol(makeLoopbackHandler()); const portP = E(network).bind('/ibc-channel/chanabc/ibc-port/portdef'); const portName = await E(portP).getLocalAddress(); @@ -84,50 +101,54 @@ async function testRemotePeg(t) { /** * Pretend we're Gaia. * - * @type {import('@agoric/network/src.js').Connection?} + * @type {Connection} */ let gaiaConnection; - E(portP).addListener( - Far('acceptor', { - async onAccept(_p, _localAddr, _remoteAddr) { - return Far('handler', { - async onOpen(c) { - gaiaConnection = c; - }, - async onReceive(_c, packetBytes) { - const packet = JSON.parse(packetBytes); - if (packet.memo) { - t.deepEqual( - packet, - { - amount: '100000000000000000001', - denom: 'portdef/chanabc/uatom', - memo: 'I am a memo!', - receiver: 'markaccount', - sender: 'agoric1jmd7lwdyykrxm5h83nlhg74fctwnky04ufpqtc', - }, - 'expected transfer packet', - ); - return JSON.stringify({ result: 'AQ==' }); - } else { - t.deepEqual( - packet, - { - amount: '100000000000000000001', - denom: 'portdef/chanabc/uatom', - memo: '', - receiver: 'markaccount', - sender: 'pegasus', - }, - 'expected transfer packet', - ); - return JSON.stringify({ result: 'AQ==' }); - } - }, - }); - }, - }), - ); + E(portP) + .addListener( + Far('acceptor', { + async onAccept(_p, _localAddr, _remoteAddr) { + return Far('handler', { + async onOpen(c) { + gaiaConnection = c; + }, + async onReceive(_c, packetBytes) { + const { resolver, vow } = makeVowKit(); + const packet = JSON.parse(packetBytes); + if (packet.memo) { + t.deepEqual( + packet, + { + amount: '100000000000000000001', + denom: 'portdef/chanabc/uatom', + memo: 'I am a memo!', + receiver: 'markaccount', + sender: 'agoric1jmd7lwdyykrxm5h83nlhg74fctwnky04ufpqtc', + }, + 'expected transfer packet', + ); + resolver.resolve(JSON.stringify({ result: 'AQ==' })); + } else { + t.deepEqual( + packet, + { + amount: '100000000000000000001', + denom: 'portdef/chanabc/uatom', + memo: '', + receiver: 'markaccount', + sender: 'pegasus', + }, + 'expected transfer packet', + ); + resolver.resolve(JSON.stringify({ result: 'AQ==' })); + } + return vow; + }, + }); + }, + }), + ) + .catch(e => t.fail(e)); // Pretend we're Agoric. const { handler: chandler, subscription: connectionSubscription } = @@ -178,7 +199,7 @@ async function testRemotePeg(t) { const localPurseP = E(localIssuerP).makeEmptyPurse(); resolveLocalDepositFacet(E(localPurseP).getDepositFacet()); - const sendAckData = await sendAckDataP; + const sendAckData = await when(sendAckDataP); const sendAck = JSON.parse(sendAckData); t.deepEqual(sendAck, { result: 'AQ==' }, 'Gaia sent the atoms'); if (!sendAck.result) { @@ -199,8 +220,8 @@ async function testRemotePeg(t) { sender: 'FIXME:sender2', }; - const sendAckData2 = await E(gaiaConnection).send( - JSON.stringify(sendPacket2), + const sendAckData2 = await when( + E(gaiaConnection).send(JSON.stringify(sendPacket2)), ); const sendAck2 = JSON.parse(sendAckData2); t.deepEqual(sendAck2, { result: 'AQ==' }, 'Gaia sent more atoms'); @@ -225,9 +246,11 @@ async function testRemotePeg(t) { // Wait for the packet to go through. t.deepEqual(await remoteDenomAit.next(), { done: false, value: 'umuon' }); - E(pegConnActions).rejectTransfersWaitingForPegRemote('umuon'); + E(pegConnActions) + .rejectTransfersWaitingForPegRemote('umuon') + .catch(e => t.fail(e)); - const sendAckData3 = await sendAckData3P; + const sendAckData3 = await when(sendAckData3P); const sendAck3 = JSON.parse(sendAckData3); t.deepEqual( sendAck3, diff --git a/packages/vats/src/core/client-behaviors.js b/packages/vats/src/core/client-behaviors.js index 24c9349d589..778ceb8a86a 100644 --- a/packages/vats/src/core/client-behaviors.js +++ b/packages/vats/src/core/client-behaviors.js @@ -87,6 +87,7 @@ async function createLocalBundle(vats, devices, vatAdminSvc, vatPowers) { * BootstrapSpace & { * vatParameters: BootstrapVatParams; * vats: SwingsetVats & SoloVats; + * zone: import('@agoric/base-zone').Zone; * }} powers */ export const startClient = async ({ diff --git a/packages/vats/src/core/types-ambient.d.ts b/packages/vats/src/core/types-ambient.d.ts index dad4d4d9b3b..a0da91a0071 100644 --- a/packages/vats/src/core/types-ambient.d.ts +++ b/packages/vats/src/core/types-ambient.d.ts @@ -377,6 +377,7 @@ type ChainBootstrapSpaceT = { typeof import('@agoric/inter-protocol/src/provisionPool.js').start >; vatStore: import('./utils.js').VatStore; + vatUpgradeInfo: MapStore; zoe: ZoeService; }; diff --git a/packages/vats/src/ibc.js b/packages/vats/src/ibc.js index cd7139cd5df..dcec6802d5f 100644 --- a/packages/vats/src/ibc.js +++ b/packages/vats/src/ibc.js @@ -1,9 +1,8 @@ -import { makeScalarMapStore, makeLegacyMap } from '@agoric/store'; -import { makePromiseKit } from '@endo/promise-kit'; +// @ts-check + import { assert, details as X, Fail } from '@agoric/assert'; -import { Far } from '@endo/far'; +import { E } from '@endo/far'; -import { makeWithQueue } from '@agoric/internal/src/queue.js'; import { dataToBase64, base64ToBytes } from '@agoric/network'; import '@agoric/store/exported.js'; @@ -35,90 +34,57 @@ const DEFAULT_PACKET_TIMEOUT_NS = 10n * 60n * 1_000_000_000n; * @property {IBCPortID} destination_port */ +/** @param {import('@agoric/base-zone').Zone} zone */ +const prepareAckWatcher = zone => { + const makeAckWatcher = zone.exoClass( + 'AckWatcher', + undefined, + (protocolUtils, packet) => ({ protocolUtils, packet }), + { + onFulfilled(ack) { + const { protocolUtils, packet } = this.state; + + const realAck = ack || DEFAULT_ACKNOWLEDGEMENT; + const ack64 = dataToBase64(realAck); + protocolUtils + .downcall('receiveExecuted', { + packet, + ack: ack64, + }) + .catch(e => this.self.onRejected(e)); + }, + onRejected(e) { + console.error(e); + }, + }, + ); + return makeAckWatcher; +}; + /** - * Create a handler for the IBC protocol, both from the network and from the - * bridge. + * @typedef {object} Counterparty + * @property {string} port_id + * @property {string} channel_id + * + * @typedef {object} ConnectingInfo + * @property {'ORDERED' | 'UNORDERED'} order + * @property {string[]} connectionHops + * @property {string} portID + * @property {string} channelID + * @property {Counterparty} counterparty + * @property {string} version * - * @param {typeof import('@endo/far').E} E - * @param {(method: string, params: any) => Promise} rawCallIBCDevice - * @returns {ProtocolHandler & BridgeHandler} Protocol/Bridge handler + * @typedef {import('@agoric/vow').VowKit} OnConnectP + * + * @typedef {Omit & { + * localAddr: Endpoint; + * onConnectP: OnConnectP; + * counterparty: { port_id: string }; + * }} Outbound */ -export function makeIBCProtocolHandler(E, rawCallIBCDevice) { - // Nonce for creating port identifiers. - let lastPortID = 0; - - const callIBCDevice = (method, params) => { - console.info('IBC downcall', method, params); - return rawCallIBCDevice(method, params); - }; - /** @type {MapStore>} */ - const channelKeyToConnP = makeScalarMapStore('CHANNEL:PORT'); - - /** - * @typedef {object} Counterparty - * @property {string} port_id - * @property {string} channel_id - * - * @typedef {object} ConnectingInfo - * @property {'ORDERED' | 'UNORDERED'} order - * @property {string[]} connectionHops - * @property {string} portID - * @property {string} channelID - * @property {Counterparty} counterparty - * @property {string} version - * - * @typedef {PromiseRecord} OnConnectP - * - * @typedef {Omit & { - * localAddr: Endpoint; - * onConnectP: OnConnectP; - * counterparty: { port_id: string }; - * }} Outbound - */ - - /** @type {LegacyMap} */ - // Legacy because it holds a mutable Javascript Array - const srcPortToOutbounds = makeLegacyMap('SRC-PORT'); - - /** @type {MapStore} */ - const channelKeyToInfo = makeScalarMapStore('CHANNEL:PORT'); - - /** @type {MapStore>} */ - const channelKeyToAttemptP = makeScalarMapStore('CHANNEL:PORT'); - - /** @type {LegacyMap>>} */ - // Legacy because it holds a LegacyMap - const channelKeyToSeqAck = makeLegacyMap('CHANNEL:PORT'); - - /** - * Send a packet out via the IBC device. - * - * @param {IBCPacket} packet - * @param {LegacyMap>} seqToAck - * @param {bigint} [relativeTimeoutNs] - */ - async function ibcSendPacket( - packet, - seqToAck, - relativeTimeoutNs = DEFAULT_PACKET_TIMEOUT_NS, - ) { - // Make a kernel call to do the send. - const fullPacket = await callIBCDevice('sendPacket', { - packet, - relativeTimeoutNs, - }); - - // Extract the actual sequence number from the return. - const { sequence } = fullPacket; - - /** @type {PromiseRecord} */ - const ackDeferred = makePromiseKit(); - - // Register the ack resolver/rejector with this sequence number. - seqToAck.init(sequence, ackDeferred); - return ackDeferred.promise; - } +/** @param {import('@agoric/base-zone').Zone} zone */ +export const prepareIBCConnectionHandler = zone => { /** * @param {string} channelID * @param {string} portID @@ -127,51 +93,32 @@ export function makeIBCProtocolHandler(E, rawCallIBCDevice) { * @param {'ORDERED' | 'UNORDERED'} order * @returns {ConnectionHandler} */ - function makeIBCConnectionHandler( - channelID, - portID, - rChannelID, - rPortID, - order, - ) { - const channelKey = `${channelID}:${portID}`; - // Legacy because it holds a PromiseRecord - const seqToAck = makeLegacyMap('SEQUENCE'); - channelKeyToSeqAck.init(channelKey, seqToAck); - - /** - * @param {Connection} _conn - * @param {Bytes} packetBytes - * @param {ConnectionHandler} _handler - * @param {object} root0 - * @param {bigint} [root0.relativeTimeoutNs] - * @returns {Promise} Acknowledgement data - */ - let onReceive = async ( - _conn, - packetBytes, - _handler, - { relativeTimeoutNs } = {}, + const makeIBCConnectionHandler = zone.exoClass( + 'IBCConnectionHandler', + undefined, + ( + { protocolUtils, channelKeyToConnP, channelKeyToSeqAck }, + { channelID, portID, rChannelID, rPortID, order }, ) => { - // console.error(`Remote IBC Handler ${portID} ${channelID}`); - const packet = { - source_port: portID, - source_channel: channelID, - destination_port: rPortID, - destination_channel: rChannelID, - data: dataToBase64(packetBytes), - }; - return ibcSendPacket(packet, seqToAck, relativeTimeoutNs); - }; - - if (order === 'ORDERED') { - // We set up a queue on the receiver to enforce ordering. - const withChannelReceiveQueue = makeWithQueue(); - onReceive = withChannelReceiveQueue(onReceive); - } + const channelKey = `${channelID}:${portID}`; + const seqToAck = zone.detached().mapStore('seqToAck'); + channelKeyToSeqAck.init(channelKey, seqToAck); - return Far('IBCConnectionHandler', { + return { + protocolUtils, + channelID, + portID, + rChannelID, + rPortID, + order, + channelKeyToConnP, + channelKeyToSeqAck, + }; + }, + { async onOpen(conn, localAddr, remoteAddr, _handler) { + const { channelID, portID, channelKeyToConnP } = this.state; + console.debug( 'onOpen Remote IBC Connection', channelID, @@ -179,385 +126,607 @@ export function makeIBCProtocolHandler(E, rawCallIBCDevice) { localAddr, remoteAddr, ); - const connP = E.resolve(conn); - channelKeyToConnP.init(channelKey, connP); + const channelKey = `${channelID}:${portID}`; + + channelKeyToConnP.init(channelKey, conn); + }, + /** + * @param {Connection} _conn + * @param {Bytes} packetBytes + * @param {ConnectionHandler} _handler + * @param {object} root0 + * @param {bigint} [root0.relativeTimeoutNs] + * @returns {PromiseVow} Acknowledgement data + */ + async onReceive( + _conn, + packetBytes, + _handler, + { relativeTimeoutNs } = {}, + ) { + const { portID, channelID, rPortID, rChannelID } = this.state; + const { protocolUtils } = this.state; + // console.error(`Remote IBC Handler ${portID} ${channelID}`); + const packet = { + source_port: portID, + source_channel: channelID, + destination_port: rPortID, + destination_channel: rChannelID, + data: dataToBase64(packetBytes), + }; + return protocolUtils.ibcSendPacket(packet, relativeTimeoutNs); }, - onReceive, async onClose(_conn, _reason, _handler) { + const { portID, channelID } = this.state; + const { protocolUtils, channelKeyToSeqAck } = this.state; + const packet = { source_port: portID, source_channel: channelID, }; - await callIBCDevice('startChannelCloseInit', { packet }); + await protocolUtils.downcall('startChannelCloseInit', { + packet, + }); const rejectReason = Error('Connection closed'); - for (const ackDeferred of seqToAck.values()) { - ackDeferred.reject(rejectReason); + const channelKey = `${channelID}:${portID}`; + + const seqToAck = channelKeyToSeqAck.get(channelKey); + + for (const ackKit of seqToAck.values()) { + ackKit.resolver.reject(rejectReason); } channelKeyToSeqAck.delete(channelKey); }, - }); - } - - /** @param {string} localAddr */ - const localAddrToPortID = localAddr => { - const m = localAddr.match(/^\/ibc-port\/([-a-zA-Z0-9._+#[\]<>]+)$/); - if (!m) { - throw TypeError( - `Invalid port specification ${localAddr}; expected "/ibc-port/PORT"`, - ); - } - return m[1]; - }; + }, + ); + + return makeIBCConnectionHandler; +}; - /** @type {ProtocolImpl} */ - let protocolImpl; +/** + * @param {import('@agoric/base-zone').Zone} zone + * @param {ReturnType} powers + */ +export const prepareIBCProtocol = (zone, { makeVowKit, watch, when }) => { + const makeAckWatcher = prepareAckWatcher(zone); + const makeIBCConnectionHandler = prepareIBCConnectionHandler(zone); /** - * @typedef {object} OutboundCircuitRecord - * @property {IBCConnectionID} dst - * @property {'ORDERED' | 'UNORDERED'} order - * @property {string} version - * @property {IBCPacket} packet - * @property {PromiseRecord} deferredHandler + * @typedef {object} IBCDevice + * @property {(method: string, args: object) => Promise} downcall */ - /** @type {LegacyMap>>} */ - // Legacy because it holds a raw JavaScript Set - const portToPendingConns = makeLegacyMap('Port'); + /** + * @type {WeakMapStore< + * IBCDevice, + * { + * protocolHandler: ProtocolHandler; + * bridgeHandler: BridgeHandler; + * } + * >} Map + * from IBC device to existing handler + */ + const ibcdevToKit = zone.weakMapStore('ibcdevToHandler'); - /** @type {ProtocolHandler} */ - const protocol = Far('IBCProtocolHandler', { - async onCreate(impl, _protocolHandler) { - console.debug('IBC onCreate'); - protocolImpl = impl; - }, - async onInstantiate() { - // The IBC channel is not known until after handshake. - return ''; - }, - async generatePortID(_localAddr, _protocolHandler) { - lastPortID += 1; - return `port-${lastPortID}`; - }, - async onBind(port, localAddr, _protocolHandler) { - const portID = localAddrToPortID(localAddr); - portToPendingConns.init(port, new Set()); - const packet = { - source_port: portID, + const detached = zone.detached(); + + /** + * Create a handler for the IBC protocol, both from the network and from the + * bridge. + * + * @param {IBCDevice} ibcdev + */ + const makeIBCProtocolKit = zone.exoClassKit( + 'IBCProtocolHandler', + undefined, + ibcdev => { + /** @type {MapStore} */ + const channelKeyToConnP = detached.mapStore('channelKeyToConnP'); + + /** @type {MapStore} */ + const channelKeyToInfo = detached.mapStore('channelKeyToInfo'); + + /** @type {MapStore} */ + const channelKeyToAttempt = detached.mapStore('channelKeyToAttempt'); + + /** @type {MapStore} */ + const srcPortToOutbounds = detached.mapStore('srcPortToOutbounds'); + + /** + * @type {MapStore< + * string, + * MapStore> + * >} + */ + const channelKeyToSeqAck = detached.mapStore('channelKeyToSeqAck'); + + /** + * @type {MapStore< + * string, + * SetStore + * >} + */ + const portToPendingConns = detached.mapStore('portToPendingConns'); + + return { + ibcdev, + channelKeyToConnP, + channelKeyToInfo, + channelKeyToAttempt, + srcPortToOutbounds, + channelKeyToSeqAck, + portToPendingConns, + lastPortID: 0, // Nonce for creating port identifiers. + /** @type {ProtocolImpl | undefined} */ protocolImpl: undefined, }; - return callIBCDevice('bindPort', { packet }); }, - async onConnect(port, localAddr, remoteAddr, _chandler, _protocolHandler) { - console.debug('IBC onConnect', localAddr, remoteAddr); - const portID = localAddrToPortID(localAddr); - const pendingConns = portToPendingConns.get(port); - - const match = remoteAddr.match( - /^(\/ibc-hop\/[^/]+)*\/ibc-port\/([^/]+)\/(ordered|unordered)\/([^/]+)$/s, - ); - if (!match) { - throw TypeError( - `Remote address ${remoteAddr} must be '(/ibc-hop/CONNECTION)*/ibc-port/PORT/(ordered|unordered)/VERSION'`, - ); - } - - const hops = []; - let h = match[1]; - while (h) { - const m = h.match(/^\/ibc-hop\/([^/]+)/); - if (!m) { - throw Error( - `internal: ${JSON.stringify(h)} did not begin with "/ibc-hop/XXX"`, + { + protocolHandler: { + async onCreate(impl, _protocolHandler) { + console.debug('IBC onCreate'); + this.state.protocolImpl = impl; + }, + async onInstantiate() { + // The IBC channel is not known until after handshake. + return ''; + }, + async generatePortID(_localAddr, _protocolHandler) { + this.state.lastPortID += 1; + return `port-${this.state.lastPortID}`; + }, + async onBind(port, localAddr, _protocolHandler) { + const { util } = this.facets; + const { portToPendingConns } = this.state; + + const portID = util.localAddrToPortID(localAddr); + portToPendingConns.init(portID, detached.setStore('pendingConns')); + const packet = { + source_port: portID, + }; + return util.downcall('bindPort', { packet }); + }, + async onConnect( + port, + localAddr, + remoteAddr, + _chandler, + _protocolHandler, + ) { + const { util } = this.facets; + const { portToPendingConns, srcPortToOutbounds } = this.state; + + console.debug('IBC onConnect', localAddr, remoteAddr); + const portID = util.localAddrToPortID(localAddr); + const pendingConns = portToPendingConns.get(portID); + + const match = remoteAddr.match( + /^(\/ibc-hop\/[^/]+)*\/ibc-port\/([^/]+)\/(ordered|unordered)\/([^/]+)$/s, ); - } - h = h.substr(m[0].length); - hops.push(m[1]); - } - - // Generate a circuit. - const rPortID = match[2]; - const order = match[3] === 'ordered' ? 'ORDERED' : 'UNORDERED'; - const version = match[4]; - - const onConnectP = makePromiseKit(); - - pendingConns.add(onConnectP); - /** @type {Outbound[]} */ - let outbounds; - if (srcPortToOutbounds.has(portID)) { - outbounds = [...srcPortToOutbounds.get(portID)]; - } else { - outbounds = []; - srcPortToOutbounds.init(portID, outbounds); - } - outbounds.push({ - portID, - counterparty: { port_id: rPortID }, - connectionHops: hops, - order, - version, - onConnectP, - localAddr, - }); - - // Initialise the channel, which automatic relayers should pick up. - const packet = { - source_port: portID, - destination_port: rPortID, - }; + if (!match) { + throw TypeError( + `Remote address ${remoteAddr} must be '(/ibc-hop/CONNECTION)*/ibc-port/PORT/(ordered|unordered)/VERSION'`, + ); + } - await callIBCDevice('startChannelOpenInit', { - packet, - order, - hops, - version, - }); + const hops = []; + let h = match[1]; + while (h) { + const m = h.match(/^\/ibc-hop\/([^/]+)/); + if (!m) { + throw Error( + `internal: ${JSON.stringify( + h, + )} did not begin with "/ibc-hop/XXX"`, + ); + } + h = h.substr(m[0].length); + hops.push(m[1]); + } - return onConnectP.promise; - }, - async onListen(_port, localAddr, _listenHandler) { - console.debug('IBC onListen', localAddr); - }, - async onListenRemove(_port, localAddr, _listenHandler) { - console.debug('IBC onListenRemove', localAddr); - }, - async onRevoke(port, localAddr, _protocolHandler) { - console.debug('IBC onRevoke', localAddr); - const pendingConns = portToPendingConns.get(port); - portToPendingConns.delete(port); - const revoked = Error(`Port ${localAddr} revoked`); - for (const onConnectP of pendingConns.values()) { - onConnectP.reject(revoked); - } - }, - }); - - return Far('IBCProtocolHandler', { - ...protocol, - async fromBridge(obj) { - console.info('IBC fromBridge', obj); - await null; - switch (obj.event) { - case 'channelOpenTry': { - // They're (more or less politely) asking if we are listening, so make an attempt. - const { - channelID, + // Generate a circuit. + const rPortID = match[2]; + const order = match[3] === 'ordered' ? 'ORDERED' : 'UNORDERED'; + const version = match[4]; + + const kit = makeVowKit(); + + pendingConns.add(kit.resolver); + /** @type {Outbound} */ + const ob = { portID, - counterparty: { port_id: rPortID, channel_id: rChannelID }, + counterparty: { port_id: rPortID }, connectionHops: hops, order, version, - counterpartyVersion: rVersion, - } = obj; - - const localAddr = `/ibc-port/${portID}/${order.toLowerCase()}/${version}`; - const ibcHops = hops.map(hop => `/ibc-hop/${hop}`).join('/'); - const remoteAddr = `${ibcHops}/ibc-port/${rPortID}/${order.toLowerCase()}/${rVersion}/ibc-channel/${rChannelID}`; - - // See if we allow an inbound attempt for this address pair (without - // rejecting). - const attemptP = E(protocolImpl).inbound(localAddr, remoteAddr); - - // Tell what version string we negotiated. - const attemptedLocal = await E(attemptP).getLocalAddress(); - const match = attemptedLocal.match( - // Match: ... /ORDER/VERSION ... - new RegExp('^(/[^/]+/[^/]+)*/(ordered|unordered)/([^/]+)(/|$)'), - ); - - const channelKey = `${channelID}:${portID}`; - if (!match) { - throw Error( - `${channelKey}: cannot determine version from attempted local address ${attemptedLocal}`, - ); + onConnectP: kit, + localAddr, + }; + if (!srcPortToOutbounds.has(portID)) { + srcPortToOutbounds.init(portID, harden([ob])); } - const negotiatedVersion = match[3]; - channelKeyToAttemptP.init(channelKey, attemptP); - channelKeyToInfo.init(channelKey, obj); + // Initialise the channel, which automatic relayers should pick up. + const packet = { + source_port: portID, + destination_port: rPortID, + }; - try { - if (negotiatedVersion !== version) { - // Too late; the relayer gave us a version we didn't like. - throw Error( - `${channelKey}: negotiated version was ${negotiatedVersion}; rejecting ${version}`, - ); - } - } catch (e) { - // Clean up after our failed attempt. - channelKeyToAttemptP.delete(channelKey); - channelKeyToInfo.delete(channelKey); - void E(attemptP).close(); - throw e; - } - break; - } + await util.downcall('startChannelOpenInit', { + packet, + order, + hops, + version, + }); - case 'channelOpenAck': { - // Complete the pending outbound connection. + return kit.vow; + }, + async onListen(_port, localAddr, _listenHandler) { + console.debug('IBC onListen', localAddr); + }, + async onListenRemove(_port, localAddr, _listenHandler) { + console.debug('IBC onListenRemove', localAddr); + }, + async onRevoke(_port, localAddr, _protocolHandler) { + const { util } = this.facets; + const { portToPendingConns } = this.state; + + console.debug('IBC onRevoke', localAddr); + const portID = util.localAddrToPortID(localAddr); + + const pendingConns = portToPendingConns.get(portID); + portToPendingConns.delete(portID); + const revoked = Error(`Port ${localAddr} revoked`); + for (const resolver of pendingConns.values()) { + resolver.reject(revoked); + } + }, + }, + bridgeHandler: { + async fromBridge(obj) { const { - portID, - channelID, - counterparty: { port_id: rPortID, channel_id: rChannelID }, - counterpartyVersion: rVersion, - connectionHops: rHops, - } = obj; - const outbounds = srcPortToOutbounds.has(portID) - ? srcPortToOutbounds.get(portID) - : []; - const oidx = outbounds.findIndex( - ({ counterparty: { port_id: iPortID }, connectionHops: iHops }) => { - if (iPortID !== rPortID) { - return false; - } - if (iHops.length !== rHops.length) { - return false; + protocolImpl, + channelKeyToAttempt, + channelKeyToInfo, + srcPortToOutbounds, + channelKeyToConnP, + channelKeyToSeqAck, + } = this.state; + + const { util } = this.facets; + + console.info('IBC fromBridge', obj); + await null; + switch (obj.event) { + case 'channelOpenTry': { + // They're (more or less politely) asking if we are listening, so make an attempt. + const { + channelID, + portID, + counterparty: { port_id: rPortID, channel_id: rChannelID }, + connectionHops: hops, + order, + version, + counterpartyVersion: rVersion, + } = obj; + + const localAddr = `/ibc-port/${portID}/${order.toLowerCase()}/${version}`; + const ibcHops = hops.map(hop => `/ibc-hop/${hop}`).join('/'); + const remoteAddr = `${ibcHops}/ibc-port/${rPortID}/${order.toLowerCase()}/${rVersion}/ibc-channel/${rChannelID}`; + + // See if we allow an inbound attempt for this address pair (without + // rejecting). + // const attempt = await this.state.protocolImpl + // ?.inbound(localAddr, remoteAddr) + // .then( + // ack => console.info('Manual packet', ack, 'acked:', ack), + // e => console.warn('Manual packet', e, 'failed:', e), + // ); + + const attempt = await when( + /** @type {ProtocolImpl} */ (protocolImpl).inbound( + localAddr, + remoteAddr, + ), + ); + + // Tell what version string we negotiated. + const attemptedLocal = await E(attempt).getLocalAddress(); + const match = attemptedLocal.match( + // Match: ... /ORDER/VERSION ... + new RegExp('^(/[^/]+/[^/]+)*/(ordered|unordered)/([^/]+)(/|$)'), + ); + + const channelKey = `${channelID}:${portID}`; + if (!match) { + throw Error( + `${channelKey}: cannot determine version from attempted local address ${attemptedLocal}`, + ); } - for (let i = 0; i < iHops.length; i += 1) { - if (iHops[i] !== rHops[i]) { - return false; + const negotiatedVersion = match[3]; + + channelKeyToAttempt.init(channelKey, attempt); + channelKeyToInfo.init(channelKey, obj); + + try { + if (negotiatedVersion !== version) { + // Too late; the relayer gave us a version we didn't like. + throw Error( + `${channelKey}: negotiated version was ${negotiatedVersion}; rejecting ${version}`, + ); } + } catch (e) { + // Clean up after our failed attempt. + channelKeyToAttempt.delete(channelKey); + channelKeyToInfo.delete(channelKey); + void E(attempt).close(); + throw e; } - return true; - }, - ); - oidx >= 0 || Fail`${portID}: did not expect channelOpenAck`; - const { onConnectP, localAddr, ...chanInfo } = outbounds[oidx]; - outbounds.splice(oidx, 1); - if (outbounds.length === 0) { - srcPortToOutbounds.delete(portID); - } + break; + } - // Finish the outbound connection. - const ibcHops = rHops.map(hop => `/ibc-hop/${hop}`).join('/'); - const remoteAddress = `${ibcHops}/ibc-port/${rPortID}/${chanInfo.order.toLowerCase()}/${rVersion}/ibc-channel/${rChannelID}`; - const localAddress = `${localAddr}/ibc-channel/${channelID}`; - const rchandler = makeIBCConnectionHandler( - channelID, - portID, - rChannelID, - rPortID, - chanInfo.order, - ); - onConnectP.resolve({ - localAddress, - remoteAddress, - handler: rchandler, - }); - break; - } + case 'channelOpenAck': { + // Complete the pending outbound connection. + const { + portID, + channelID, + counterparty: { port_id: rPortID, channel_id: rChannelID }, + counterpartyVersion: rVersion, + connectionHops: rHops, + } = obj; + const outbounds = this.state.srcPortToOutbounds.has(portID) + ? [...this.state.srcPortToOutbounds.get(portID)] + : []; + const oidx = outbounds.findIndex( + ({ + counterparty: { port_id: iPortID }, + connectionHops: iHops, + }) => { + if (iPortID !== rPortID) { + return false; + } + if (iHops.length !== rHops.length) { + return false; + } + for (let i = 0; i < iHops.length; i += 1) { + if (iHops[i] !== rHops[i]) { + return false; + } + } + return true; + }, + ); + oidx >= 0 || Fail`${portID}: did not expect channelOpenAck`; + const { onConnectP, localAddr, ...chanInfo } = outbounds[oidx]; + outbounds.splice(oidx, 1); + if (outbounds.length === 0) { + srcPortToOutbounds.delete(portID); + } else { + srcPortToOutbounds.set(portID, harden(outbounds)); + } - case 'channelOpenConfirm': { - const { portID, channelID } = obj; - const channelKey = `${channelID}:${portID}`; - channelKeyToAttemptP.has(channelKey) || - Fail`${channelKey}: did not expect channelOpenConfirm`; - const attemptP = channelKeyToAttemptP.get(channelKey); - channelKeyToAttemptP.delete(channelKey); + // Finish the outbound connection. + const ibcHops = rHops.map(hop => `/ibc-hop/${hop}`).join('/'); + const remoteAddress = `${ibcHops}/ibc-port/${rPortID}/${chanInfo.order.toLowerCase()}/${rVersion}/ibc-channel/${rChannelID}`; + const localAddress = `${localAddr}/ibc-channel/${channelID}`; + const rchandler = makeIBCConnectionHandler( + { + protocolUtils: util, + channelKeyToConnP, + channelKeyToSeqAck, + }, + { + channelID, + portID, + rChannelID, + rPortID, + order: chanInfo.order, + }, + ); + onConnectP.resolver.resolve({ + localAddress, + remoteAddress, + handler: rchandler, + }); + break; + } - // We have the information from our inbound connection, so complete it. - const { - order, - counterparty: { port_id: rPortID, channel_id: rChannelID }, - } = channelKeyToInfo.get(channelKey); - channelKeyToInfo.delete(channelKey); + case 'channelOpenConfirm': { + const { portID, channelID } = obj; + const channelKey = `${channelID}:${portID}`; + channelKeyToAttempt.has(channelKey) || + Fail`${channelKey}: did not expect channelOpenConfirm`; + const attemptP = channelKeyToAttempt.get(channelKey); + channelKeyToAttempt.delete(channelKey); + + // We have the information from our inbound connection, so complete it. + const { + order, + counterparty: { port_id: rPortID, channel_id: rChannelID }, + } = channelKeyToInfo.get(channelKey); + channelKeyToInfo.delete(channelKey); + + // Accept the attempt. + const rchandler = makeIBCConnectionHandler( + { + protocolUtils: util, + channelKeyToConnP, + channelKeyToSeqAck, + }, + { + channelID, + portID, + rChannelID, + rPortID, + order, + }, + ); + const localAddr = await E(attemptP).getLocalAddress(); + void E(attemptP).accept({ + localAddress: `${localAddr}/ibc-channel/${channelID}`, + handler: rchandler, + }); + break; + } - // Accept the attempt. - const rchandler = makeIBCConnectionHandler( - channelID, - portID, - rChannelID, - rPortID, - order, - ); - const localAddr = await E(attemptP).getLocalAddress(); - void E(attemptP).accept({ - localAddress: `${localAddr}/ibc-channel/${channelID}`, - handler: rchandler, - }); - break; - } + case 'receivePacket': { + const { packet } = obj; + const { + data: data64, + destination_port: portID, + destination_channel: channelID, + } = packet; + const channelKey = `${channelID}:${portID}`; + const conn = channelKeyToConnP.get(channelKey); + const data = base64ToBytes(data64); + + watch(conn.send(data), makeAckWatcher(util, packet)); + break; + } - case 'receivePacket': { - const { packet } = obj; - const { - data: data64, - destination_port: portID, - destination_channel: channelID, - } = packet; - const channelKey = `${channelID}:${portID}`; - const connP = channelKeyToConnP.get(channelKey); - const data = base64ToBytes(data64); - - await E(connP) - .send(data) - .then(ack => { - const realAck = ack || DEFAULT_ACKNOWLEDGEMENT; - const ack64 = dataToBase64(realAck); - return callIBCDevice('receiveExecuted', { packet, ack: ack64 }); - }) - .catch(e => console.error(e)); - break; - } + case 'acknowledgementPacket': { + const { packet, acknowledgement } = obj; + const { + sequence, + source_channel: channelID, + source_port: portID, + } = packet; + const ackKit = util.findAckKit(channelID, portID, sequence); + ackKit.resolver.resolve(base64ToBytes(acknowledgement)); + break; + } - case 'acknowledgementPacket': { - const { packet, acknowledgement } = obj; - const { - sequence, - source_channel: channelID, - source_port: portID, - } = packet; - const channelKey = `${channelID}:${portID}`; - const seqToAck = channelKeyToSeqAck.get(channelKey); - const ackDeferred = seqToAck.get(sequence); - ackDeferred.resolve(base64ToBytes(acknowledgement)); - seqToAck.delete(sequence); - break; - } + case 'timeoutPacket': { + const { packet } = obj; + const { + sequence, + source_channel: channelID, + source_port: portID, + } = packet; + const ackKit = util.findAckKit(channelID, portID, sequence); + ackKit.resolver.reject(Error(`Packet timed out`)); + break; + } + + case 'channelCloseInit': + case 'channelCloseConfirm': { + const { portID, channelID } = obj; + const channelKey = `${channelID}:${portID}`; + if (channelKeyToConnP.has(channelKey)) { + const conn = channelKeyToConnP.get(channelKey); + channelKeyToConnP.delete(channelKey); + void conn.close(); + } + break; + } + + case 'sendPacket': { + const { packet, relativeTimeoutNs } = obj; + util.ibcSendPacket(packet, relativeTimeoutNs).then( + ack => console.info('Manual packet', packet, 'acked:', ack), + e => console.warn('Manual packet', packet, 'failed:', e), + ); + break; + } + + default: + console.error('Unexpected IBC_EVENT', obj.event); + assert.fail(X`unrecognized method ${obj.event}`, TypeError); + } + }, + }, + util: { + downcall(method, args) { + return E(this.state.ibcdev).downcall(method, args); + }, + /** + * Send a packet out via the IBC device. + * + * @param {IBCPacket} packet + * @param {bigint} [relativeTimeoutNs] + * @returns {PromiseVow} Acknowledgement data + */ + async ibcSendPacket( + packet, + relativeTimeoutNs = DEFAULT_PACKET_TIMEOUT_NS, + ) { + const { util } = this.facets; + // Make a kernel call to do the send. + const fullPacket = await util.downcall('sendPacket', { + packet, + relativeTimeoutNs, + }); - case 'timeoutPacket': { - const { packet } = obj; + // Extract the actual sequence number from the return. const { sequence, source_channel: channelID, source_port: portID, - } = packet; - const channelKey = `${channelID}:${portID}`; - const seqToAck = channelKeyToSeqAck.get(channelKey); - const ackDeferred = seqToAck.get(sequence); - ackDeferred.reject(Error(`Packet timed out`)); - seqToAck.delete(sequence); - break; - } - - case 'channelCloseInit': - case 'channelCloseConfirm': { - const { portID, channelID } = obj; - const channelKey = `${channelID}:${portID}`; - if (channelKeyToConnP.has(channelKey)) { - const connP = channelKeyToConnP.get(channelKey); - channelKeyToConnP.delete(channelKey); - void E(connP).close(); + } = fullPacket; + + /** @type {import('@agoric/vow').VowKit} */ + const { vow } = util.findAckKit(channelID, portID, sequence); + return vow; + }, + /** @param {string} localAddr */ + localAddrToPortID(localAddr) { + const m = localAddr.match(/^\/ibc-port\/([-a-zA-Z0-9._+#[\]<>]+)$/); + if (!m) { + throw TypeError( + `Invalid port specification ${localAddr}; expected "/ibc-port/PORT"`, + ); } - break; - } + return m[1]; + }, - case 'sendPacket': { - const { packet, relativeTimeoutNs } = obj; - const { source_port: portID, source_channel: channelID } = packet; + findAckKit(channelID, portID, sequence) { + const { channelKeyToSeqAck } = this.state; const channelKey = `${channelID}:${portID}`; const seqToAck = channelKeyToSeqAck.get(channelKey); - ibcSendPacket(packet, seqToAck, relativeTimeoutNs).then( - ack => console.info('Manual packet', packet, 'acked:', ack), - e => console.warn('Manual packet', packet, 'failed:', e), - ); - break; - } + if (seqToAck.has(sequence)) { + return seqToAck.get(sequence); + } + const kit = makeVowKit(); + seqToAck.init(sequence, harden(kit)); + return kit; + }, + }, + }, + ); - default: - console.error('Unexpected IBC_EVENT', obj.event); - assert.fail(X`unrecognized method ${obj.event}`, TypeError); - } + const makeIBCProtocolHandlerKit = ibcdev => { + const { protocolHandler, bridgeHandler } = makeIBCProtocolKit(ibcdev); + return harden({ protocolHandler, bridgeHandler }); + }; + + const provideIBCProtocolHandlerKit = ibcdev => { + if (ibcdevToKit.has(ibcdev)) { + return ibcdevToKit.get(ibcdev); + } + const kit = makeIBCProtocolHandlerKit(ibcdev); + ibcdevToKit.init(ibcdev, kit); + return kit; + }; + return provideIBCProtocolHandlerKit; +}; + +harden(prepareIBCProtocol); + +/** @param {import('@agoric/base-zone').Zone} zone */ +export const prepareCallbacks = zone => { + return zone.exoClass( + 'callbacks', + undefined, + /** @param {import('@agoric/vats').ScopedBridgeManager} dibcBridgeManager */ + dibcBridgeManager => ({ dibcBridgeManager }), + { + downcall(method, obj) { + const { dibcBridgeManager } = this.state; + return E(dibcBridgeManager).toBridge({ + ...obj, + type: 'IBC_METHOD', + method, + }); + }, }, - }); -} + ); +}; diff --git a/packages/vats/src/proposals/network-proposal.js b/packages/vats/src/proposals/network-proposal.js index 8565ed5df76..729abbf033d 100644 --- a/packages/vats/src/proposals/network-proposal.js +++ b/packages/vats/src/proposals/network-proposal.js @@ -1,10 +1,8 @@ -import { E, Far } from '@endo/far'; +import { E } from '@endo/far'; import { BridgeId as BRIDGE_ID } from '@agoric/internal'; -import { - makeLoopbackProtocolHandler, - makeEchoConnectionHandler, - makeNonceMaker, -} from '@agoric/network'; +import { prepareVowTools } from '@agoric/vat-data/vow.js'; +import { makeScalarMapStore } from '@agoric/store'; +import { makeHeapZone } from '@agoric/zone'; const NUM_IBC_PORTS_PER_CLIENT = 3; const INTERCHAIN_ACCOUNT_CONTROLLER_PORT_PREFIX = 'icacontroller-'; @@ -15,60 +13,37 @@ const INTERCHAIN_ACCOUNT_CONTROLLER_PORT_PREFIX = 'icacontroller-'; */ export const registerNetworkProtocols = async (vats, dibcBridgeManager) => { const ps = []; + + const loopbackHandler = await E(vats.network).makeLoopbackProtocolHandler(); // Every vat has a loopback device. - ps.push( - E(vats.network).registerProtocolHandler( - ['/local'], - makeLoopbackProtocolHandler(), - ), - ); + ps.push(E(vats.network).registerProtocolHandler(['/local'], loopbackHandler)); + if (dibcBridgeManager) { assert('ibc' in vats); // We have access to the bridge, and therefore IBC. - const callbacks = Far('callbacks', { - downcall(method, obj) { - return E(dibcBridgeManager).toBridge({ - ...obj, - type: 'IBC_METHOD', - method, - }); - }, - }); + const settledBridgeManager = await dibcBridgeManager; + const callbacks = await E(vats.ibc).makeCallbacks(settledBridgeManager); ps.push( E(vats.ibc) - .createInstance(callbacks) - .then(ibcHandler => + .createHandlers(callbacks) + .then(({ protocolHandler, bridgeHandler }) => E(dibcBridgeManager) - .initHandler(ibcHandler) + .initHandler(bridgeHandler) .then(() => E(vats.network).registerProtocolHandler( ['/ibc-port', '/ibc-hop'], - ibcHandler, + protocolHandler, ), ), ), ); } else { - const loHandler = makeLoopbackProtocolHandler( - makeNonceMaker('ibc-channel/channel-'), + const loHandler = await E(vats.network).makeLoopbackProtocolHandler( + 'ibc-channel/channel-', ); ps.push(E(vats.network).registerProtocolHandler(['/ibc-port'], loHandler)); } await Promise.all(ps); - - // Add an echo listener on our ibc-port network (whether real or virtual). - const echoPort = await E(vats.network).bind('/ibc-port/echo'); - - return E(echoPort).addListener( - Far('listener', { - async onAccept(_port, _localAddr, _remoteAddr, _listenHandler) { - return harden(makeEchoConnectionHandler()); - }, - async onListen(port, _listenHandler) { - console.debug(`listening on echo port: ${port}`); - }, - }), - ); }; /** @@ -95,8 +70,9 @@ export const setupNetworkProtocols = async ( loadCriticalVat, bridgeManager: bridgeManagerP, provisioning, + vatUpgradeInfo: vatUpgradeInfoP, }, - produce: { networkVat }, + produce: { networkVat, vatUpgradeInfo: produceVatUpgradeInfo }, }, options, ) => { @@ -110,6 +86,11 @@ export const setupNetworkProtocols = async ( // don't proceed if loadCriticalVat fails await Promise.all(Object.values(vats)); + produceVatUpgradeInfo.resolve(makeScalarMapStore('vatUpgradeInfo')); + const info = await vatUpgradeInfoP; + info.init('ibc', ibcRef); + info.init('network', networkRef); + networkVat.reset(); networkVat.resolve(vats.network); const bridgeManager = await bridgeManagerP; @@ -139,6 +120,16 @@ export const setupNetworkProtocols = async ( // we need to finish registering handlers for // ibc-port etc. await registerNetworkProtocols(vats, dibcBridgeManager); + + // Heap-based vow resolution is used for this module because the + // bootstrap vat can't yet be upgraded. + const powers = prepareVowTools(makeHeapZone()); + + const { when } = powers; + // Add an echo listener on our ibc-port network (whether real or virtual). + const echoPort = await when(E(vats.network).bind('/ibc-port/echo')); + const { listener } = await E(vats.network).makeEchoConnectionKit(); + await E(echoPort).addListener(listener); return E(client).assignBundle([_a => ({ ibcport: makePorts() })]); }; @@ -151,10 +142,13 @@ export const getManifestForNetwork = (_powers, { networkRef, ibcRef }) => ({ bridgeManager: 'bridge', zoe: 'zoe', provisioning: 'provisioning', + vatUpgradeInfo: true, }, produce: { networkVat: 'network', + vatUpgradeInfo: true, }, + zone: true, }, }, options: { diff --git a/packages/vats/src/proposals/restart-vats-proposal.js b/packages/vats/src/proposals/restart-vats-proposal.js index 02b42cb1342..9332626dae7 100644 --- a/packages/vats/src/proposals/restart-vats-proposal.js +++ b/packages/vats/src/proposals/restart-vats-proposal.js @@ -16,6 +16,8 @@ const vatUpgradeStatus = { bank: 'covered by test-upgrade-vats: upgrade vat-bank', board: 'covered by test-upgrade-vats: upgrade vat-board', bridge: 'covered by test-upgrade-vats: upgrade vat-bridge', + ibc: 'upgradeable', + network: 'upgradeable', priceAuthority: 'covered by test-upgrade-vats: upgrade vat-priceAuthority', provisioning: 'UNTESTED', zoe: 'tested in @agoric/zoe', @@ -117,11 +119,19 @@ export const restartVats = async ({ consume }, { options }) => { } trace('iterating over vatStore'); - for (const [name] of vatStore.entries()) { + for (const [name, { adminNode }] of vatStore.entries()) { const status = vatUpgradeStatus[name]; if (!status) { Fail`unaudited vat ${name}`; } + if (status === 'upgradeable') { + console.log('upgrading vat', name); + const { vatAdminSvc } = consume; + const info = await consume.vatUpgradeInfo; + const { bundleID } = info.get(name); + const bcap = await E(vatAdminSvc).getBundleCap(bundleID); + await E(adminNode).upgrade(bcap); + } console.log('VAT', name, status); } @@ -144,6 +154,8 @@ export const getManifestForRestart = (_powers, options) => ({ loadCriticalVat: true, psmKit: true, vatStore: true, + vatAdminSvc: true, + vatUpgradeInfo: true, zoe: 'zoe', provisioning: 'provisioning', vaultFactoryKit: true, diff --git a/packages/vats/src/proposals/zcf-proposal.js b/packages/vats/src/proposals/zcf-proposal.js index 7d974e02460..c7373a0c301 100644 --- a/packages/vats/src/proposals/zcf-proposal.js +++ b/packages/vats/src/proposals/zcf-proposal.js @@ -1,3 +1,4 @@ +// @ts-check import { E } from '@endo/far'; /** diff --git a/packages/vats/src/vat-ibc.js b/packages/vats/src/vat-ibc.js index 1e4c9ea30ed..08e9b0f6088 100644 --- a/packages/vats/src/vat-ibc.js +++ b/packages/vats/src/vat-ibc.js @@ -1,14 +1,25 @@ -import { E, Far } from '@endo/far'; -import { makeIBCProtocolHandler } from './ibc.js'; - -export function buildRootObject() { - function createInstance(callbacks) { - const ibcHandler = makeIBCProtocolHandler(E, (method, params) => - E(callbacks).downcall(method, params), - ); +import { Far } from '@endo/far'; +import { makeDurableZone } from '@agoric/zone/durable.js'; +import { prepareVowTools } from '@agoric/vat-data/vow.js'; +import { prepareCallbacks, prepareIBCProtocol } from './ibc.js'; + +export function buildRootObject(_vatPowers, _args, baggage) { + const zone = makeDurableZone(baggage); + const powers = prepareVowTools(zone.subZone('vow')); + const makeIBCProtocolHandler = prepareIBCProtocol( + zone.subZone('IBC'), + powers, + ); + + const makeCallbacks = prepareCallbacks(zone); + + function createHandlers(callbacks) { + const ibcHandler = makeIBCProtocolHandler(callbacks); return harden(ibcHandler); } + return Far('root', { - createInstance, + createHandlers, + makeCallbacks, }); } diff --git a/packages/vats/src/vat-network.js b/packages/vats/src/vat-network.js index 8b499cec7a0..f1784069f0d 100644 --- a/packages/vats/src/vat-network.js +++ b/packages/vats/src/vat-network.js @@ -1,5 +1,41 @@ -import { makeRouterProtocol } from '@agoric/network'; +// @ts-check +import { makeDurableZone } from '@agoric/zone/durable.js'; +import { + prepareEchoConnectionKit, + prepareLoopbackProtocolHandler, + prepareRouterProtocol, +} from '@agoric/network'; +import { prepareVowTools } from '@agoric/vat-data/vow.js'; +import { Far } from '@endo/far'; -export function buildRootObject() { - return makeRouterProtocol(); // already Far('Router') +export function buildRootObject(_vatPowers, _args, baggage) { + const zone = makeDurableZone(baggage); + const powers = prepareVowTools(zone.subZone('vow')); + + const makeRouterProtocol = prepareRouterProtocol( + zone.subZone('network'), + powers, + ); + const protocol = zone.makeOnce('RouterProtocol', _key => + makeRouterProtocol(), + ); + + const makeLoopbackProtocolHandler = prepareLoopbackProtocolHandler( + zone, + powers, + ); + const makeEchoConnectionKit = prepareEchoConnectionKit(zone); + + return Far('RouterProtocol', { + makeLoopbackProtocolHandler, + makeEchoConnectionKit, + /** @param {Parameters} args */ + registerProtocolHandler: (...args) => + protocol.registerProtocolHandler(...args), + /** @param {Parameters} args */ + unregisterProtocolHandler: (...args) => + protocol.unregisterProtocolHandler(...args), + /** @param {Parameters} args */ + bind: (...args) => protocol.bind(...args), + }); } diff --git a/packages/vats/test/test-network.js b/packages/vats/test/test-network.js index 6e722b08f14..00d200d2c28 100644 --- a/packages/vats/test/test-network.js +++ b/packages/vats/test/test-network.js @@ -1,82 +1,162 @@ import { test } from '@agoric/swingset-vat/tools/prepare-test-env-ava.js'; +import { reincarnate } from '@agoric/swingset-liveslots/tools/setup-vat-data.js'; -import { E, Far } from '@endo/far'; -import { makeSubscriptionKit } from '@agoric/notifier'; +import { E } from '@endo/far'; +import { + makePinnedHistoryTopic, + prepareDurablePublishKit, + subscribeEach, +} from '@agoric/notifier'; +import { makeDurableZone } from '@agoric/zone/durable.js'; +import { prepareVowTools } from '@agoric/vat-data/vow.js'; import { buildRootObject as ibcBuildRootObject } from '../src/vat-ibc.js'; import { buildRootObject as networkBuildRootObject } from '../src/vat-network.js'; +import '../src/types.js'; +import { registerNetworkProtocols } from '../src/proposals/network-proposal.js'; + +const { fakeVomKit } = reincarnate({ relaxDurabilityRules: false }); +const provideBaggage = key => { + const root = fakeVomKit.cm.provideBaggage(); + const zone = makeDurableZone(root); + return zone.mapStore(`${key} baggage`); +}; + +const preparePlusOneConnectionHandler = (zone, { makeVowKit }, log) => { + const makePlusOneConnectionHandler = zone.exoClass( + 'plusOne', + undefined, + ({ publisher }) => { + return { + publisher, + }; + }, + { + async onReceive(_c, packetBytes) { + log('Receiving Data', packetBytes); + const { vow, resolver } = makeVowKit(); + resolver.resolve(`${packetBytes}1`); + return vow; + }, + async onOpen(_c, localAddr, remoteAddr, _connectionHandler) { + this.state.publisher.publish([ + 'plusOne-open', + { localAddr, remoteAddr }, + ]); + }, + }, + ); + + return makePlusOneConnectionHandler; +}; + +const prepareIBCListener = (zone, makePlusOne) => { + const makeIBCListener = zone.exoClass( + 'ibcListener', + undefined, + ({ publisher }) => { + return { publisher }; + }, + { + async onAccept(_port, _localAddr, _remoteAddr, _listenHandler) { + return makePlusOne({ publisher: this.state.publisher }); + }, + async onListen(port, _listenHandler) { + console.debug(`listening on echo port: ${port}`); + }, + }, + ); + + return makeIBCListener; +}; + test('network - ibc', async t => { - const networkVat = E(networkBuildRootObject)(); - const ibcVat = E(ibcBuildRootObject)(); + const networkVat = E(networkBuildRootObject)( + null, + null, + provideBaggage('network'), + ); + const ibcVat = E(ibcBuildRootObject)(null, null, provideBaggage('ibc')); + const baggage = provideBaggage('network - ibc'); + const zone = makeDurableZone(baggage); + const powers = prepareVowTools(zone); + const { when } = powers; - const { subscription, publication } = makeSubscriptionKit(); + const makeDurablePublishKit = prepareDurablePublishKit( + baggage, + 'DurablePublishKit', + ); - const events = subscription[Symbol.asyncIterator](); - const callbacks = Far('ibcCallbacks', { - downcall: (method, params) => { - publication.updateState([method, params]); + const { subscriber, publisher } = makeDurablePublishKit(); + + const pinnedHistoryTopic = makePinnedHistoryTopic(subscriber); + const events = subscribeEach(pinnedHistoryTopic)[Symbol.asyncIterator](); + + let hndlr; + /** @type {import('../src/types.js').ScopedBridgeManager} */ + const bridgeHandler = zone.exo('IBC Bridge Manager', undefined, { + toBridge: async obj => { + const { method, type, ...params } = obj; + publisher.publish([method, params]); + t.is(type, 'IBC_METHOD'); if (method === 'sendPacket') { const { packet } = params; return { ...packet, sequence: '39' }; } return undefined; }, + fromBridge: async obj => { + if (!hndlr) throw Error('no handler!'); + await E(hndlr).fromBridge(obj); + }, + initHandler: h => { + if (hndlr) throw Error('already init'); + hndlr = h; + }, + setHandler: h => { + if (!hndlr) throw Error('must init first'); + hndlr = h; + }, }); - const ibcHandler = await E(ibcVat).createInstance(callbacks); - await E(networkVat).registerProtocolHandler( - ['/ibc-port', '/ibc-hop'], - ibcHandler, + await registerNetworkProtocols( + { network: networkVat, ibc: ibcVat, provisioning: undefined }, + bridgeHandler, ); // Actually test the ibc port binding. // TODO: Do more tests on the returned Port object. - const p = E(networkVat).bind('/ibc-port/'); - await p; + t.log('Opening a Listening Port'); + const p = await when(E(networkVat).bind('/ibc-port/')); const ev1 = await events.next(); t.assert(!ev1.done); t.deepEqual(ev1.value, ['bindPort', { packet: { source_port: 'port-1' } }]); + const makePlusOne = preparePlusOneConnectionHandler(zone, powers, t.log); + const makeIBCListener = prepareIBCListener(zone, makePlusOne); + const testEcho = async () => { - await E(p).addListener( - Far('ibcListener', { - async onAccept(_port, _localAddr, _remoteAddr, _listenHandler) { - /** @type {ConnectionHandler} */ - const handler = Far('plusOne', { - async onReceive(_c, packetBytes) { - return `${packetBytes}1`; - }, - async onOpen(_c, localAddr, remoteAddr, _connectionHandler) { - publication.updateState([ - 'plusOne-open', - { localAddr, remoteAddr }, - ]); - }, - }); - return handler; - }, - async onListen(port, _listenHandler) { - console.debug(`listening on echo port: ${port}`); - }, - }), - ); + await E(p).addListener(makeIBCListener({ publisher })); - const c = E(p).connect('/ibc-port/port-1/unordered/foo'); + t.log('Accepting an Inbound Connection'); + const c = await when(E(p).connect('/ibc-port/port-1/unordered/foo')); - const ack = await E(c).send('hello198'); + t.log('Sending Data - echo'); + const ack = await when(E(c).send('hello198')); t.is(ack, 'hello1981', 'expected echo'); - await c; - await E(c).close(); + t.log('Closing the Connection'); + await when(E(c).close()); }; await testEcho(); const testIBCOutbound = async () => { - const c = E(p).connect( - '/ibc-hop/connection-11/ibc-port/port-98/unordered/bar', - ); + t.log('Connecting to a Remote Port'); + const [hopName, portName, version] = ['connection-11', 'port-98', 'bar']; + const remoteEndpoint = `/ibc-hop/${hopName}/ibc-port/${portName}/unordered/${version}`; + const cP = E(p).connect(remoteEndpoint); const evopen = await events.next(); t.assert(!evopen.done); @@ -100,7 +180,7 @@ test('network - ibc', async t => { }, ]); - await E(ibcHandler).fromBridge({ + await E(bridgeHandler).fromBridge({ event: 'channelOpenAck', portID: 'port-1', channelID: 'channel-1', @@ -109,7 +189,8 @@ test('network - ibc', async t => { connectionHops: ['connection-11'], }); - await c; + const c = await when(cP); + t.log('Sending Data - transfer'); const ack = E(c).send('some-transfer-message'); const ev3 = await events.next(); @@ -128,7 +209,7 @@ test('network - ibc', async t => { }, ]); - await E(ibcHandler).fromBridge({ + await E(bridgeHandler).fromBridge({ event: 'acknowledgementPacket', packet: { data: 'c29tZS10cmFuc2Zlci1tZXNzYWdl', @@ -141,7 +222,7 @@ test('network - ibc', async t => { acknowledgement: 'YS10cmFuc2Zlci1yZXBseQ==', }); - t.is(await ack, 'a-transfer-reply'); + t.is(await when(ack), 'a-transfer-reply'); await E(c).close(); }; @@ -149,7 +230,7 @@ test('network - ibc', async t => { await testIBCOutbound(); const testIBCInbound = async () => { - await E(ibcHandler).fromBridge({ + await E(bridgeHandler).fromBridge({ event: 'channelOpenTry', channelID: 'channel-2', portID: 'port-1', @@ -160,7 +241,7 @@ test('network - ibc', async t => { counterpartyVersion: 'bazo', }); - await E(ibcHandler).fromBridge({ + await E(bridgeHandler).fromBridge({ event: 'channelOpenConfirm', portID: 'port-1', channelID: 'channel-2', @@ -177,7 +258,7 @@ test('network - ibc', async t => { }, ]); - await E(ibcHandler).fromBridge({ + await E(bridgeHandler).fromBridge({ event: 'receivePacket', packet: { data: 'aW5ib3VuZC1tc2c=', @@ -208,7 +289,7 @@ test('network - ibc', async t => { await testIBCInbound(); // Verify that we consumed all the published events. - publication.finish([]); + publisher.finish([]); const evend = await events.next(); t.assert(evend.done); t.deepEqual(evend.value, []); diff --git a/packages/vats/tools/boot-test-utils.js b/packages/vats/tools/boot-test-utils.js index ba166db519e..4e938f497d5 100644 --- a/packages/vats/tools/boot-test-utils.js +++ b/packages/vats/tools/boot-test-utils.js @@ -73,7 +73,8 @@ export const makeMock = log => network: Far('network', { registerProtocolHandler: noop, - bind: () => harden({ addListener: noop }), + makeLoopbackProtocolHandler: noop, + bind: () => Far('network - listener', { addListener: noop }), }), }, }); diff --git a/yarn.lock b/yarn.lock index 864d1a4eb43..495d6fb66d6 100644 --- a/yarn.lock +++ b/yarn.lock @@ -591,7 +591,7 @@ "@endo/promise-kit" "^1.0.4" "@fast-check/ava" "^1.1.5" -"@endo/patterns@^1.2.0": +"@endo/patterns@^1.1.0", "@endo/patterns@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@endo/patterns/-/patterns-1.2.0.tgz#5932feb051e2b966ed010aa14f0428b1712d91a2" integrity sha512-uZaN7nMSW7wWB1Nhyxpppowmchc//Z+UvilakQqrN8uUzX0qUKmTgviS4vKtCmsFtLXlfI13uqp0k8OPr3ClpQ== From f45a0f59d3613f4fc5bb2ad8e9c204b55f44f74f Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Fri, 16 Feb 2024 15:42:46 -0800 Subject: [PATCH 02/47] lint: loosen for .d.ts --- .eslintrc.cjs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.eslintrc.cjs b/.eslintrc.cjs index 31dab462f0f..a4847e907f8 100644 --- a/.eslintrc.cjs +++ b/.eslintrc.cjs @@ -154,6 +154,15 @@ module.exports = { 'no-undef': 'off', }, }, + { + files: ['*.d.ts'], + rules: { + // Irrelevant in a typedef + 'no-use-before-define': 'off', + // Linter confuses the type declaration with value declaration + 'no-redeclare': 'off', + }, + }, { // disable type-aware linting in HTML files: ['*.html'], From 0be5be39047281ec5367f2079d96d7d05cfb024d Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Tue, 27 Feb 2024 17:34:53 -0800 Subject: [PATCH 03/47] chore(deps): pin tsx to work around: Error: tsx must be loaded with --import instead of --loader The --loader flag was deprecated in Node v20.6.0 --- packages/boot/package.json | 2 +- yarn.lock | 38 +++++++++++++++++++------------------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/packages/boot/package.json b/packages/boot/package.json index b20df47f81b..2bbdf561261 100644 --- a/packages/boot/package.json +++ b/packages/boot/package.json @@ -52,7 +52,7 @@ "@agoric/swingset-liveslots": "^0.10.2", "ava": "^5.3.0", "c8": "^7.13.0", - "tsx": "^3.12.8" + "tsx": "3.12.8" }, "files": [ "CHANGELOG.md", diff --git a/yarn.lock b/yarn.lock index 495d6fb66d6..50418f645aa 100644 --- a/yarn.lock +++ b/yarn.lock @@ -673,28 +673,28 @@ jsdoc-type-pratt-parser "~4.0.0" "@esbuild-kit/cjs-loader@^2.4.2": - version "2.4.2" - resolved "https://registry.yarnpkg.com/@esbuild-kit/cjs-loader/-/cjs-loader-2.4.2.tgz#cb4dde00fbf744a68c4f20162ea15a8242d0fa54" - integrity sha512-BDXFbYOJzT/NBEtp71cvsrGPwGAMGRB/349rwKuoxNSiKjPraNNnlK6MIIabViCjqZugu6j+xeMDlEkWdHHJSg== + version "2.4.4" + resolved "https://registry.yarnpkg.com/@esbuild-kit/cjs-loader/-/cjs-loader-2.4.4.tgz#8638177732e2de258a3243597bfdba082993c442" + integrity sha512-NfsJX4PdzhwSkfJukczyUiZGc7zNNWZcEAyqeISpDnn0PTfzMJR1aR8xAIPskBejIxBJbIgCCMzbaYa9SXepIg== dependencies: - "@esbuild-kit/core-utils" "^3.0.0" - get-tsconfig "^4.4.0" + "@esbuild-kit/core-utils" "^3.2.3" + get-tsconfig "^4.7.0" -"@esbuild-kit/core-utils@^3.0.0", "@esbuild-kit/core-utils@^3.2.2": - version "3.2.2" - resolved "https://registry.yarnpkg.com/@esbuild-kit/core-utils/-/core-utils-3.2.2.tgz#ac3fe38d6ddcb3aa4658425034bb7a9cefa83495" - integrity sha512-Ub6LaRaAgF80dTSzUdXpFLM1pVDdmEVB9qb5iAzSpyDlX/mfJTFGOnZ516O05p5uWWteNviMKi4PAyEuRxI5gA== +"@esbuild-kit/core-utils@^3.2.2", "@esbuild-kit/core-utils@^3.2.3", "@esbuild-kit/core-utils@^3.3.2": + version "3.3.2" + resolved "https://registry.yarnpkg.com/@esbuild-kit/core-utils/-/core-utils-3.3.2.tgz#186b6598a5066f0413471d7c4d45828e399ba96c" + integrity sha512-sPRAnw9CdSsRmEtnsl2WXWdyquogVpB3yZ3dgwJfe8zrOzTsV7cJvmwrKVa+0ma5BoiGJ+BoqkMvawbayKUsqQ== dependencies: esbuild "~0.18.20" source-map-support "^0.5.21" "@esbuild-kit/esm-loader@^2.5.5": - version "2.5.5" - resolved "https://registry.yarnpkg.com/@esbuild-kit/esm-loader/-/esm-loader-2.5.5.tgz#b82da14fcee3fc1d219869756c06f43f67d1ca71" - integrity sha512-Qwfvj/qoPbClxCRNuac1Du01r9gvNOT+pMYtJDapfB1eoGN1YlJ1BixLyL9WVENRx5RXgNLdfYdx/CuswlGhMw== + version "2.6.5" + resolved "https://registry.yarnpkg.com/@esbuild-kit/esm-loader/-/esm-loader-2.6.5.tgz#6eedee46095d7d13b1efc381e2211ed1c60e64ea" + integrity sha512-FxEMIkJKnodyA1OaCUoEvbYRkoZlLZ4d/eXFu9Fh8CbBBgP5EmZxrfTRyN0qpXZ4vOvqnE5YdRdcrmUUXuU+dA== dependencies: - "@esbuild-kit/core-utils" "^3.0.0" - get-tsconfig "^4.4.0" + "@esbuild-kit/core-utils" "^3.3.2" + get-tsconfig "^4.7.0" "@esbuild/android-arm64@0.18.20": version "0.18.20" @@ -5157,10 +5157,10 @@ get-symbol-description@^1.0.0: call-bind "^1.0.2" get-intrinsic "^1.1.1" -get-tsconfig@^4.4.0: - version "4.7.0" - resolved "https://registry.yarnpkg.com/get-tsconfig/-/get-tsconfig-4.7.0.tgz#06ce112a1463e93196aa90320c35df5039147e34" - integrity sha512-pmjiZ7xtB8URYm74PlGJozDNyhvsVLUcpBa8DZBG3bWHwaHa9bPiRpiSfovw+fjhwONSCWKRyk+JQHEGZmMrzw== +get-tsconfig@^4.7.0: + version "4.7.2" + resolved "https://registry.yarnpkg.com/get-tsconfig/-/get-tsconfig-4.7.2.tgz#0dcd6fb330391d46332f4c6c1bf89a6514c2ddce" + integrity sha512-wuMsz4leaj5hbGgg4IvDU0bqJagpftG5l5cXIAvo8uZrqn0NJqwtfupTN00VnkQJPcIRrxYrm1Ue24btpCha2A== dependencies: resolve-pkg-maps "^1.0.0" @@ -9388,7 +9388,7 @@ tsutils@3, tsutils@~3.21.0: dependencies: tslib "^1.8.1" -tsx@^3.12.8: +tsx@3.12.8: version "3.12.8" resolved "https://registry.yarnpkg.com/tsx/-/tsx-3.12.8.tgz#e9ec95c6b116e28f0187467f839029a3ce17a851" integrity sha512-Lt9KYaRGF023tlLInPj8rgHwsZU8qWLBj4iRXNWxTfjIkU7canGL806AqKear1j722plHuiYNcL2ZCo6uS9UJA== From bd0e56717a928cc78e30f5b5337db45ec1c9887b Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Wed, 28 Feb 2024 10:47:40 -0800 Subject: [PATCH 04/47] lint: void unhandled when --- packages/agoric-cli/src/bin-agops.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/agoric-cli/src/bin-agops.js b/packages/agoric-cli/src/bin-agops.js index 1f893353055..ab655712870 100755 --- a/packages/agoric-cli/src/bin-agops.js +++ b/packages/agoric-cli/src/bin-agops.js @@ -77,7 +77,7 @@ program.addCommand(makeAuctionCommand(logger, { ...procIO, fetch })); program.addCommand(makeInterCommand(procIO, { fetch })); program.addCommand(makeTestCommand(procIO, { fetch })); -E.when(program.parseAsync(process.argv), undefined, err => { +void E.when(program.parseAsync(process.argv), undefined, err => { if (err instanceof CommanderError) { console.error(err.message); } else { From c57996a4fec1fb3a222bd5a2611588f5270630dd Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Wed, 28 Feb 2024 10:48:20 -0800 Subject: [PATCH 05/47] lint: for loop --- packages/agoric-cli/src/install.js | 10 ++++++---- packages/agoric-cli/test/test-inter-cli.js | 18 +++++++++--------- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/packages/agoric-cli/src/install.js b/packages/agoric-cli/src/install.js index 09f22372ee5..50511244b54 100644 --- a/packages/agoric-cli/src/install.js +++ b/packages/agoric-cli/src/install.js @@ -41,9 +41,9 @@ export default async function installMain(progname, rawArgs, powers, opts) { p.childProcess.stdout.on('data', out => stdout.push(out)); await p; const d = JSON.parse(Buffer.concat(stdout).toString('utf-8')); - Object.entries(d).forEach(([name, { location }]) => - map.set(name, path.resolve(cwd, location)), - ); + for (const [name, { location }] of Object.entries(d)) { + map.set(name, path.resolve(cwd, location)); + } return map; } @@ -268,7 +268,9 @@ export default async function installMain(progname, rawArgs, powers, opts) { }; await Promise.all(subdirs.map(removeNodeModulesSymlinks)); } else { - DEFAULT_SDK_PACKAGE_NAMES.forEach(name => sdkPackageToPath.set(name, null)); + for (const name of DEFAULT_SDK_PACKAGE_NAMES) { + sdkPackageToPath.set(name, null); + } } if (forceSdkVersion !== undefined) { diff --git a/packages/agoric-cli/test/test-inter-cli.js b/packages/agoric-cli/test/test-inter-cli.js index ee79b967df7..87aed4b8d9c 100644 --- a/packages/agoric-cli/test/test-inter-cli.js +++ b/packages/agoric-cli/test/test-inter-cli.js @@ -130,12 +130,12 @@ const makeProcess = (t, keyring, out) => { t.truthy(cmd); switch (cmd) { case 'keys': { - ['--node', '--chain'].forEach(opt => { + for (const opt of ['--node', '--chain']) { const ix = args.findIndex(a => a.startsWith(opt)); if (ix >= 0) { args.splice(ix, 1); } - }); + } t.deepEqual(args.slice(0, 3), ['keys', 'show', '--address']); const name = args[3]; const addr = keyring[name]; @@ -316,10 +316,10 @@ test('README: inter usage', async t => { const net = makeNet({ ...publishedNames, wallet: govWallets }); const cmd = await makeInterCommand(makeProcess(t, testKeyring, out), net); - subCommands(cmd).forEach(c => { + for (const c of subCommands(cmd)) { c.exitOverride(); c.configureOutput({ writeErr: s => diag.push(s) }); - }); + } await t.throwsAsync(cmd.parseAsync(argv)); const txt = diag.join('').trim(); @@ -343,10 +343,10 @@ test('diagnostic for agd ENOENT', async t => { }, makeNet({}), ); - subCommands(cmd).forEach(c => { + for (const c of subCommands(cmd)) { c.exitOverride(); c.configureOutput({ writeErr: s => diag.push(s) }); - }); + } await t.throwsAsync(cmd.parseAsync(argv), { instanceOf: CommanderError }); t.is( @@ -366,11 +366,11 @@ const usageTest = (words, blurb = 'Command usage:') => { const program = createCommand('agops'); const cmd = await makeInterCommand(makeProcess(t, {}, out), makeNet({})); program.addCommand(cmd); - subCommands(program).forEach(c => + for (const c of subCommands(program)) { c.exitOverride(() => { throw new CommanderError(1, 'usage', ''); - }), - ); + }); + } cmd.configureOutput({ writeOut: s => out.push(s), writeErr: s => out.push(s), From 9975f3857f149685bb4d8e63e6923de2dbe64845 Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Thu, 15 Feb 2024 09:27:44 -0800 Subject: [PATCH 06/47] chore: telescope generate --- packages/new-cosmic-proto/.editorconfig | 12 + packages/new-cosmic-proto/.eslintignore | 5 + packages/new-cosmic-proto/.eslintrc.js | 70 ++++ packages/new-cosmic-proto/.gitignore | 50 +++ packages/new-cosmic-proto/.npmignore | 32 ++ packages/new-cosmic-proto/.npmrc | 1 + packages/new-cosmic-proto/LICENSE | 21 ++ packages/new-cosmic-proto/README.md | 298 ++++++++++++++++++ .../__tests__/messages.test.js | 3 + packages/new-cosmic-proto/package.json | 64 ++++ packages/new-cosmic-proto/scripts/aminos.js | 29 ++ packages/new-cosmic-proto/scripts/codegen.js | 101 ++++++ packages/new-cosmic-proto/src/index.ts | 1 + packages/new-cosmic-proto/tsconfig.json | 28 ++ 14 files changed, 715 insertions(+) create mode 100644 packages/new-cosmic-proto/.editorconfig create mode 100644 packages/new-cosmic-proto/.eslintignore create mode 100644 packages/new-cosmic-proto/.eslintrc.js create mode 100644 packages/new-cosmic-proto/.gitignore create mode 100644 packages/new-cosmic-proto/.npmignore create mode 100644 packages/new-cosmic-proto/.npmrc create mode 100644 packages/new-cosmic-proto/LICENSE create mode 100644 packages/new-cosmic-proto/README.md create mode 100644 packages/new-cosmic-proto/__tests__/messages.test.js create mode 100644 packages/new-cosmic-proto/package.json create mode 100644 packages/new-cosmic-proto/scripts/aminos.js create mode 100644 packages/new-cosmic-proto/scripts/codegen.js create mode 100644 packages/new-cosmic-proto/src/index.ts create mode 100644 packages/new-cosmic-proto/tsconfig.json diff --git a/packages/new-cosmic-proto/.editorconfig b/packages/new-cosmic-proto/.editorconfig new file mode 100644 index 00000000000..4a7ea3036a2 --- /dev/null +++ b/packages/new-cosmic-proto/.editorconfig @@ -0,0 +1,12 @@ +root = true + +[*] +indent_style = space +indent_size = 2 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.md] +trim_trailing_whitespace = false diff --git a/packages/new-cosmic-proto/.eslintignore b/packages/new-cosmic-proto/.eslintignore new file mode 100644 index 00000000000..38ba484990a --- /dev/null +++ b/packages/new-cosmic-proto/.eslintignore @@ -0,0 +1,5 @@ +node_modules/ +dist/ +main/ +module/ +coverage/ \ No newline at end of file diff --git a/packages/new-cosmic-proto/.eslintrc.js b/packages/new-cosmic-proto/.eslintrc.js new file mode 100644 index 00000000000..bb88e34ddbe --- /dev/null +++ b/packages/new-cosmic-proto/.eslintrc.js @@ -0,0 +1,70 @@ +module.exports = { + plugins: ['prettier'], + extends: ['eslint:recommended', 'prettier'], + parserOptions: { + ecmaVersion: 11, + requireConfigFile: false, + sourceType: 'module', + ecmaFeatures: { + jsx: true + } + }, + env: { + es6: true, + browser: true, + node: true, + jest: true + }, + rules: { + 'no-debugger': 2, + 'no-alert': 2, + 'no-await-in-loop': 0, + 'no-prototype-builtins': 0, + 'no-return-assign': ['error', 'except-parens'], + 'no-restricted-syntax': [ + 2, + 'ForInStatement', + 'LabeledStatement', + 'WithStatement' + ], + 'no-unused-vars': [ + 0, + { + ignoreSiblings: true, + argsIgnorePattern: 'React|res|next|^_' + } + ], + 'prefer-const': [ + 'error', + { + destructuring: 'all' + } + ], + 'no-unused-expressions': [ + 2, + { + allowTaggedTemplates: true + } + ], + 'no-console': 1, + 'comma-dangle': 2, + 'jsx-quotes': [2, 'prefer-double'], + 'linebreak-style': ['error', 'unix'], + quotes: [ + 2, + 'single', + { + avoidEscape: true, + allowTemplateLiterals: true + } + ], + 'prettier/prettier': [ + 'error', + { + trailingComma: 'none', + singleQuote: true, + printWidth: 80 + } + ] + } +}; diff --git a/packages/new-cosmic-proto/.gitignore b/packages/new-cosmic-proto/.gitignore new file mode 100644 index 00000000000..7bba65cc80c --- /dev/null +++ b/packages/new-cosmic-proto/.gitignore @@ -0,0 +1,50 @@ +# Logs +logs +*.log +npm-debug.log* + +# Runtime data +pids +*.pid +*.seed + +# out +dist +mjs +main +module + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# nyc test coverage +.nyc_output + +# Compiled binary addons (http://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules +jspm_packages + +# Optional npm cache directory +.npm + +# Optional REPL history +.node_repl_history + +# Editors +.idea + +# Lib +lib + +# npm package lock +package-lock.json +yarn.lock + +# others +.DS_Store \ No newline at end of file diff --git a/packages/new-cosmic-proto/.npmignore b/packages/new-cosmic-proto/.npmignore new file mode 100644 index 00000000000..cc2605fa84a --- /dev/null +++ b/packages/new-cosmic-proto/.npmignore @@ -0,0 +1,32 @@ +*.log +npm-debug.log* + +# Coverage directory used by tools like istanbul +coverage +.nyc_output + +# Dependency directories +node_modules + +# npm package lock +package-lock.json +yarn.lock + +# project files +__fixtures__ +__tests__ +.babelrc +.babelrc.js +.editorconfig +.eslintignore +.eslintrc +.eslintrc.js +.gitignore +.travis.yml +.vscode +CHANGELOG.md +examples +jest.config.js +package.json +src +test \ No newline at end of file diff --git a/packages/new-cosmic-proto/.npmrc b/packages/new-cosmic-proto/.npmrc new file mode 100644 index 00000000000..a21347f1bde --- /dev/null +++ b/packages/new-cosmic-proto/.npmrc @@ -0,0 +1 @@ +scripts-prepend-node-path=true \ No newline at end of file diff --git a/packages/new-cosmic-proto/LICENSE b/packages/new-cosmic-proto/LICENSE new file mode 100644 index 00000000000..93b275f6299 --- /dev/null +++ b/packages/new-cosmic-proto/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2023 Agoric OpCo + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/packages/new-cosmic-proto/README.md b/packages/new-cosmic-proto/README.md new file mode 100644 index 00000000000..c568b7882bf --- /dev/null +++ b/packages/new-cosmic-proto/README.md @@ -0,0 +1,298 @@ +# cosmic-proto + +

+
+ Protobuf interfaces for Agoric on Cosmos +

+ + +## install + +```sh +npm install cosmic-proto +``` +## Table of contents + +- [cosmic-proto](#cosmic-proto) + - [Install](#install) + - [Table of contents](#table-of-contents) +- [Usage](#usage) + - [RPC Clients](#rpc-clients) + - [Composing Messages](#composing-messages) + - Cosmos, CosmWasm, and IBC + - [CosmWasm](#cosmwasm-messages) + - [IBC](#ibc-messages) + - [Cosmos](#cosmos-messages) +- [Wallets and Signers](#connecting-with-wallets-and-signing-messages) + - [Stargate Client](#initializing-the-stargate-client) + - [Creating Signers](#creating-signers) + - [Broadcasting Messages](#broadcasting-messages) +- [Advanced Usage](#advanced-usage) +- [Developing](#developing) +- [Credits](#credits) + +## Usage +### RPC Clients + +```js +import { agoric } from 'cosmic-proto'; + +const { createRPCQueryClient } = agoric.ClientFactory; +const client = await createRPCQueryClient({ rpcEndpoint: RPC_ENDPOINT }); + +// now you can query the cosmos modules +const balance = await client.cosmos.bank.v1beta1 + .allBalances({ address: 'agoric1addresshere' }); + +// you can also query the agoric modules +const balances = await client.agoric.exchange.v1beta1 + .exchangeBalances() +``` + +### Composing Messages + +Import the `agoric` object from `cosmic-proto`. + +```js +import { agoric } from 'cosmic-proto'; + +const { + createSpotLimitOrder, + createSpotMarketOrder, + deposit +} = agoric.exchange.v1beta1.MessageComposer.withTypeUrl; +``` + +#### CosmWasm Messages + +```js +import { cosmwasm } from "cosmic-proto"; + +const { + clearAdmin, + executeContract, + instantiateContract, + migrateContract, + storeCode, + updateAdmin +} = cosmwasm.wasm.v1.MessageComposer.withTypeUrl; +``` + +#### IBC Messages + +```js +import { ibc } from 'cosmic-proto'; + +const { + transfer +} = ibc.applications.transfer.v1.MessageComposer.withTypeUrl +``` + +#### Cosmos Messages + +```js +import { cosmos } from 'cosmic-proto'; + +const { + fundCommunityPool, + setWithdrawAddress, + withdrawDelegatorReward, + withdrawValidatorCommission +} = cosmos.distribution.v1beta1.MessageComposer.fromPartial; + +const { + multiSend, + send +} = cosmos.bank.v1beta1.MessageComposer.fromPartial; + +const { + beginRedelegate, + createValidator, + delegate, + editValidator, + undelegate +} = cosmos.staking.v1beta1.MessageComposer.fromPartial; + +const { + deposit, + submitProposal, + vote, + voteWeighted +} = cosmos.gov.v1beta1.MessageComposer.fromPartial; +``` + +## Connecting with Wallets and Signing Messages + +⚡️ For web interfaces, we recommend using [cosmos-kit](https://github.com/cosmology-tech/cosmos-kit). Continue below to see how to manually construct signers and clients. + +Here are the docs on [creating signers](https://github.com/cosmology-tech/cosmos-kit/tree/main/packages/react#signing-clients) in cosmos-kit that can be used with Keplr and other wallets. + +### Initializing the Stargate Client + +Use `getSigningagoricClient` to get your `SigningStargateClient`, with the proto/amino messages full-loaded. No need to manually add amino types, just require and initialize the client: + +```js +import { getSigningagoricClient } from 'cosmic-proto'; + +const stargateClient = await getSigningagoricClient({ + rpcEndpoint, + signer // OfflineSigner +}); +``` +### Creating Signers + +To broadcast messages, you can create signers with a variety of options: + +* [cosmos-kit](https://github.com/cosmology-tech/cosmos-kit/tree/main/packages/react#signing-clients) (recommended) +* [keplr](https://docs.keplr.app/api/cosmjs.html) +* [cosmjs](https://gist.github.com/webmaster128/8444d42a7eceeda2544c8a59fbd7e1d9) +### Amino Signer + +Likely you'll want to use the Amino, so unless you need proto, you should use this one: + +```js +import { getOfflineSignerAmino as getOfflineSigner } from 'cosmjs-utils'; +``` +### Proto Signer + +```js +import { getOfflineSignerProto as getOfflineSigner } from 'cosmjs-utils'; +``` + +WARNING: NOT RECOMMENDED TO USE PLAIN-TEXT MNEMONICS. Please take care of your security and use best practices such as AES encryption and/or methods from 12factor applications. + +```js +import { chains } from 'chain-registry'; + +const mnemonic = + 'unfold client turtle either pilot stock floor glow toward bullet car science'; + const chain = chains.find(({ chain_name }) => chain_name === 'agoric'); + const signer = await getOfflineSigner({ + mnemonic, + chain + }); +``` +### Broadcasting Messages + +Now that you have your `stargateClient`, you can broadcast messages: + +```js +const { send } = cosmos.bank.v1beta1.MessageComposer.withTypeUrl; + +const msg = send({ + amount: [ + { + denom: 'coin', + amount: '1000' + } + ], + toAddress: address, + fromAddress: address +}); + +const fee: StdFee = { + amount: [ + { + denom: 'coin', + amount: '864' + } + ], + gas: '86364' +}; +const response = await stargateClient.signAndBroadcast(address, [msg], fee); +``` + +## Advanced Usage + + +If you want to manually construct a stargate client + +```js +import { OfflineSigner, GeneratedType, Registry } from "@cosmjs/proto-signing"; +import { AminoTypes, SigningStargateClient } from "@cosmjs/stargate"; + +import { + cosmosAminoConverters, + cosmosProtoRegistry, + cosmwasmAminoConverters, + cosmwasmProtoRegistry, + ibcProtoRegistry, + ibcAminoConverters, + agoricAminoConverters, + agoricProtoRegistry +} from 'cosmic-proto'; + +const signer: OfflineSigner = /* create your signer (see above) */ +const rpcEndpint = 'https://rpc.cosmos.directory/agoric'; // or another URL + +const protoRegistry: ReadonlyArray<[string, GeneratedType]> = [ + ...cosmosProtoRegistry, + ...cosmwasmProtoRegistry, + ...ibcProtoRegistry, + ...agoricProtoRegistry +]; + +const aminoConverters = { + ...cosmosAminoConverters, + ...cosmwasmAminoConverters, + ...ibcAminoConverters, + ...agoricAminoConverters +}; + +const registry = new Registry(protoRegistry); +const aminoTypes = new AminoTypes(aminoConverters); + +const stargateClient = await SigningStargateClient.connectWithSigner(rpcEndpoint, signer, { + registry, + aminoTypes +}); +``` + +## Developing + +When first cloning the repo: + +``` +yarn +yarn build +``` + +### Codegen + +Contract schemas live in `./contracts`, and protos in `./proto`. Look inside of `scripts/codegen.js` and configure the settings for bundling your SDK and contracts into `cosmic-proto`: + +``` +yarn codegen +``` + +### Publishing + +Build the types and then publish: + +``` +yarn build +yarn publish +``` + +## Related + +Checkout these related projects: + +* [@cosmology/telescope](https://github.com/cosmology-tech/telescope) Your Frontend Companion for Building with TypeScript with Cosmos SDK Modules. +* [@cosmwasm/ts-codegen](https://github.com/CosmWasm/ts-codegen) Convert your CosmWasm smart contracts into dev-friendly TypeScript classes. +* [chain-registry](https://github.com/cosmology-tech/chain-registry) Everything from token symbols, logos, and IBC denominations for all assets you want to support in your application. +* [cosmos-kit](https://github.com/cosmology-tech/cosmos-kit) Experience the convenience of connecting with a variety of web3 wallets through a single, streamlined interface. +* [create-cosmos-app](https://github.com/cosmology-tech/create-cosmos-app) Set up a modern Cosmos app by running one command. +* [interchain-ui](https://github.com/cosmology-tech/interchain-ui) The Interchain Design System, empowering developers with a flexible, easy-to-use UI kit. +* [starship](https://github.com/cosmology-tech/starship) Unified Testing and Development for the Interchain. + +## Credits + +🛠 Built by Cosmology — if you like our tools, please consider delegating to [our validator ⚛️](https://cosmology.zone/validator) + + +## Disclaimer + +AS DESCRIBED IN THE LICENSES, THE SOFTWARE IS PROVIDED “AS IS”, AT YOUR OWN RISK, AND WITHOUT WARRANTIES OF ANY KIND. + +No developer or entity involved in creating this software will be liable for any claims or damages whatsoever associated with your use, inability to use, or your interaction with other users of the code, including any direct, indirect, incidental, special, exemplary, punitive or consequential damages, or loss of profits, cryptocurrencies, tokens, or anything else of value. diff --git a/packages/new-cosmic-proto/__tests__/messages.test.js b/packages/new-cosmic-proto/__tests__/messages.test.js new file mode 100644 index 00000000000..72917a97fbb --- /dev/null +++ b/packages/new-cosmic-proto/__tests__/messages.test.js @@ -0,0 +1,3 @@ +it('it works', async () => { + console.log('it works!'); +}); diff --git a/packages/new-cosmic-proto/package.json b/packages/new-cosmic-proto/package.json new file mode 100644 index 00000000000..f8231fbda4c --- /dev/null +++ b/packages/new-cosmic-proto/package.json @@ -0,0 +1,64 @@ +{ + "name": "@turadg/cosmic-proto", + "version": "0.0.1", + "description": "Protobuf interfaces for Agoric on Cosmos", + "author": "Agoric OpCo ", + "homepage": "https://github.com/turadg/cosmic-proto#readme", + "license": "SEE LICENSE IN LICENSE", + "main": "dist/index.js", + "module": "dist/index.mjs", + "typings": "dist/index.d.ts", + "directories": { + "lib": "src" + }, + "files": [ + "dist", + "!CHANGELOG.md" + ], + "scripts": { + "build:cjs": "yarn tsc -p tsconfig.json --outDir dist --module commonjs || true", + "build:mjs": "yarn tsc -p tsconfig.json --outDir mjs --module es2022 --declaration false || true", + "clean:mjs": "rimraf mjs", + "clean:dist": "rimraf dist", + "clean": "npm run clean:mjs && npm run clean:dist", + "build:rename": "publish-scripts --cmd rename --srcDir mjs --outDir dist --findExt js --replaceExt mjs --no-rmDir", + "build": "npm run clean && npm run build:cjs && npm run build:mjs && npm run build:rename", + "codegen": "node scripts/codegen.js", + "prepare": "npm run build", + "lint": "eslint src --fix", + "test": "jest", + "test:watch": "jest --watch", + "test:debug": "node --inspect node_modules/.bin/jest --runInBand" + }, + "publishConfig": { + "access": "restricted" + }, + "repository": { + "type": "git", + "url": "https://github.com/turadg/cosmic-proto" + }, + "keywords": [], + "bugs": { + "url": "https://github.com/turadg/cosmic-proto/issues" + }, + "devDependencies": { + "@cosmology/telescope": "^1.0.1", + "@types/jest": "^29.5.0", + "eslint": "8.45.0", + "eslint-config-prettier": "^8.8.0", + "eslint-plugin-prettier": "^4.2.1", + "jest": "^29.5.0", + "jest-in-case": "^1.0.2", + "prettier": "^2.8.7", + "publish-scripts": "0.1.0", + "rimraf": "^5.0.0", + "ts-jest": "^29.1.0", + "typescript": "^5.0.4" + }, + "dependencies": { + "@cosmjs/amino": "0.29.4", + "@cosmjs/proto-signing": "0.29.4", + "@cosmjs/stargate": "0.29.4", + "@cosmjs/tendermint-rpc": "^0.29.4" + } +} diff --git a/packages/new-cosmic-proto/scripts/aminos.js b/packages/new-cosmic-proto/scripts/aminos.js new file mode 100644 index 00000000000..fe7aee16cb4 --- /dev/null +++ b/packages/new-cosmic-proto/scripts/aminos.js @@ -0,0 +1,29 @@ +module.exports.AMINO_MAP = { + // PUT YOUR AMINO names here... + // Staking + // '/cosmos.staking.v1beta1.MsgCreateValidator': { + // aminoType: 'cosmos-sdk/MsgCreateValidator' + // }, + // '/cosmos.staking.v1beta1.MsgEditValidator': { + // aminoType: 'cosmos-sdk/MsgEditValidator' + // }, + // '/cosmos.staking.v1beta1.MsgDelegate': { + // aminoType: 'cosmos-sdk/MsgDelegate' + // }, + // '/cosmos.staking.v1beta1.MsgUndelegate': { + // aminoType: 'cosmos-sdk/MsgUndelegate' + // }, + // '/cosmos.staking.v1beta1.MsgBeginRedelegate': { + // aminoType: 'cosmos-sdk/MsgBeginRedelegate' + // }, + // '/cosmos.staking.v1beta1.MsgCancelUnbondingDelegation': { + // aminoType: 'cosmos-sdk/MsgCancelUnbondingDelegation' + // }, + // '/cosmos.staking.v1beta1.MsgUpdateParams': { + // aminoType: 'cosmos-sdk/x/staking/MsgUpdateParams' + // }, + // // IBC + // '/ibc.applications.transfer.v1.MsgTransfer': { + // aminoType: 'cosmos-sdk/MsgTransfer' + // } +}; diff --git a/packages/new-cosmic-proto/scripts/codegen.js b/packages/new-cosmic-proto/scripts/codegen.js new file mode 100644 index 00000000000..2a6843676f8 --- /dev/null +++ b/packages/new-cosmic-proto/scripts/codegen.js @@ -0,0 +1,101 @@ +const { join } = require('path'); +const telescope = require('@cosmology/telescope').default; +const rimraf = require('rimraf').rimrafSync; +const { AMINO_MAP } = require('./aminos'); + +const protoDirs = [join(__dirname, '/../proto')]; +const outPath = join(__dirname, '../src/codegen'); +rimraf(outPath); + +telescope({ + protoDirs, + outPath, + options: { + tsDisable: { + files: [ + 'cosmos/authz/v1beta1/tx.amino.ts', + 'cosmos/staking/v1beta1/tx.amino.ts' + ], + patterns: ['**/*amino.ts', '**/*registry.ts'] + }, + prototypes: { + includePackageVar: false, + removeUnusedImports: true, + experimentalGlobalProtoNamespace: true, + interfaces: { + enabled: true, + useUnionTypes: false + }, + excluded: { + packages: [ + 'ibc.applications.fee.v1', // issue with parsing protos (LCD routes with nested objects in params) + + 'cosmos.app.v1alpha1', + 'cosmos.app.v1beta1', + 'cosmos.base.kv.v1beta1', + 'cosmos.base.reflection.v1beta1', + 'cosmos.base.snapshots.v1beta1', + 'cosmos.base.store.v1beta1', + 'cosmos.base.tendermint.v1beta1', + 'cosmos.crisis.v1beta1', + 'cosmos.evidence.v1beta1', + 'cosmos.genutil.v1beta1', + + 'cosmos.autocli.v1', + + 'cosmos.msg.v1', + 'cosmos.nft.v1beta1', + 'cosmos.capability.v1beta1', + 'cosmos.orm.v1alpha1', + 'cosmos.orm.v1', + 'cosmos.slashing.v1beta1', + 'google.api', + 'ibc.core.port.v1', + 'ibc.core.types.v1' + ] + }, + methods: { + fromJSON: false, + toJSON: false, + encode: true, + decode: true, + fromPartial: true, + toAmino: true, + fromAmino: true, + fromProto: true, + toProto: true + }, + parser: { + keepCase: false + } + }, + typingsFormat: { + duration: 'duration', + timestamp: 'date', + useExact: false, + useDeepPartial: false, + num64: 'bigint', + customTypes: { + useCosmosSDKDec: true + } + }, + aminoEncoding: { + enabled: true, + exceptions: AMINO_MAP + }, + lcdClients: { + enabled: false + }, + rpcClients: { + enabled: true, + camelCase: true + } + } +}) + .then(() => { + console.log('✨ all done!'); + }) + .catch((e) => { + console.error(e); + process.exit(1); + }); diff --git a/packages/new-cosmic-proto/src/index.ts b/packages/new-cosmic-proto/src/index.ts new file mode 100644 index 00000000000..646541b5972 --- /dev/null +++ b/packages/new-cosmic-proto/src/index.ts @@ -0,0 +1 @@ +export * from './codegen'; diff --git a/packages/new-cosmic-proto/tsconfig.json b/packages/new-cosmic-proto/tsconfig.json new file mode 100644 index 00000000000..38eee4ccf88 --- /dev/null +++ b/packages/new-cosmic-proto/tsconfig.json @@ -0,0 +1,28 @@ +{ + "compilerOptions": { + "baseUrl": ".", + "rootDir": "src", + "skipLibCheck": true, + "emitDeclarationOnly": false, + "declaration": true, + "esModuleInterop": true, + "target": "es2022", + "module": "es2022", + "lib": [ + "es2022", + "DOM" + ], + "sourceMap": true, + "isolatedModules": true, + "allowJs": true, + "downlevelIteration": true, + "moduleResolution": "node", + "resolveJsonModule": true + }, + "include": [ + "src/**/*" + ], + "exclude": [ + "node_modules" + ] +} \ No newline at end of file From 7aa4049756afd7de6ad71700d92b8a06d7b7e4d3 Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Thu, 15 Feb 2024 09:29:38 -0800 Subject: [PATCH 07/47] chore: match existing cosmic-proto (until replacing) --- packages/cosmic-proto/package.json | 21 +- packages/new-cosmic-proto/.editorconfig | 12 - packages/new-cosmic-proto/.eslintrc.js | 70 - packages/new-cosmic-proto/.npmrc | 1 - packages/new-cosmic-proto/LICENSE | 222 +- packages/new-cosmic-proto/README.md | 25 +- .../__tests__/messages.test.js | 3 - packages/new-cosmic-proto/package.json | 75 +- .../scripts/{aminos.js => aminos.cjs} | 0 .../scripts/{codegen.js => codegen.cjs} | 33 +- packages/new-cosmic-proto/test/test-dummy.js | 5 + yarn.lock | 3332 +++++++++++++++-- 12 files changed, 3252 insertions(+), 547 deletions(-) delete mode 100644 packages/new-cosmic-proto/.editorconfig delete mode 100644 packages/new-cosmic-proto/.eslintrc.js delete mode 100644 packages/new-cosmic-proto/.npmrc delete mode 100644 packages/new-cosmic-proto/__tests__/messages.test.js rename packages/new-cosmic-proto/scripts/{aminos.js => aminos.cjs} (100%) rename packages/new-cosmic-proto/scripts/{codegen.js => codegen.cjs} (83%) create mode 100644 packages/new-cosmic-proto/test/test-dummy.js diff --git a/packages/cosmic-proto/package.json b/packages/cosmic-proto/package.json index 376b5ca2490..50da4d65979 100644 --- a/packages/cosmic-proto/package.json +++ b/packages/cosmic-proto/package.json @@ -13,24 +13,23 @@ "bugs": { "url": "https://github.com/Agoric/agoric-sdk/issues" }, - "type": "module", "exports": { + ".": { + "types": "./dist/codegen/index.d.ts", + "default": "./dist/codegen/index.js" + }, "./package.json": "./package.json", "./swingset/msgs.js": { - "types": "./dist/agoric/swingset/msgs.d.ts", - "default": "./dist/agoric/swingset/msgs.js" + "types": "./dist/codegen/agoric/swingset/msgs.d.ts", + "default": "./dist/codegen/agoric/swingset/msgs.js" }, "./swingset/query.js": { - "types": "./dist/agoric/swingset/query.d.ts", - "default": "./dist/agoric/swingset/query.js" + "types": "./dist/codegen/agoric/swingset/query.d.ts", + "default": "./dist/codegen/agoric/swingset/query.js" }, "./vstorage/query.js": { - "types": "./dist/agoric/vstorage/query.d.ts", - "default": "./dist/agoric/vstorage/query.js" - }, - "./swingset/swingset.js": { - "types": "./dist/agoric/swingset/swingset.d.ts", - "default": "./dist/agoric/swingset/swingset.js" + "types": "./dist/codegen/agoric/vstorage/query.d.ts", + "default": "./dist/codegen/agoric/vstorage/query.js" } }, "scripts": { diff --git a/packages/new-cosmic-proto/.editorconfig b/packages/new-cosmic-proto/.editorconfig deleted file mode 100644 index 4a7ea3036a2..00000000000 --- a/packages/new-cosmic-proto/.editorconfig +++ /dev/null @@ -1,12 +0,0 @@ -root = true - -[*] -indent_style = space -indent_size = 2 -end_of_line = lf -charset = utf-8 -trim_trailing_whitespace = true -insert_final_newline = true - -[*.md] -trim_trailing_whitespace = false diff --git a/packages/new-cosmic-proto/.eslintrc.js b/packages/new-cosmic-proto/.eslintrc.js deleted file mode 100644 index bb88e34ddbe..00000000000 --- a/packages/new-cosmic-proto/.eslintrc.js +++ /dev/null @@ -1,70 +0,0 @@ -module.exports = { - plugins: ['prettier'], - extends: ['eslint:recommended', 'prettier'], - parserOptions: { - ecmaVersion: 11, - requireConfigFile: false, - sourceType: 'module', - ecmaFeatures: { - jsx: true - } - }, - env: { - es6: true, - browser: true, - node: true, - jest: true - }, - rules: { - 'no-debugger': 2, - 'no-alert': 2, - 'no-await-in-loop': 0, - 'no-prototype-builtins': 0, - 'no-return-assign': ['error', 'except-parens'], - 'no-restricted-syntax': [ - 2, - 'ForInStatement', - 'LabeledStatement', - 'WithStatement' - ], - 'no-unused-vars': [ - 0, - { - ignoreSiblings: true, - argsIgnorePattern: 'React|res|next|^_' - } - ], - 'prefer-const': [ - 'error', - { - destructuring: 'all' - } - ], - 'no-unused-expressions': [ - 2, - { - allowTaggedTemplates: true - } - ], - 'no-console': 1, - 'comma-dangle': 2, - 'jsx-quotes': [2, 'prefer-double'], - 'linebreak-style': ['error', 'unix'], - quotes: [ - 2, - 'single', - { - avoidEscape: true, - allowTemplateLiterals: true - } - ], - 'prettier/prettier': [ - 'error', - { - trailingComma: 'none', - singleQuote: true, - printWidth: 80 - } - ] - } -}; diff --git a/packages/new-cosmic-proto/.npmrc b/packages/new-cosmic-proto/.npmrc deleted file mode 100644 index a21347f1bde..00000000000 --- a/packages/new-cosmic-proto/.npmrc +++ /dev/null @@ -1 +0,0 @@ -scripts-prepend-node-path=true \ No newline at end of file diff --git a/packages/new-cosmic-proto/LICENSE b/packages/new-cosmic-proto/LICENSE index 93b275f6299..261eeb9e9f8 100644 --- a/packages/new-cosmic-proto/LICENSE +++ b/packages/new-cosmic-proto/LICENSE @@ -1,21 +1,201 @@ -The MIT License (MIT) - -Copyright (c) 2023 Agoric OpCo - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/new-cosmic-proto/README.md b/packages/new-cosmic-proto/README.md index c568b7882bf..0be2c29b131 100644 --- a/packages/new-cosmic-proto/README.md +++ b/packages/new-cosmic-proto/README.md @@ -1,4 +1,4 @@ -# cosmic-proto +# @agoric/cosmic-proto


@@ -9,13 +9,10 @@ ## install ```sh -npm install cosmic-proto +npm install @agoric/cosmic-proto ``` ## Table of contents -- [cosmic-proto](#cosmic-proto) - - [Install](#install) - - [Table of contents](#table-of-contents) - [Usage](#usage) - [RPC Clients](#rpc-clients) - [Composing Messages](#composing-messages) @@ -35,7 +32,7 @@ npm install cosmic-proto ### RPC Clients ```js -import { agoric } from 'cosmic-proto'; +import { agoric } from '@agoric/cosmic-proto'; const { createRPCQueryClient } = agoric.ClientFactory; const client = await createRPCQueryClient({ rpcEndpoint: RPC_ENDPOINT }); @@ -51,10 +48,10 @@ const balances = await client.agoric.exchange.v1beta1 ### Composing Messages -Import the `agoric` object from `cosmic-proto`. +Import the `agoric` object from `@agoric/cosmic-proto`. ```js -import { agoric } from 'cosmic-proto'; +import { agoric } from '@agoric/cosmic-proto'; const { createSpotLimitOrder, @@ -66,7 +63,7 @@ const { #### CosmWasm Messages ```js -import { cosmwasm } from "cosmic-proto"; +import { cosmwasm } from "@agoric/cosmic-proto"; const { clearAdmin, @@ -81,7 +78,7 @@ const { #### IBC Messages ```js -import { ibc } from 'cosmic-proto'; +import { ibc } from '@agoric/cosmic-proto'; const { transfer @@ -91,7 +88,7 @@ const { #### Cosmos Messages ```js -import { cosmos } from 'cosmic-proto'; +import { cosmos } from '@agoric/cosmic-proto'; const { fundCommunityPool, @@ -132,7 +129,7 @@ Here are the docs on [creating signers](https://github.com/cosmology-tech/cosmos Use `getSigningagoricClient` to get your `SigningStargateClient`, with the proto/amino messages full-loaded. No need to manually add amino types, just require and initialize the client: ```js -import { getSigningagoricClient } from 'cosmic-proto'; +import { getSigningagoricClient } from '@agoric/cosmic-proto'; const stargateClient = await getSigningagoricClient({ rpcEndpoint, @@ -220,7 +217,7 @@ import { ibcAminoConverters, agoricAminoConverters, agoricProtoRegistry -} from 'cosmic-proto'; +} from '@agoric/cosmic-proto'; const signer: OfflineSigner = /* create your signer (see above) */ const rpcEndpint = 'https://rpc.cosmos.directory/agoric'; // or another URL @@ -259,7 +256,7 @@ yarn build ### Codegen -Contract schemas live in `./contracts`, and protos in `./proto`. Look inside of `scripts/codegen.js` and configure the settings for bundling your SDK and contracts into `cosmic-proto`: +Contract schemas live in `./contracts`, and protos in `./proto`. Look inside of `scripts/codegen.cjs` and configure the settings for bundling your SDK and contracts into `@agoric/cosmic-proto`: ``` yarn codegen diff --git a/packages/new-cosmic-proto/__tests__/messages.test.js b/packages/new-cosmic-proto/__tests__/messages.test.js deleted file mode 100644 index 72917a97fbb..00000000000 --- a/packages/new-cosmic-proto/__tests__/messages.test.js +++ /dev/null @@ -1,3 +0,0 @@ -it('it works', async () => { - console.log('it works!'); -}); diff --git a/packages/new-cosmic-proto/package.json b/packages/new-cosmic-proto/package.json index f8231fbda4c..e45a78bfa36 100644 --- a/packages/new-cosmic-proto/package.json +++ b/packages/new-cosmic-proto/package.json @@ -1,10 +1,37 @@ { - "name": "@turadg/cosmic-proto", - "version": "0.0.1", - "description": "Protobuf interfaces for Agoric on Cosmos", - "author": "Agoric OpCo ", - "homepage": "https://github.com/turadg/cosmic-proto#readme", - "license": "SEE LICENSE IN LICENSE", + "name": "@agoric/new-cosmic-proto", + "version": "0.4.0", + "description": "Protobuf stubs for the Agoric cosmos-sdk module", + "keywords": [], + "author": "Agoric", + "license": "Apache-2.0", + "homepage": "https://github.com/Agoric/agoric-sdk/tree/HEAD/packages/new-cosmic-proto#readme", + "repository": { + "type": "git", + "url": "git+https://github.com/Agoric/agoric-sdk.git" + }, + "bugs": { + "url": "https://github.com/Agoric/agoric-sdk/issues" + }, + "exports": { + ".": { + "types": "./dist/codegen/index.d.ts", + "default": "./dist/codegen/index.js" + }, + "./package.json": "./package.json", + "./swingset/msgs.js": { + "types": "./dist/codegen/agoric/swingset/msgs.d.ts", + "default": "./dist/codegen/agoric/swingset/msgs.js" + }, + "./swingset/query.js": { + "types": "./dist/codegen/agoric/swingset/query.d.ts", + "default": "./dist/codegen/agoric/swingset/query.js" + }, + "./vstorage/query.js": { + "types": "./dist/codegen/agoric/vstorage/query.d.ts", + "default": "./dist/codegen/agoric/vstorage/query.js" + } + }, "main": "dist/index.js", "module": "dist/index.mjs", "typings": "dist/index.d.ts", @@ -16,8 +43,8 @@ "!CHANGELOG.md" ], "scripts": { - "build:cjs": "yarn tsc -p tsconfig.json --outDir dist --module commonjs || true", - "build:mjs": "yarn tsc -p tsconfig.json --outDir mjs --module es2022 --declaration false || true", + "build:cjs": "yarn tsc --outDir dist --module commonjs || true", + "build:mjs": "yarn tsc --outDir mjs --module es2022 --declaration false || true", "clean:mjs": "rimraf mjs", "clean:dist": "rimraf dist", "clean": "npm run clean:mjs && npm run clean:dist", @@ -25,40 +52,34 @@ "build": "npm run clean && npm run build:cjs && npm run build:mjs && npm run build:rename", "codegen": "node scripts/codegen.js", "prepare": "npm run build", - "lint": "eslint src --fix", - "test": "jest", - "test:watch": "jest --watch", - "test:debug": "node --inspect node_modules/.bin/jest --runInBand" + "lint-fix": "yarn lint:eslint --fix", + "lint": "tsc", + "test": "ava", + "test:xs": "exit 0" }, "publishConfig": { "access": "restricted" }, - "repository": { - "type": "git", - "url": "https://github.com/turadg/cosmic-proto" - }, - "keywords": [], - "bugs": { - "url": "https://github.com/turadg/cosmic-proto/issues" - }, "devDependencies": { "@cosmology/telescope": "^1.0.1", - "@types/jest": "^29.5.0", + "ava": "^5.3.1", "eslint": "8.45.0", "eslint-config-prettier": "^8.8.0", "eslint-plugin-prettier": "^4.2.1", - "jest": "^29.5.0", - "jest-in-case": "^1.0.2", "prettier": "^2.8.7", "publish-scripts": "0.1.0", "rimraf": "^5.0.0", - "ts-jest": "^29.1.0", "typescript": "^5.0.4" }, "dependencies": { "@cosmjs/amino": "0.29.4", - "@cosmjs/proto-signing": "0.29.4", - "@cosmjs/stargate": "0.29.4", - "@cosmjs/tendermint-rpc": "^0.29.4" + "@cosmjs/proto-signing": "^0.30.1", + "@cosmjs/stargate": "^0.30.1", + "@cosmjs/tendermint-rpc": "^0.30.1" + }, + "ava": { + "files": [ + "test/**/test-*.js" + ] } } diff --git a/packages/new-cosmic-proto/scripts/aminos.js b/packages/new-cosmic-proto/scripts/aminos.cjs similarity index 100% rename from packages/new-cosmic-proto/scripts/aminos.js rename to packages/new-cosmic-proto/scripts/aminos.cjs diff --git a/packages/new-cosmic-proto/scripts/codegen.js b/packages/new-cosmic-proto/scripts/codegen.cjs similarity index 83% rename from packages/new-cosmic-proto/scripts/codegen.js rename to packages/new-cosmic-proto/scripts/codegen.cjs index 2a6843676f8..73a570ce351 100644 --- a/packages/new-cosmic-proto/scripts/codegen.js +++ b/packages/new-cosmic-proto/scripts/codegen.cjs @@ -1,7 +1,7 @@ const { join } = require('path'); const telescope = require('@cosmology/telescope').default; const rimraf = require('rimraf').rimrafSync; -const { AMINO_MAP } = require('./aminos'); +const { AMINO_MAP } = require('./aminos.cjs'); const protoDirs = [join(__dirname, '/../proto')]; const outPath = join(__dirname, '../src/codegen'); @@ -14,9 +14,9 @@ telescope({ tsDisable: { files: [ 'cosmos/authz/v1beta1/tx.amino.ts', - 'cosmos/staking/v1beta1/tx.amino.ts' + 'cosmos/staking/v1beta1/tx.amino.ts', ], - patterns: ['**/*amino.ts', '**/*registry.ts'] + patterns: ['**/*amino.ts', '**/*registry.ts'], }, prototypes: { includePackageVar: false, @@ -24,7 +24,7 @@ telescope({ experimentalGlobalProtoNamespace: true, interfaces: { enabled: true, - useUnionTypes: false + useUnionTypes: false, }, excluded: { packages: [ @@ -51,8 +51,8 @@ telescope({ 'cosmos.slashing.v1beta1', 'google.api', 'ibc.core.port.v1', - 'ibc.core.types.v1' - ] + 'ibc.core.types.v1', + ], }, methods: { fromJSON: false, @@ -63,11 +63,11 @@ telescope({ toAmino: true, fromAmino: true, fromProto: true, - toProto: true + toProto: true, }, parser: { - keepCase: false - } + keepCase: false, + }, }, typingsFormat: { duration: 'duration', @@ -76,26 +76,25 @@ telescope({ useDeepPartial: false, num64: 'bigint', customTypes: { - useCosmosSDKDec: true - } + useCosmosSDKDec: true, + }, }, aminoEncoding: { enabled: true, - exceptions: AMINO_MAP + exceptions: AMINO_MAP, }, lcdClients: { - enabled: false + enabled: false, }, rpcClients: { enabled: true, - camelCase: true - } - } + }, + }, }) .then(() => { console.log('✨ all done!'); }) - .catch((e) => { + .catch(e => { console.error(e); process.exit(1); }); diff --git a/packages/new-cosmic-proto/test/test-dummy.js b/packages/new-cosmic-proto/test/test-dummy.js new file mode 100644 index 00000000000..7a739f9a58b --- /dev/null +++ b/packages/new-cosmic-proto/test/test-dummy.js @@ -0,0 +1,5 @@ +import test from 'ava'; + +test('it works', async t => { + t.pass(); +}); diff --git a/yarn.lock b/yarn.lock index 50418f645aa..f9857bd4970 100644 --- a/yarn.lock +++ b/yarn.lock @@ -21,7 +21,15 @@ resolved "https://registry.yarnpkg.com/@agoric/wallet-ui/-/wallet-ui-0.1.3-solo.0.tgz#5f05c3dd2820d4f1efcbccbd2dc1292847ecbd2b" integrity sha512-NbhCrTH9u2af+6ituM99M8Mo10VOP1nQRTZoYEXW+esBwJId/7cRniMmAC7qmkbXs8POA31S8EQ5gAhkWq08WA== -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.22.13", "@babel/code-frame@^7.23.5": +"@ampproject/remapping@^2.1.0", "@ampproject/remapping@^2.2.0": + version "2.2.1" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.1.tgz#99e8e11851128b8702cd57c33684f1d0f260b630" + integrity sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg== + dependencies: + "@jridgewell/gen-mapping" "^0.3.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.18.6", "@babel/code-frame@^7.22.13", "@babel/code-frame@^7.23.5": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.23.5.tgz#9009b69a8c602293476ad598ff53e4562e15c244" integrity sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA== @@ -29,7 +37,63 @@ "@babel/highlight" "^7.23.4" chalk "^2.4.2" -"@babel/generator@^7.23.6": +"@babel/compat-data@^7.17.7", "@babel/compat-data@^7.18.8", "@babel/compat-data@^7.20.5", "@babel/compat-data@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.23.5.tgz#ffb878728bb6bdcb6f4510aa51b1be9afb8cfd98" + integrity sha512-uU27kfDRlhfKl+w1U6vp16IuvSLtjAxdArVXPa9BvLkrr7CYIsxH5adpHObeAGY/41+syctUWOZ140a2Rvkgjw== + +"@babel/core@7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.18.10.tgz#39ad504991d77f1f3da91be0b8b949a5bc466fb8" + integrity sha512-JQM6k6ENcBFKVtWvLavlvi/mPcpYZ3+R+2EySDEMSMbp7Mn4FexlbbJVrx2R7Ijhr01T8gyqrOaABWIOgxeUyw== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.18.10" + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-module-transforms" "^7.18.9" + "@babel/helpers" "^7.18.9" + "@babel/parser" "^7.18.10" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.18.10" + "@babel/types" "^7.18.10" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.1" + semver "^6.3.0" + +"@babel/core@^7.11.6", "@babel/core@^7.12.3": + version "7.23.9" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.23.9.tgz#b028820718000f267870822fec434820e9b1e4d1" + integrity sha512-5q0175NOjddqpvvzU+kDiSOAk4PfdO6FvwCWoQ6RO7rTzEe8vlo+4HVfcnAREhD4npMs0e9uZypjTwzZPCf/cw== + dependencies: + "@ampproject/remapping" "^2.2.0" + "@babel/code-frame" "^7.23.5" + "@babel/generator" "^7.23.6" + "@babel/helper-compilation-targets" "^7.23.6" + "@babel/helper-module-transforms" "^7.23.3" + "@babel/helpers" "^7.23.9" + "@babel/parser" "^7.23.9" + "@babel/template" "^7.23.9" + "@babel/traverse" "^7.23.9" + "@babel/types" "^7.23.9" + convert-source-map "^2.0.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.3" + semver "^6.3.1" + +"@babel/generator@7.18.12": + version "7.18.12" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.18.12.tgz#fa58daa303757bd6f5e4bbca91b342040463d9f4" + integrity sha512-dfQ8ebCN98SvyL7IxNMCUtZQSq5R7kxgN+r8qYTGDmmSion1hX2C0zq2yo1bsCDhXixokv1SAWTZUMYbO/V5zg== + dependencies: + "@babel/types" "^7.18.10" + "@jridgewell/gen-mapping" "^0.3.2" + jsesc "^2.5.1" + +"@babel/generator@^7.18.10", "@babel/generator@^7.23.6": version "7.23.6" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.6.tgz#9e1fca4811c77a10580d17d26b57b036133f3c2e" integrity sha512-qrSfCYxYQB5owCmGLbl8XRpX1ytXlpueOb0N0UmQwA073KZxejgQTzAmJezxvpwQD9uGtK2shHdi55QT+MbjIw== @@ -39,12 +103,73 @@ "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" -"@babel/helper-environment-visitor@^7.22.20": +"@babel/helper-annotate-as-pure@^7.18.6", "@babel/helper-annotate-as-pure@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz#e7f06737b197d580a01edf75d97e2c8be99d3882" + integrity sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-builder-binary-assignment-operator-visitor@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.22.15.tgz#5426b109cf3ad47b91120f8328d8ab1be8b0b956" + integrity sha512-QkBXwGgaoC2GtGZRoma6kv7Szfv06khvhFav67ZExau2RaXzy8MpHSMO2PNoP2XtmQphJQRHFfg77Bq731Yizw== + dependencies: + "@babel/types" "^7.22.15" + +"@babel/helper-compilation-targets@^7.17.7", "@babel/helper-compilation-targets@^7.18.9", "@babel/helper-compilation-targets@^7.20.7", "@babel/helper-compilation-targets@^7.22.15", "@babel/helper-compilation-targets@^7.23.6": + version "7.23.6" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.23.6.tgz#4d79069b16cbcf1461289eccfbbd81501ae39991" + integrity sha512-9JB548GZoQVmzrFgp8o7KxdgkTGm6xs9DW0o/Pim72UDjzr5ObUQ6ZzYPqA+g9OTS2bBQoctLJrky0RDCAWRgQ== + dependencies: + "@babel/compat-data" "^7.23.5" + "@babel/helper-validator-option" "^7.23.5" + browserslist "^4.22.2" + lru-cache "^5.1.1" + semver "^6.3.1" + +"@babel/helper-create-class-features-plugin@^7.18.6", "@babel/helper-create-class-features-plugin@^7.21.0", "@babel/helper-create-class-features-plugin@^7.23.6": + version "7.23.10" + resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.23.10.tgz#25d55fafbaea31fd0e723820bb6cc3df72edf7ea" + integrity sha512-2XpP2XhkXzgxecPNEEK8Vz8Asj9aRxt08oKOqtiZoqV2UGZ5T+EkyP9sXQ9nwMxBIG34a7jmasVqoMop7VdPUw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-function-name" "^7.23.0" + "@babel/helper-member-expression-to-functions" "^7.23.0" + "@babel/helper-optimise-call-expression" "^7.22.5" + "@babel/helper-replace-supers" "^7.22.20" + "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + semver "^6.3.1" + +"@babel/helper-create-regexp-features-plugin@^7.18.6", "@babel/helper-create-regexp-features-plugin@^7.22.15", "@babel/helper-create-regexp-features-plugin@^7.22.5": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.22.15.tgz#5ee90093914ea09639b01c711db0d6775e558be1" + integrity sha512-29FkPLFjn4TPEa3RE7GpW+qbE8tlsu3jntNYNfcGsc49LphF1PQIiD+vMZ1z1xVOKt+93khA9tc2JBs3kBjA7w== + dependencies: + "@babel/helper-annotate-as-pure" "^7.22.5" + regexpu-core "^5.3.1" + semver "^6.3.1" + +"@babel/helper-define-polyfill-provider@^0.3.2", "@babel/helper-define-polyfill-provider@^0.3.3": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.3.tgz#8612e55be5d51f0cd1f36b4a5a83924e89884b7a" + integrity sha512-z5aQKU4IzbqCC1XH0nAqfsFLMVSo22SBKUc0BxGrLkolTdPTructy0ToNnlO2zA4j9Q/7pjMZf0DSY+DSTYzww== + dependencies: + "@babel/helper-compilation-targets" "^7.17.7" + "@babel/helper-plugin-utils" "^7.16.7" + debug "^4.1.1" + lodash.debounce "^4.0.8" + resolve "^1.14.2" + semver "^6.1.2" + +"@babel/helper-environment-visitor@^7.18.9", "@babel/helper-environment-visitor@^7.22.20": version "7.22.20" resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167" integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA== -"@babel/helper-function-name@^7.23.0": +"@babel/helper-function-name@^7.18.9", "@babel/helper-function-name@^7.22.5", "@babel/helper-function-name@^7.23.0": version "7.23.0" resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz#1f9a3cdbd5b2698a670c30d2735f9af95ed52759" integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw== @@ -52,30 +177,122 @@ "@babel/template" "^7.22.15" "@babel/types" "^7.23.0" -"@babel/helper-hoist-variables@^7.22.5": +"@babel/helper-hoist-variables@^7.18.6", "@babel/helper-hoist-variables@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== dependencies: "@babel/types" "^7.22.5" -"@babel/helper-split-export-declaration@^7.22.6": +"@babel/helper-member-expression-to-functions@^7.22.15", "@babel/helper-member-expression-to-functions@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.23.0.tgz#9263e88cc5e41d39ec18c9a3e0eced59a3e7d366" + integrity sha512-6gfrPwh7OuT6gZyJZvd6WbTfrqAo7vm4xCzAXOusKqq/vWdKXphTpj5klHKNmRUU6/QRGlBsyU9mAIPaWHlqJA== + dependencies: + "@babel/types" "^7.23.0" + +"@babel/helper-module-imports@^7.18.6", "@babel/helper-module-imports@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz#16146307acdc40cc00c3b2c647713076464bdbf0" + integrity sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w== + dependencies: + "@babel/types" "^7.22.15" + +"@babel/helper-module-transforms@^7.18.9", "@babel/helper-module-transforms@^7.23.3": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.23.3.tgz#d7d12c3c5d30af5b3c0fcab2a6d5217773e2d0f1" + integrity sha512-7bBs4ED9OmswdfDzpz4MpWgSrV7FXlc3zIagvLFjS5H+Mk7Snr21vQ6QwrsoCGMfNC4e4LQPdoULEt4ykz0SRQ== + dependencies: + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-module-imports" "^7.22.15" + "@babel/helper-simple-access" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/helper-validator-identifier" "^7.22.20" + +"@babel/helper-optimise-call-expression@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.22.5.tgz#f21531a9ccbff644fdd156b4077c16ff0c3f609e" + integrity sha512-HBwaojN0xFRx4yIvpwGqxiV2tUfl7401jlok564NgB9EHS1y6QT17FmKWm4ztqjeVdXLuC4fSvHc5ePpQjoTbw== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.16.7", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.18.9", "@babel/helper-plugin-utils@^7.20.2", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz#dd7ee3735e8a313b9f7b05a773d892e88e6d7295" + integrity sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg== + +"@babel/helper-remap-async-to-generator@^7.18.9", "@babel/helper-remap-async-to-generator@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.22.20.tgz#7b68e1cb4fa964d2996fd063723fb48eca8498e0" + integrity sha512-pBGyV4uBqOns+0UvhsTO8qgl8hO89PmiDYv+/COyp1aeMcmfrfruz+/nCMFiYyFF/Knn0yfrC85ZzNFjembFTw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-wrap-function" "^7.22.20" + +"@babel/helper-replace-supers@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.22.20.tgz#e37d367123ca98fe455a9887734ed2e16eb7a793" + integrity sha512-qsW0In3dbwQUbK8kejJ4R7IHVGwHJlV6lpG6UA7a9hSa2YEiAib+N1T2kr6PEeUT+Fl7najmSOS6SmAwCHK6Tw== + dependencies: + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-member-expression-to-functions" "^7.22.15" + "@babel/helper-optimise-call-expression" "^7.22.5" + +"@babel/helper-simple-access@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz#4938357dc7d782b80ed6dbb03a0fba3d22b1d5de" + integrity sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-skip-transparent-expression-wrappers@^7.20.0", "@babel/helper-skip-transparent-expression-wrappers@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.22.5.tgz#007f15240b5751c537c40e77abb4e89eeaaa8847" + integrity sha512-tK14r66JZKiC43p8Ki33yLBVJKlQDFoA8GYN67lWCDCqoL6EMMSuM9b+Iff2jHaM/RRFYl7K+iiru7hbRqNx8Q== + dependencies: + "@babel/types" "^7.22.5" + +"@babel/helper-split-export-declaration@^7.18.6", "@babel/helper-split-export-declaration@^7.22.6": version "7.22.6" resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c" integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g== dependencies: "@babel/types" "^7.22.5" -"@babel/helper-string-parser@^7.23.4": +"@babel/helper-string-parser@^7.18.10", "@babel/helper-string-parser@^7.23.4": version "7.23.4" resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz#9478c707febcbbe1ddb38a3d91a2e054ae622d83" integrity sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ== -"@babel/helper-validator-identifier@^7.22.20": +"@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.22.20": version "7.22.20" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== +"@babel/helper-validator-option@^7.18.6", "@babel/helper-validator-option@^7.22.15", "@babel/helper-validator-option@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.23.5.tgz#907a3fbd4523426285365d1206c423c4c5520307" + integrity sha512-85ttAOMLsr53VgXkTbkx8oA6YTfT4q7/HzXSLEYmjcSTJPMPQtvq1BD79Byep5xMUYbGRzEpDsjUf3dyp54IKw== + +"@babel/helper-wrap-function@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.22.20.tgz#15352b0b9bfb10fc9c76f79f6342c00e3411a569" + integrity sha512-pms/UwkOpnQe/PDAEdV/d7dVCoBbB+R4FvYoHGZz+4VPcg7RtYy2KP7S2lbuWM6FCSgob5wshfGESbC/hzNXZw== + dependencies: + "@babel/helper-function-name" "^7.22.5" + "@babel/template" "^7.22.15" + "@babel/types" "^7.22.19" + +"@babel/helpers@^7.18.9", "@babel/helpers@^7.23.9": + version "7.23.9" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.23.9.tgz#c3e20bbe7f7a7e10cb9b178384b4affdf5995c7d" + integrity sha512-87ICKgU5t5SzOT7sBMfCOZQ2rHjRU+Pcb9BoILMYz600W6DkVRLFBPwQ18gwUVvggqXivaUakpnxWQGbpywbBQ== + dependencies: + "@babel/template" "^7.23.9" + "@babel/traverse" "^7.23.9" + "@babel/types" "^7.23.9" + "@babel/highlight@^7.23.4": version "7.23.4" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.23.4.tgz#edaadf4d8232e1a961432db785091207ead0621b" @@ -85,11 +302,708 @@ chalk "^2.4.2" js-tokens "^4.0.0" +"@babel/parser@7.18.11": + version "7.18.11" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.18.11.tgz#68bb07ab3d380affa9a3f96728df07969645d2d9" + integrity sha512-9JKn5vN+hDt0Hdqn1PiJ2guflwP+B6Ga8qbDuoF0PzzVhrzsKIJo8yGqVk6CmMHiMei9w1C1Bp9IMJSIK+HPIQ== + +"@babel/parser@^7.14.7", "@babel/parser@^7.18.10", "@babel/parser@^7.18.11", "@babel/parser@^7.23.9": + version "7.23.9" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.9.tgz#7b903b6149b0f8fa7ad564af646c4c38a77fc44b" + integrity sha512-9tcKgqKbs3xGJ+NtKF2ndOBBLVwPjl1SHxPQkd36r3Dlirw3xWUeGaTbqr7uGZcTaxkVNwc+03SVP7aCdWrTlA== + "@babel/parser@^7.22.15", "@babel/parser@^7.23.6", "@babel/parser@^7.7.0": version "7.23.6" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.6.tgz#ba1c9e512bda72a47e285ae42aff9d2a635a9e3b" integrity sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ== +"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.23.3.tgz#5cd1c87ba9380d0afb78469292c954fee5d2411a" + integrity sha512-iRkKcCqb7iGnq9+3G6rZ+Ciz5VywC4XNRHe57lKM+jOeYAoR0lVqdeeDRfh0tQcTfw/+vBhHn926FmQhLtlFLQ== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.18.9": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.23.3.tgz#f6652bb16b94f8f9c20c50941e16e9756898dc5d" + integrity sha512-WwlxbfMNdVEpQjZmK5mhm7oSwD3dS6eU+Iwsi4Knl9wAletWem7kaRsGOG+8UEbRyqxY4SS5zvtfXwX+jMxUwQ== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" + "@babel/plugin-transform-optional-chaining" "^7.23.3" + +"@babel/plugin-proposal-async-generator-functions@^7.18.10": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.20.7.tgz#bfb7276d2d573cb67ba379984a2334e262ba5326" + integrity sha512-xMbiLsn/8RK7Wq7VeVytytS2L6qE69bXPB10YCmMdDZbKF4okCqY74pI/jJQ/8U0b/F6NrT2+14b8/P9/3AMGA== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-remap-async-to-generator" "^7.18.9" + "@babel/plugin-syntax-async-generators" "^7.8.4" + +"@babel/plugin-proposal-class-properties@7.18.6", "@babel/plugin-proposal-class-properties@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz#b110f59741895f7ec21a6fff696ec46265c446a3" + integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-proposal-class-static-block@^7.18.6": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.21.0.tgz#77bdd66fb7b605f3a61302d224bdfacf5547977d" + integrity sha512-XP5G9MWNUskFuP30IfFSEFB0Z6HzLIUcjYM4bYOPHXl7eiJ9HFv8tWj6TXTN5QODiEhDZAeI4hLok2iHFFV4hw== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.21.0" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + +"@babel/plugin-proposal-dynamic-import@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz#72bcf8d408799f547d759298c3c27c7e7faa4d94" + integrity sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + +"@babel/plugin-proposal-export-default-from@7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-default-from/-/plugin-proposal-export-default-from-7.18.10.tgz#091f4794dbce4027c03cf4ebc64d3fb96b75c206" + integrity sha512-5H2N3R2aQFxkV4PIBUR/i7PUSwgTZjouJKzI8eKswfIjT0PhvzkPn0t0wIS5zn6maQuvtT0t1oHtMUz61LOuow== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-export-default-from" "^7.18.6" + +"@babel/plugin-proposal-export-namespace-from@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.9.tgz#5f7313ab348cdb19d590145f9247540e94761203" + integrity sha512-k1NtHyOMvlDDFeb9G5PhUXuGj8m/wiwojgQVEhJ/fsVsMCpLyOP4h0uGEjYJKrRI+EVPlb5Jk+Gt9P97lOGwtA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + +"@babel/plugin-proposal-json-strings@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz#7e8788c1811c393aff762817e7dbf1ebd0c05f0b" + integrity sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-json-strings" "^7.8.3" + +"@babel/plugin-proposal-logical-assignment-operators@^7.18.9": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.20.7.tgz#dfbcaa8f7b4d37b51e8bfb46d94a5aea2bb89d83" + integrity sha512-y7C7cZgpMIjWlKE5T7eJwp+tnRYM89HmRvWM5EQuB5BoHEONjmQ8lSNmBUwOyy/GFRsohJED51YBF79hE1djug== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + +"@babel/plugin-proposal-nullish-coalescing-operator@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz#fdd940a99a740e577d6c753ab6fbb43fdb9467e1" + integrity sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + +"@babel/plugin-proposal-numeric-separator@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz#899b14fbafe87f053d2c5ff05b36029c62e13c75" + integrity sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + +"@babel/plugin-proposal-object-rest-spread@7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.18.9.tgz#f9434f6beb2c8cae9dfcf97d2a5941bbbf9ad4e7" + integrity sha512-kDDHQ5rflIeY5xl69CEqGEZ0KY369ehsCIEbTGb4siHG5BE9sga/T0r0OUwyZNLMmZE79E1kbsqAjwFCW4ds6Q== + dependencies: + "@babel/compat-data" "^7.18.8" + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-transform-parameters" "^7.18.8" + +"@babel/plugin-proposal-object-rest-spread@^7.18.9": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.20.7.tgz#aa662940ef425779c75534a5c41e9d936edc390a" + integrity sha512-d2S98yCiLxDVmBmE8UjGcfPvNEUbA1U5q5WxaWFUGRzJSVAZqm5W6MbPct0jxnegUZ0niLeNX+IOzEs7wYg9Dg== + dependencies: + "@babel/compat-data" "^7.20.5" + "@babel/helper-compilation-targets" "^7.20.7" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-transform-parameters" "^7.20.7" + +"@babel/plugin-proposal-optional-catch-binding@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz#f9400d0e6a3ea93ba9ef70b09e72dd6da638a2cb" + integrity sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + +"@babel/plugin-proposal-optional-chaining@^7.18.9": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.21.0.tgz#886f5c8978deb7d30f678b2e24346b287234d3ea" + integrity sha512-p4zeefM72gpmEe2fkUr/OnOXpWEf8nAgk7ZYVqqfFiyIG7oFfVZcCrU64hWn5xp4tQ9LkV4bTIa5rD0KANpKNA== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + +"@babel/plugin-proposal-private-methods@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz#5209de7d213457548a98436fa2882f52f4be6bea" + integrity sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-proposal-private-property-in-object@^7.18.6": + version "7.21.11" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.11.tgz#69d597086b6760c4126525cfa154f34631ff272c" + integrity sha512-0QZ8qP/3RLDVBwBFoWAwCtgcDZJVwA5LUJRZU8x2YFfKNuFq161wK3cuGrALu5yiPu+vzwTAg/sMWVNeWeNyaw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-create-class-features-plugin" "^7.21.0" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + +"@babel/plugin-proposal-unicode-property-regex@^7.18.6", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz#af613d2cd5e643643b65cded64207b15c85cb78e" + integrity sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-async-generators@^7.8.4": + version "7.8.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== + dependencies: + "@babel/helper-plugin-utils" "^7.12.13" + +"@babel/plugin-syntax-class-static-block@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz#195df89b146b4b78b3bf897fd7a257c84659d406" + integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-dynamic-import@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" + integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-export-default-from@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-default-from/-/plugin-syntax-export-default-from-7.23.3.tgz#7e6d4bf595d5724230200fb2b7401d4734b15335" + integrity sha512-KeENO5ck1IeZ/l2lFZNy+mpobV3D2Zy5C1YFnWm+YuY5mQiAWc4yAp13dqgguwsBsFVLh4LPCEqCa5qW13N+hw== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-syntax-export-namespace-from@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" + integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.3" + +"@babel/plugin-syntax-import-assertions@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.23.3.tgz#9c05a7f592982aff1a2768260ad84bcd3f0c77fc" + integrity sha512-lPgDSU+SJLK3xmFDTV2ZRQAiM7UuUjGidwBywFavObCiZc1BeAAcMtHJKUya92hPHO+at63JJPLygilZard8jw== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-syntax-json-strings@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-jsx@^7.23.3": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.23.3.tgz#8f2e4f8a9b5f9aa16067e142c1ac9cd9f810f473" + integrity sha512-EB2MELswq55OHUoRZLGg/zC7QWUKfNLpE57m/S2yr1uEneIgsTgrSzXP3NXEsMkVn76OlaVVnzN+ugObuYGwhg== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-syntax-logical-assignment-operators@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-private-property-in-object@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz#0dc6671ec0ea22b6e94a1114f857970cd39de1ad" + integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-top-level-await@^7.14.5": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-typescript@^7.23.3": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.23.3.tgz#24f460c85dbbc983cd2b9c4994178bcc01df958f" + integrity sha512-9EiNjVJOMwCO+43TqoTrgQ8jMwcAd0sWyXi9RPfIsLTj4R2MADDDQXELhffaUx/uJv2AYcxBgPwH6j4TIA4ytQ== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-arrow-functions@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.23.3.tgz#94c6dcfd731af90f27a79509f9ab7fb2120fc38b" + integrity sha512-NzQcQrzaQPkaEwoTm4Mhyl8jI1huEL/WWIEvudjTCMJ9aBZNpsJbMASx7EQECtQQPS/DcnFpo0FIh3LvEO9cxQ== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-async-to-generator@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.23.3.tgz#d1f513c7a8a506d43f47df2bf25f9254b0b051fa" + integrity sha512-A7LFsKi4U4fomjqXJlZg/u0ft/n8/7n7lpffUP/ZULx/DtV9SGlNKZolHH6PE8Xl1ngCc0M11OaeZptXVkfKSw== + dependencies: + "@babel/helper-module-imports" "^7.22.15" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-remap-async-to-generator" "^7.22.20" + +"@babel/plugin-transform-block-scoped-functions@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.23.3.tgz#fe1177d715fb569663095e04f3598525d98e8c77" + integrity sha512-vI+0sIaPIO6CNuM9Kk5VmXcMVRiOpDh7w2zZt9GXzmE/9KD70CUEVhvPR/etAeNK/FAEkhxQtXOzVF3EuRL41A== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-block-scoping@^7.18.9": + version "7.23.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.23.4.tgz#b2d38589531c6c80fbe25e6b58e763622d2d3cf5" + integrity sha512-0QqbP6B6HOh7/8iNR4CQU2Th/bbRtBp4KS9vcaZd1fZ0wSh5Fyssg0UCIHwxh+ka+pNDREbVLQnHCMHKZfPwfw== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-classes@^7.18.9": + version "7.23.8" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.23.8.tgz#d08ae096c240347badd68cdf1b6d1624a6435d92" + integrity sha512-yAYslGsY1bX6Knmg46RjiCiNSwJKv2IUC8qOdYKqMMr0491SXFhcHqOdRDeCRohOOIzwN/90C6mQ9qAKgrP7dg== + dependencies: + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-compilation-targets" "^7.23.6" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-function-name" "^7.23.0" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-replace-supers" "^7.22.20" + "@babel/helper-split-export-declaration" "^7.22.6" + globals "^11.1.0" + +"@babel/plugin-transform-computed-properties@^7.18.9": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.23.3.tgz#652e69561fcc9d2b50ba4f7ac7f60dcf65e86474" + integrity sha512-dTj83UVTLw/+nbiHqQSFdwO9CbTtwq1DsDqm3CUEtDrZNET5rT5E6bIdTlOftDTDLMYxvxHNEYO4B9SLl8SLZw== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/template" "^7.22.15" + +"@babel/plugin-transform-destructuring@^7.18.9": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.23.3.tgz#8c9ee68228b12ae3dff986e56ed1ba4f3c446311" + integrity sha512-n225npDqjDIr967cMScVKHXJs7rout1q+tt50inyBCPkyZ8KxeI6d+GIbSBTT/w/9WdlWDOej3V9HE5Lgk57gw== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-dotall-regex@^7.18.6", "@babel/plugin-transform-dotall-regex@^7.4.4": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.23.3.tgz#3f7af6054882ede89c378d0cf889b854a993da50" + integrity sha512-vgnFYDHAKzFaTVp+mneDsIEbnJ2Np/9ng9iviHw3P/KVcgONxpNULEW/51Z/BaFojG2GI2GwwXck5uV1+1NOYQ== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.22.15" + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-duplicate-keys@^7.18.9": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.23.3.tgz#664706ca0a5dfe8d066537f99032fc1dc8b720ce" + integrity sha512-RrqQ+BQmU3Oyav3J+7/myfvRCq7Tbz+kKLLshUmMwNlDHExbGL7ARhajvoBJEvc+fCguPPu887N+3RRXBVKZUA== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-exponentiation-operator@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.23.3.tgz#ea0d978f6b9232ba4722f3dbecdd18f450babd18" + integrity sha512-5fhCsl1odX96u7ILKHBj4/Y8vipoqwsJMh4csSA8qFfxrZDEA4Ssku2DyNvMJSmZNOEBT750LfFPbtrnTP90BQ== + dependencies: + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.22.15" + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-for-of@^7.18.8": + version "7.23.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.23.6.tgz#81c37e24171b37b370ba6aaffa7ac86bcb46f94e" + integrity sha512-aYH4ytZ0qSuBbpfhuofbg/e96oQ7U2w1Aw/UQmKT+1l39uEhUPoFS3fHevDc1G0OvewyDudfMKY1OulczHzWIw== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" + +"@babel/plugin-transform-function-name@^7.18.9": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.23.3.tgz#8f424fcd862bf84cb9a1a6b42bc2f47ed630f8dc" + integrity sha512-I1QXp1LxIvt8yLaib49dRW5Okt7Q4oaxao6tFVKS/anCdEOMtYwWVKoiOA1p34GOWIZjUK0E+zCp7+l1pfQyiw== + dependencies: + "@babel/helper-compilation-targets" "^7.22.15" + "@babel/helper-function-name" "^7.23.0" + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-literals@^7.18.9": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.23.3.tgz#8214665f00506ead73de157eba233e7381f3beb4" + integrity sha512-wZ0PIXRxnwZvl9AYpqNUxpZ5BiTGrYt7kueGQ+N5FiQ7RCOD4cm8iShd6S6ggfVIWaJf2EMk8eRzAh52RfP4rQ== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-member-expression-literals@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.23.3.tgz#e37b3f0502289f477ac0e776b05a833d853cabcc" + integrity sha512-sC3LdDBDi5x96LA+Ytekz2ZPk8i/Ck+DEuDbRAll5rknJ5XRTSaPKEYwomLcs1AA8wg9b3KjIQRsnApj+q51Ag== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-modules-amd@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.23.3.tgz#e19b55436a1416829df0a1afc495deedfae17f7d" + integrity sha512-vJYQGxeKM4t8hYCKVBlZX/gtIY2I7mRGFNcm85sgXGMTBcoV3QdVtdpbcWEbzbfUIUZKwvgFT82mRvaQIebZzw== + dependencies: + "@babel/helper-module-transforms" "^7.23.3" + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-modules-commonjs@^7.18.6", "@babel/plugin-transform-modules-commonjs@^7.23.3": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.23.3.tgz#661ae831b9577e52be57dd8356b734f9700b53b4" + integrity sha512-aVS0F65LKsdNOtcz6FRCpE4OgsP2OFnW46qNxNIX9h3wuzaNcSQsJysuMwqSibC98HPrf2vCgtxKNwS0DAlgcA== + dependencies: + "@babel/helper-module-transforms" "^7.23.3" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-simple-access" "^7.22.5" + +"@babel/plugin-transform-modules-systemjs@^7.18.9": + version "7.23.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.23.9.tgz#105d3ed46e4a21d257f83a2f9e2ee4203ceda6be" + integrity sha512-KDlPRM6sLo4o1FkiSlXoAa8edLXFsKKIda779fbLrvmeuc3itnjCtaO6RrtoaANsIJANj+Vk1zqbZIMhkCAHVw== + dependencies: + "@babel/helper-hoist-variables" "^7.22.5" + "@babel/helper-module-transforms" "^7.23.3" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-validator-identifier" "^7.22.20" + +"@babel/plugin-transform-modules-umd@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.23.3.tgz#5d4395fccd071dfefe6585a4411aa7d6b7d769e9" + integrity sha512-zHsy9iXX2nIsCBFPud3jKn1IRPWg3Ing1qOZgeKV39m1ZgIdpJqvlWVeiHBZC6ITRG0MfskhYe9cLgntfSFPIg== + dependencies: + "@babel/helper-module-transforms" "^7.23.3" + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-named-capturing-groups-regex@^7.18.6": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.22.5.tgz#67fe18ee8ce02d57c855185e27e3dc959b2e991f" + integrity sha512-YgLLKmS3aUBhHaxp5hi1WJTgOUb/NCuDHzGT9z9WTt3YG+CPRhJs6nprbStx6DnWM4dh6gt7SU3sZodbZ08adQ== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.22.5" + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-new-target@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.23.3.tgz#5491bb78ed6ac87e990957cea367eab781c4d980" + integrity sha512-YJ3xKqtJMAT5/TIZnpAR3I+K+WaDowYbN3xyxI8zxx/Gsypwf9B9h0VB+1Nh6ACAAPRS5NSRje0uVv5i79HYGQ== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-object-super@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.23.3.tgz#81fdb636dcb306dd2e4e8fd80db5b2362ed2ebcd" + integrity sha512-BwQ8q0x2JG+3lxCVFohg+KbQM7plfpBwThdW9A6TMtWwLsbDA01Ek2Zb/AgDN39BiZsExm4qrXxjk+P1/fzGrA== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-replace-supers" "^7.22.20" + +"@babel/plugin-transform-optional-chaining@^7.23.3": + version "7.23.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.23.4.tgz#6acf61203bdfc4de9d4e52e64490aeb3e52bd017" + integrity sha512-ZU8y5zWOfjM5vZ+asjgAPwDaBjJzgufjES89Rs4Lpq63O300R/kOz30WCLo6BxxX6QVEilwSlpClnG5cZaikTA== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + +"@babel/plugin-transform-parameters@^7.18.8", "@babel/plugin-transform-parameters@^7.20.7": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.23.3.tgz#83ef5d1baf4b1072fa6e54b2b0999a7b2527e2af" + integrity sha512-09lMt6UsUb3/34BbECKVbVwrT9bO6lILWln237z7sLaWnMsTi7Yc9fhX5DLpkJzAGfaReXI22wP41SZmnAA3Vw== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-property-literals@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.23.3.tgz#54518f14ac4755d22b92162e4a852d308a560875" + integrity sha512-jR3Jn3y7cZp4oEWPFAlRsSWjxKe4PZILGBSd4nis1TsC5qeSpb+nrtihJuDhNI7QHiVbUaiXa0X2RZY3/TI6Nw== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-regenerator@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.23.3.tgz#141afd4a2057298602069fce7f2dc5173e6c561c" + integrity sha512-KP+75h0KghBMcVpuKisx3XTu9Ncut8Q8TuvGO4IhY+9D5DFEckQefOuIsB/gQ2tG71lCke4NMrtIPS8pOj18BQ== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + regenerator-transform "^0.15.2" + +"@babel/plugin-transform-reserved-words@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.23.3.tgz#4130dcee12bd3dd5705c587947eb715da12efac8" + integrity sha512-QnNTazY54YqgGxwIexMZva9gqbPa15t/x9VS+0fsEFWplwVpXYZivtgl43Z1vMpc1bdPP2PP8siFeVcnFvA3Cg== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-runtime@7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.18.10.tgz#37d14d1fa810a368fd635d4d1476c0154144a96f" + integrity sha512-q5mMeYAdfEbpBAgzl7tBre/la3LeCxmDO1+wMXRdPWbcoMjR3GiXlCLk7JBZVVye0bqTGNMbt0yYVXX1B1jEWQ== + dependencies: + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.9" + babel-plugin-polyfill-corejs2 "^0.3.2" + babel-plugin-polyfill-corejs3 "^0.5.3" + babel-plugin-polyfill-regenerator "^0.4.0" + semver "^6.3.0" + +"@babel/plugin-transform-shorthand-properties@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.23.3.tgz#97d82a39b0e0c24f8a981568a8ed851745f59210" + integrity sha512-ED2fgqZLmexWiN+YNFX26fx4gh5qHDhn1O2gvEhreLW2iI63Sqm4llRLCXALKrCnbN4Jy0VcMQZl/SAzqug/jg== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-spread@^7.18.9": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.23.3.tgz#41d17aacb12bde55168403c6f2d6bdca563d362c" + integrity sha512-VvfVYlrlBVu+77xVTOAoxQ6mZbnIq5FM0aGBSFEcIh03qHf+zNqA4DC/3XMUozTg7bZV3e3mZQ0i13VB6v5yUg== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-skip-transparent-expression-wrappers" "^7.22.5" + +"@babel/plugin-transform-sticky-regex@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.23.3.tgz#dec45588ab4a723cb579c609b294a3d1bd22ff04" + integrity sha512-HZOyN9g+rtvnOU3Yh7kSxXrKbzgrm5X4GncPY1QOquu7epga5MxKHVpYu2hvQnry/H+JjckSYRb93iNfsioAGg== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-template-literals@^7.18.9": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.23.3.tgz#5f0f028eb14e50b5d0f76be57f90045757539d07" + integrity sha512-Flok06AYNp7GV2oJPZZcP9vZdszev6vPBkHLwxwSpaIqx75wn6mUd3UFWsSsA0l8nXAKkyCmL/sR02m8RYGeHg== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-typeof-symbol@^7.18.9": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.23.3.tgz#9dfab97acc87495c0c449014eb9c547d8966bca4" + integrity sha512-4t15ViVnaFdrPC74be1gXBSMzXk3B4Us9lP7uLRQHTFpV5Dvt33pn+2MyyNxmN3VTTm3oTrZVMUmuw3oBnQ2oQ== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-typescript@^7.23.3": + version "7.23.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.23.6.tgz#aa36a94e5da8d94339ae3a4e22d40ed287feb34c" + integrity sha512-6cBG5mBvUu4VUD04OHKnYzbuHNP8huDsD3EDqqpIpsswTDoqHCjLoHb6+QgsV1WsT2nipRqCPgxD3LXnEO7XfA== + dependencies: + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-create-class-features-plugin" "^7.23.6" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-typescript" "^7.23.3" + +"@babel/plugin-transform-unicode-escapes@^7.18.10": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.23.3.tgz#1f66d16cab01fab98d784867d24f70c1ca65b925" + integrity sha512-OMCUx/bU6ChE3r4+ZdylEqAjaQgHAgipgW8nsCfu5pGqDcFytVd91AwRvUJSBZDz0exPGgnjoqhgRYLRjFZc9Q== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/plugin-transform-unicode-regex@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.23.3.tgz#26897708d8f42654ca4ce1b73e96140fbad879dc" + integrity sha512-wMHpNA4x2cIA32b/ci3AfwNgheiva2W0WUKWTK7vBHBhDKfPsc5cFGNWm69WBqpwd86u1qwZ9PWevKqm1A3yAw== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.22.15" + "@babel/helper-plugin-utils" "^7.22.5" + +"@babel/preset-env@7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.18.10.tgz#83b8dfe70d7eea1aae5a10635ab0a5fe60dfc0f4" + integrity sha512-wVxs1yjFdW3Z/XkNfXKoblxoHgbtUF7/l3PvvP4m02Qz9TZ6uZGxRVYjSQeR87oQmHco9zWitW5J82DJ7sCjvA== + dependencies: + "@babel/compat-data" "^7.18.8" + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + "@babel/helper-validator-option" "^7.18.6" + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.18.6" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-async-generator-functions" "^7.18.10" + "@babel/plugin-proposal-class-properties" "^7.18.6" + "@babel/plugin-proposal-class-static-block" "^7.18.6" + "@babel/plugin-proposal-dynamic-import" "^7.18.6" + "@babel/plugin-proposal-export-namespace-from" "^7.18.9" + "@babel/plugin-proposal-json-strings" "^7.18.6" + "@babel/plugin-proposal-logical-assignment-operators" "^7.18.9" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.18.6" + "@babel/plugin-proposal-numeric-separator" "^7.18.6" + "@babel/plugin-proposal-object-rest-spread" "^7.18.9" + "@babel/plugin-proposal-optional-catch-binding" "^7.18.6" + "@babel/plugin-proposal-optional-chaining" "^7.18.9" + "@babel/plugin-proposal-private-methods" "^7.18.6" + "@babel/plugin-proposal-private-property-in-object" "^7.18.6" + "@babel/plugin-proposal-unicode-property-regex" "^7.18.6" + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-class-properties" "^7.12.13" + "@babel/plugin-syntax-class-static-block" "^7.14.5" + "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + "@babel/plugin-syntax-import-assertions" "^7.18.6" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-private-property-in-object" "^7.14.5" + "@babel/plugin-syntax-top-level-await" "^7.14.5" + "@babel/plugin-transform-arrow-functions" "^7.18.6" + "@babel/plugin-transform-async-to-generator" "^7.18.6" + "@babel/plugin-transform-block-scoped-functions" "^7.18.6" + "@babel/plugin-transform-block-scoping" "^7.18.9" + "@babel/plugin-transform-classes" "^7.18.9" + "@babel/plugin-transform-computed-properties" "^7.18.9" + "@babel/plugin-transform-destructuring" "^7.18.9" + "@babel/plugin-transform-dotall-regex" "^7.18.6" + "@babel/plugin-transform-duplicate-keys" "^7.18.9" + "@babel/plugin-transform-exponentiation-operator" "^7.18.6" + "@babel/plugin-transform-for-of" "^7.18.8" + "@babel/plugin-transform-function-name" "^7.18.9" + "@babel/plugin-transform-literals" "^7.18.9" + "@babel/plugin-transform-member-expression-literals" "^7.18.6" + "@babel/plugin-transform-modules-amd" "^7.18.6" + "@babel/plugin-transform-modules-commonjs" "^7.18.6" + "@babel/plugin-transform-modules-systemjs" "^7.18.9" + "@babel/plugin-transform-modules-umd" "^7.18.6" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.18.6" + "@babel/plugin-transform-new-target" "^7.18.6" + "@babel/plugin-transform-object-super" "^7.18.6" + "@babel/plugin-transform-parameters" "^7.18.8" + "@babel/plugin-transform-property-literals" "^7.18.6" + "@babel/plugin-transform-regenerator" "^7.18.6" + "@babel/plugin-transform-reserved-words" "^7.18.6" + "@babel/plugin-transform-shorthand-properties" "^7.18.6" + "@babel/plugin-transform-spread" "^7.18.9" + "@babel/plugin-transform-sticky-regex" "^7.18.6" + "@babel/plugin-transform-template-literals" "^7.18.9" + "@babel/plugin-transform-typeof-symbol" "^7.18.9" + "@babel/plugin-transform-unicode-escapes" "^7.18.10" + "@babel/plugin-transform-unicode-regex" "^7.18.6" + "@babel/preset-modules" "^0.1.5" + "@babel/types" "^7.18.10" + babel-plugin-polyfill-corejs2 "^0.3.2" + babel-plugin-polyfill-corejs3 "^0.5.3" + babel-plugin-polyfill-regenerator "^0.4.0" + core-js-compat "^3.22.1" + semver "^6.3.0" + +"@babel/preset-modules@^0.1.5": + version "0.1.6" + resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.6.tgz#31bcdd8f19538437339d17af00d177d854d9d458" + integrity sha512-ID2yj6K/4lKfhuU3+EX4UvNbIt7eACFbHmNUjzA+ep+B5971CknnA/9DEWKbRokfbbtblxxxXFJJrH47UEAMVg== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" + "@babel/plugin-transform-dotall-regex" "^7.4.4" + "@babel/types" "^7.4.4" + esutils "^2.0.2" + +"@babel/preset-typescript@^7.18.6": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.23.3.tgz#14534b34ed5b6d435aa05f1ae1c5e7adcc01d913" + integrity sha512-17oIGVlqz6CchO9RFYn5U6ZpWRZIngayYCtrPRSgANSwC2V1Jb+iP74nVxzzXJte8b8BYxrL1yY96xfhTBrNNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/helper-validator-option" "^7.22.15" + "@babel/plugin-syntax-jsx" "^7.23.3" + "@babel/plugin-transform-modules-commonjs" "^7.23.3" + "@babel/plugin-transform-typescript" "^7.23.3" + +"@babel/regjsgen@^0.8.0": + version "0.8.0" + resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310" + integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA== + +"@babel/runtime@^7.11.2", "@babel/runtime@^7.18.9", "@babel/runtime@^7.19.0", "@babel/runtime@^7.8.4": + version "7.23.9" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.9.tgz#47791a15e4603bb5f905bc0753801cf21d6345f7" + integrity sha512-0CX6F+BI2s9dkUqr08KFrAIZgNFj75rdBU/DjCyYLIaV/quFjkk6T+EJ2LkZHyZTbEV4L5p97mNkUsHl2wLFAw== + dependencies: + regenerator-runtime "^0.14.0" + "@babel/runtime@^7.20.7": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.22.5.tgz#8564dd588182ce0047d55d7a75e93921107b57ec" @@ -97,6 +1011,15 @@ dependencies: regenerator-runtime "^0.13.11" +"@babel/template@^7.18.10", "@babel/template@^7.23.9": + version "7.23.9" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.23.9.tgz#f881d0487cba2828d3259dcb9ef5005a9731011a" + integrity sha512-+xrD2BWLpvHKNmX2QbpdpsBaWnRxahMwJjO+KZk2JOElj5nSmKezyS1B4u+QbHMTX69t4ukm6hh9lsYQ7GHCKA== + dependencies: + "@babel/code-frame" "^7.23.5" + "@babel/parser" "^7.23.9" + "@babel/types" "^7.23.9" + "@babel/template@^7.22.15": version "7.22.15" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" @@ -106,7 +1029,23 @@ "@babel/parser" "^7.22.15" "@babel/types" "^7.22.15" -"@babel/traverse@^7.23.6", "@babel/traverse@^7.7.0": +"@babel/traverse@7.18.11": + version "7.18.11" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.18.11.tgz#3d51f2afbd83ecf9912bcbb5c4d94e3d2ddaa16f" + integrity sha512-TG9PiM2R/cWCAy6BPJKeHzNbu4lPzOSZpeMfeNErskGpTJx6trEvFaVCbDvpcxwy49BKWmEPwiW8mrysNiDvIQ== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.18.10" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.18.9" + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/parser" "^7.18.11" + "@babel/types" "^7.18.10" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/traverse@7.23.6", "@babel/traverse@^7.23.6", "@babel/traverse@^7.7.0": version "7.23.6" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.6.tgz#b53526a2367a0dd6edc423637f3d2d0f2521abc5" integrity sha512-czastdK1e8YByZqezMPFiZ8ahwVMh/ESl9vPgvgdB9AmFMGP5jfpFax74AQgl5zj4XHzqeYAg2l8PuUeRS1MgQ== @@ -122,7 +1061,32 @@ debug "^4.3.1" globals "^11.1.0" -"@babel/types@^7.17.0", "@babel/types@^7.22.15", "@babel/types@^7.22.5", "@babel/types@^7.23.0", "@babel/types@^7.23.6", "@babel/types@^7.7.0": +"@babel/traverse@^7.18.10", "@babel/traverse@^7.23.9": + version "7.23.9" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.9.tgz#2f9d6aead6b564669394c5ce0f9302bb65b9d950" + integrity sha512-I/4UJ9vs90OkBtY6iiiTORVMyIhJ4kAVmsKo9KFc8UOxMeUfi2hvtIBsET5u9GizXE6/GFSuKCTNfgCswuEjRg== + dependencies: + "@babel/code-frame" "^7.23.5" + "@babel/generator" "^7.23.6" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-function-name" "^7.23.0" + "@babel/helper-hoist-variables" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/parser" "^7.23.9" + "@babel/types" "^7.23.9" + debug "^4.3.1" + globals "^11.1.0" + +"@babel/types@7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.18.10.tgz#4908e81b6b339ca7c6b7a555a5fc29446f26dde6" + integrity sha512-MJvnbEiiNkpjo+LknnmRrqbY1GPUUggjv+wQVjetM/AONoupqRALB7I6jGqNUAZsKcRIEu2J6FRFvsczljjsaQ== + dependencies: + "@babel/helper-string-parser" "^7.18.10" + "@babel/helper-validator-identifier" "^7.18.6" + to-fast-properties "^2.0.0" + +"@babel/types@7.23.6", "@babel/types@^7.17.0", "@babel/types@^7.22.15", "@babel/types@^7.22.5", "@babel/types@^7.23.0", "@babel/types@^7.23.6", "@babel/types@^7.7.0": version "7.23.6" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.6.tgz#be33fdb151e1f5a56877d704492c240fc71c7ccd" integrity sha512-+uarb83brBzPKN38NX1MkB6vb6+mwvR6amUulqAE7ccQw1pEl+bCia9TbdG1lsnFP7lZySvUn37CHyXQdfTwzg== @@ -131,6 +1095,15 @@ "@babel/helper-validator-identifier" "^7.22.20" to-fast-properties "^2.0.0" +"@babel/types@^7.18.10", "@babel/types@^7.22.19", "@babel/types@^7.23.9", "@babel/types@^7.4.4": + version "7.23.9" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.9.tgz#1dd7b59a9a2b5c87f8b41e52770b5ecbf492e002" + integrity sha512-dQjSq/7HaSjRM43FFGnv5keM2HsxpmyV1PfaSVm0nzzjwwTmjOe6J4bC8e3+pTEIgHaHj+1ZlLThRJ2auc/w1Q== + dependencies: + "@babel/helper-string-parser" "^7.23.4" + "@babel/helper-validator-identifier" "^7.22.20" + to-fast-properties "^2.0.0" + "@bcoe/v8-coverage@^0.2.3": version "0.2.3" resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" @@ -177,6 +1150,16 @@ triple-beam "1.3.0" winston "3.3.3" +"@cosmjs/amino@0.29.4": + version "0.29.4" + resolved "https://registry.yarnpkg.com/@cosmjs/amino/-/amino-0.29.4.tgz#93d5f90033cb2af1573627582cd2cf8a515c3ef4" + integrity sha512-FBjaJ4oUKFtH34O7XjUk370x8sF7EbXD29miXrm0Rl5GEtEORJgQwutXQllHo5gBkpOxC+ZQ40CibXhPzH7G7A== + dependencies: + "@cosmjs/crypto" "^0.29.4" + "@cosmjs/encoding" "^0.29.4" + "@cosmjs/math" "^0.29.4" + "@cosmjs/utils" "^0.29.4" + "@cosmjs/amino@^0.30.1": version "0.30.1" resolved "https://registry.yarnpkg.com/@cosmjs/amino/-/amino-0.30.1.tgz#7c18c14627361ba6c88e3495700ceea1f76baace" @@ -204,6 +1187,19 @@ long "^4.0.0" pako "^2.0.2" +"@cosmjs/crypto@^0.29.4": + version "0.29.5" + resolved "https://registry.yarnpkg.com/@cosmjs/crypto/-/crypto-0.29.5.tgz#ab99fc382b93d8a8db075780cf07487a0f9519fd" + integrity sha512-2bKkaLGictaNL0UipQCL6C1afaisv6k8Wr/GCLx9FqiyFkh9ZgRHDyetD64ZsjnWV/N/D44s/esI+k6oPREaiQ== + dependencies: + "@cosmjs/encoding" "^0.29.5" + "@cosmjs/math" "^0.29.5" + "@cosmjs/utils" "^0.29.5" + "@noble/hashes" "^1" + bn.js "^5.2.0" + elliptic "^6.5.4" + libsodium-wrappers "^0.7.6" + "@cosmjs/crypto@^0.30.0", "@cosmjs/crypto@^0.30.1": version "0.30.1" resolved "https://registry.yarnpkg.com/@cosmjs/crypto/-/crypto-0.30.1.tgz#21e94d5ca8f8ded16eee1389d2639cb5c43c3eb5" @@ -217,6 +1213,15 @@ elliptic "^6.5.4" libsodium-wrappers "^0.7.6" +"@cosmjs/encoding@^0.29.4", "@cosmjs/encoding@^0.29.5": + version "0.29.5" + resolved "https://registry.yarnpkg.com/@cosmjs/encoding/-/encoding-0.29.5.tgz#009a4b1c596cdfd326f30ccfa79f5e56daa264f2" + integrity sha512-G4rGl/Jg4dMCw5u6PEZHZcoHnUBlukZODHbm/wcL4Uu91fkn5jVo5cXXZcvs4VCkArVGrEj/52eUgTZCmOBGWQ== + dependencies: + base64-js "^1.3.0" + bech32 "^1.1.4" + readonly-date "^1.0.0" + "@cosmjs/encoding@^0.30.0", "@cosmjs/encoding@^0.30.1": version "0.30.1" resolved "https://registry.yarnpkg.com/@cosmjs/encoding/-/encoding-0.30.1.tgz#b5c4e0ef7ceb1f2753688eb96400ed70f35c6058" @@ -241,6 +1246,13 @@ "@cosmjs/stream" "^0.30.1" xstream "^11.14.0" +"@cosmjs/math@^0.29.4", "@cosmjs/math@^0.29.5": + version "0.29.5" + resolved "https://registry.yarnpkg.com/@cosmjs/math/-/math-0.29.5.tgz#722c96e080d6c2b62215ce9f4c70da7625b241b6" + integrity sha512-2GjKcv+A9f86MAWYLUkjhw1/WpRl2R1BTb3m9qPG7lzMA7ioYff9jY5SPCfafKdxM4TIQGxXQlYGewQL16O68Q== + dependencies: + bn.js "^5.2.0" + "@cosmjs/math@^0.30.0", "@cosmjs/math@^0.30.1": version "0.30.1" resolved "https://registry.yarnpkg.com/@cosmjs/math/-/math-0.30.1.tgz#8b816ef4de5d3afa66cb9fdfb5df2357a7845b8a" @@ -312,11 +1324,138 @@ readonly-date "^1.0.0" xstream "^11.14.0" +"@cosmjs/utils@^0.29.4", "@cosmjs/utils@^0.29.5": + version "0.29.5" + resolved "https://registry.yarnpkg.com/@cosmjs/utils/-/utils-0.29.5.tgz#3fed1b3528ae8c5f1eb5d29b68755bebfd3294ee" + integrity sha512-m7h+RXDUxOzEOGt4P+3OVPX7PuakZT3GBmaM/Y2u+abN3xZkziykD/NvedYFvvCCdQo714XcGl33bwifS9FZPQ== + "@cosmjs/utils@^0.30.0", "@cosmjs/utils@^0.30.1": version "0.30.1" resolved "https://registry.yarnpkg.com/@cosmjs/utils/-/utils-0.30.1.tgz#6d92582341be3c2ec8d82090253cfa4b7f959edb" integrity sha512-KvvX58MGMWh7xA+N+deCfunkA/ZNDvFLw4YbOmX3f/XBIkqrVY7qlotfy2aNb1kgp6h4B6Yc8YawJPDTfvWX7g== +"@cosmology/ast@^1.4.8": + version "1.4.8" + resolved "https://registry.yarnpkg.com/@cosmology/ast/-/ast-1.4.8.tgz#9c6312de9fc4177d861a8723e4121cfdd4988533" + integrity sha512-o039BHTB7HU/XC3ZzTnIPfrB5a0G/FlQoJPBhtSVslLYXmi6aXxvJKtTLuvZzT1+bmzg/izVfo6ja55rAn7cZg== + dependencies: + "@babel/parser" "^7.23.6" + "@babel/types" "7.23.6" + "@cosmology/types" "^1.4.2" + "@cosmology/utils" "^1.4.2" + case "1.6.3" + dotty "0.1.2" + +"@cosmology/proto-parser@^1.4.4": + version "1.4.4" + resolved "https://registry.yarnpkg.com/@cosmology/proto-parser/-/proto-parser-1.4.4.tgz#bb5c9515513cf9a1e70298c3861e049b70d15ee3" + integrity sha512-VFDHrg6mQj0dKLcQOtU1icYoLN28BAxjwig7fpJ8ud2fZleLeOm09rlrcT9ZwwBBmL6Gsv+bhB1gK8q2EfDcTw== + dependencies: + "@cosmology/protobufjs" "6.11.6" + "@cosmology/types" "^1.4.2" + "@cosmology/utils" "^1.4.2" + dotty "0.1.2" + glob "8.0.3" + minimatch "5.1.0" + mkdirp "3.0.0" + +"@cosmology/protobufjs@6.11.6": + version "6.11.6" + resolved "https://registry.yarnpkg.com/@cosmology/protobufjs/-/protobufjs-6.11.6.tgz#6f7bd340ab4a27969b1f75b4bff21a74e03b971a" + integrity sha512-k1opGC9CTX5vD2447pUqLmleVv0Kb8RasBUxkZHudVOvuXs2qAAGONmMIEGRCROKTodhTY9fdTnGU2lCZqAwNw== + dependencies: + "@protobufjs/aspromise" "^1.1.2" + "@protobufjs/base64" "^1.1.2" + "@protobufjs/codegen" "^2.0.4" + "@protobufjs/eventemitter" "^1.1.0" + "@protobufjs/fetch" "^1.1.0" + "@protobufjs/float" "^1.0.2" + "@protobufjs/inquire" "^1.1.0" + "@protobufjs/path" "^1.1.2" + "@protobufjs/pool" "^1.1.0" + "@protobufjs/utf8" "^1.1.0" + "@types/long" "^4.0.1" + "@types/node" ">=13.7.0" + long "^4.0.0" + +"@cosmology/telescope@^1.0.1": + version "1.4.12" + resolved "https://registry.yarnpkg.com/@cosmology/telescope/-/telescope-1.4.12.tgz#5696b6670586cf83297d015ab2da2d760ca0e5fa" + integrity sha512-e6HlViIrzae0y5IhxTLFtNgNsACGtOhwE4v1ahmmvZWqmVkfRl0VxddVqrrCFXPNsJQy0mQUlcVggJ6lzKeuDA== + dependencies: + "@babel/generator" "^7.23.6" + "@babel/parser" "^7.23.6" + "@babel/traverse" "7.23.6" + "@babel/types" "7.23.6" + "@cosmology/ast" "^1.4.8" + "@cosmology/proto-parser" "^1.4.4" + "@cosmology/types" "^1.4.2" + "@cosmology/utils" "^1.4.2" + "@cosmwasm/ts-codegen" "0.34.0" + "@types/parse-package-name" "0.1.0" + case "1.6.3" + dargs "7.0.0" + deepmerge "4.3.1" + dotty "0.1.2" + fuzzy "0.1.3" + glob "8.0.3" + inquirerer "0.1.3" + long "^5.2.1" + minimatch "5.1.0" + minimist "1.2.8" + mkdirp "3.0.0" + parse-package-name "1.0.0" + rimraf "5.0.0" + shelljs "0.8.5" + +"@cosmology/types@^1.4.2": + version "1.4.2" + resolved "https://registry.yarnpkg.com/@cosmology/types/-/types-1.4.2.tgz#e5771f139c8e44735968ed87583e832e410696b9" + integrity sha512-GkzC1eJFJWnQmS4Vkje53qBFledKNB3YYEQnV/sJ28sS9Ud020AvLuXkG8eqRsmdpDNxGI7F+hzWQaaahDk1Ng== + dependencies: + case "1.6.3" + +"@cosmology/utils@^1.4.2": + version "1.4.2" + resolved "https://registry.yarnpkg.com/@cosmology/utils/-/utils-1.4.2.tgz#a66a9a9a0c1bc3ddd8b21be6beae864bd4603610" + integrity sha512-GWACbn2EjHvqmAOxm9MMFdfoweznB6b3kN6Agoj+bu/3EF35roIhEqUF7ku0LhyQuoFU4euxe0YwkkXgWUvCDA== + dependencies: + "@cosmology/types" "^1.4.2" + dotty "0.1.2" + +"@cosmwasm/ts-codegen@0.34.0": + version "0.34.0" + resolved "https://registry.yarnpkg.com/@cosmwasm/ts-codegen/-/ts-codegen-0.34.0.tgz#659bb69fc5f74cbd623567f3090e69072f08b23c" + integrity sha512-Mt/aYj0be9GnA51Arysqr9L8KQNWUJJHoJvtru7hkHmukPSewZDilwHCOJAaehk5gjsP7LFey2o2KEdfzkANhA== + dependencies: + "@babel/core" "7.18.10" + "@babel/generator" "7.18.12" + "@babel/parser" "7.18.11" + "@babel/plugin-proposal-class-properties" "7.18.6" + "@babel/plugin-proposal-export-default-from" "7.18.10" + "@babel/plugin-proposal-object-rest-spread" "7.18.9" + "@babel/plugin-transform-runtime" "7.18.10" + "@babel/preset-env" "7.18.10" + "@babel/preset-typescript" "^7.18.6" + "@babel/runtime" "^7.18.9" + "@babel/traverse" "7.18.11" + "@babel/types" "7.18.10" + "@pyramation/json-schema-to-typescript" " 11.0.4" + case "1.6.3" + dargs "7.0.0" + deepmerge "4.2.2" + dotty "0.1.2" + fuzzy "0.1.3" + glob "8.0.3" + inquirerer "0.1.3" + long "^5.2.0" + minimist "1.2.6" + mkdirp "1.0.4" + parse-package-name "1.0.0" + rimraf "3.0.2" + shelljs "0.8.5" + wasm-ast-types "^0.25.0" + "@dabh/diagnostics@^2.0.2": version "2.0.3" resolved "https://registry.yarnpkg.com/@dabh/diagnostics/-/diagnostics-2.0.3.tgz#7f7e97ee9a725dffc7808d93668cc984e1dc477a" @@ -813,11 +1952,31 @@ dependencies: eslint-visitor-keys "^3.3.0" +"@eslint-community/regexpp@^4.4.0": + version "4.10.0" + resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.10.0.tgz#548f6de556857c8bb73bbee70c35dc82a2e74d63" + integrity sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA== + "@eslint-community/regexpp@^4.5.1", "@eslint-community/regexpp@^4.6.1": version "4.6.2" resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.6.2.tgz#1816b5f6948029c5eaacb0703b850ee0cb37d8f8" integrity sha512-pPTNuaAG3QMH+buKyBIGJs3g/S5y0caxw0ygM3YyE6yJFySwiGGSzA+mM3KJ8QQvzeLh3blwgSonkFjgQdxzMw== +"@eslint/eslintrc@^2.1.0": + version "2.1.4" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.1.4.tgz#388a269f0f25c1b6adc317b5a2c55714894c70ad" + integrity sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ== + dependencies: + ajv "^6.12.4" + debug "^4.3.2" + espree "^9.6.0" + globals "^13.19.0" + ignore "^5.2.0" + import-fresh "^3.2.1" + js-yaml "^4.1.0" + minimatch "^3.1.2" + strip-json-comments "^3.1.1" + "@eslint/eslintrc@^2.1.2": version "2.1.2" resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.1.2.tgz#c6936b4b328c64496692f76944e755738be62396" @@ -833,6 +1992,11 @@ minimatch "^3.1.2" strip-json-comments "^3.1.1" +"@eslint/js@8.44.0": + version "8.44.0" + resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.44.0.tgz#961a5903c74139390478bdc808bcde3fc45ab7af" + integrity sha512-Ag+9YM4ocKQx9AarydN0KY2j0ErMHNIocPDrVo8zAE44xLTjEtz81OdR68/cydGtk6m6jDb5Za3r2useMzYmSw== + "@eslint/js@^8.47.0": version "8.47.0" resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.47.0.tgz#5478fdf443ff8158f9de171c704ae45308696c7d" @@ -901,6 +2065,17 @@ resolved "https://registry.yarnpkg.com/@isaacs/string-locale-compare/-/string-locale-compare-1.1.0.tgz#291c227e93fd407a96ecd59879a35809120e432b" integrity sha512-SQ7Kzhh9+D+ZW9MA0zkYv3VXhIDNx+LzM6EJ+/65I3QY+enU6Itte7E5XX7EWrqLW2FN4n06GWzBnPoC3th2aQ== +"@istanbuljs/load-nyc-config@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== + dependencies: + camelcase "^5.3.1" + find-up "^4.1.0" + get-package-type "^0.1.0" + js-yaml "^3.13.1" + resolve-from "^5.0.0" + "@istanbuljs/schema@^0.1.2", "@istanbuljs/schema@^0.1.3": version "0.1.3" resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" @@ -913,6 +2088,13 @@ dependencies: requireindex "~1.1.0" +"@jest/schemas@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-28.1.3.tgz#ad8b86a66f11f33619e3d7e1dcddd7f2d40ff905" + integrity sha512-/l/VWsdt/aBXgjshLWOFyFt3IVdYypu5y2Wn2rOO1un6nkqIn8SLXzgIMYXFyYsRWDyF5EthmKJMIdJvk08grg== + dependencies: + "@sinclair/typebox" "^0.24.1" + "@jest/schemas@^29.4.3": version "29.4.3" resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-29.4.3.tgz#39cf1b8469afc40b6f5a2baaa146e332c4151788" @@ -920,6 +2102,48 @@ dependencies: "@sinclair/typebox" "^0.25.16" +"@jest/transform@28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-28.1.3.tgz#59d8098e50ab07950e0f2fc0fc7ec462371281b0" + integrity sha512-u5dT5di+oFI6hfcLOHGTAfmUxFRrjK+vnaP0kkVow9Md/M7V/MxqQMOz/VV25UZO8pzeA9PjfTpOu6BDuwSPQA== + dependencies: + "@babel/core" "^7.11.6" + "@jest/types" "^28.1.3" + "@jridgewell/trace-mapping" "^0.3.13" + babel-plugin-istanbul "^6.1.1" + chalk "^4.0.0" + convert-source-map "^1.4.0" + fast-json-stable-stringify "^2.0.0" + graceful-fs "^4.2.9" + jest-haste-map "^28.1.3" + jest-regex-util "^28.0.2" + jest-util "^28.1.3" + micromatch "^4.0.4" + pirates "^4.0.4" + slash "^3.0.0" + write-file-atomic "^4.0.1" + +"@jest/types@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-28.1.3.tgz#b05de80996ff12512bc5ceb1d208285a7d11748b" + integrity sha512-RyjiyMUZrKz/c+zlMFO1pm70DcIlST8AeWTkoUdZevew44wcNZQHsEVOiCVtgVnlFFD82FPaXycys58cf2muVQ== + dependencies: + "@jest/schemas" "^28.1.3" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" + +"@jridgewell/gen-mapping@^0.3.0": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" + integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + "@jridgewell/gen-mapping@^0.3.2": version "0.3.2" resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" @@ -934,6 +2158,11 @@ resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6" + integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== + "@jridgewell/set-array@^1.0.1": version "1.1.2" resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" @@ -944,6 +2173,11 @@ resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== +"@jridgewell/sourcemap-codec@^1.4.14": + version "1.4.15" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" + integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== + "@jridgewell/trace-mapping@^0.3.12", "@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.9": version "0.3.18" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz#25783b2086daf6ff1dcb53c9249ae480e4dd4cd6" @@ -952,6 +2186,19 @@ "@jridgewell/resolve-uri" "3.1.0" "@jridgewell/sourcemap-codec" "1.4.14" +"@jridgewell/trace-mapping@^0.3.13": + version "0.3.22" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.22.tgz#72a621e5de59f5f1ef792d0793a82ee20f645e4c" + integrity sha512-Wf963MzWtA2sjrNt+g18IAln9lKnlRp+K2eH4jjIoF1wYeq3aMREpG09xhlhdzS0EjwU7qmUJYangWa+151vZw== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" + +"@jsdevtools/ono@^7.1.3": + version "7.1.3" + resolved "https://registry.yarnpkg.com/@jsdevtools/ono/-/ono-7.1.3.tgz#9df03bbd7c696a5c58885c34aa06da41c8543796" + integrity sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg== + "@lerna/add@5.6.2": version "5.6.2" resolved "https://registry.yarnpkg.com/@lerna/add/-/add-5.6.2.tgz#d0e25fd4900b6f8a9548f940cc016ce8a3e2d2ba" @@ -2099,17 +3346,10 @@ resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33" integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg== -"@pkgr/utils@^2.3.1": - version "2.4.2" - resolved "https://registry.yarnpkg.com/@pkgr/utils/-/utils-2.4.2.tgz#9e638bbe9a6a6f165580dc943f138fd3309a2cbc" - integrity sha512-POgTXhjrTfbTV63DiFXav4lBHiICLKKwDeaKn9Nphwj7WH6m0hMMCaJkMyRWjgtPFyRKRVoMXXjczsTQRDEhYw== - dependencies: - cross-spawn "^7.0.3" - fast-glob "^3.3.0" - is-glob "^4.0.3" - open "^9.1.0" - picocolors "^1.0.0" - tslib "^2.6.0" +"@pkgr/core@^0.1.0": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@pkgr/core/-/core-0.1.1.tgz#1ec17e2edbec25c8306d424ecfbf13c7de1aaa31" + integrity sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA== "@protobufjs/aspromise@^1.1.1", "@protobufjs/aspromise@^1.1.2": version "1.1.2" @@ -2164,6 +3404,35 @@ resolved "https://registry.yarnpkg.com/@protobufjs/utf8/-/utf8-1.1.0.tgz#a777360b5b39a1a2e5106f8e858f2fd2d060c570" integrity sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw== +"@pyramation/json-schema-ref-parser@9.0.6": + version "9.0.6" + resolved "https://registry.yarnpkg.com/@pyramation/json-schema-ref-parser/-/json-schema-ref-parser-9.0.6.tgz#556e416ce7dcc15a3c1afd04d6a059e03ed09aeb" + integrity sha512-L5kToHAEc1Q87R8ZwWFaNa4tPHr8Hnm+U+DRdUVq3tUtk+EX4pCqSd34Z6EMxNi/bjTzt1syAG9J2Oo1YFlqSg== + dependencies: + "@jsdevtools/ono" "^7.1.3" + call-me-maybe "^1.0.1" + js-yaml "^3.13.1" + +"@pyramation/json-schema-to-typescript@ 11.0.4": + version "11.0.4" + resolved "https://registry.yarnpkg.com/@pyramation/json-schema-to-typescript/-/json-schema-to-typescript-11.0.4.tgz#959bdb631dad336e1fdbf608a9b5908ab0da1d6b" + integrity sha512-+aSzXDLhMHOEdV2cJ7Tjg/9YenjHU5BCmClVygzwxJZ1R16NOfEn7lTAwVzb/2jivOSnhjHzMJbnSf8b6rd1zg== + dependencies: + "@pyramation/json-schema-ref-parser" "9.0.6" + "@types/json-schema" "^7.0.11" + "@types/lodash" "^4.14.182" + "@types/prettier" "^2.6.1" + cli-color "^2.0.2" + get-stdin "^8.0.0" + glob "^7.1.6" + glob-promise "^4.2.2" + is-glob "^4.0.3" + lodash "^4.17.21" + minimist "^1.2.6" + mkdirp "^1.0.4" + mz "^2.7.0" + prettier "^2.6.2" + "@rollup/plugin-commonjs@^19.0.0": version "19.0.0" resolved "https://registry.yarnpkg.com/@rollup/plugin-commonjs/-/plugin-commonjs-19.0.0.tgz#8c3e71f9a66908e60d70cc1be205834ef3e45f71" @@ -2198,6 +3467,11 @@ estree-walker "^1.0.1" picomatch "^2.2.2" +"@sinclair/typebox@^0.24.1": + version "0.24.51" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.24.51.tgz#645f33fe4e02defe26f2f5c0410e1c094eac7f5f" + integrity sha512-1P1OROm/rdubP5aFDSZQILU0vrLCJ4fvHt6EoqHEM+2D/G5MK3bIaymUKLit8Js9gbns5UyJnkP/TZROLw4tUA== + "@sinclair/typebox@^0.25.16": version "0.25.24" resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.25.24.tgz#8c7688559979f7079aacaf31aa881c3aa410b718" @@ -2275,6 +3549,14 @@ "@types/qs" "*" "@types/serve-static" "*" +"@types/glob@^7.1.3": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.2.0.tgz#bc1b5bf3aa92f25bd5dd39f35c57361bdce5b2eb" + integrity sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA== + dependencies: + "@types/minimatch" "*" + "@types/node" "*" + "@types/glob@^8.1.0": version "8.1.0" resolved "https://registry.yarnpkg.com/@types/glob/-/glob-8.1.0.tgz#b63e70155391b0584dce44e7ea25190bbc38f2fc" @@ -2283,6 +3565,13 @@ "@types/minimatch" "^5.1.2" "@types/node" "*" +"@types/graceful-fs@^4.1.3": + version "4.1.9" + resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.9.tgz#2a06bc0f68a20ab37b3e36aa238be6abdf49e8b4" + integrity sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ== + dependencies: + "@types/node" "*" + "@types/http-proxy@^1.17.8": version "1.17.9" resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.9.tgz#7f0e7931343761efde1e2bf48c40f02f3f75705a" @@ -2290,21 +3579,50 @@ dependencies: "@types/node" "*" +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz#7739c232a1fee9b4d3ce8985f314c0c6d33549d7" + integrity sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w== + "@types/istanbul-lib-coverage@^2.0.1": version "2.0.4" resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== +"@types/istanbul-lib-report@*": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz#53047614ae72e19fc0401d872de3ae2b4ce350bf" + integrity sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.4" + resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz#0f03e3d2f670fbdac586e34b433783070cc16f54" + integrity sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ== + dependencies: + "@types/istanbul-lib-report" "*" + "@types/json-schema@*", "@types/json-schema@^7.0.12": version "7.0.12" resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.12.tgz#d70faba7039d5fca54c83c7dbab41051d2b6f6cb" integrity sha512-Hr5Jfhc9eYOQNPYO5WLDq/n4jqijdHNlDXjuAQkkt+mWdQR+XJToOHrsD4cPaMXpn6KO7y2+wM8AZEs8VpBLVA== +"@types/json-schema@^7.0.11": + version "7.0.15" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841" + integrity sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA== + "@types/json5@^0.0.29": version "0.0.29" resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== +"@types/lodash@^4.14.182": + version "4.14.202" + resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.202.tgz#f09dbd2fb082d507178b2f2a5c7e74bd72ff98f8" + integrity sha512-OvlIYQK9tNneDlS0VN54LLd5uiPCBOp7gS5Z0f1mjoJYBrtStzgmJBxONW3U6OZqdtNzZPmn9BS/7WI7BFFcFQ== + "@types/long@^4.0.1": version "4.0.2" resolved "https://registry.yarnpkg.com/@types/long/-/long-4.0.2.tgz#b74129719fc8d11c01868010082d483b7545591a" @@ -2332,16 +3650,16 @@ resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a" integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw== +"@types/minimatch@*", "@types/minimatch@^5.1.2": + version "5.1.2" + resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-5.1.2.tgz#07508b45797cb81ec3f273011b054cd0755eddca" + integrity sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA== + "@types/minimatch@^3.0.3": version "3.0.5" resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.5.tgz#1001cc5e6a3704b83c236027e77f2f58ea010f40" integrity sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ== -"@types/minimatch@^5.1.2": - version "5.1.2" - resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-5.1.2.tgz#07508b45797cb81ec3f273011b054cd0755eddca" - integrity sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA== - "@types/minimist@^1.2.0": version "1.2.2" resolved "https://registry.yarnpkg.com/@types/minimist/-/minimist-1.2.2.tgz#ee771e2ba4b3dc5b372935d549fd9617bf345b8c" @@ -2379,16 +3697,21 @@ resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e" integrity sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA== -"@types/object-hash@^1.3.0": - version "1.3.4" - resolved "https://registry.yarnpkg.com/@types/object-hash/-/object-hash-1.3.4.tgz#079ba142be65833293673254831b5e3e847fe58b" - integrity sha512-xFdpkAkikBgqBdG9vIlsqffDV8GpvnPEzs0IUtr1v3BEB97ijsFQ4RXVbUZwjFThhB4MDSTUfvmxUD5PGx0wXA== - "@types/parse-json@^4.0.0": version "4.0.0" resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== +"@types/parse-package-name@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@types/parse-package-name/-/parse-package-name-0.1.0.tgz#a4e54e3eef677d8b9d931b54b94ed77e8ae52a4f" + integrity sha512-+vF4M3Cd3Ec22Uwb+OKhDrSAcXQ5I6evRx+1letx4KzfzycU+AOEDHnCifus8In11i8iYNFXPfzg9HWTcC1h+Q== + +"@types/prettier@^2.6.1": + version "2.7.3" + resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.7.3.tgz#3e51a17e291d01d17d3fc61422015a933af7a08f" + integrity sha512-+68kP9yzs4LMp7VNh8gdzMSPZFL44MLGqiHWvttYJe+6qnuVr4Ek9wSBQoveqY/r+LwjCcU29kNVkidwim+kYA== + "@types/proper-lockfile@^4.1.2": version "4.1.2" resolved "https://registry.yarnpkg.com/@types/proper-lockfile/-/proper-lockfile-4.1.2.tgz#49537cee7134055ee13a1833b76a1c298f39bb26" @@ -2454,12 +3777,41 @@ resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.6.tgz#250a7b16c3b91f672a24552ec64678eeb1d3a08d" integrity sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ== +"@types/yargs-parser@*": + version "21.0.3" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.3.tgz#815e30b786d2e8f0dcd85fd5bcf5e1a04d008f15" + integrity sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ== + "@types/yargs-parser@^21.0.0": version "21.0.0" resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== -"@typescript-eslint/eslint-plugin@^6.0.0", "@typescript-eslint/eslint-plugin@^6.20.0": +"@types/yargs@^17.0.8": + version "17.0.32" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.32.tgz#030774723a2f7faafebf645f4e5a48371dca6229" + integrity sha512-xQ67Yc/laOG5uMfX/093MRlGGCIBzZMarVa+gfNKJxWAIgykYpVGkBdbqEzGDDfCrVUj6Hiff4mTZ5BA6TmAog== + dependencies: + "@types/yargs-parser" "*" + +"@typescript-eslint/eslint-plugin@^6.0.0": + version "6.13.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.13.2.tgz#2e03506c5362a65e43cb132c37c9ce2d3cb51470" + integrity sha512-3+9OGAWHhk4O1LlcwLBONbdXsAhLjyCFogJY/cWy2lxdVJ2JrcTF2pTGMaLl2AE7U1l31n8Py4a8bx5DLf/0dQ== + dependencies: + "@eslint-community/regexpp" "^4.5.1" + "@typescript-eslint/scope-manager" "6.13.2" + "@typescript-eslint/type-utils" "6.13.2" + "@typescript-eslint/utils" "6.13.2" + "@typescript-eslint/visitor-keys" "6.13.2" + debug "^4.3.4" + graphemer "^1.4.0" + ignore "^5.2.4" + natural-compare "^1.4.0" + semver "^7.5.4" + ts-api-utils "^1.0.1" + +"@typescript-eslint/eslint-plugin@^6.20.0": version "6.20.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.20.0.tgz#9cf31546d2d5e884602626d89b0e0d2168ac25ed" integrity sha512-fTwGQUnjhoYHeSF6m5pWNkzmDDdsKELYrOBxhjMrofPqCkoC2k3B2wvGHFxa1CTIqkEn88nlW1HVMztjo2K8Hg== @@ -2487,6 +3839,14 @@ "@typescript-eslint/visitor-keys" "6.20.0" debug "^4.3.4" +"@typescript-eslint/scope-manager@6.13.2": + version "6.13.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-6.13.2.tgz#5fa4e4adace028dafac212c770640b94e7b61052" + integrity sha512-CXQA0xo7z6x13FeDYCgBkjWzNqzBn8RXaE3QVQVIUm74fWJLkJkaHmHdKStrxQllGh6Q4eUGyNpMe0b1hMkXFA== + dependencies: + "@typescript-eslint/types" "6.13.2" + "@typescript-eslint/visitor-keys" "6.13.2" + "@typescript-eslint/scope-manager@6.20.0": version "6.20.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-6.20.0.tgz#8a926e60f6c47feb5bab878246dc2ae465730151" @@ -2495,6 +3855,16 @@ "@typescript-eslint/types" "6.20.0" "@typescript-eslint/visitor-keys" "6.20.0" +"@typescript-eslint/type-utils@6.13.2": + version "6.13.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-6.13.2.tgz#ebec2da14a6bb7122e0fd31eea72a382c39c6102" + integrity sha512-Qr6ssS1GFongzH2qfnWKkAQmMUyZSyOr0W54nZNU1MDfo+U4Mv3XveeLZzadc/yq8iYhQZHYT+eoXJqnACM1tw== + dependencies: + "@typescript-eslint/typescript-estree" "6.13.2" + "@typescript-eslint/utils" "6.13.2" + debug "^4.3.4" + ts-api-utils "^1.0.1" + "@typescript-eslint/type-utils@6.20.0": version "6.20.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-6.20.0.tgz#d395475cd0f3610dd80c7d8716fa0db767da3831" @@ -2505,11 +3875,29 @@ debug "^4.3.4" ts-api-utils "^1.0.1" +"@typescript-eslint/types@6.13.2": + version "6.13.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-6.13.2.tgz#c044aac24c2f6cefb8e921e397acad5417dd0ae6" + integrity sha512-7sxbQ+EMRubQc3wTfTsycgYpSujyVbI1xw+3UMRUcrhSy+pN09y/lWzeKDbvhoqcRbHdc+APLs/PWYi/cisLPg== + "@typescript-eslint/types@6.20.0": version "6.20.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-6.20.0.tgz#5ccd74c29011ae7714ae6973e4ec0c634708b448" integrity sha512-MM9mfZMAhiN4cOEcUOEx+0HmuaW3WBfukBZPCfwSqFnQy0grXYtngKCqpQN339X3RrwtzspWJrpbrupKYUSBXQ== +"@typescript-eslint/typescript-estree@6.13.2": + version "6.13.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-6.13.2.tgz#ae556ee154c1acf025b48d37c3ef95a1d55da258" + integrity sha512-SuD8YLQv6WHnOEtKv8D6HZUzOub855cfPnPMKvdM/Bh1plv1f7Q/0iFUDLKKlxHcEstQnaUU4QZskgQq74t+3w== + dependencies: + "@typescript-eslint/types" "6.13.2" + "@typescript-eslint/visitor-keys" "6.13.2" + debug "^4.3.4" + globby "^11.1.0" + is-glob "^4.0.3" + semver "^7.5.4" + ts-api-utils "^1.0.1" + "@typescript-eslint/typescript-estree@6.20.0": version "6.20.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-6.20.0.tgz#5b2d0975949e6bdd8d45ee1471461ef5fadc5542" @@ -2524,6 +3912,19 @@ semver "^7.5.4" ts-api-utils "^1.0.1" +"@typescript-eslint/utils@6.13.2": + version "6.13.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-6.13.2.tgz#8eb89e53adc6d703a879b131e528807245486f89" + integrity sha512-b9Ptq4eAZUym4idijCRzl61oPCwwREcfDI8xGk751Vhzig5fFZR9CyzDz4Sp/nxSLBYxUPyh4QdIDqWykFhNmQ== + dependencies: + "@eslint-community/eslint-utils" "^4.4.0" + "@types/json-schema" "^7.0.12" + "@types/semver" "^7.5.0" + "@typescript-eslint/scope-manager" "6.13.2" + "@typescript-eslint/types" "6.13.2" + "@typescript-eslint/typescript-estree" "6.13.2" + semver "^7.5.4" + "@typescript-eslint/utils@6.20.0", "@typescript-eslint/utils@^6.18.1": version "6.20.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-6.20.0.tgz#0e52afcfaa51af5656490ba4b7437cc3aa28633d" @@ -2537,6 +3938,14 @@ "@typescript-eslint/typescript-estree" "6.20.0" semver "^7.5.4" +"@typescript-eslint/visitor-keys@6.13.2": + version "6.13.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-6.13.2.tgz#e0a4a80cf842bb08e6127b903284166ac4a5594c" + integrity sha512-OGznFs0eAQXJsp+xSd6k/O1UbFi/K/L7WjqeRoFE7vadjAF9y0uppXhYNQNEqygjou782maGClOoZwPqF0Drlw== + dependencies: + "@typescript-eslint/types" "6.13.2" + eslint-visitor-keys "^3.4.1" + "@typescript-eslint/visitor-keys@6.20.0": version "6.20.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-6.20.0.tgz#f7ada27f2803de89df0edd9fd7be22c05ce6a498" @@ -2653,7 +4062,7 @@ ajv@7.1.1: require-from-string "^2.0.2" uri-js "^4.2.2" -ajv@^6.12.4: +ajv@^6.10.0, ajv@^6.12.4: version "6.12.6" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== @@ -2678,6 +4087,16 @@ ansi-colors@^4.1.1: resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.3.tgz#37611340eb2243e70cc604cad35d63270d48781b" integrity sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw== +ansi-escapes@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-2.0.0.tgz#5bae52be424878dd9783e8910e3fc2922e83c81b" + integrity sha512-tH/fSoQp4DrEodDK3QpdiWiZTSe7sBJ9eOqcQBZ0o9HTM+5M/viSEn+sPMoTuPjQQ8n++w3QJoPEjt8LVPcrCg== + +ansi-escapes@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.2.0.tgz#8780b98ff9dbf5638152d1f1fe5c1d7b4442976b" + integrity sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ== + ansi-escapes@^4.2.1: version "4.3.2" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" @@ -2685,6 +4104,21 @@ ansi-escapes@^4.2.1: dependencies: type-fest "^0.21.3" +ansi-regex@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + integrity sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA== + +ansi-regex@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.1.tgz#123d6479e92ad45ad897d4054e3c7ca7db4944e1" + integrity sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw== + +ansi-regex@^4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.1.tgz#164daac87ab2d6f6db3a29875e2d1766582dabed" + integrity sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g== + ansi-regex@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" @@ -2700,6 +4134,11 @@ ansi-sequence-parser@^1.1.0: resolved "https://registry.yarnpkg.com/ansi-sequence-parser/-/ansi-sequence-parser-1.1.1.tgz#e0aa1cdcbc8f8bb0b5bca625aac41f5f056973cf" integrity sha512-vJXt3yiaUL4UU546s3rPXlsry/RnM730G1+HkpKE012AN0sx1eOrxSu95oKDIonskeLTijMgqWZ3uDEe3NFvyg== +ansi-styles@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" + integrity sha512-kmCevFghRiWM7HB5zTPULl4r9bVFSWjz62MhqizDGUrq2NWuNMQyuv4tHHoKJHs69M/MF64lEcHdYIocrdWQYA== + ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" @@ -2724,12 +4163,17 @@ ansi-styles@^6.0.0, ansi-styles@^6.1.0, ansi-styles@^6.2.1: resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== +any-promise@^1.0.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" + integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== + anylogger@^0.21.0: version "0.21.0" resolved "https://registry.yarnpkg.com/anylogger/-/anylogger-0.21.0.tgz#b6cbea631cd5e1c884e5c0fa007d80bde1b22bd4" integrity sha512-XJVplwflEff43l7aE48lW9gNoS0fpb1Ha4WttzjfTFlN3uJUIKALZ5oNWtwgRXPm/Q2dbp1EIddMbQ/AGHVX1A== -anymatch@~3.1.2: +anymatch@^3.0.3, anymatch@~3.1.2: version "3.1.3" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== @@ -2782,6 +4226,14 @@ array-buffer-byte-length@^1.0.0: call-bind "^1.0.2" is-array-buffer "^3.0.1" +array-buffer-byte-length@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz#1e5583ec16763540a27ae52eed99ff899223568f" + integrity sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg== + dependencies: + call-bind "^1.0.5" + is-array-buffer "^3.0.4" + array-differ@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-3.0.0.tgz#3cbb3d0f316810eafcc47624734237d6aee4ae6b" @@ -2838,6 +4290,20 @@ array.prototype.flatmap@^1.3.1: es-abstract "^1.20.4" es-shim-unscopables "^1.0.0" +arraybuffer.prototype.slice@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz#097972f4255e41bc3425e37dc3f6421cf9aefde6" + integrity sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A== + dependencies: + array-buffer-byte-length "^1.0.1" + call-bind "^1.0.5" + define-properties "^1.2.1" + es-abstract "^1.22.3" + es-errors "^1.2.1" + get-intrinsic "^1.2.3" + is-array-buffer "^3.0.4" + is-shared-array-buffer "^1.0.2" + arrgv@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/arrgv/-/arrgv-1.0.2.tgz#025ed55a6a433cad9b604f8112fc4292715a6ec0" @@ -2863,6 +4329,13 @@ asap@^2.0.0: resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= +ast-stringify@0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/ast-stringify/-/ast-stringify-0.1.0.tgz#5c6439fbfb4513dcc26c7d34464ccd084ed91cb7" + integrity sha512-J1PgFYV3RG6r37+M6ySZJH406hR82okwGvFM9hLXpOvdx4WC4GEW8/qiw6pi1hKTrqcRvoHP8a7mp87egYr6iA== + dependencies: + "@babel/runtime" "^7.11.2" + ast-types-flow@^0.0.7: version "0.0.7" resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" @@ -2937,11 +4410,65 @@ ava@^5.3.0: write-file-atomic "^5.0.1" yargs "^17.7.2" +ava@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/ava/-/ava-5.3.1.tgz#335737dd963b7941b90214836cea2e8de1f4d5f4" + integrity sha512-Scv9a4gMOXB6+ni4toLuhAm9KYWEjsgBglJl+kMGI5+IVDt120CCDZyB5HNU9DjmLI2t4I0GbnxGLmmRfGTJGg== + dependencies: + acorn "^8.8.2" + acorn-walk "^8.2.0" + ansi-styles "^6.2.1" + arrgv "^1.0.2" + arrify "^3.0.0" + callsites "^4.0.0" + cbor "^8.1.0" + chalk "^5.2.0" + chokidar "^3.5.3" + chunkd "^2.0.1" + ci-info "^3.8.0" + ci-parallel-vars "^1.0.1" + clean-yaml-object "^0.1.0" + cli-truncate "^3.1.0" + code-excerpt "^4.0.0" + common-path-prefix "^3.0.0" + concordance "^5.0.4" + currently-unhandled "^0.4.1" + debug "^4.3.4" + emittery "^1.0.1" + figures "^5.0.0" + globby "^13.1.4" + ignore-by-default "^2.1.0" + indent-string "^5.0.0" + is-error "^2.2.2" + is-plain-object "^5.0.0" + is-promise "^4.0.0" + matcher "^5.0.0" + mem "^9.0.2" + ms "^2.1.3" + p-event "^5.0.1" + p-map "^5.5.0" + picomatch "^2.3.1" + pkg-conf "^4.0.0" + plur "^5.1.0" + pretty-ms "^8.0.0" + resolve-cwd "^3.0.0" + stack-utils "^2.0.6" + strip-ansi "^7.0.1" + supertap "^3.0.1" + temp-dir "^3.0.0" + write-file-atomic "^5.0.1" + yargs "^17.7.2" + available-typed-arrays@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7" integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw== +available-typed-arrays@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.6.tgz#ac812d8ce5a6b976d738e1c45f08d0b00bc7d725" + integrity sha512-j1QzY8iPNPG4o4xmO3ptzpRxTciqD3MgEHtifP/YnJpIo58Xu+ne4BejlbkuaLfXn/nz6HFiw29bLpj2PNMdGg== + axe-core@^4.6.2: version "4.7.2" resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.7.2.tgz#040a7342b20765cb18bb50b628394c21bccc17a0" @@ -2982,6 +4509,41 @@ babel-eslint@^10.0.3: eslint-visitor-keys "^1.0.0" resolve "^1.12.0" +babel-plugin-istanbul@^6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@istanbuljs/load-nyc-config" "^1.0.0" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-instrument "^5.0.4" + test-exclude "^6.0.0" + +babel-plugin-polyfill-corejs2@^0.3.2: + version "0.3.3" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.3.tgz#5d1bd3836d0a19e1b84bbf2d9640ccb6f951c122" + integrity sha512-8hOdmFYFSZhqg2C/JgLUQ+t52o5nirNwaWM2B9LWteozwIvM14VSwdsCAUET10qT+kmySAlseadmfeeSWFCy+Q== + dependencies: + "@babel/compat-data" "^7.17.7" + "@babel/helper-define-polyfill-provider" "^0.3.3" + semver "^6.1.1" + +babel-plugin-polyfill-corejs3@^0.5.3: + version "0.5.3" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.3.tgz#d7e09c9a899079d71a8b670c6181af56ec19c5c7" + integrity sha512-zKsXDh0XjnrUEW0mxIHLfjBfnXSMr5Q/goMe/fxpQnLm07mcOZiIZHBNWCMx60HmdvjxfXcalac0tfFg0wqxyw== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.2" + core-js-compat "^3.21.0" + +babel-plugin-polyfill-regenerator@^0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.4.1.tgz#390f91c38d90473592ed43351e801a9d3e0fd747" + integrity sha512-NtQGmyQDXjQqQ+IzRkBVwEOz9lQ4zxAQZgoAYEtU9dJjnl1Oc98qnN7jcp+bE7O7aYzVpavXE3/VKXNzUbh7aw== + dependencies: + "@babel/helper-define-polyfill-provider" "^0.3.3" + balanced-match@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" @@ -3017,11 +4579,6 @@ better-sqlite3@^9.1.1: bindings "^1.5.0" prebuild-install "^7.1.1" -big-integer@^1.6.44: - version "1.6.51" - resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.51.tgz#0df92a5d9880560d3ff2d5fd20245c889d130686" - integrity sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg== - bin-links@^3.0.0: version "3.0.3" resolved "https://registry.yarnpkg.com/bin-links/-/bin-links-3.0.3.tgz#3842711ef3db2cd9f16a5f404a996a12db355a6e" @@ -3098,13 +4655,6 @@ body-parser@1.20.0: type-is "~1.6.18" unpipe "1.0.0" -bplist-parser@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/bplist-parser/-/bplist-parser-0.2.0.tgz#43a9d183e5bf9d545200ceac3e712f79ebbe8d0e" - integrity sha512-z0M+byMThzQmD9NILRniCUXYsYpjwnlO8N5uCFaCqIOpqRsJCrQL9NK3JsD67CN5a08nF5oIL2bD6loTdHOuKw== - dependencies: - big-integer "^1.6.44" - brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" @@ -3142,6 +4692,23 @@ browserslist@^4.21.0: node-releases "^2.0.12" update-browserslist-db "^1.0.11" +browserslist@^4.22.2, browserslist@^4.22.3: + version "4.23.0" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.23.0.tgz#8f3acc2bbe73af7213399430890f86c63a5674ab" + integrity sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ== + dependencies: + caniuse-lite "^1.0.30001587" + electron-to-chromium "^1.4.668" + node-releases "^2.0.14" + update-browserslist-db "^1.0.13" + +bser@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== + dependencies: + node-int64 "^0.4.0" + buffer-from@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" @@ -3172,13 +4739,6 @@ builtins@^5.0.0: dependencies: semver "^7.0.0" -bundle-name@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/bundle-name/-/bundle-name-3.0.0.tgz#ba59bcc9ac785fb67ccdbf104a2bf60c099f0e1a" - integrity sha512-PKA4BeSvBpQKQ8iPOGCSiell+N8P+Tf1DlwqmYhpe2gAhKPHn8EYOxVT+ShuGmhg8lN8XiSlS80yiExKXrURlw== - dependencies: - run-applescript "^5.0.0" - byte-size@^7.0.0: version "7.0.1" resolved "https://registry.yarnpkg.com/byte-size/-/byte-size-7.0.1.tgz#b1daf3386de7ab9d706b941a748dbfc71130dee3" @@ -3257,6 +4817,22 @@ call-bind@^1.0.0, call-bind@^1.0.2: function-bind "^1.1.1" get-intrinsic "^1.0.2" +call-bind@^1.0.5, call-bind@^1.0.6, call-bind@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.7.tgz#06016599c40c56498c18769d2730be242b6fa3b9" + integrity sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + set-function-length "^1.2.1" + +call-me-maybe@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.2.tgz#03f964f19522ba643b1b0693acb9152fe2074baa" + integrity sha512-HpX65o1Hnr9HH25ojC1YGs7HCQLq0GCOibSaWER0eNpgJ/Z1MZv2mTc7+xh6WOPxbRVcmgbv4hGU+uSQ/2xFZQ== + callsites@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" @@ -3286,6 +4862,21 @@ caniuse-lite@^1.0.30001503: resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001507.tgz#fae53f6286e7564783eadea9b447819410a59534" integrity sha512-SFpUDoSLCaE5XYL2jfqe9ova/pbQHEmbheDf5r4diNwbAgR3qxM9NQtfsiSscjqoya5K7kFcHPUQ+VsUkIJR4A== +caniuse-lite@^1.0.30001587: + version "1.0.30001587" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001587.tgz#a0bce920155fa56a1885a69c74e1163fc34b4881" + integrity sha512-HMFNotUmLXn71BQxg8cijvqxnIAofforZOwGsxyXJ0qugTdspUF4sPSJ2vhgprHCB996tIDzEq1ubumPDV8ULA== + +case-anything@^2.1.13: + version "2.1.13" + resolved "https://registry.yarnpkg.com/case-anything/-/case-anything-2.1.13.tgz#0cdc16278cb29a7fcdeb072400da3f342ba329e9" + integrity sha512-zlOQ80VrQ2Ue+ymH5OuM/DlDq64mEm+B9UTdHULv5osUMD6HalNTblf2b1u/m6QecjsnOkBpqVZ+XPwIVsy7Ng== + +case@1.6.3: + version "1.6.3" + resolved "https://registry.yarnpkg.com/case/-/case-1.6.3.tgz#0a4386e3e9825351ca2e6216c60467ff5f1ea1c9" + integrity sha512-mzDSXIPaFwVDvZAHqZ9VlbyF4yyXRuX6IvB06WvPYkqJVO24kX1PPhv9bfpKNFZyxYFmmgo03HUiD8iklmJYRQ== + cbor@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/cbor/-/cbor-8.1.0.tgz#cfc56437e770b73417a2ecbfc9caf6b771af60d5" @@ -3293,6 +4884,17 @@ cbor@^8.1.0: dependencies: nofilter "^3.1.0" +chalk@^1.0.0, chalk@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" + integrity sha512-U3lRVLMSlsCfjqYPbLyVv11M9CPW4I728d6TCKMAOJueEeB9/8o+eSsMnxPJD+Q+K909sdESg7C+tIkoH6on1A== + dependencies: + ansi-styles "^2.2.1" + escape-string-regexp "^1.0.2" + has-ansi "^2.0.0" + strip-ansi "^3.0.0" + supports-color "^2.0.0" + chalk@^2.4.1, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" @@ -3320,6 +4922,11 @@ character-entities@^2.0.0: resolved "https://registry.yarnpkg.com/character-entities/-/character-entities-2.0.2.tgz#2d09c2e72cd9523076ccb21157dff66ad43fcc22" integrity sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ== +chardet@^0.4.0: + version "0.4.2" + resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.4.2.tgz#b5473b33dc97c424e5d98dc87d55d4d8a29c8bf2" + integrity sha512-j/Toj7f1z98Hh2cYo2BVr85EpIRWqUi7rtRSGxh/cqUjqrnJe9l9UE7IUGd2vQ2p+kSHLkSzObQPZPLUC6TQwg== + chardet@^0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" @@ -3360,6 +4967,11 @@ ci-info@^2.0.0: resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== +ci-info@^3.2.0: + version "3.9.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.9.0.tgz#4279a62028a7b1f262f3473fc9605f5e218c59b4" + integrity sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ== + ci-info@^3.8.0: version "3.8.0" resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.8.0.tgz#81408265a5380c929f0bc665d62256628ce9ef91" @@ -3392,6 +5004,17 @@ clean-yaml-object@^0.1.0: resolved "https://registry.yarnpkg.com/clean-yaml-object/-/clean-yaml-object-0.1.0.tgz#63fb110dc2ce1a84dc21f6d9334876d010ae8b68" integrity sha512-3yONmlN9CSAkzNwnRCiJQ7Q2xK5mWuEfL3PuTZcAUzhObbXsfsnMptJzXwz93nc5zn9V9TwCVMmV7w4xsm43dw== +cli-color@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/cli-color/-/cli-color-2.0.3.tgz#73769ba969080629670f3f2ef69a4bf4e7cc1879" + integrity sha512-OkoZnxyC4ERN3zLzZaY9Emb7f/MhBOIpePv0Ycok0fJYT+Ouo00UBEIwsVsr0yoow++n5YWlSUgST9GKhNHiRQ== + dependencies: + d "^1.0.1" + es5-ext "^0.10.61" + es6-iterator "^2.0.3" + memoizee "^0.4.15" + timers-ext "^0.1.7" + cli-cursor@3.1.0, cli-cursor@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" @@ -3399,6 +5022,13 @@ cli-cursor@3.1.0, cli-cursor@^3.1.0: dependencies: restore-cursor "^3.1.0" +cli-cursor@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" + integrity sha512-8lgKz8LmCRYZZQDpRyT2m5rKJ08TnU4tR9FFFW2rxpxR1FzWi4PQ/NfyODchAatHaUgnSPVcx/R5w6NuTBzFiw== + dependencies: + restore-cursor "^2.0.0" + cli-spinners@2.6.1, cli-spinners@^2.5.0: version "2.6.1" resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.6.1.tgz#adc954ebe281c37a6319bfa401e6dd2488ffb70d" @@ -3412,6 +5042,11 @@ cli-truncate@^3.1.0: slice-ansi "^5.0.0" string-width "^5.0.0" +cli-width@^2.0.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.1.tgz#b0433d0b4e9c847ef18868a4ef16fd5fc8271c48" + integrity sha512-GRMWDxpOB6Dgk2E5Uo+3eEBvtOOlimMmpbFiKuLFnQzYDavtLFY3K5ona41jgN/WdRZtG7utuVSVTL4HbZHGkw== + cli-width@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6" @@ -3508,6 +5143,11 @@ color@^3.1.3: color-convert "^1.9.3" color-string "^1.6.0" +colors@^1.1.2: + version "1.4.0" + resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" + integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== + colorspace@1.1.x: version "1.1.4" resolved "https://registry.yarnpkg.com/colorspace/-/colorspace-1.1.4.tgz#8d442d1186152f60453bf8070cd66eb364e59243" @@ -3541,11 +5181,6 @@ commander@^11.1.0: resolved "https://registry.yarnpkg.com/commander/-/commander-11.1.0.tgz#62fdce76006a68e5c1ab3314dc92e800eb83d906" integrity sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ== -commander@~2.20.3: - version "2.20.3" - resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" - integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== - comment-parser@1.3.1, comment-parser@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/comment-parser/-/comment-parser-1.3.1.tgz#3d7ea3adaf9345594aedee6563f422348f165c1b" @@ -3724,12 +5359,15 @@ conventional-recommended-bump@^6.1.0: meow "^8.0.0" q "^1.5.1" -convert-source-map@^1.6.0: - version "1.8.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" - integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== - dependencies: - safe-buffer "~5.1.1" +convert-source-map@^1.4.0, convert-source-map@^1.7.0: + version "1.9.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" + integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== + +convert-source-map@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" + integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== convert-to-spaces@^2.0.1: version "2.0.1" @@ -3746,6 +5384,13 @@ cookie@0.5.0: resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== +core-js-compat@^3.21.0, core-js-compat@^3.22.1: + version "3.36.0" + resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.36.0.tgz#087679119bc2fdbdefad0d45d8e5d307d45ba190" + integrity sha512-iV9Pd/PsgjNWBXeq8XRtWVSgz2tKAfhfvBs7qxYty+RlRd+OCksaWmOnc4JKrTc1cToXL1N0s3l/vwlxPtdElw== + dependencies: + browserslist "^4.22.3" + core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" @@ -3802,21 +5447,24 @@ currently-unhandled@^0.4.1: dependencies: array-find-index "^1.0.1" +d@1, d@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" + integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== + dependencies: + es5-ext "^0.10.50" + type "^1.0.1" + damerau-levenshtein@^1.0.8: version "1.0.8" resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7" integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== -dargs@^7.0.0: +dargs@7.0.0, dargs@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/dargs/-/dargs-7.0.0.tgz#04015c41de0bcb69ec84050f3d9be0caf8d6d5cc" integrity sha512-2iy1EkLdlBzQGvbweYRFxmFath8+K7+AKB0TlhHWkNuH+TmovaMH/Wp7V7R4u7f4SnX3OgLsU9t1NI9ioDnUpg== -dataloader@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/dataloader/-/dataloader-1.4.0.tgz#bca11d867f5d3f1b9ed9f737bd15970c65dff5c8" - integrity sha512-68s5jYdlvasItOJnCuI2Q9s4q98g0pCyL3HrcKJu8KNugUl8ahgmZYg38ysLTgQjjXX3H8CJLkAvWrclWfcalw== - date-time@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/date-time/-/date-time-3.1.0.tgz#0d1e934d170579f481ed8df1e2b8ff70ee845e1e" @@ -3939,28 +5587,15 @@ deep-object-diff@^1.1.9: resolved "https://registry.yarnpkg.com/deep-object-diff/-/deep-object-diff-1.1.9.tgz#6df7ef035ad6a0caa44479c536ed7b02570f4595" integrity sha512-Rn+RuwkmkDwCi2/oXOFS9Gsr5lJZu/yTGpK7wAaAIE75CC+LCGEZHpY6VQJa/RoJcrmaA/docWJZvYohlNkWPA== -deepmerge@^4.2.2: +deepmerge@4.2.2, deepmerge@^4.2.2: version "4.2.2" resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== -default-browser-id@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/default-browser-id/-/default-browser-id-3.0.0.tgz#bee7bbbef1f4e75d31f98f4d3f1556a14cea790c" - integrity sha512-OZ1y3y0SqSICtE8DE4S8YOE9UZOJ8wO16fKWVP5J1Qz42kV9jcnMVFrEE/noXb/ss3Q4pZIH79kxofzyNNtUNA== - dependencies: - bplist-parser "^0.2.0" - untildify "^4.0.0" - -default-browser@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/default-browser/-/default-browser-4.0.0.tgz#53c9894f8810bf86696de117a6ce9085a3cbc7da" - integrity sha512-wX5pXO1+BrhMkSbROFsyxUm0i/cJEScyNhA4PPxc41ICuv05ZZB/MX28s8aZx6xjmatvebIapF6hLEKEcpneUA== - dependencies: - bundle-name "^3.0.0" - default-browser-id "^3.0.0" - execa "^7.1.1" - titleize "^3.0.0" +deepmerge@4.3.1: + version "4.3.1" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" + integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A== defaults@^1.0.3: version "1.0.3" @@ -3969,16 +5604,20 @@ defaults@^1.0.3: dependencies: clone "^1.0.2" +define-data-property@^1.0.1, define-data-property@^1.1.2: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.4.tgz#894dc141bb7d3060ae4366f6a0107e68fbe48c5e" + integrity sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + gopd "^1.0.1" + define-lazy-prop@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== -define-lazy-prop@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz#dbb19adfb746d7fc6d734a06b72f4a00d021255f" - integrity sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg== - define-properties@^1.1.3, define-properties@^1.1.4: version "1.2.0" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.0.tgz#52988570670c9eacedd8064f4a990f2405849bd5" @@ -3987,6 +5626,15 @@ define-properties@^1.1.3, define-properties@^1.1.4: has-property-descriptors "^1.0.0" object-keys "^1.1.1" +define-properties@^1.2.0, define-properties@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.1.tgz#10781cc616eb951a80a034bafcaa7377f6af2b6c" + integrity sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg== + dependencies: + define-data-property "^1.0.1" + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" + delay@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/delay/-/delay-5.0.0.tgz#137045ef1b96e5071060dd5be60bf9334436bd1d" @@ -4112,10 +5760,15 @@ dotenv@~10.0.0: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== -dprint-node@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/dprint-node/-/dprint-node-1.0.7.tgz#f571eaf61affb3a696cff1bdde78a021875ba540" - integrity sha512-NTZOW9A7ipb0n7z7nC3wftvsbceircwVHSgzobJsEQa+7RnOMbhrfX5IflA6CtC4GA63DSAiHYXa4JKEy9F7cA== +dotty@0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/dotty/-/dotty-0.1.2.tgz#512d44cc4111a724931226259297f235e8484f6f" + integrity sha512-V0EWmKeH3DEhMwAZ+8ZB2Ao4OK6p++Z0hsDtZq3N0+0ZMVqkzrcEGROvOnZpLnvBg5PTNG23JEDLAm64gPaotQ== + +dprint-node@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/dprint-node/-/dprint-node-1.0.8.tgz#a02470722d8208a7d7eb3704328afda1d6758625" + integrity sha512-iVKnUtYfGrYcW1ZAlfR/F59cUVL8QIhWoBJoSjkkdua/dkWIgjZfiLMeTjiB06X0ZLkQ0M2C1VbUj/CxkIf1zg== dependencies: detect-libc "^1.0.3" @@ -4146,6 +5799,11 @@ electron-to-chromium@^1.4.431: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.440.tgz#d3b1eeb36b717eb479a240c0406ac1fa67901762" integrity sha512-r6dCgNpRhPwiWlxbHzZQ/d9swfPaEJGi8ekqRBwQYaR3WmA5VkqQfBWSDDjuJU1ntO+W9tHx8OHV/96Q8e0dVw== +electron-to-chromium@^1.4.668: + version "1.4.670" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.670.tgz#0fc5ac92ada8371e898ea72d577ffc888167a017" + integrity sha512-hcijYOWjOtjKrKPtNA6tuLlA/bTLO3heFG8pQA6mLpq7dRydSWicXova5lyxDzp1iVJaYhK7J2OQlGE52KYn7A== + elliptic@^6.5.4: version "6.5.4" resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" @@ -4234,7 +5892,7 @@ error-ex@^1.3.1: dependencies: is-arrayish "^0.2.1" -es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.20.4: +es-abstract@^1.19.0, es-abstract@^1.20.4: version "1.21.2" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.21.2.tgz#a56b9695322c8a185dc25975aa3b8ec31d0e7eff" integrity sha512-y/B5POM2iBnIxCiernH1G7rC9qQoM77lLIMQLuob0zhp8C56Po81+2Nj0WFKnd0pNReDTnkYryc+zhOzpEIROg== @@ -4274,6 +5932,65 @@ es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.20.4: unbox-primitive "^1.0.2" which-typed-array "^1.1.9" +es-abstract@^1.22.1, es-abstract@^1.22.3: + version "1.22.4" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.22.4.tgz#26eb2e7538c3271141f5754d31aabfdb215f27bf" + integrity sha512-vZYJlk2u6qHYxBOTjAeg7qUxHdNfih64Uu2J8QqWgXZ2cri0ZpJAkzDUK/q593+mvKwlxyaxr6F1Q+3LKoQRgg== + dependencies: + array-buffer-byte-length "^1.0.1" + arraybuffer.prototype.slice "^1.0.3" + available-typed-arrays "^1.0.6" + call-bind "^1.0.7" + es-define-property "^1.0.0" + es-errors "^1.3.0" + es-set-tostringtag "^2.0.2" + es-to-primitive "^1.2.1" + function.prototype.name "^1.1.6" + get-intrinsic "^1.2.4" + get-symbol-description "^1.0.2" + globalthis "^1.0.3" + gopd "^1.0.1" + has-property-descriptors "^1.0.2" + has-proto "^1.0.1" + has-symbols "^1.0.3" + hasown "^2.0.1" + internal-slot "^1.0.7" + is-array-buffer "^3.0.4" + is-callable "^1.2.7" + is-negative-zero "^2.0.2" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.2" + is-string "^1.0.7" + is-typed-array "^1.1.13" + is-weakref "^1.0.2" + object-inspect "^1.13.1" + object-keys "^1.1.1" + object.assign "^4.1.5" + regexp.prototype.flags "^1.5.2" + safe-array-concat "^1.1.0" + safe-regex-test "^1.0.3" + string.prototype.trim "^1.2.8" + string.prototype.trimend "^1.0.7" + string.prototype.trimstart "^1.0.7" + typed-array-buffer "^1.0.1" + typed-array-byte-length "^1.0.0" + typed-array-byte-offset "^1.0.0" + typed-array-length "^1.0.4" + unbox-primitive "^1.0.2" + which-typed-array "^1.1.14" + +es-define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.0.tgz#c7faefbdff8b2696cf5f46921edfb77cc4ba3845" + integrity sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ== + dependencies: + get-intrinsic "^1.2.4" + +es-errors@^1.2.1, es-errors@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f" + integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw== + es-set-tostringtag@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz#338d502f6f674301d710b80c8592de8a15f09cd8" @@ -4283,6 +6000,15 @@ es-set-tostringtag@^2.0.1: has "^1.0.3" has-tostringtag "^1.0.0" +es-set-tostringtag@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz#11f7cc9f63376930a5f20be4915834f4bc74f9c9" + integrity sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q== + dependencies: + get-intrinsic "^1.2.2" + has-tostringtag "^1.0.0" + hasown "^2.0.0" + es-shim-unscopables@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" @@ -4299,6 +6025,42 @@ es-to-primitive@^1.2.1: is-date-object "^1.0.1" is-symbol "^1.0.2" +es5-ext@^0.10.35, es5-ext@^0.10.46, es5-ext@^0.10.50, es5-ext@^0.10.53, es5-ext@^0.10.61, es5-ext@~0.10.14, es5-ext@~0.10.2, es5-ext@~0.10.46: + version "0.10.62" + resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.62.tgz#5e6adc19a6da524bf3d1e02bbc8960e5eb49a9a5" + integrity sha512-BHLqn0klhEpnOKSrzn/Xsz2UIW8j+cGmo9JLzr8BiUapV8hPL9+FliFqjwr9ngW7jWdnxv6eO+/LqyhJVqgrjA== + dependencies: + es6-iterator "^2.0.3" + es6-symbol "^3.1.3" + next-tick "^1.1.0" + +es6-iterator@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" + integrity sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g== + dependencies: + d "1" + es5-ext "^0.10.35" + es6-symbol "^3.1.1" + +es6-symbol@^3.1.1, es6-symbol@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" + integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== + dependencies: + d "^1.0.1" + ext "^1.1.2" + +es6-weak-map@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-2.0.3.tgz#b6da1f16cc2cc0d9be43e6bdbfc5e7dfcdf31d53" + integrity sha512-p5um32HOTO1kP+w7PRnB+5lQ43Z6muuMuIMffvDN8ZB4GcnjLBV6zGStpbASIMk4DCAvEaamhe2zhyCb/QXXsA== + dependencies: + d "1" + es5-ext "^0.10.46" + es6-iterator "^2.0.3" + es6-symbol "^3.1.1" + esbuild@~0.18.20: version "0.18.20" resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.18.20.tgz#4709f5a34801b43b799ab7d6d82f7284a9b7a7a6" @@ -4342,7 +6104,7 @@ escape-string-regexp@5.0.0, escape-string-regexp@^5.0.0: resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz#4683126b500b61762f2dbebace1806e8be31b1c8" integrity sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw== -escape-string-regexp@^1.0.5: +escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= @@ -4377,6 +6139,11 @@ eslint-config-prettier@>=8.0.0, eslint-config-prettier@^9.0.0: resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-9.0.0.tgz#eb25485946dd0c66cd216a46232dc05451518d1f" integrity sha512-IcJsTkJae2S35pRsRAwoCE+925rJJStOdkKnLVgtE+tEpqU0EVVM7OqrwxqgptKdX29NUwC82I5pXsGFIgSevw== +eslint-config-prettier@^8.8.0: + version "8.10.0" + resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-8.10.0.tgz#3a06a662130807e2502fc3ff8b4143d8a0658e11" + integrity sha512-SM8AMJdeQqRYT9O9zguiruQZaN7+z+E4eAP9oiLNGKMtomwaB1E9dcgUD6ZAn/eQAb52USbvezbiljfZUhbJcg== + eslint-formatter-pretty@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/eslint-formatter-pretty/-/eslint-formatter-pretty-4.1.0.tgz#7a6877c14ffe2672066c853587d89603e97c7708" @@ -4537,6 +6304,13 @@ eslint-plugin-no-only-tests@^3.0.0: resolved "https://registry.yarnpkg.com/eslint-plugin-no-only-tests/-/eslint-plugin-no-only-tests-3.1.0.tgz#f38e4935c6c6c4842bf158b64aaa20c366fe171b" integrity sha512-Lf4YW/bL6Un1R6A76pRZyE1dl1vr31G/ev8UzIc/geCgFWyrKil8hVjYqWVKGB/UIGmb6Slzs9T0wNezdSVegw== +eslint-plugin-prettier@^4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz#651cbb88b1dab98bfd42f017a12fa6b2d993f94b" + integrity sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ== + dependencies: + prettier-linter-helpers "^1.0.0" + eslint-plugin-prettier@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-5.0.0.tgz#6887780ed95f7708340ec79acfdf60c35b9be57a" @@ -4555,7 +6329,7 @@ eslint-rule-documentation@>=1.0.0: resolved "https://registry.yarnpkg.com/eslint-rule-documentation/-/eslint-rule-documentation-1.0.23.tgz#4e0886145597a78d24524ec7e0cf18c6fedc23a8" integrity sha512-pWReu3fkohwyvztx/oQWWgld2iad25TfUdi6wvhhaDPIQjHU/pyvlKgXFw1kX31SQK2Nq9MH+vRDWB0ZLy8fYw== -eslint-scope@^7.2.2: +eslint-scope@^7.2.0, eslint-scope@^7.2.2: version "7.2.2" resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.2.2.tgz#deb4f92563390f32006894af62a22dba1c46423f" integrity sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg== @@ -4585,6 +6359,49 @@ eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1, eslint-visitor-keys@^3.4 resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz#0cd72fe8550e3c2eae156a96a4dddcd1c8ac5800" integrity sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag== +eslint@8.45.0: + version "8.45.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.45.0.tgz#bab660f90d18e1364352c0a6b7c6db8edb458b78" + integrity sha512-pd8KSxiQpdYRfYa9Wufvdoct3ZPQQuVuU5O6scNgMuOMYuxvH0IGaYK0wUFjo4UYYQQCUndlXiMbnxopwvvTiw== + dependencies: + "@eslint-community/eslint-utils" "^4.2.0" + "@eslint-community/regexpp" "^4.4.0" + "@eslint/eslintrc" "^2.1.0" + "@eslint/js" "8.44.0" + "@humanwhocodes/config-array" "^0.11.10" + "@humanwhocodes/module-importer" "^1.0.1" + "@nodelib/fs.walk" "^1.2.8" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.3.2" + doctrine "^3.0.0" + escape-string-regexp "^4.0.0" + eslint-scope "^7.2.0" + eslint-visitor-keys "^3.4.1" + espree "^9.6.0" + esquery "^1.4.2" + esutils "^2.0.2" + fast-deep-equal "^3.1.3" + file-entry-cache "^6.0.1" + find-up "^5.0.0" + glob-parent "^6.0.2" + globals "^13.19.0" + graphemer "^1.4.0" + ignore "^5.2.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + is-path-inside "^3.0.3" + js-yaml "^4.1.0" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash.merge "^4.6.2" + minimatch "^3.1.2" + natural-compare "^1.4.0" + optionator "^0.9.3" + strip-ansi "^6.0.1" + text-table "^0.2.0" + eslint@^8.47.0: version "8.47.0" resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.47.0.tgz#c95f9b935463fb4fad7005e626c7621052e90806" @@ -4695,6 +6512,14 @@ etag@~1.8.1: resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= +event-emitter@^0.3.5: + version "0.3.5" + resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39" + integrity sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA== + dependencies: + d "1" + es5-ext "~0.10.14" + event-lite@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/event-lite/-/event-lite-0.1.3.tgz#3dfe01144e808ac46448f0c19b4ab68e403a901d" @@ -4720,21 +6545,6 @@ execa@^5.0.0: signal-exit "^3.0.3" strip-final-newline "^2.0.0" -execa@^7.1.1: - version "7.1.1" - resolved "https://registry.yarnpkg.com/execa/-/execa-7.1.1.tgz#3eb3c83d239488e7b409d48e8813b76bb55c9c43" - integrity sha512-wH0eMf/UXckdUYnO21+HDztteVv05rq2GXksxT4fCGeHkBhw1DROXh40wcjMcRqDOWE7iPJ4n3M7e2+YFP+76Q== - dependencies: - cross-spawn "^7.0.3" - get-stream "^6.0.1" - human-signals "^4.3.0" - is-stream "^3.0.0" - merge-stream "^2.0.0" - npm-run-path "^5.1.0" - onetime "^6.0.0" - signal-exit "^3.0.7" - strip-final-newline "^3.0.0" - expand-template@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c" @@ -4787,6 +6597,22 @@ express@^4.17.1: utils-merge "1.0.1" vary "~1.1.2" +ext@^1.1.2: + version "1.7.0" + resolved "https://registry.yarnpkg.com/ext/-/ext-1.7.0.tgz#0ea4383c0103d60e70be99e9a7f11027a33c4f5f" + integrity sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw== + dependencies: + type "^2.7.2" + +external-editor@^2.0.4: + version "2.2.0" + resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-2.2.0.tgz#045511cfd8d133f3846673d1047c154e214ad3d5" + integrity sha512-bSn6gvGxKt+b7+6TKEv1ZycHleA7aHhRHyAqJyp5pbUFuYYNIzpZnQDk7AsYckyWdEnTeAnay0aCy2aV6iTk9A== + dependencies: + chardet "^0.4.0" + iconv-lite "^0.4.17" + tmp "^0.0.33" + external-editor@^3.0.3: version "3.1.0" resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495" @@ -4813,7 +6639,7 @@ fast-diff@^1.1.2, fast-diff@^1.2.0: resolved "https://registry.yarnpkg.com/fast-diff/-/fast-diff-1.2.0.tgz#73ee11982d86caaf7959828d519cfe927fac5f03" integrity sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w== -fast-glob@3, fast-glob@^3.2.11, fast-glob@^3.2.9, fast-glob@^3.3.0: +fast-glob@3, fast-glob@^3.2.11, fast-glob@^3.2.9: version "3.3.1" resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.1.tgz#784b4e897340f3dbbef17413b3f11acf03c874c4" integrity sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg== @@ -4857,6 +6683,13 @@ fastq@^1.6.0: dependencies: reusify "^1.0.4" +fb-watchman@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" + integrity sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA== + dependencies: + bser "2.1.1" + fecha@^4.2.0: version "4.2.3" resolved "https://registry.yarnpkg.com/fecha/-/fecha-4.2.3.tgz#4d9ccdbc61e8629b259fdca67e65891448d569fd" @@ -4869,6 +6702,13 @@ figures@3.2.0, figures@^3.0.0: dependencies: escape-string-regexp "^1.0.5" +figures@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962" + integrity sha512-Oa2M9atig69ZkfwiApY8F2Yy+tzMbazyvqv21R0NsSC8floSOC09BbT1ITWAdoMGQvJ/aZnR1KMwdx9tvHnTNA== + dependencies: + escape-string-regexp "^1.0.5" + figures@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/figures/-/figures-5.0.0.tgz#126cd055052dea699f8a54e8c9450e6ecfc44d5f" @@ -5076,6 +6916,11 @@ fs.realpath@^1.0.0: resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== +fsevents@^2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" + integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== + fsevents@~2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" @@ -5086,6 +6931,11 @@ function-bind@^1.1.1: resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + function.prototype.name@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" @@ -5096,11 +6946,31 @@ function.prototype.name@^1.1.5: es-abstract "^1.19.0" functions-have-names "^1.2.2" +function.prototype.name@^1.1.6: + version "1.1.6" + resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.6.tgz#cdf315b7d90ee77a4c6ee216c3c3362da07533fd" + integrity sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.2.0" + es-abstract "^1.22.1" + functions-have-names "^1.2.3" + functions-have-names@^1.2.2: version "1.2.2" resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.2.tgz#98d93991c39da9361f8e50b337c4f6e41f120e21" integrity sha512-bLgc3asbWdwPbx2mNk2S49kmJCuQeu0nfmaOgbs8WIyzzkw3r4htszdIi9Q9EMezDPTYuJx2wvjZ/EwgAthpnA== +functions-have-names@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" + integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== + +fuzzy@0.1.3, fuzzy@^0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/fuzzy/-/fuzzy-0.1.3.tgz#4c76ec2ff0ac1a36a9dccf9a00df8623078d4ed8" + integrity sha512-/gZffu4ykarLrCiP3Ygsa86UAo1E5vEVlvTrpkKywXSbP9Xhln3oSp9QSV57gEq3JFFpGJ4GZ+5zdEp3FcUh4w== + gauge@^4.0.3: version "4.0.4" resolved "https://registry.yarnpkg.com/gauge/-/gauge-4.0.4.tgz#52ff0652f2bbf607a989793d53b751bef2328dce" @@ -5115,6 +6985,11 @@ gauge@^4.0.3: strip-ansi "^6.0.1" wide-align "^1.1.5" +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + get-caller-file@^2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" @@ -5129,6 +7004,22 @@ get-intrinsic@^1.0.2, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@ has "^1.0.3" has-symbols "^1.0.3" +get-intrinsic@^1.2.1, get-intrinsic@^1.2.2, get-intrinsic@^1.2.3, get-intrinsic@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz#e385f5a4b5227d449c3eabbad05494ef0abbeadd" + integrity sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ== + dependencies: + es-errors "^1.3.0" + function-bind "^1.1.2" + has-proto "^1.0.1" + has-symbols "^1.0.3" + hasown "^2.0.0" + +get-package-type@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + get-pkg-repo@^4.0.0: version "4.2.1" resolved "https://registry.yarnpkg.com/get-pkg-repo/-/get-pkg-repo-4.2.1.tgz#75973e1c8050c73f48190c52047c4cee3acbf385" @@ -5144,7 +7035,12 @@ get-port@^5.1.1: resolved "https://registry.yarnpkg.com/get-port/-/get-port-5.1.1.tgz#0469ed07563479de6efb986baf053dcd7d4e3193" integrity sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ== -get-stream@^6.0.0, get-stream@^6.0.1: +get-stdin@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-8.0.0.tgz#cbad6a73feb75f6eeb22ba9e01f89aa28aa97a53" + integrity sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg== + +get-stream@^6.0.0: version "6.0.1" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== @@ -5157,6 +7053,15 @@ get-symbol-description@^1.0.0: call-bind "^1.0.2" get-intrinsic "^1.1.1" +get-symbol-description@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.2.tgz#533744d5aa20aca4e079c8e5daf7fd44202821f5" + integrity sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg== + dependencies: + call-bind "^1.0.5" + es-errors "^1.3.0" + get-intrinsic "^1.2.4" + get-tsconfig@^4.7.0: version "4.7.2" resolved "https://registry.yarnpkg.com/get-tsconfig/-/get-tsconfig-4.7.2.tgz#0dcd6fb330391d46332f4c6c1bf89a6514c2ddce" @@ -5232,6 +7137,13 @@ glob-parent@^6.0.2: dependencies: is-glob "^4.0.3" +glob-promise@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/glob-promise/-/glob-promise-4.2.2.tgz#15f44bcba0e14219cd93af36da6bb905ff007877" + integrity sha512-xcUzJ8NWN5bktoTIX7eOclO1Npxd/dyVqUJxlLIDasT4C7KZyqlPIwkdJ0Ypiy3p2ZKahTjK4M9uC3sNSfNMzw== + dependencies: + "@types/glob" "^7.1.3" + glob@7.1.4: version "7.1.4" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.4.tgz#aa608a2f6c577ad357e1ae5a5c26d9a8d1969255" @@ -5244,6 +7156,28 @@ glob@7.1.4: once "^1.3.0" path-is-absolute "^1.0.0" +glob@8.0.3: + version "8.0.3" + resolved "https://registry.yarnpkg.com/glob/-/glob-8.0.3.tgz#415c6eb2deed9e502c68fa44a272e6da6eeca42e" + integrity sha512-ull455NHSHI/Y1FqGaaYFaLGkNMMJbavMrEGFXG/PGrg6y7sutWHUHrz6gy6WEBH6akM1M414dWKCNs+IhKdiQ== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^5.0.1" + once "^1.3.0" + +glob@^10.0.0, glob@^10.3.3, glob@^10.3.7: + version "10.3.10" + resolved "https://registry.yarnpkg.com/glob/-/glob-10.3.10.tgz#0351ebb809fd187fe421ab96af83d3a70715df4b" + integrity sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g== + dependencies: + foreground-child "^3.1.0" + jackspeak "^2.3.5" + minimatch "^9.0.1" + minipass "^5.0.0 || ^6.0.2 || ^7.0.0" + path-scurry "^1.10.1" + glob@^10.2.2: version "10.3.3" resolved "https://registry.yarnpkg.com/glob/-/glob-10.3.3.tgz#8360a4ffdd6ed90df84aa8d52f21f452e86a123b" @@ -5255,7 +7189,7 @@ glob@^10.2.2: minipass "^5.0.0 || ^6.0.2 || ^7.0.0" path-scurry "^1.10.1" -glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: +glob@^7.0.0, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: version "7.2.3" resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== @@ -5327,7 +7261,7 @@ gopd@^1.0.1: dependencies: get-intrinsic "^1.1.3" -graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6: +graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6, graceful-fs@^4.2.9: version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== @@ -5354,6 +7288,13 @@ hard-rejection@^2.1.0: resolved "https://registry.yarnpkg.com/hard-rejection/-/hard-rejection-2.1.0.tgz#1c6eda5c1685c63942766d79bb40ae773cecd883" integrity sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA== +has-ansi@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" + integrity sha512-C8vBJ8DwUCx19vhm7urhTuUsr4/IyP6l4VzNQDv+ryHQObW3TTTp9yB68WpYgRe2bbaGuZ/se74IqFeVnMnLZg== + dependencies: + ansi-regex "^2.0.0" + has-bigints@^1.0.1, has-bigints@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" @@ -5376,6 +7317,13 @@ has-property-descriptors@^1.0.0: dependencies: get-intrinsic "^1.1.1" +has-property-descriptors@^1.0.1, has-property-descriptors@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz#963ed7d071dc7bf5f084c5bfbe0d1b6222586854" + integrity sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg== + dependencies: + es-define-property "^1.0.0" + has-proto@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.1.tgz#1885c1305538958aff469fef37937c22795408e0" @@ -5393,6 +7341,13 @@ has-tostringtag@^1.0.0: dependencies: has-symbols "^1.0.2" +has-tostringtag@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.2.tgz#2cdc42d40bef2e5b4eeab7c01a73c54ce7ab5abc" + integrity sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw== + dependencies: + has-symbols "^1.0.3" + has-unicode@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" @@ -5413,6 +7368,13 @@ hash.js@^1.0.0, hash.js@^1.0.3: inherits "^2.0.3" minimalistic-assert "^1.0.1" +hasown@^2.0.0, hasown@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.1.tgz#26f48f039de2c0f8d3356c223fb8d50253519faa" + integrity sha512-1/th4MHjnwncwXsIW6QMzlvYL9kG5e/CpVvLRZe4XPa8TOUNbCELqmvhDmnkNsAjwaG4+I8gJJL0JBvTTLO9qA== + dependencies: + function-bind "^1.1.2" + hmac-drbg@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" @@ -5511,11 +7473,6 @@ human-signals@^2.1.0: resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== -human-signals@^4.3.0: - version "4.3.1" - resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-4.3.1.tgz#ab7f811e851fca97ffbd2c1fe9a958964de321b2" - integrity sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ== - humanize-ms@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed" @@ -5523,7 +7480,7 @@ humanize-ms@^1.2.1: dependencies: ms "^2.0.0" -iconv-lite@0.4.24, iconv-lite@^0.4.24: +iconv-lite@0.4.24, iconv-lite@^0.4.17, iconv-lite@^0.4.24: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== @@ -5628,6 +7585,11 @@ inherits@2, inherits@2.0.4, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3: resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== +inherits@2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== + ini@^1.3.2, ini@^1.3.4, ini@~1.3.0: version "1.3.8" resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" @@ -5646,6 +7608,58 @@ init-package-json@^3.0.2: validate-npm-package-license "^3.0.4" validate-npm-package-name "^4.0.0" +inquirer-autocomplete-prompt@^0.11.1: + version "0.11.1" + resolved "https://registry.yarnpkg.com/inquirer-autocomplete-prompt/-/inquirer-autocomplete-prompt-0.11.1.tgz#f90ca9510a4c489882e9be294934bd8c2e575e09" + integrity sha512-VM4eNiyRD4CeUc2cyKni+F8qgHwL9WC4LdOr+mEC85qP/QNsDV+ysVqUrJYhw1TmDQu1QVhc8hbaL7wfk8SJxw== + dependencies: + ansi-escapes "^2.0.0" + chalk "^1.1.3" + figures "^2.0.0" + inquirer "3.1.1" + lodash "^4.17.4" + run-async "^2.3.0" + util "^0.10.3" + +inquirer@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-3.1.1.tgz#87621c4fba4072f48a8dd71c9f9df6f100b2d534" + integrity sha512-H50sHQwgvvaTBd3HpKMVtL/u6LoHDvYym51gd7bGQe/+9HkCE+J0/3N5FJLfd6O6oz44hHewC2Pc2LodzWVafQ== + dependencies: + ansi-escapes "^2.0.0" + chalk "^1.0.0" + cli-cursor "^2.1.0" + cli-width "^2.0.0" + external-editor "^2.0.4" + figures "^2.0.0" + lodash "^4.3.0" + mute-stream "0.0.7" + run-async "^2.2.0" + rx-lite "^4.0.8" + rx-lite-aggregates "^4.0.8" + string-width "^2.0.0" + strip-ansi "^3.0.0" + through "^2.3.6" + +inquirer@^6.0.0: + version "6.5.2" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-6.5.2.tgz#ad50942375d036d327ff528c08bd5fab089928ca" + integrity sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ== + dependencies: + ansi-escapes "^3.2.0" + chalk "^2.4.2" + cli-cursor "^2.1.0" + cli-width "^2.0.0" + external-editor "^3.0.3" + figures "^2.0.0" + lodash "^4.17.12" + mute-stream "0.0.7" + run-async "^2.2.0" + rxjs "^6.4.0" + string-width "^2.1.0" + strip-ansi "^5.1.0" + through "^2.3.6" + inquirer@^8.2.2, inquirer@^8.2.4: version "8.2.5" resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-8.2.5.tgz#d8654a7542c35a9b9e069d27e2df4858784d54f8" @@ -5667,6 +7681,15 @@ inquirer@^8.2.2, inquirer@^8.2.4: through "^2.3.6" wrap-ansi "^7.0.0" +inquirerer@0.1.3, inquirerer@^0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/inquirerer/-/inquirerer-0.1.3.tgz#ecf91dc672b3bf45211d7f64bf5e8d5e171fd2ad" + integrity sha512-yGgLUOqPxTsINBjZNZeLi3cv2zgxXtw9feaAOSJf2j6AqIT5Uxs5ZOqOrfAf+xP65Sicla1FD3iDxa3D6TsCAQ== + dependencies: + colors "^1.1.2" + inquirer "^6.0.0" + inquirer-autocomplete-prompt "^0.11.1" + int64-buffer@^0.1.9: version "0.1.10" resolved "https://registry.yarnpkg.com/int64-buffer/-/int64-buffer-0.1.10.tgz#277b228a87d95ad777d07c13832022406a473423" @@ -5681,10 +7704,27 @@ internal-slot@^1.0.5: has "^1.0.3" side-channel "^1.0.4" -ip@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ip/-/ip-2.0.0.tgz#4cf4ab182fee2314c75ede1276f8c80b479936da" - integrity sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ== +internal-slot@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.7.tgz#c06dcca3ed874249881007b0a5523b172a190802" + integrity sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g== + dependencies: + es-errors "^1.3.0" + hasown "^2.0.0" + side-channel "^1.0.4" + +interpret@^1.0.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.4.0.tgz#665ab8bc4da27a774a40584e812e3e0fa45b1a1e" + integrity sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA== + +ip-address@^9.0.5: + version "9.0.5" + resolved "https://registry.yarnpkg.com/ip-address/-/ip-address-9.0.5.tgz#117a960819b08780c3bd1f14ef3c1cc1d3f3ea5a" + integrity sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g== + dependencies: + jsbn "1.1.0" + sprintf-js "^1.1.3" ipaddr.js@1.9.1: version "1.9.1" @@ -5710,6 +7750,14 @@ is-array-buffer@^3.0.1, is-array-buffer@^3.0.2: get-intrinsic "^1.2.0" is-typed-array "^1.1.10" +is-array-buffer@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.4.tgz#7a1f92b3d61edd2bc65d24f130530ea93d7fae98" + integrity sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.2.1" + is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" @@ -5768,6 +7816,13 @@ is-core-module@^2.11.0, is-core-module@^2.5.0, is-core-module@^2.8.1, is-core-mo dependencies: has "^1.0.3" +is-core-module@^2.13.0: + version "2.13.1" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.1.tgz#ad0d7532c6fea9da1ebdc82742d74525c6273384" + integrity sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw== + dependencies: + hasown "^2.0.0" + is-date-object@^1.0.1: version "1.0.5" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" @@ -5780,11 +7835,6 @@ is-docker@^2.0.0, is-docker@^2.1.1: resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== -is-docker@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-3.0.0.tgz#90093aa3106277d8a77a5910dbae71747e15a200" - integrity sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ== - is-error@^2.2.2: version "2.2.2" resolved "https://registry.yarnpkg.com/is-error/-/is-error-2.2.2.tgz#c10ade187b3c93510c5470a5567833ee25649843" @@ -5795,6 +7845,11 @@ is-extglob@^2.1.1: resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== +is-fullwidth-code-point@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + integrity sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w== + is-fullwidth-code-point@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" @@ -5812,13 +7867,6 @@ is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: dependencies: is-extglob "^2.1.1" -is-inside-container@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-inside-container/-/is-inside-container-1.0.0.tgz#e81fba699662eb31dbdaf26766a61d4814717ea4" - integrity sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA== - dependencies: - is-docker "^3.0.0" - is-interactive@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-1.0.0.tgz#cea6e6ae5c870a7b0a0004070b7b587e0252912e" @@ -5888,6 +7936,11 @@ is-plain-object@^5.0.0: resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344" integrity sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q== +is-promise@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-2.2.2.tgz#39ab959ccbf9a774cf079f7b40c7a26f763135f1" + integrity sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ== + is-promise@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/is-promise/-/is-promise-4.0.0.tgz#42ff9f84206c1991d26debf520dd5c01042dd2f3" @@ -5927,11 +7980,6 @@ is-stream@^2.0.0: resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== -is-stream@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-3.0.0.tgz#e6bfd7aa6bef69f4f472ce9bb681e3e57b4319ac" - integrity sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA== - is-string@^1.0.5, is-string@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" @@ -5964,6 +8012,13 @@ is-typed-array@^1.1.10, is-typed-array@^1.1.9: gopd "^1.0.1" has-tostringtag "^1.0.0" +is-typed-array@^1.1.13: + version "1.1.13" + resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.13.tgz#d6c5ca56df62334959322d7d7dd1cca50debe229" + integrity sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw== + dependencies: + which-typed-array "^1.1.14" + is-typedarray@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" @@ -5998,6 +8053,11 @@ isarray@^1.0.0, isarray@~1.0.0: resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= +isarray@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.5.tgz#8af1e4c1221244cc62459faf38940d4e644a5723" + integrity sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw== + isexe@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" @@ -6018,6 +8078,17 @@ istanbul-lib-coverage@3.2.0, istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== +istanbul-lib-instrument@^5.0.4: + version "5.2.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" + integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== + dependencies: + "@babel/core" "^7.12.3" + "@babel/parser" "^7.14.7" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.2.0" + semver "^6.3.0" + istanbul-lib-report@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" @@ -6044,6 +8115,15 @@ jackspeak@^2.0.3: optionalDependencies: "@pkgjs/parseargs" "^0.11.0" +jackspeak@^2.3.5: + version "2.3.6" + resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-2.3.6.tgz#647ecc472238aee4b06ac0e461acc21a8c505ca8" + integrity sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ== + dependencies: + "@isaacs/cliui" "^8.0.2" + optionalDependencies: + "@pkgjs/parseargs" "^0.11.0" + jake@^10.8.5: version "10.8.7" resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.7.tgz#63a32821177940c33f356e0ba44ff9d34e1c7d8f" @@ -6076,6 +8156,51 @@ jest-get-type@^29.4.3: resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-29.4.3.tgz#1ab7a5207c995161100b5187159ca82dd48b3dd5" integrity sha512-J5Xez4nRRMjk8emnTpWrlkyb9pfRQQanDrvWHhsR1+VUfbwxi30eVcZFlcdGInRibU4G5LwHXpI7IRHU0CY+gg== +jest-haste-map@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-28.1.3.tgz#abd5451129a38d9841049644f34b034308944e2b" + integrity sha512-3S+RQWDXccXDKSWnkHa/dPwt+2qwA8CJzR61w3FoYCvoo3Pn8tvGcysmMF0Bj0EX5RYvAI2EIvC57OmotfdtKA== + dependencies: + "@jest/types" "^28.1.3" + "@types/graceful-fs" "^4.1.3" + "@types/node" "*" + anymatch "^3.0.3" + fb-watchman "^2.0.0" + graceful-fs "^4.2.9" + jest-regex-util "^28.0.2" + jest-util "^28.1.3" + jest-worker "^28.1.3" + micromatch "^4.0.4" + walker "^1.0.8" + optionalDependencies: + fsevents "^2.3.2" + +jest-regex-util@^28.0.2: + version "28.0.2" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-28.0.2.tgz#afdc377a3b25fb6e80825adcf76c854e5bf47ead" + integrity sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw== + +jest-util@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-28.1.3.tgz#f4f932aa0074f0679943220ff9cbba7e497028b0" + integrity sha512-XdqfpHwpcSRko/C35uLYFM2emRAltIIKZiJ9eAmhjsj0CqZMa0p1ib0R5fWIqGhn1a103DebTbpqIaP1qCQ6tQ== + dependencies: + "@jest/types" "^28.1.3" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-worker@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-28.1.3.tgz#7e3c4ce3fa23d1bb6accb169e7f396f98ed4bb98" + integrity sha512-CqRA220YV/6jCo8VWvAt1KKx6eek1VIHMPeLEbpcfSfkEeWyBNppynM/o6q+Wmw+sOhos2ml34wZbSX3G13//g== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + js-string-escape@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/js-string-escape/-/js-string-escape-1.0.1.tgz#e2625badbc0d67c7533e9edc1068c587ae4137ef" @@ -6100,7 +8225,7 @@ js-yaml@4.1.0, js-yaml@^4.1.0: dependencies: argparse "^2.0.1" -js-yaml@^3.10.0, js-yaml@^3.14.1: +js-yaml@^3.10.0, js-yaml@^3.13.1, js-yaml@^3.14.1: version "3.14.1" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== @@ -6108,6 +8233,11 @@ js-yaml@^3.10.0, js-yaml@^3.14.1: argparse "^1.0.7" esprima "^4.0.0" +jsbn@1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-1.1.0.tgz#b01307cb29b618a1ed26ec79e911f803c4da0040" + integrity sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A== + jsdoc-type-pratt-parser@~4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/jsdoc-type-pratt-parser/-/jsdoc-type-pratt-parser-4.0.0.tgz#136f0571a99c184d84ec84662c45c29ceff71114" @@ -6118,6 +8248,11 @@ jsesc@^2.5.1: resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== +jsesc@~0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" + integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== + json-parse-better-errors@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" @@ -6167,7 +8302,7 @@ json5@^1.0.2: dependencies: minimist "^1.2.0" -json5@^2.2.2: +json5@^2.2.1, json5@^2.2.2, json5@^2.2.3: version "2.2.3" resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== @@ -6397,6 +8532,11 @@ lodash.camelcase@4.3.0: resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6" integrity sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA== +lodash.debounce@^4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" + integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== + lodash.ismatch@^4.4.0: version "4.4.0" resolved "https://registry.yarnpkg.com/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz#756cb5150ca3ba6f11085a78849645f188f85f37" @@ -6442,7 +8582,7 @@ lodash.upperfirst@4.3.1: resolved "https://registry.yarnpkg.com/lodash.upperfirst/-/lodash.upperfirst-4.3.1.tgz#1365edf431480481ef0d1c68957a5ed99d49f7ce" integrity sha512-sReKOYJIJf74dhJONhU4e0/shzi1trVbSWDOhKYE5XV2O+H7Sb2Dihwuc7xWxVl+DgFPyTqIN3zMfT9cq5iWDg== -lodash@4.17.21, lodash@^4.13.1, lodash@^4.17.15, lodash@^4.17.21: +lodash@4.17.21, lodash@^4.13.1, lodash@^4.17.12, lodash@^4.17.15, lodash@^4.17.21, lodash@^4.17.4, lodash@^4.3.0: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -6477,6 +8617,18 @@ long@^5.0.0: resolved "https://registry.yarnpkg.com/long/-/long-5.2.0.tgz#2696dadf4b4da2ce3f6f6b89186085d94d52fd61" integrity sha512-9RTUNjK60eJbx3uz+TEGF7fUr29ZDxR5QzXcyDpeSfeH28S9ycINflOgOlppit5U+4kNTe83KQnMEerw7GmE8w== +long@^5.2.0, long@^5.2.1, long@^5.2.3: + version "5.2.3" + resolved "https://registry.yarnpkg.com/long/-/long-5.2.3.tgz#a3ba97f3877cf1d778eccbcb048525ebb77499e1" + integrity sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q== + +lru-cache@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" + integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== + dependencies: + yallist "^3.0.2" + lru-cache@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" @@ -6494,6 +8646,13 @@ lru-cache@^7.14.0, lru-cache@^7.4.4, lru-cache@^7.5.1, lru-cache@^7.7.1: resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.0.0.tgz#b9e2a6a72a129d81ab317202d93c7691df727e61" integrity sha512-svTf/fzsKHffP42sujkO/Rjs37BCIsQVRCeNYIm9WN8rgT7ffoUnRtZCqU+6BqcSBdv8gwJeTz8knJpgACeQMw== +lru-queue@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/lru-queue/-/lru-queue-0.1.0.tgz#2738bd9f0d3cf4f84490c5736c48699ac632cda3" + integrity sha512-BpdYkt9EvGl8OfWHDQPISVpcl5xZthb+XPsbELj5AQXxIC8IriDZIQYjBJPEm5rS420sjZ0TLEzRcq5KdBhYrQ== + dependencies: + es5-ext "~0.10.2" + lunr@^2.3.9: version "2.3.9" resolved "https://registry.yarnpkg.com/lunr/-/lunr-2.3.9.tgz#18b123142832337dd6e964df1a5a7707b25d35e1" @@ -6564,6 +8723,13 @@ make-fetch-happen@^11.0.3: socks-proxy-agent "^7.0.0" ssri "^10.0.0" +makeerror@1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== + dependencies: + tmpl "1.0.5" + map-age-cleaner@^0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a" @@ -6638,6 +8804,20 @@ mem@^9.0.2: map-age-cleaner "^0.1.3" mimic-fn "^4.0.0" +memoizee@^0.4.15: + version "0.4.15" + resolved "https://registry.yarnpkg.com/memoizee/-/memoizee-0.4.15.tgz#e6f3d2da863f318d02225391829a6c5956555b72" + integrity sha512-UBWmJpLZd5STPm7PMUlOw/TSy972M+z8gcyQ5veOnSDRREz/0bmpyTfKt3/51DhEBqCZQn1udM/5flcSPYhkdQ== + dependencies: + d "^1.0.1" + es5-ext "^0.10.53" + es6-weak-map "^2.0.3" + event-emitter "^0.3.5" + is-promise "^2.2.2" + lru-queue "^0.1.0" + next-tick "^1.1.0" + timers-ext "^0.1.7" + memorystream@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/memorystream/-/memorystream-0.3.1.tgz#86d7090b30ce455d63fbae12dda51a47ddcaf9b2" @@ -6930,6 +9110,11 @@ mime@1.6.0: resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== +mimic-fn@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" + integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== + mimic-fn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" @@ -6967,6 +9152,13 @@ minimatch@3.0.5: dependencies: brace-expansion "^1.1.7" +minimatch@5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.0.tgz#1717b464f4971b144f6aabe8f2d0b8e4511e09c7" + integrity sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg== + dependencies: + brace-expansion "^2.0.1" + "minimatch@6 || 7 || 8 || 9", minimatch@9.0.3, minimatch@^9.0.1, minimatch@^9.0.3: version "9.0.3" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.3.tgz#a6e00c3de44c3a542bfaae70abfc22420a6da825" @@ -6997,11 +9189,16 @@ minimist-options@4.1.0: is-plain-obj "^1.1.0" kind-of "^6.0.3" -minimist@1, minimist@^1.2.0, minimist@^1.2.3, minimist@^1.2.5, minimist@^1.2.6: +minimist@1, minimist@1.2.8, minimist@^1.2.0, minimist@^1.2.3, minimist@^1.2.5, minimist@^1.2.6, minimist@^1.2.8: version "1.2.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== +minimist@1.2.6: + version "1.2.6" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" + integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== + minipass-collect@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/minipass-collect/-/minipass-collect-1.0.2.tgz#22b813bf745dc6edba2576b940022ad6edc8c617" @@ -7077,6 +9274,11 @@ minipass@^5.0.0: resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.0.2.tgz#58a82b7d81c7010da5bd4b2c0c85ac4b4ec5131e" integrity sha512-eL79dXrE1q9dBbDCLg7xfn/vl7MS4F1gvJAgjJrQli/jbQWdUttuVawphqpffoIYfRdq78LHx6GP4bU/EQ2ATA== +minipass@^7.0.3: + version "7.0.4" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.0.4.tgz#dbce03740f50a4786ba994c1fb908844d27b038c" + integrity sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ== + minizlib@^2.1.1, minizlib@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931" @@ -7099,11 +9301,28 @@ mkdirp-infer-owner@^2.0.0: infer-owner "^1.0.4" mkdirp "^1.0.3" -mkdirp@^1.0.3, mkdirp@^1.0.4: +mkdirp@1.0.4, mkdirp@^1.0.3, mkdirp@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== +mkdirp@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-3.0.0.tgz#758101231418bda24435c0888a91d9bd91f1372d" + integrity sha512-7+JDnNsyCvZXoUJdkMR0oUE2AmAdsNXGTmRbiOjYIwQ6q+bL6NwrozGQdPcmYaNcrhH37F50HHBUzoaBV6FITQ== + +mkdirp@^0.5.1: + version "0.5.6" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" + integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== + dependencies: + minimist "^1.2.6" + +mkdirp@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-3.0.1.tgz#e44e4c5607fb279c168241713cc6e0fea9adcb50" + integrity sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg== + modify-values@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/modify-values/-/modify-values-1.0.1.tgz#b3939fa605546474e3e3e3c63d64bd43b4ee6022" @@ -7166,11 +9385,25 @@ multimatch@^5.0.0: arrify "^2.0.1" minimatch "^3.0.4" +mute-stream@0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.7.tgz#3075ce93bc21b8fab43e1bc4da7e8115ed1e7bab" + integrity sha512-r65nCZhrbXXb6dXOACihYApHw2Q6pV0M3V0PSxd74N0+D8nzAdEAITq2oAjA1jVnKI+tGvEBUpqiMh0+rW6zDQ== + mute-stream@0.0.8, mute-stream@~0.0.4: version "0.0.8" resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d" integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== +mz@^2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" + integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== + dependencies: + any-promise "^1.0.0" + object-assign "^4.0.1" + thenify-all "^1.0.0" + n-readlines@^1.0.0, n-readlines@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/n-readlines/-/n-readlines-1.0.1.tgz#bbb7364d38bc31a170a199f986fcacfa76b95f6e" @@ -7201,6 +9434,11 @@ neo-async@^2.6.0: resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== +next-tick@1, next-tick@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.1.0.tgz#1836ee30ad56d67ef281b22bd199f709449b35eb" + integrity sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ== + nice-try@^1.0.4: version "1.0.5" resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" @@ -7272,11 +9510,21 @@ node-gyp@^9.0.0: tar "^6.1.2" which "^2.0.2" +node-int64@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== + node-releases@^2.0.12: version "2.0.12" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.12.tgz#35627cc224a23bfb06fb3380f2b3afaaa7eb1039" integrity sha512-QzsYKWhXTWx8h1kIvqfnC++o0pEmpRQA/aenALsL2F4pqNVr7YzcdMlDij5WBnwftRbJCNJL/O7zdKaxKPHqgQ== +node-releases@^2.0.14: + version "2.0.14" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.14.tgz#2ffb053bceb8b2be8495ece1ab6ce600c4461b0b" + integrity sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw== + nofilter@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/nofilter/-/nofilter-3.1.0.tgz#c757ba68801d41ff930ba2ec55bab52ca184aa66" @@ -7436,13 +9684,6 @@ npm-run-path@^4.0.1: dependencies: path-key "^3.0.0" -npm-run-path@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-5.1.0.tgz#bc62f7f3f6952d9894bd08944ba011a6ee7b7e00" - integrity sha512-sJOdmRGrY2sjNTRMbSvluQqg+8X7ZK61yvzBEIDhz4f8z1TZFYABsqjjCBd/0PUNE9M6QDgHJXQkGUEm7Q+l9Q== - dependencies: - path-key "^4.0.0" - npmlog@^6.0.0, npmlog@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-6.0.2.tgz#c8166017a42f2dea92d6453168dd865186a70830" @@ -7504,16 +9745,21 @@ nx@15.9.4, "nx@>=14.8.1 < 16": "@nrwl/nx-win32-arm64-msvc" "15.9.4" "@nrwl/nx-win32-x64-msvc" "15.9.4" -object-hash@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-1.3.1.tgz#fde452098a951cb145f039bb7d455449ddc126df" - integrity sha512-OSuu/pU4ENM9kmREg0BdNrUDIl1heYa4mBZacJc+vVWz4GtAwu7jO8s4AIt2aGRUTqxykpWzI3Oqnsm13tTMDA== +object-assign@^4.0.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== -object-inspect@^1.12.3, object-inspect@^1.9.0: +object-inspect@^1.12.3: version "1.12.3" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.3.tgz#ba62dffd67ee256c8c086dfae69e016cd1f198b9" integrity sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g== +object-inspect@^1.13.1, object-inspect@^1.9.0: + version "1.13.1" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.1.tgz#b96c6109324ccfef6b12216a956ca4dc2ff94bc2" + integrity sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ== + object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" @@ -7529,6 +9775,16 @@ object.assign@^4.1.2, object.assign@^4.1.3, object.assign@^4.1.4: has-symbols "^1.0.3" object-keys "^1.1.1" +object.assign@^4.1.5: + version "4.1.5" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.5.tgz#3a833f9ab7fdb80fc9e8d2300c803d216d8fdbb0" + integrity sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ== + dependencies: + call-bind "^1.0.5" + define-properties "^1.2.1" + has-symbols "^1.0.3" + object-keys "^1.1.1" + object.entries@^1.1.5, object.entries@^1.1.6: version "1.1.6" resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.6.tgz#9737d0e5b8291edd340a3e3264bb8a3b00d5fa23" @@ -7589,6 +9845,13 @@ one-time@^1.0.0: dependencies: fn.name "1.x.x" +onetime@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" + integrity sha512-oyyPpiMaKARvvcgip+JV+7zci5L8D1W9RZIz2l1o08AM3pfspitVWnPt3mzHcBPp12oYMTy0pqrFs/C+m3EwsQ== + dependencies: + mimic-fn "^1.0.0" + onetime@^5.1.0, onetime@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" @@ -7596,13 +9859,6 @@ onetime@^5.1.0, onetime@^5.1.2: dependencies: mimic-fn "^2.1.0" -onetime@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-6.0.0.tgz#7c24c18ed1fd2e9bca4bd26806a33613c77d34b4" - integrity sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ== - dependencies: - mimic-fn "^4.0.0" - open@^7.4.2: version "7.4.2" resolved "https://registry.yarnpkg.com/open/-/open-7.4.2.tgz#b8147e26dcf3e426316c730089fd71edd29c2321" @@ -7620,16 +9876,6 @@ open@^8.4.0: is-docker "^2.1.1" is-wsl "^2.2.0" -open@^9.1.0: - version "9.1.0" - resolved "https://registry.yarnpkg.com/open/-/open-9.1.0.tgz#684934359c90ad25742f5a26151970ff8c6c80b6" - integrity sha512-OS+QTnw1/4vrf+9hh1jc1jnYjzSG4ttTBB8UxOwAnInG3Uo4ssetzC1ihqaIHjLJnA5GGlRl6QlZXOTQhRBUvg== - dependencies: - default-browser "^4.0.0" - define-lazy-prop "^3.0.0" - is-inside-container "^1.0.0" - is-wsl "^2.2.0" - opener@^1.5.2: version "1.5.2" resolved "https://registry.yarnpkg.com/opener/-/opener-1.5.2.tgz#5d37e1f35077b9dcac4301372271afdeb2a13598" @@ -7882,6 +10128,11 @@ parse-ms@^3.0.0: resolved "https://registry.yarnpkg.com/parse-ms/-/parse-ms-3.0.0.tgz#3ea24a934913345fcc3656deda72df921da3a70e" integrity sha512-Tpb8Z7r7XbbtBTrM9UhpkzzaMrqA2VXMT3YChzYltwV3P3pM6t8wl7TvpMnSTosz1aQAdVib7kdoys7vYOPerw== +parse-package-name@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/parse-package-name/-/parse-package-name-1.0.0.tgz#1a108757e4ffc6889d5e78bcc4932a97c097a5a7" + integrity sha512-kBeTUtcj+SkyfaW4+KBe0HtsloBJ/mKTPoxpVdA57GZiPerREsUWJOhVj9anXweFiJkm5y8FG1sxFZkZ0SN6wg== + parse-path@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/parse-path/-/parse-path-7.0.0.tgz#605a2d58d0a749c8594405d8cc3a2bf76d16099b" @@ -7951,11 +10202,6 @@ path-key@^3.0.0, path-key@^3.1.0: resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== -path-key@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-4.0.0.tgz#295588dc3aee64154f877adb9d780b81c554bf18" - integrity sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ== - path-parse@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" @@ -7991,7 +10237,7 @@ picocolors@^1.0.0: resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== -picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.3.1: +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3, picomatch@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== @@ -8009,7 +10255,7 @@ pify@^2.3.0: pify@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" - integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY= + integrity sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg== pify@^4.0.1: version "4.0.1" @@ -8021,6 +10267,11 @@ pify@^5.0.0: resolved "https://registry.yarnpkg.com/pify/-/pify-5.0.0.tgz#1f5eca3f5e87ebec28cc6d54a0e4aaf00acc127f" integrity sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA== +pirates@^4.0.4: + version "4.0.6" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.6.tgz#3018ae32ecfcff6c29ba2267cbf21166ac1f36b9" + integrity sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg== + pkg-conf@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/pkg-conf/-/pkg-conf-4.0.0.tgz#63ace00cbacfa94c2226aee133800802d3e3b80c" @@ -8101,6 +10352,11 @@ prettier-plugin-jsdoc@^1.0.0: comment-parser "^1.3.1" mdast-util-from-markdown "^1.2.0" +prettier@^2.6.2, prettier@^2.8.7: + version "2.8.8" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.8.tgz#e8c5d7e98a4305ffe3de2e1fc4aca1a71c28b1da" + integrity sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q== + prettier@^3.0.0, prettier@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.0.3.tgz#432a51f7ba422d1469096c0fdc28e235db8f9643" @@ -8183,7 +10439,7 @@ proto-list@~1.2.1: resolved "https://registry.yarnpkg.com/proto-list/-/proto-list-1.2.4.tgz#212d5bfe1318306a420f6402b8e26ff39647a849" integrity sha1-IS1b/hMYMGpCD2QCuOJv85ZHqEk= -protobufjs@^6.10.3, protobufjs@^6.11.3, protobufjs@^6.8.8, protobufjs@~6.11.2, protobufjs@~6.11.3: +protobufjs@^6.10.3, protobufjs@^6.8.8, protobufjs@~6.11.2, protobufjs@~6.11.3: version "6.11.3" resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-6.11.3.tgz#637a527205a35caa4f3e2a9a4a13ddffe0e7af74" integrity sha512-xL96WDdCZYdU7Slin569tFX712BxsxslWwAfAhCYjQKGTq7dAU91Lomy6nLLhh/dyGhk/YH4TwTSRxTzhuHyZg== @@ -8238,6 +10494,19 @@ proxy-from-env@^1.1.0: resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== +publish-scripts@0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/publish-scripts/-/publish-scripts-0.1.0.tgz#544af8dfb8ebafbd0e9541d5200e8618d85584dd" + integrity sha512-cF4AYXPUKbU6Cw1PWY4b1CCl0+tsM13WdppqoBbD37lNvXf8vX6jgie/JDLH3jlSHDNCYuOolqLVMmdNlWWRjQ== + dependencies: + "@babel/runtime" "^7.19.0" + fuzzy "^0.1.3" + glob "^10.3.3" + inquirerer "^0.1.3" + minimist "^1.2.8" + mkdirp "^3.0.1" + rimraf "^5.0.1" + pump@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" @@ -8423,6 +10692,13 @@ readonly-date@^1.0.0: resolved "https://registry.yarnpkg.com/readonly-date/-/readonly-date-1.0.0.tgz#5af785464d8c7d7c40b9d738cbde8c646f97dcd9" integrity sha512-tMKIV7hlk0h4mO3JTmmVuIlJVXjKk3Sep9Bf5OH0O+758ruuVkUy2J9SttDLm91IEX/WHlXPSpxMGjPj4beMIQ== +rechoir@^0.6.2: + version "0.6.2" + resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.6.2.tgz#85204b54dba82d5742e28c96756ef43af50e3384" + integrity sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw== + dependencies: + resolve "^1.1.6" + redent@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/redent/-/redent-3.0.0.tgz#e557b7998316bb53c9f1f56fa626352c6963059f" @@ -8431,11 +10707,35 @@ redent@^3.0.0: indent-string "^4.0.0" strip-indent "^3.0.0" +regenerate-unicode-properties@^10.1.0: + version "10.1.1" + resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.1.tgz#6b0e05489d9076b04c436f318d9b067bba459480" + integrity sha512-X007RyZLsCJVVrjgEFVpLUTZwyOZk3oiL75ZcuYjlIWd6rNJtOjkBwQc5AsRrpbKVkxN6sklw/k/9m2jJYOf8Q== + dependencies: + regenerate "^1.4.2" + +regenerate@^1.4.2: + version "1.4.2" + resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" + integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== + regenerator-runtime@^0.13.11: version "0.13.11" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz#f6dca3e7ceec20590d07ada785636a90cdca17f9" integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg== +regenerator-runtime@^0.14.0: + version "0.14.1" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz#356ade10263f685dda125100cd862c1db895327f" + integrity sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw== + +regenerator-transform@^0.15.2: + version "0.15.2" + resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.15.2.tgz#5bbae58b522098ebdf09bca2f83838929001c7a4" + integrity sha512-hfMp2BoF0qOk3uc5V20ALGDS2ddjQaLrdl7xrGXvAIow7qeWRM2VA2HuCHkUKk9slq3VwEwLNK3DFBqDfPGYtg== + dependencies: + "@babel/runtime" "^7.8.4" + regexp.prototype.flags@^1.4.3: version "1.4.3" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" @@ -8445,6 +10745,35 @@ regexp.prototype.flags@^1.4.3: define-properties "^1.1.3" functions-have-names "^1.2.2" +regexp.prototype.flags@^1.5.2: + version "1.5.2" + resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz#138f644a3350f981a858c44f6bb1a61ff59be334" + integrity sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw== + dependencies: + call-bind "^1.0.6" + define-properties "^1.2.1" + es-errors "^1.3.0" + set-function-name "^2.0.1" + +regexpu-core@^5.3.1: + version "5.3.2" + resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.3.2.tgz#11a2b06884f3527aec3e93dbbf4a3b958a95546b" + integrity sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ== + dependencies: + "@babel/regjsgen" "^0.8.0" + regenerate "^1.4.2" + regenerate-unicode-properties "^10.1.0" + regjsparser "^0.9.1" + unicode-match-property-ecmascript "^2.0.0" + unicode-match-property-value-ecmascript "^2.1.0" + +regjsparser@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.9.1.tgz#272d05aa10c7c1f67095b1ff0addae8442fc5709" + integrity sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ== + dependencies: + jsesc "~0.5.0" + require-directory@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" @@ -8487,6 +10816,15 @@ resolve-pkg-maps@^1.0.0: resolved "https://registry.yarnpkg.com/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz#616b3dc2c57056b5588c31cdf4b3d64db133720f" integrity sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw== +resolve@^1.1.6, resolve@^1.14.2: + version "1.22.8" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" + integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== + dependencies: + is-core-module "^2.13.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + resolve@^1.10.0, resolve@^1.12.0, resolve@^1.17.0, resolve@^1.19.0, resolve@^1.22.1: version "1.22.1" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" @@ -8496,6 +10834,14 @@ resolve@^1.10.0, resolve@^1.12.0, resolve@^1.17.0, resolve@^1.19.0, resolve@^1.2 path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" +restore-cursor@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" + integrity sha512-6IzJLuGi4+R14vwagDHX+JrXmPVtPpn4mffDJ1UdR7/Edm87fl6yi8mMBIVvFtJaNTUvjughmW4hwLhRG7gC1Q== + dependencies: + onetime "^2.0.0" + signal-exit "^3.0.2" + restore-cursor@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" @@ -8519,6 +10865,20 @@ reusify@^1.0.4: resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== +rimraf@3.0.2, rimraf@^3.0.0, rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +rimraf@5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-5.0.0.tgz#5bda14e410d7e4dd522154891395802ce032c2cb" + integrity sha512-Jf9llaP+RvaEVS5nPShYFhtXIrb3LRKP281ib3So0KkeZKo2wIKyq0Re7TOSwanasA423PSr6CCIL4bP6T040g== + dependencies: + glob "^10.0.0" + rimraf@^2.6.3, rimraf@~2.6.2: version "2.6.3" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" @@ -8526,12 +10886,12 @@ rimraf@^2.6.3, rimraf@~2.6.2: dependencies: glob "^7.1.3" -rimraf@^3.0.0, rimraf@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" - integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== +rimraf@^5.0.0, rimraf@^5.0.1: + version "5.0.5" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-5.0.5.tgz#9be65d2d6e683447d2e9013da2bf451139a61ccf" + integrity sha512-CqDakW+hMe/Bz202FPEymy68P+G50RfMQK+Qo5YUqc9SPipvbGjCGKd0RSKEelbsfQuw3g5NZDSrlZZAJurH1A== dependencies: - glob "^7.1.3" + glob "^10.3.7" rollup-plugin-string@^3.0.0: version "3.0.0" @@ -8554,14 +10914,7 @@ rollup@^2.58.0, rollup@^2.79.1: optionalDependencies: fsevents "~2.3.2" -run-applescript@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/run-applescript/-/run-applescript-5.0.0.tgz#e11e1c932e055d5c6b40d98374e0268d9b11899c" - integrity sha512-XcT5rBksx1QdIhlFOCtgZkB99ZEouFZ1E2Kc2LHqNW13U3/74YGdkQRmThTwxy4QIyookibDKYZOPqX//6BlAg== - dependencies: - execa "^5.0.0" - -run-async@^2.4.0: +run-async@^2.2.0, run-async@^2.3.0, run-async@^2.4.0: version "2.4.1" resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.4.1.tgz#8440eccf99ea3e70bd409d49aab88e10c189a455" integrity sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ== @@ -8573,6 +10926,25 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" +rx-lite-aggregates@^4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/rx-lite-aggregates/-/rx-lite-aggregates-4.0.8.tgz#753b87a89a11c95467c4ac1626c4efc4e05c67be" + integrity sha512-3xPNZGW93oCjiO7PtKxRK6iOVYBWBvtf9QHDfU23Oc+dLIQmAV//UnyXV/yihv81VS/UqoQPk4NegS8EFi55Hg== + dependencies: + rx-lite "*" + +rx-lite@*, rx-lite@^4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/rx-lite/-/rx-lite-4.0.8.tgz#0b1e11af8bc44836f04a6407e92da42467b79444" + integrity sha512-Cun9QucwK6MIrp3mry/Y7hqD1oFqTYLQ4pGxaHTjIdaFDWRGGLikqp6u8LcWJnzpoALg9hap+JGk8sFIUuEGNA== + +rxjs@^6.4.0: + version "6.6.7" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9" + integrity sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ== + dependencies: + tslib "^1.9.0" + rxjs@^7.5.5: version "7.5.5" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.5.5.tgz#2ebad89af0f560f460ad5cc4213219e1f7dd4e9f" @@ -8587,12 +10959,22 @@ sade@^1.7.3: dependencies: mri "^1.1.0" +safe-array-concat@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/safe-array-concat/-/safe-array-concat-1.1.0.tgz#8d0cae9cb806d6d1c06e08ab13d847293ebe0692" + integrity sha512-ZdQ0Jeb9Ofti4hbt5lX3T2JcAamT9hfzYU1MNB+z/jaEbB6wfFfPIR/zEORmZqobkCCJhSjodobH6WHNmJ97dg== + dependencies: + call-bind "^1.0.5" + get-intrinsic "^1.2.2" + has-symbols "^1.0.3" + isarray "^2.0.5" + safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -safe-buffer@5.2.1, safe-buffer@^5.0.1: +safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== @@ -8606,6 +10988,15 @@ safe-regex-test@^1.0.0: get-intrinsic "^1.1.3" is-regex "^1.1.4" +safe-regex-test@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/safe-regex-test/-/safe-regex-test-1.0.3.tgz#a5b4c0f06e0ab50ea2c395c14d8371232924c377" + integrity sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw== + dependencies: + call-bind "^1.0.6" + es-errors "^1.3.0" + is-regex "^1.1.4" + safe-stable-stringify@^2.3.1: version "2.4.2" resolved "https://registry.yarnpkg.com/safe-stable-stringify/-/safe-stable-stringify-2.4.2.tgz#ec7b037768098bf65310d1d64370de0dc02353aa" @@ -8633,6 +11024,11 @@ semver@^6.0.0, semver@^6.3.0: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== +semver@^6.1.1, semver@^6.1.2, semver@^6.3.1: + version "6.3.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== + semver@^7.0.0, semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.5.1, semver@^7.5.4: version "7.5.4" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" @@ -8695,6 +11091,27 @@ set-blocking@^2.0.0: resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= +set-function-length@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.1.tgz#47cc5945f2c771e2cf261c6737cf9684a2a5e425" + integrity sha512-j4t6ccc+VsKwYHso+kElc5neZpjtq9EnRICFZtWyBsLojhmeF/ZBd/elqm22WJh/BziDe/SBiOeAt0m2mfLD0g== + dependencies: + define-data-property "^1.1.2" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.3" + gopd "^1.0.1" + has-property-descriptors "^1.0.1" + +set-function-name@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/set-function-name/-/set-function-name-2.0.1.tgz#12ce38b7954310b9f61faa12701620a0c882793a" + integrity sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA== + dependencies: + define-data-property "^1.0.1" + functions-have-names "^1.2.3" + has-property-descriptors "^1.0.0" + setprototypeof@1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" @@ -8736,6 +11153,15 @@ shell-quote@^1.6.1: resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.7.3.tgz#aa40edac170445b9a431e17bb62c0b881b9c4123" integrity sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw== +shelljs@0.8.5: + version "0.8.5" + resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.5.tgz#de055408d8361bed66c669d2f000538ced8ee20c" + integrity sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow== + dependencies: + glob "^7.0.0" + interpret "^1.0.0" + rechoir "^0.6.2" + shiki@^0.14.7: version "0.14.7" resolved "https://registry.yarnpkg.com/shiki/-/shiki-0.14.7.tgz#c3c9e1853e9737845f1d2ef81b31bcfb07056d4e" @@ -8761,9 +11187,9 @@ signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7: integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== signal-exit@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.0.2.tgz#ff55bb1d9ff2114c13b400688fa544ac63c36967" - integrity sha512-MY2/qGx4enyjprQnFaZsHib3Yadh3IXyV2C321GY0pjGfVBu4un0uDJkwgdxqO+Rdx8JMT8IfJIRwbYVz3Ob3Q== + version "4.1.0" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" + integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== simple-concat@^1.0.0: version "1.0.1" @@ -8833,17 +11259,17 @@ socks-proxy-agent@^7.0.0: socks "^2.6.2" socks@^2.6.2: - version "2.7.1" - resolved "https://registry.yarnpkg.com/socks/-/socks-2.7.1.tgz#d8e651247178fde79c0663043e07240196857d55" - integrity sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ== + version "2.7.3" + resolved "https://registry.yarnpkg.com/socks/-/socks-2.7.3.tgz#7d8a75d7ce845c0a96f710917174dba0d543a785" + integrity sha512-vfuYK48HXCTFD03G/1/zkIls3Ebr2YNa4qU9gHDZdblHLiqhJrJGkY3+0Nx0JpN9qBhJbVObc1CNciT1bIZJxw== dependencies: - ip "^2.0.0" + ip-address "^9.0.5" smart-buffer "^4.2.0" sort-keys@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/sort-keys/-/sort-keys-2.0.0.tgz#658535584861ec97d730d6cf41822e1f56684128" - integrity sha1-ZYU1WEhh7JfXMNbPQYIuH1ZoQSg= + integrity sha512-/dPCrG1s3ePpWm6yBbxZq5Be1dXGLyLn9Z791chDC3NFrpkVbWGzkBwPN1knaciexFXgRJ7hzdnwZ4stHSDmjg== dependencies: is-plain-obj "^1.0.0" @@ -8870,30 +11296,30 @@ source-map@0.7.4, source-map@^0.7.4: source-map@^0.5.0: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" - integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= + integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== -source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.1: +source-map@^0.6.0, source-map@^0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== sourcemap-codec@^1.4.4: - version "1.4.6" - resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.6.tgz#e30a74f0402bad09807640d39e971090a08ce1e9" - integrity sha512-1ZooVLYFxC448piVLBbtOxFcXwnymH9oUF8nRd3CuYDVvkRBxRl6pB4Mtas5a4drtL+E8LDgFkQNcgIw6tc8Hg== + version "1.4.8" + resolved "https://registry.yarnpkg.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4" + integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA== spdx-correct@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.0.tgz#fb83e504445268f154b074e218c87c003cd31df4" - integrity sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q== + version "3.2.0" + resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.2.0.tgz#4f5ab0668f0059e34f9c00dce331784a12de4e9c" + integrity sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA== dependencies: spdx-expression-parse "^3.0.0" spdx-license-ids "^3.0.0" spdx-exceptions@^2.1.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz#2ea450aee74f2a89bfb94519c07fcd6f41322977" - integrity sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA== + version "2.5.0" + resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz#5d607d27fc806f66d7b64a766650fa890f04ed66" + integrity sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w== spdx-expression-parse@^3.0.0, spdx-expression-parse@^3.0.1: version "3.0.1" @@ -8904,9 +11330,9 @@ spdx-expression-parse@^3.0.0, spdx-expression-parse@^3.0.1: spdx-license-ids "^3.0.0" spdx-license-ids@^3.0.0: - version "3.0.5" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654" - integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q== + version "3.0.17" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.17.tgz#887da8aa73218e51a1d917502d79863161a93f9c" + integrity sha512-sh8PWc/ftMqAAdFiBu6Fy6JUOYjqDJBJvIhpfDMyHrr0Rbp5liZqd4TjtQ/RgfLjKFZb+LMx5hpml5qOWy0qvg== split2@^3.0.0: version "3.2.2" @@ -8922,17 +11348,22 @@ split@^1.0.0: dependencies: through "2" +sprintf-js@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.3.tgz#4914b903a2f8b685d17fdf78a70e917e872e444a" + integrity sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA== + sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== ssri@^10.0.0: - version "10.0.4" - resolved "https://registry.yarnpkg.com/ssri/-/ssri-10.0.4.tgz#5a20af378be586df139ddb2dfb3bf992cf0daba6" - integrity sha512-12+IR2CB2C28MMAw0Ncqwj5QbTcs0nGIhgJzYWzDkb21vWmfNI83KS4f3Ci6GI98WreIfG7o9UXp3C0qbpA8nQ== + version "10.0.5" + resolved "https://registry.yarnpkg.com/ssri/-/ssri-10.0.5.tgz#e49efcd6e36385196cb515d3a2ad6c3f0265ef8c" + integrity sha512-bSf16tAFkGeRlUNDjXu8FzaMQt6g2HZJrun7mtMbIPOddxt3GLMSz5VWUWcqTJUPfLEaDIepGxv+bYQW49596A== dependencies: - minipass "^5.0.0" + minipass "^7.0.3" ssri@^9.0.0, ssri@^9.0.1: version "9.0.1" @@ -8967,6 +11398,14 @@ statuses@2.0.1: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" +string-width@^2.0.0, string-width@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" + integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== + dependencies: + is-fullwidth-code-point "^2.0.0" + strip-ansi "^4.0.0" + string-width@^5.0.0, string-width@^5.0.1, string-width@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" @@ -8977,42 +11416,49 @@ string-width@^5.0.0, string-width@^5.0.1, string-width@^5.1.2: strip-ansi "^7.0.1" string.prototype.padend@^3.0.0: - version "3.1.3" - resolved "https://registry.yarnpkg.com/string.prototype.padend/-/string.prototype.padend-3.1.3.tgz#997a6de12c92c7cb34dc8a201a6c53d9bd88a5f1" - integrity sha512-jNIIeokznm8SD/TZISQsZKYu7RJyheFNt84DUPrh482GC8RVp2MKqm2O5oBRdGxbDQoXrhhWtPIWQOiy20svUg== + version "3.1.5" + resolved "https://registry.yarnpkg.com/string.prototype.padend/-/string.prototype.padend-3.1.5.tgz#311ef3a4e3c557dd999cdf88fbdde223f2ac0f95" + integrity sha512-DOB27b/2UTTD+4myKUFh+/fXWcu/UDyASIXfg+7VzoCNNGOfWvoyU/x5pvVHr++ztyt/oSYI1BcWBBG/hmlNjA== dependencies: call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.1" + define-properties "^1.2.0" + es-abstract "^1.22.1" -string.prototype.trim@^1.2.7: - version "1.2.7" - resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz#a68352740859f6893f14ce3ef1bb3037f7a90533" - integrity sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg== +string.prototype.trim@^1.2.7, string.prototype.trim@^1.2.8: + version "1.2.8" + resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.8.tgz#f9ac6f8af4bd55ddfa8895e6aea92a96395393bd" + integrity sha512-lfjY4HcixfQXOfaqCvcBuOIapyaroTXhbkfJN3gcB1OtyupngWK4sEET9Knd0cXd28kTUqu/kHoV4HKSJdnjiQ== dependencies: call-bind "^1.0.2" - define-properties "^1.1.4" - es-abstract "^1.20.4" + define-properties "^1.2.0" + es-abstract "^1.22.1" -string.prototype.trimend@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz#c4a27fa026d979d79c04f17397f250a462944533" - integrity sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ== +string.prototype.trimend@^1.0.6, string.prototype.trimend@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.7.tgz#1bb3afc5008661d73e2dc015cd4853732d6c471e" + integrity sha512-Ni79DqeB72ZFq1uH/L6zJ+DKZTkOtPIHovb3YZHQViE+HDouuU4mBrLOLDn5Dde3RF8qw5qVETEjhu9locMLvA== dependencies: call-bind "^1.0.2" - define-properties "^1.1.4" - es-abstract "^1.20.4" + define-properties "^1.2.0" + es-abstract "^1.22.1" -string.prototype.trimstart@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz#e90ab66aa8e4007d92ef591bbf3cd422c56bdcf4" - integrity sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA== +string.prototype.trimstart@^1.0.6, string.prototype.trimstart@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.7.tgz#d4cdb44b83a4737ffbac2d406e405d43d0184298" + integrity sha512-NGhtDFu3jCEm7B4Fy0DpLewdJQOZcQ0rGbwQ/+stjnrp2i+rlKeCvos9hOIeCmqwratM47OBxY7uFZzjxHXmrg== dependencies: call-bind "^1.0.2" - define-properties "^1.1.4" - es-abstract "^1.20.4" + define-properties "^1.2.0" + es-abstract "^1.22.1" + +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" -string_decoder@^1.1.1, string_decoder@~1.1.1: +string_decoder@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== @@ -9026,10 +11472,31 @@ string_decoder@^1.1.1, string_decoder@~1.1.1: dependencies: ansi-regex "^5.0.1" +strip-ansi@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + integrity sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg== + dependencies: + ansi-regex "^2.0.0" + +strip-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" + integrity sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow== + dependencies: + ansi-regex "^3.0.0" + +strip-ansi@^5.1.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" + integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== + dependencies: + ansi-regex "^4.1.0" + strip-ansi@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.0.1.tgz#61740a08ce36b61e50e65653f07060d000975fb2" - integrity sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw== + version "7.1.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" + integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== dependencies: ansi-regex "^6.0.1" @@ -9048,11 +11515,6 @@ strip-final-newline@^2.0.0: resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== -strip-final-newline@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-3.0.0.tgz#52894c313fbff318835280aed60ff71ebf12b8fd" - integrity sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw== - strip-indent@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" @@ -9068,7 +11530,7 @@ strip-json-comments@^3.1.1: strip-json-comments@~2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" - integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= + integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ== strong-log-transformer@^2.1.0: version "2.1.0" @@ -9089,6 +11551,11 @@ supertap@^3.0.1: serialize-error "^7.0.1" strip-ansi "^7.0.1" +supports-color@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" + integrity sha512-KKNVtd6pCYgPIKU4cp2733HWYCpplQhddZLBUryaAHou723x+FRzQ5Df824Fj+IyyuiQTRoub4SnIFfIcrp70g== + supports-color@^5.3.0: version "5.5.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" @@ -9103,10 +11570,17 @@ supports-color@^7.0.0, supports-color@^7.1.0: dependencies: has-flag "^4.0.0" +supports-color@^8.0.0: + version "8.1.1" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + supports-hyperlinks@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.2.0.tgz#4f77b42488765891774b70c79babd87f9bd594bb" - integrity sha512-6sXEzV5+I5j8Bmq9/vUphGRM/RJNT9SCURJLjwfOg51heRtguGWDzcaBlgAzKhQa0EVNpPEKzQuBwZ8S8WaCeQ== + version "2.3.0" + resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz#3943544347c1ff90b15effb03fc14ae45ec10624" + integrity sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA== dependencies: has-flag "^4.0.0" supports-color "^7.0.0" @@ -9127,12 +11601,12 @@ symbol-observable@^2.0.3: integrity sha512-sQV7phh2WCYAn81oAkakC5qjq2Ml0g8ozqz03wOGnx9dDlG1de6yrF+0RAzSJD8fPUow3PTSMf2SAbOGxb93BA== synckit@^0.8.5: - version "0.8.5" - resolved "https://registry.yarnpkg.com/synckit/-/synckit-0.8.5.tgz#b7f4358f9bb559437f9f167eb6bc46b3c9818fa3" - integrity sha512-L1dapNV6vu2s/4Sputv8xGsCdAVlb5nRDMFU/E27D44l5U6cw1g0dGd45uLc+OXjNMmF4ntiMdCimzcjFKQI8Q== + version "0.8.8" + resolved "https://registry.yarnpkg.com/synckit/-/synckit-0.8.8.tgz#fe7fe446518e3d3d49f5e429f443cf08b6edfcd7" + integrity sha512-HwOKAP7Wc5aRGYdKH+dw0PRRpbO841v2DENBtjnR5HFWoiNByAl7vrx3p0G/rCyYXQsrxqtX48TImFtPcIHSpQ== dependencies: - "@pkgr/utils" "^2.3.1" - tslib "^2.5.0" + "@pkgr/core" "^0.1.0" + tslib "^2.6.2" table@^6.7.1: version "6.8.1" @@ -9167,9 +11641,9 @@ tar-stream@^2.1.4, tar-stream@~2.2.0: readable-stream "^3.1.1" tar@^6.1.0, tar@^6.1.11, tar@^6.1.2: - version "6.1.15" - resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.15.tgz#c9738b0b98845a3b344d334b8fa3041aaba53a69" - integrity sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A== + version "6.2.0" + resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.0.tgz#b14ce49a79cb1cd23bc9b016302dea5474493f73" + integrity sha512-/Wo7DcT0u5HUV486xg675HtjNd3BXZ6xDbzsCUZPt5iw8bTQ63bP0Raut3mvro9u+CUyq7YQd8Cx55fsZXxqLQ== dependencies: chownr "^2.0.0" fs-minipass "^2.0.0" @@ -9188,7 +11662,7 @@ tdigest@^0.1.1: temp-dir@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/temp-dir/-/temp-dir-1.0.0.tgz#0a7c0ea26d3a39afa7e0ebea9c1fc0bc4daa011d" - integrity sha1-CnwOom06Oa+n4OvqnB/AvE2qAR0= + integrity sha512-xZFXEGbG7SNC3itwBzI3RYjq/cEhBkx2hJuKGIUOcEULmkQExXiHat2z/qkISYsuR+IKumhEfKKbV5qXmhICFQ== temp-dir@^3.0.0: version "3.0.0" @@ -9196,10 +11670,11 @@ temp-dir@^3.0.0: integrity sha512-nHc6S/bwIilKHNRgK/3jlhDoIHcp45YgyiwcAk46Tr0LfEqGBVpmiAyuiuxeVE44m3mXnEeVhaipLOEWmH+Njw== temp@^0.9.0, temp@^0.9.1: - version "0.9.1" - resolved "https://registry.yarnpkg.com/temp/-/temp-0.9.1.tgz#2d666114fafa26966cd4065996d7ceedd4dd4697" - integrity sha512-WMuOgiua1xb5R56lE0eH6ivpVmg/lq2OHm4+LtT/xtEtPQ+sz6N3bBM6WZ5FvO1lO4IKIOb43qnhoc4qxP5OeA== + version "0.9.4" + resolved "https://registry.yarnpkg.com/temp/-/temp-0.9.4.tgz#cd20a8580cb63635d0e4e9d4bd989d44286e7620" + integrity sha512-yYrrsWnrXMcdsnu/7YMYAofM1ktpL5By7vZhf15CrXijWWrEYZks5AXBudalfSWJLlnen/QUJUB5aoB0kqZUGA== dependencies: + mkdirp "^0.5.1" rimraf "~2.6.2" test-exclude@^6.0.0: @@ -9224,7 +11699,21 @@ text-hex@1.0.x: text-table@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" - integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== + +thenify-all@^1.0.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" + integrity sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA== + dependencies: + thenify ">= 3.1.0 < 4" + +"thenify@>= 3.1.0 < 4": + version "3.3.1" + resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.1.tgz#8932e686a4066038a016dd9e2ca46add9838a95f" + integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw== + dependencies: + any-promise "^1.0.0" through2@^2.0.0: version "2.0.5" @@ -9244,17 +11733,20 @@ through2@^4.0.0: through@2, "through@>=2.2.7 <3", through@^2.3.4, through@^2.3.6: version "2.3.8" resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" - integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= + integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== time-zone@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/time-zone/-/time-zone-1.0.0.tgz#99c5bf55958966af6d06d83bdf3800dc82faec5d" integrity sha512-TIsDdtKo6+XrPtiTm1ssmMngN1sAhyKnTO2kunQWqNPWIVvCm15Wmw4SWInwTVgJ5u/Tr04+8Ei9TNcw4x4ONA== -titleize@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/titleize/-/titleize-3.0.0.tgz#71c12eb7fdd2558aa8a44b0be83b8a76694acd53" - integrity sha512-KxVu8EYHDPBdUYdKZdKtU2aj2XfEx9AfjXxE/Aj0vT06w2icA09Vus1rh6eSu1y01akYg6BjIK/hxyLJINoMLQ== +timers-ext@^0.1.7: + version "0.1.7" + resolved "https://registry.yarnpkg.com/timers-ext/-/timers-ext-0.1.7.tgz#6f57ad8578e07a3fb9f91d9387d65647555e25c6" + integrity sha512-b85NUNzTSdodShTIbky6ZF02e8STtVVfD+fu4aXXShEELpozH+bCpJLYMPZbsABN2wDH7fJpqIoXxJpzbf0NqQ== + dependencies: + es5-ext "~0.10.46" + next-tick "1" tmp@^0.0.33: version "0.0.33" @@ -9270,10 +11762,15 @@ tmp@^0.2.1, tmp@~0.2.1: dependencies: rimraf "^3.0.0" +tmpl@1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== + to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" - integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= + integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== to-regex-range@^5.0.1: version "5.0.1" @@ -9290,7 +11787,7 @@ toidentifier@1.0.1: tr46@~0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" - integrity sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o= + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== treeverse@^2.0.0: version "2.0.0" @@ -9302,47 +11799,55 @@ trim-newlines@^3.0.0: resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-3.0.1.tgz#260a5d962d8b752425b32f3a7db0dcacd176c144" integrity sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw== -triple-beam@1.3.0, triple-beam@^1.3.0: +triple-beam@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/triple-beam/-/triple-beam-1.3.0.tgz#a595214c7298db8339eeeee083e4d10bd8cb8dd9" integrity sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw== -ts-api-utils@^1.0.1, ts-api-utils@~1.0.1: +triple-beam@^1.3.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/triple-beam/-/triple-beam-1.4.1.tgz#6fde70271dc6e5d73ca0c3b24e2d92afb7441984" + integrity sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg== + +ts-api-utils@^1.0.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.2.1.tgz#f716c7e027494629485b21c0df6180f4d08f5e8b" + integrity sha512-RIYA36cJn2WiH9Hy77hdF9r7oEwxAtB/TS9/S4Qd90Ap4z5FSiin5zEiTL44OII1Y3IIlEvxwxFUVgrHSZ/UpA== + +ts-api-utils@~1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.0.3.tgz#f12c1c781d04427313dbac808f453f050e54a331" integrity sha512-wNMeqtMz5NtwpT/UZGY5alT+VoKdSsOOP/kqHFcUW1P/VRhH2wJ48+DN2WwUliNbQ976ETwDL0Ifd2VVvgonvg== -ts-poet@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/ts-poet/-/ts-poet-6.1.0.tgz#c5c3d679dfce1fe39acb5f5415275c5d6a598cb7" - integrity sha512-PFwbNJjGrb44wzHUGQicG2/nhjR+3+k7zYLDTa8D61NVUitl7K/JgIc9/P+8oMNenntKzLc8tjLDOkPrxIhm6A== +ts-poet@^6.7.0: + version "6.7.0" + resolved "https://registry.yarnpkg.com/ts-poet/-/ts-poet-6.7.0.tgz#6b2ff3b7b0c70ea650d6d570dfe6899f73fb3c38" + integrity sha512-A0wvFtpkTCWPw7ftTIwbEH+L+7ul4CU0x3jXKQ+kCnmEQIAOwhpUaBmcAYKxZCxHae9/MUl4LbyTqw25BpzW5Q== dependencies: - dprint-node "^1.0.7" + dprint-node "^1.0.8" -ts-proto-descriptors@1.7.1: - version "1.7.1" - resolved "https://registry.yarnpkg.com/ts-proto-descriptors/-/ts-proto-descriptors-1.7.1.tgz#685d00305b06adfa929fd5a016a419382cd64c50" - integrity sha512-oIKUh3K4Xts4v29USGLfUG+2mEk32MsqpgZAOUyUlkrcIdv34yE+k2oZ2Nzngm6cV/JgFdOxRCqeyvmWHuYAyw== +ts-proto-descriptors@1.15.0: + version "1.15.0" + resolved "https://registry.yarnpkg.com/ts-proto-descriptors/-/ts-proto-descriptors-1.15.0.tgz#e859e3a2887da2d954c552524719b80bdb6ee355" + integrity sha512-TYyJ7+H+7Jsqawdv+mfsEpZPTIj9siDHS6EMCzG/z3b/PZiphsX+mWtqFfFVe5/N0Th6V3elK9lQqjnrgTOfrg== dependencies: - long "^4.0.0" - protobufjs "^6.8.8" + long "^5.2.3" + protobufjs "^7.2.4" ts-proto@^1.131.0: - version "1.131.0" - resolved "https://registry.yarnpkg.com/ts-proto/-/ts-proto-1.131.0.tgz#bc5906c9d35755a086d7e47e8357bc7b858f9fc6" - integrity sha512-oj/kWrDz3JkkPQwumQDKfwHroH9iNz2alvrbCYzNYJlb/2MAUzFNfh9T+SZUPDfoXXK6oJoW1hGCHznj67yHNg== + version "1.167.5" + resolved "https://registry.yarnpkg.com/ts-proto/-/ts-proto-1.167.5.tgz#98f1a26c211b2c167ce05b4a1a2b9c998ee23d80" + integrity sha512-46ci2eWiLk+rA9rwYya98eUivYP3VRi+978yzt15I34BvPlQlfplMxW2bWJa3yJgdGtNWdT1WLkGn53uhSpjCw== dependencies: - "@types/object-hash" "^1.3.0" - dataloader "^1.4.0" - object-hash "^1.3.1" - protobufjs "^6.11.3" - ts-poet "^6.1.0" - ts-proto-descriptors "1.7.1" + case-anything "^2.1.13" + protobufjs "^7.2.4" + ts-poet "^6.7.0" + ts-proto-descriptors "1.15.0" tsconfig-paths@^3.14.1: - version "3.14.2" - resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz#6e32f1f79412decd261f92d633a9dc1cfa99f088" - integrity sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g== + version "3.15.0" + resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz#5299ec605e55b1abb23ec939ef15edaf483070d4" + integrity sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg== dependencies: "@types/json5" "^0.0.29" json5 "^1.0.2" @@ -9371,15 +11876,15 @@ tsd@^0.30.4: path-exists "^4.0.0" read-pkg-up "^7.0.0" -"tslib@1 || 2", tslib@^2.1.0, tslib@^2.3.0, tslib@^2.4.0, tslib@^2.5.0, tslib@^2.6.0: +"tslib@1 || 2", tslib@^2.1.0, tslib@^2.3.0, tslib@^2.4.0, tslib@^2.6.2: version "2.6.2" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== -tslib@^1.8.1: - version "1.13.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.13.0.tgz#c881e13cc7015894ed914862d276436fa9a47043" - integrity sha512-i/6DQjL8Xf3be4K/E6Wgpekn5Qasl1usyw++dAA35Ue5orEn65VIxOA+YvNNl9HV3qv70T7CNwjODHZrLwvd1Q== +tslib@^1.8.1, tslib@^1.9.0: + version "1.14.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== tsutils@3, tsutils@~3.21.0: version "3.21.0" @@ -9402,7 +11907,7 @@ tsx@3.12.8: tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" - integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= + integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w== dependencies: safe-buffer "^5.0.1" @@ -9475,6 +11980,46 @@ type-is@~1.6.18: media-typer "0.3.0" mime-types "~2.1.24" +type@^1.0.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" + integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== + +type@^2.7.2: + version "2.7.2" + resolved "https://registry.yarnpkg.com/type/-/type-2.7.2.tgz#2376a15a3a28b1efa0f5350dcf72d24df6ef98d0" + integrity sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw== + +typed-array-buffer@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/typed-array-buffer/-/typed-array-buffer-1.0.1.tgz#0608ffe6bca71bf15a45bff0ca2604107a1325f5" + integrity sha512-RSqu1UEuSlrBhHTWC8O9FnPjOduNs4M7rJ4pRKoEjtx1zUNOPN2sSXHLDX+Y2WPbHIxbvg4JFo2DNAEfPIKWoQ== + dependencies: + call-bind "^1.0.6" + es-errors "^1.3.0" + is-typed-array "^1.1.13" + +typed-array-byte-length@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz#d787a24a995711611fb2b87a4052799517b230d0" + integrity sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA== + dependencies: + call-bind "^1.0.2" + for-each "^0.3.3" + has-proto "^1.0.1" + is-typed-array "^1.1.10" + +typed-array-byte-offset@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz#cbbe89b51fdef9cd6aaf07ad4707340abbc4ea0b" + integrity sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg== + dependencies: + available-typed-arrays "^1.0.5" + call-bind "^1.0.2" + for-each "^0.3.3" + has-proto "^1.0.1" + is-typed-array "^1.1.10" + typed-array-length@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/typed-array-length/-/typed-array-length-1.0.4.tgz#89d83785e5c4098bec72e08b319651f0eac9c1bb" @@ -9494,12 +12039,12 @@ typedarray-to-buffer@^3.1.5: typedarray@^0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" - integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= + integrity sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA== typedoc@^0.25.7: - version "0.25.7" - resolved "https://registry.yarnpkg.com/typedoc/-/typedoc-0.25.7.tgz#11e3f527ca80ca3c029cb8e15f362e6d9f715e25" - integrity sha512-m6A6JjQRg39p2ZVRIN3NKXgrN8vzlHhOS+r9ymUYtcUP/TIQPvWSq7YgE5ZjASfv5Vd5BW5xrir6Gm2XNNcOow== + version "0.25.8" + resolved "https://registry.yarnpkg.com/typedoc/-/typedoc-0.25.8.tgz#7d0e1bf12d23bf1c459fd4893c82cb855911ff12" + integrity sha512-mh8oLW66nwmeB9uTa0Bdcjfis+48bAjSH3uqdzSuSawfduROQLlXw//WSNZLYDdhmMVB7YcYZicq6e8T0d271A== dependencies: lunr "^2.3.9" marked "^4.3.0" @@ -9511,18 +12056,15 @@ typedoc@^0.25.7: resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.9.5.tgz#095979f9bcc0d09da324d58d03ce8f8374cbe65a" integrity sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g== -typescript@^5.3.3, typescript@~5.3.3: +typescript@^5.0.4, typescript@^5.3.3, typescript@~5.3.3: version "5.3.3" resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.3.3.tgz#b3ce6ba258e72e6305ba66f5c9b452aaee3ffe37" integrity sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw== uglify-js@^3.1.4: - version "3.8.0" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.8.0.tgz#f3541ae97b2f048d7e7e3aa4f39fd8a1f5d7a805" - integrity sha512-ugNSTT8ierCsDHso2jkBHXYrU8Y5/fY2ZUprfrJUiD7YpuFvV4jODLFmb3h4btQjqr5Nh4TX4XtgDfCU1WdioQ== - dependencies: - commander "~2.20.3" - source-map "~0.6.1" + version "3.17.4" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.17.4.tgz#61678cf5fa3f5b7eb789bb345df29afb8257c22c" + integrity sha512-T9q82TJI9e/C1TAxYvfb16xO120tMVFZrGA3f9/P4424DNu6ypK103y0GPFVa17yotwSyZW5iYXgjYHkGrJW/g== unbox-primitive@^1.0.2: version "1.0.2" @@ -9539,6 +12081,29 @@ undici-types@~5.26.4: resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== +unicode-canonical-property-names-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz#301acdc525631670d39f6146e0e77ff6bbdebddc" + integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== + +unicode-match-property-ecmascript@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz#54fd16e0ecb167cf04cf1f756bdcc92eba7976c3" + integrity sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q== + dependencies: + unicode-canonical-property-names-ecmascript "^2.0.0" + unicode-property-aliases-ecmascript "^2.0.0" + +unicode-match-property-value-ecmascript@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.1.0.tgz#cb5fffdcd16a05124f5a4b0bf7c3770208acbbe0" + integrity sha512-qxkjQt6qjg/mYscYMC0XKRn3Rh0wFPlfxB0xkt9CfyTvpX1Ra0+rAmdX2QyAobptSEvuy4RtpPRui6XkV+8wjA== + +unicode-property-aliases-ecmascript@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz#43d41e3be698bd493ef911077c9b131f827e8ccd" + integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w== + unique-filename@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-2.0.1.tgz#e785f8675a9a7589e0ac77e0b5c34d2eaeac6da2" @@ -9575,54 +12140,56 @@ unist-util-stringify-position@^3.0.0: "@types/unist" "^2.0.0" universal-user-agent@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-6.0.0.tgz#3381f8503b251c0d9cd21bc1de939ec9df5480ee" - integrity sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w== + version "6.0.1" + resolved "https://registry.yarnpkg.com/universal-user-agent/-/universal-user-agent-6.0.1.tgz#15f20f55da3c930c57bddbf1734c6654d5fd35aa" + integrity sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ== universalify@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" - integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== + version "2.0.1" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.1.tgz#168efc2180964e6386d061e094df61afe239b18d" + integrity sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw== unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" - integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= - -untildify@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/untildify/-/untildify-4.0.0.tgz#2bc947b953652487e4600949fb091e3ae8cd919b" - integrity sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw== + integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== upath@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/upath/-/upath-2.0.1.tgz#50c73dea68d6f6b990f51d279ce6081665d61a8b" integrity sha512-1uEe95xksV1O0CYKXo8vQvN1JEbtJp7lb7C5U9HMsIp6IVwntkH/oNUzyVNQSd4S1sYk2FpSSW44FqMc8qee5w== -update-browserslist-db@^1.0.11: - version "1.0.11" - resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz#9a2a641ad2907ae7b3616506f4b977851db5b940" - integrity sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA== +update-browserslist-db@^1.0.11, update-browserslist-db@^1.0.13: + version "1.0.13" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz#3c5e4f5c083661bd38ef64b6328c26ed6c8248c4" + integrity sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg== dependencies: escalade "^3.1.1" picocolors "^1.0.0" uri-js@^4.2.2: - version "4.2.2" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.2.2.tgz#94c540e1ff772956e2299507c010aea6c8838eb0" - integrity sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ== + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== dependencies: punycode "^2.1.0" util-deprecate@^1.0.1, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== + +util@^0.10.3: + version "0.10.4" + resolved "https://registry.yarnpkg.com/util/-/util-0.10.4.tgz#3aa0125bfe668a4672de58857d3ace27ecb76901" + integrity sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A== + dependencies: + inherits "2.0.3" utils-merge@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" - integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= + integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== uuid@^8.3.2: version "8.3.2" @@ -9645,13 +12212,13 @@ v8-compile-cache@2.3.0: integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== v8-to-istanbul@^9.0.0: - version "9.0.1" - resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.0.1.tgz#b6f994b0b5d4ef255e17a0d17dc444a9f5132fa4" - integrity sha512-74Y4LqY74kLE6IFyIjPtkSTWzUZmj8tdHT9Ii/26dvQ6K9Dl2NbEfj0XgU2sHCtKgt5VupqhlO/5aWuqS+IY1w== + version "9.2.0" + resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.2.0.tgz#2ed7644a245cddd83d4e087b9b33b3e62dfd10ad" + integrity sha512-/EH/sDgxU2eGxajKdwLCDmQ4FWq+kpi3uCmBGpw1xJtnAxEjlD8j8PEiGWpCIMIs3ciNAgH0d3TTJiUkYzyZjA== dependencies: "@jridgewell/trace-mapping" "^0.3.12" "@types/istanbul-lib-coverage" "^2.0.1" - convert-source-map "^1.6.0" + convert-source-map "^2.0.0" validate-npm-package-license@^3.0.1, validate-npm-package-license@^3.0.4: version "3.0.4" @@ -9664,7 +12231,7 @@ validate-npm-package-license@^3.0.1, validate-npm-package-license@^3.0.4: validate-npm-package-name@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/validate-npm-package-name/-/validate-npm-package-name-3.0.0.tgz#5fa912d81eb7d0c74afc140de7317f0ca7df437e" - integrity sha1-X6kS2B630MdK/BQN5zF/DKffQ34= + integrity sha512-M6w37eVCMMouJ9V/sdPGnC5H4uDr73/+xdq0FBLO3TFFX1+7wiUY6Es328NN+y43tmY+doUdN9g9J21vqB7iLw== dependencies: builtins "^1.0.3" @@ -9678,7 +12245,7 @@ validate-npm-package-name@^4.0.0: vary@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" - integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= + integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== vscode-oniguruma@^1.7.0: version "1.7.0" @@ -9695,17 +12262,36 @@ walk-up-path@^1.0.0: resolved "https://registry.yarnpkg.com/walk-up-path/-/walk-up-path-1.0.0.tgz#d4745e893dd5fd0dbb58dd0a4c6a33d9c9fec53e" integrity sha512-hwj/qMDUEjCU5h0xr90KGCf0tg0/LgJbmOWgrWKYlcJZM7XvquvUJZ0G/HMGr7F7OQMOUuPHWP9JpriinkAlkg== +walker@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== + dependencies: + makeerror "1.0.12" + +wasm-ast-types@^0.25.0: + version "0.25.0" + resolved "https://registry.yarnpkg.com/wasm-ast-types/-/wasm-ast-types-0.25.0.tgz#9f4056803c99fae6899c1f226e46cf730d9fe2db" + integrity sha512-ZTjXuBqRf3ntxXmskO1TyTTr8UbmAZGr72JGGctizzpdokMtuLsk5q8jHaFUv/qgQni1KuBxZnHIyaj1lnZegQ== + dependencies: + "@babel/runtime" "^7.18.9" + "@babel/types" "7.18.10" + "@jest/transform" "28.1.3" + ast-stringify "0.1.0" + case "1.6.3" + deepmerge "4.2.2" + wcwidth@^1.0.0, wcwidth@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" - integrity sha1-8LDc+RW8X/FSivrbLA4XtTLaL+g= + integrity sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg== dependencies: defaults "^1.0.3" webidl-conversions@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" - integrity sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE= + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== well-known-symbols@^2.0.0: version "2.0.0" @@ -9715,7 +12301,7 @@ well-known-symbols@^2.0.0: whatwg-url@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" - integrity sha1-lmRU6HZUYuN2RNNib2dCzotwll0= + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== dependencies: tr46 "~0.0.3" webidl-conversions "^3.0.0" @@ -9731,17 +12317,16 @@ which-boxed-primitive@^1.0.2: is-string "^1.0.5" is-symbol "^1.0.3" -which-typed-array@^1.1.9: - version "1.1.9" - resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.9.tgz#307cf898025848cf995e795e8423c7f337efbde6" - integrity sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA== +which-typed-array@^1.1.14, which-typed-array@^1.1.9: + version "1.1.14" + resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.14.tgz#1f78a111aee1e131ca66164d8bdc3ab062c95a06" + integrity sha512-VnXFiIW8yNn9kIHN88xvZ4yOWchftKDsRJ8fEPacX/wl1lOvBrhsJ/OeJCXq7B0AaijRuqgzSKalJoPk+D8MPg== dependencies: - available-typed-arrays "^1.0.5" - call-bind "^1.0.2" + available-typed-arrays "^1.0.6" + call-bind "^1.0.5" for-each "^0.3.3" gopd "^1.0.1" - has-tostringtag "^1.0.0" - is-typed-array "^1.1.10" + has-tostringtag "^1.0.1" which@^1.2.9: version "1.3.1" @@ -9765,9 +12350,9 @@ wide-align@^1.1.5: string-width "^1.0.2 || 2 || 3 || 4" winston-transport@^4.4.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/winston-transport/-/winston-transport-4.5.0.tgz#6e7b0dd04d393171ed5e4e4905db265f7ab384fa" - integrity sha512-YpZzcUzBedhlTAfJg6vJDlyEai/IFMIVcaEZZyl3UXIl4gmqRpU7AE89AHLkbzLUsv0NVmw7ts+iztqKxxPW1Q== + version "4.7.0" + resolved "https://registry.yarnpkg.com/winston-transport/-/winston-transport-4.7.0.tgz#e302e6889e6ccb7f383b926df6936a5b781bd1f0" + integrity sha512-ajBj65K5I7denzer2IYW6+2bNIVqLGDHqDw3Ow8Ohh+vdW+rv4MZ6eiDvHoKhfJFZ2auyN8byXieDDJ96ViONg== dependencies: logform "^2.3.2" readable-stream "^3.6.0" @@ -9791,7 +12376,7 @@ winston@3.3.3: wordwrap@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" - integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus= + integrity sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q== "wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: version "7.0.0" @@ -9885,9 +12470,9 @@ write-pkg@^4.0.0: write-json-file "^3.2.0" ws@^7, ws@^7.2.0: - version "7.5.7" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.7.tgz#9e0ac77ee50af70d58326ecff7e85eb3fa375e67" - integrity sha512-KMvVuFzpKBuiIXW3E4u3mySRO2/mCHSyZDJQM5NQ9Q9KHWHWh0NHgfbRMLLrceUK5qAL4ytALJbpRMjixFZh8A== + version "7.5.9" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" + integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== xstream@^11.14.0: version "11.14.0" @@ -9907,6 +12492,11 @@ y18n@^5.0.5: resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== +yallist@^3.0.2: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" + integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== + yallist@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" From 317defa5c65ee209261d260adc8c72c78a180b2c Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Tue, 27 Feb 2024 16:29:00 -0800 Subject: [PATCH 08/47] build: ts-nocheck for 'util' package something recently started reporting TS errors in there. just ignore --- patches/util+0.10.4.patch | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 patches/util+0.10.4.patch diff --git a/patches/util+0.10.4.patch b/patches/util+0.10.4.patch new file mode 100644 index 00000000000..740203c1379 --- /dev/null +++ b/patches/util+0.10.4.patch @@ -0,0 +1,9 @@ +diff --git a/node_modules/util/util.js b/node_modules/util/util.js +index e0ea321..876b412 100644 +--- a/node_modules/util/util.js ++++ b/node_modules/util/util.js +@@ -1,3 +1,4 @@ ++// @ts-nocheck + // Copyright Joyent, Inc. and other Node contributors. + // + // Permission is hereby granted, free of charge, to any person obtaining a From 4cfc9eb10dc43a7b06788d4311dfb6057729f1fe Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Thu, 15 Feb 2024 12:03:18 -0800 Subject: [PATCH 09/47] test: Ava with TypeScript --- package.json | 2 +- packages/new-cosmic-proto/.gitignore | 3 +- packages/new-cosmic-proto/package.json | 10 +- packages/new-cosmic-proto/src/index.ts | 2 +- packages/new-cosmic-proto/test/test-dummy.js | 5 - packages/new-cosmic-proto/test/test-dummy.ts | 20 ++++ yarn.lock | 116 ++++++++++++++++++- 7 files changed, 145 insertions(+), 13 deletions(-) delete mode 100644 packages/new-cosmic-proto/test/test-dummy.js create mode 100644 packages/new-cosmic-proto/test/test-dummy.ts diff --git a/package.json b/package.json index 38e922a3286..6c340b8dcf9 100644 --- a/package.json +++ b/package.json @@ -84,7 +84,7 @@ }, "ava": { "files": [ - "packages/*/test/**/test-*.js" + "packages/*/test/**/test-*.ts" ], "timeout": "30m" }, diff --git a/packages/new-cosmic-proto/.gitignore b/packages/new-cosmic-proto/.gitignore index 7bba65cc80c..dbc43e10a41 100644 --- a/packages/new-cosmic-proto/.gitignore +++ b/packages/new-cosmic-proto/.gitignore @@ -7,6 +7,7 @@ npm-debug.log* pids *.pid *.seed +.tsimp # out dist @@ -47,4 +48,4 @@ package-lock.json yarn.lock # others -.DS_Store \ No newline at end of file +.DS_Store diff --git a/packages/new-cosmic-proto/package.json b/packages/new-cosmic-proto/package.json index e45a78bfa36..710c34b194d 100644 --- a/packages/new-cosmic-proto/package.json +++ b/packages/new-cosmic-proto/package.json @@ -69,6 +69,7 @@ "prettier": "^2.8.7", "publish-scripts": "0.1.0", "rimraf": "^5.0.0", + "tsimp": "^2.0.11", "typescript": "^5.0.4" }, "dependencies": { @@ -78,8 +79,15 @@ "@cosmjs/tendermint-rpc": "^0.30.1" }, "ava": { + "extensions": { + "ts": "module" + }, "files": [ - "test/**/test-*.js" + "test/**/test-*.ts" + ], + "nodeArguments": [ + "--import=tsimp", + "--no-warnings" ] } } diff --git a/packages/new-cosmic-proto/src/index.ts b/packages/new-cosmic-proto/src/index.ts index 646541b5972..8d1f9d19631 100644 --- a/packages/new-cosmic-proto/src/index.ts +++ b/packages/new-cosmic-proto/src/index.ts @@ -1 +1 @@ -export * from './codegen'; +export * from './codegen/index.js'; diff --git a/packages/new-cosmic-proto/test/test-dummy.js b/packages/new-cosmic-proto/test/test-dummy.js deleted file mode 100644 index 7a739f9a58b..00000000000 --- a/packages/new-cosmic-proto/test/test-dummy.js +++ /dev/null @@ -1,5 +0,0 @@ -import test from 'ava'; - -test('it works', async t => { - t.pass(); -}); diff --git a/packages/new-cosmic-proto/test/test-dummy.ts b/packages/new-cosmic-proto/test/test-dummy.ts new file mode 100644 index 00000000000..a52996e91bc --- /dev/null +++ b/packages/new-cosmic-proto/test/test-dummy.ts @@ -0,0 +1,20 @@ +import test from 'ava'; +import { coins } from '@cosmjs/amino'; + +// FIXME does not work after SES init (add this back to package.json: +// "require": [ +// "@endo/init/debug.js" +// ]) +// import * as index from '../src/index.js'; + +import { agoric } from '../src/index.js'; + +console.log(agoric); + +test('it works', async t => { + const fee = { + amount: coins(0, 'uosmo'), + gas: '250000', + }; + t.pass(); +}); diff --git a/yarn.lock b/yarn.lock index f9857bd4970..40ea5f3264d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2048,6 +2048,18 @@ resolved "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.3.tgz#f060bf6eaafae4d56a7dac618980838b0696e2ab" integrity sha512-FmuxfCuolpLl0AnQ2NHSzoUKWEJDFl63qXjzdoWBVyFCXzMGm1spBzk7LeHNoVCiWCF7mRVms9e6jEV9+MoPbg== +"@isaacs/cached@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@isaacs/cached/-/cached-1.0.1.tgz#b6ad07c346f843fb3f117a0f3401ea8b7f7d4eea" + integrity sha512-7kGcJ9Hc1f4qpTApWz3swxbF9Qv1NF/GxuPtXeTptbsgvJIoufSd0h854Nq/2bw80F5C1onsFgEI05l+q0e4vw== + dependencies: + "@isaacs/catcher" "^1.0.0" + +"@isaacs/catcher@^1.0.0", "@isaacs/catcher@^1.0.4": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@isaacs/catcher/-/catcher-1.0.4.tgz#fa5aa6fa43d255b9fe32e1e1f40db6623de2c80d" + integrity sha512-g2klMwbnguClWNnCeQ1zYaDJsvPbIbnjdJPDE0z09MqoejJDZSLK5vIKiClq2Bkg5ubuI8vaN6wfIUi5GYzMVA== + "@isaacs/cliui@^8.0.2": version "8.0.2" resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" @@ -3404,6 +3416,68 @@ resolved "https://registry.yarnpkg.com/@protobufjs/utf8/-/utf8-1.1.0.tgz#a777360b5b39a1a2e5106f8e858f2fd2d060c570" integrity sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw== +"@protobufs/amino@^0.0.11": + version "0.0.11" + resolved "https://registry.yarnpkg.com/@protobufs/amino/-/amino-0.0.11.tgz#5eb6b8193da8ea818484818dff2800a981cb7a61" + integrity sha512-JRIkW6/YGIUfbdDuASt3wsuxzC0Xj3U2sV0Arqa9iNwCvv4HtOpbqdWVVVgvQBnR0/ZkwQeXnt+GH7yT/DvsYQ== + dependencies: + "@protobufs/google" "^0.0.10" + +"@protobufs/confio@^0.0.6": + version "0.0.6" + resolved "https://registry.yarnpkg.com/@protobufs/confio/-/confio-0.0.6.tgz#a6ddf44eca2cbe535384228312ae7ef5dff29644" + integrity sha512-abZ0ntTJBuB8q2aMBvOerAFk8CSzafB09YdttKFEqwxokZsLFZ3+o7YaH3RIk863oeM//8sonwTaxRV8r4rmSA== + +"@protobufs/cosmos@^0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@protobufs/cosmos/-/cosmos-0.1.0.tgz#492251de16be3e0a89820f48637cd3f42114f24c" + integrity sha512-L3NZ+z0kI6GMTiD2HASNe3WbopPhQlaQaKZNRue+8LiGEv/vbbxD1lox8cwOqes3AN5dHiT0i3+gvzIbKBb7gw== + dependencies: + "@protobufs/amino" "^0.0.11" + "@protobufs/cosmos_proto" "^0.0.10" + "@protobufs/gogoproto" "^0.0.10" + "@protobufs/google" "^0.0.10" + "@protobufs/tendermint" "^0.0.10" + +"@protobufs/cosmos_proto@^0.0.10": + version "0.0.10" + resolved "https://registry.yarnpkg.com/@protobufs/cosmos_proto/-/cosmos_proto-0.0.10.tgz#622726ee227f220f608df180f938e5d8ebb1534a" + integrity sha512-4nMopXxN23udy1HEe+vS49zD9dxrA7i0E3n15QUz1x0tbrowYLHzJKeyCUNlsh5PKpEIXGxHXpPZWXs7vVCwUw== + dependencies: + "@protobufs/google" "^0.0.10" + +"@protobufs/gogoproto@^0.0.10": + version "0.0.10" + resolved "https://registry.yarnpkg.com/@protobufs/gogoproto/-/gogoproto-0.0.10.tgz#0181e17142c800b60c7ca5f92c76a614d86c5c54" + integrity sha512-u3eK1aSO3KOuX4RVFpqKPTaT/WLV50GFLuIC3slVGfD7Z1CfZ5ivHbFYUib96gihu1Mq2OZpNVj3dNws9YsVoQ== + dependencies: + "@protobufs/google" "^0.0.10" + +"@protobufs/google@^0.0.10": + version "0.0.10" + resolved "https://registry.yarnpkg.com/@protobufs/google/-/google-0.0.10.tgz#820f741b0c53f688550c74c7ddb25a5ee131a6bf" + integrity sha512-3yo+liabFM1519smwwfzh1C535CntXVsS7zT98xmo21tZUX7vxeFpQDMx38EzMGYSy/Reo8wEMWJUHqZzYsCUw== + +"@protobufs/ibc@^0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@protobufs/ibc/-/ibc-0.1.0.tgz#36aeadc9f09d185d683f66a650dad9dc40437875" + integrity sha512-GmGkX81yyd55Tm34SCOmcOiB0QRwFBHGmZpDRAsks33TBx4efAtT9rKAdtn/oPujx9sha1TqU2s3trnMPVvKyg== + dependencies: + "@protobufs/amino" "^0.0.11" + "@protobufs/confio" "^0.0.6" + "@protobufs/cosmos" "^0.1.0" + "@protobufs/gogoproto" "^0.0.10" + "@protobufs/google" "^0.0.10" + "@protobufs/tendermint" "^0.0.10" + +"@protobufs/tendermint@^0.0.10": + version "0.0.10" + resolved "https://registry.yarnpkg.com/@protobufs/tendermint/-/tendermint-0.0.10.tgz#816b27410afcecd8b6d403df149f3c2b9b80655e" + integrity sha512-hAAMLFhKdAovslKeWnLTp2gGn5bxSTDVcQLKs4C4cC91R/KfHOh+Klt4PqSGUv/APINAmREzsX2LDUbIQ2dCpg== + dependencies: + "@protobufs/gogoproto" "^0.0.10" + "@protobufs/google" "^0.0.10" + "@pyramation/json-schema-ref-parser@9.0.6": version "9.0.6" resolved "https://registry.yarnpkg.com/@pyramation/json-schema-ref-parser/-/json-schema-ref-parser-9.0.6.tgz#556e416ce7dcc15a3c1afd04d6a059e03ed09aeb" @@ -6837,7 +6911,7 @@ foreground-child@^2.0.0: cross-spawn "^7.0.0" signal-exit "^3.0.2" -foreground-child@^3.1.0: +foreground-child@^3.1.0, foreground-child@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-3.1.1.tgz#1d173e776d75d2772fed08efe4a0de1ea1b12d0d" integrity sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg== @@ -10267,7 +10341,7 @@ pify@^5.0.0: resolved "https://registry.yarnpkg.com/pify/-/pify-5.0.0.tgz#1f5eca3f5e87ebec28cc6d54a0e4aaf00acc127f" integrity sha512-eW/gHNMlxdSP6dmG6uJip6FXN0EQBwm2clYYd8Wul42Cwu/DK8HEftzsapcNdYe2MfLiIwZqsDk2RDEsTE79hA== -pirates@^4.0.4: +pirates@^4.0.4, pirates@^4.0.6: version "4.0.6" resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.6.tgz#3018ae32ecfcff6c29ba2267cbf21166ac1f36b9" integrity sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg== @@ -10886,7 +10960,7 @@ rimraf@^2.6.3, rimraf@~2.6.2: dependencies: glob "^7.1.3" -rimraf@^5.0.0, rimraf@^5.0.1: +rimraf@^5.0.0, rimraf@^5.0.1, rimraf@^5.0.5: version "5.0.5" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-5.0.5.tgz#9be65d2d6e683447d2e9013da2bf451139a61ccf" integrity sha512-CqDakW+hMe/Bz202FPEymy68P+G50RfMQK+Qo5YUqc9SPipvbGjCGKd0RSKEelbsfQuw3g5NZDSrlZZAJurH1A== @@ -11186,7 +11260,7 @@ signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7: resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== -signal-exit@^4.0.1: +signal-exit@^4.0.1, signal-exit@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== @@ -11249,6 +11323,20 @@ smart-buffer@^4.2.0: resolved "https://registry.yarnpkg.com/smart-buffer/-/smart-buffer-4.2.0.tgz#6e1d71fa4f18c05f7d0ff216dd16a481d0e8d9ae" integrity sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg== +sock-daemon@^1.4.2: + version "1.4.2" + resolved "https://registry.yarnpkg.com/sock-daemon/-/sock-daemon-1.4.2.tgz#b9d5d1f8b156b20a7c1ceba095da8b8745fac405" + integrity sha512-IzbegWshWWR+UzQ7487mbdYNmfJ1jXUXQBUHooqtpylO+aW0vMVbFN2d2ug3CSPZ0wbG7ZTTGwpUuthIDFIOGg== + dependencies: + rimraf "^5.0.5" + signal-exit "^4.1.0" + socket-post-message "^1.0.3" + +socket-post-message@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/socket-post-message/-/socket-post-message-1.0.3.tgz#638dfca32064eee9a784bb5be9634b19e649ac39" + integrity sha512-UhJaB3xR2oF+HvddFOq2cBZi4zVKOHvdiBo+BaScNxsEUg3TLWSP8BkweKfe07kfH1thjn1hJR0af/w1EtBFjg== + socks-proxy-agent@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-7.0.0.tgz#dc069ecf34436621acb41e3efa66ca1b5fed15b6" @@ -11876,6 +11964,21 @@ tsd@^0.30.4: path-exists "^4.0.0" read-pkg-up "^7.0.0" +tsimp@^2.0.11: + version "2.0.11" + resolved "https://registry.yarnpkg.com/tsimp/-/tsimp-2.0.11.tgz#28b7efb609a070554cedb4309c1a7def662e93ab" + integrity sha512-wRhMmvar8tWHN3ZmykD8f4B4sjCn/f8DFM67LRY+stf/LPa2Kq8ATE2PIi570/DiDJA8kjjxzos3EgP0LmnFLA== + dependencies: + "@isaacs/cached" "^1.0.1" + "@isaacs/catcher" "^1.0.4" + foreground-child "^3.1.1" + mkdirp "^3.0.1" + pirates "^4.0.6" + rimraf "^5.0.5" + signal-exit "^4.1.0" + sock-daemon "^1.4.2" + walk-up-path "^3.0.1" + "tslib@1 || 2", tslib@^2.1.0, tslib@^2.3.0, tslib@^2.4.0, tslib@^2.6.2: version "2.6.2" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" @@ -12262,6 +12365,11 @@ walk-up-path@^1.0.0: resolved "https://registry.yarnpkg.com/walk-up-path/-/walk-up-path-1.0.0.tgz#d4745e893dd5fd0dbb58dd0a4c6a33d9c9fec53e" integrity sha512-hwj/qMDUEjCU5h0xr90KGCf0tg0/LgJbmOWgrWKYlcJZM7XvquvUJZ0G/HMGr7F7OQMOUuPHWP9JpriinkAlkg== +walk-up-path@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/walk-up-path/-/walk-up-path-3.0.1.tgz#c8d78d5375b4966c717eb17ada73dbd41490e886" + integrity sha512-9YlCL/ynK3CTlrSRrDxZvUauLzAswPCrsaCgilqFevUYpeEW0/3ScEjaa3kbW/T0ghhkEr7mv+fpjqn1Y1YuTA== + walker@^1.0.8: version "1.0.8" resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" From d2467e72470c2ac335060f4b90194aa5e3b1d12b Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Thu, 15 Feb 2024 09:42:05 -0800 Subject: [PATCH 10/47] build: ESM compat --- packages/new-cosmic-proto/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/new-cosmic-proto/package.json b/packages/new-cosmic-proto/package.json index 710c34b194d..2c9eaf22cb4 100644 --- a/packages/new-cosmic-proto/package.json +++ b/packages/new-cosmic-proto/package.json @@ -50,7 +50,7 @@ "clean": "npm run clean:mjs && npm run clean:dist", "build:rename": "publish-scripts --cmd rename --srcDir mjs --outDir dist --findExt js --replaceExt mjs --no-rmDir", "build": "npm run clean && npm run build:cjs && npm run build:mjs && npm run build:rename", - "codegen": "node scripts/codegen.js", + "codegen": "node scripts/codegen.cjs", "prepare": "npm run build", "lint-fix": "yarn lint:eslint --fix", "lint": "tsc", From 733d5c75b26140a386e9caca8db4d6afcdbc8049 Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Tue, 20 Feb 2024 14:37:05 -0800 Subject: [PATCH 11/47] build: source protos without symlink --- packages/new-cosmic-proto/.gitignore | 3 +++ packages/new-cosmic-proto/package.json | 4 +++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/new-cosmic-proto/.gitignore b/packages/new-cosmic-proto/.gitignore index dbc43e10a41..fe1ba73a67c 100644 --- a/packages/new-cosmic-proto/.gitignore +++ b/packages/new-cosmic-proto/.gitignore @@ -49,3 +49,6 @@ yarn.lock # others .DS_Store + +# Copy as needed from golang tree +proto diff --git a/packages/new-cosmic-proto/package.json b/packages/new-cosmic-proto/package.json index 2c9eaf22cb4..5c9328aa1ca 100644 --- a/packages/new-cosmic-proto/package.json +++ b/packages/new-cosmic-proto/package.json @@ -50,8 +50,9 @@ "clean": "npm run clean:mjs && npm run clean:dist", "build:rename": "publish-scripts --cmd rename --srcDir mjs --outDir dist --findExt js --replaceExt mjs --no-rmDir", "build": "npm run clean && npm run build:cjs && npm run build:mjs && npm run build:rename", - "codegen": "node scripts/codegen.cjs", + "codegen": "yarn protos-update && node scripts/codegen.cjs", "prepare": "npm run build", + "protos-update": "cp -rf ../../golang/cosmos/third_party/proto . && cp -rf ../../golang/cosmos/proto/agoric proto", "lint-fix": "yarn lint:eslint --fix", "lint": "tsc", "test": "ava", @@ -61,6 +62,7 @@ "access": "restricted" }, "devDependencies": { + "@agoric/cosmos": "^0.34.1", "@cosmology/telescope": "^1.0.1", "ava": "^5.3.1", "eslint": "8.45.0", From 23232135f3d1508d4a0ed9e0a84063dd8db19e4d Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Thu, 15 Feb 2024 12:16:12 -0800 Subject: [PATCH 12/47] chore: revise bad generation https://github.com/cosmology-tech/telescope/issues/525 --- packages/new-cosmic-proto/scripts/aminos.cjs | 29 ------------------- packages/new-cosmic-proto/scripts/codegen.cjs | 12 ++++---- 2 files changed, 7 insertions(+), 34 deletions(-) delete mode 100644 packages/new-cosmic-proto/scripts/aminos.cjs diff --git a/packages/new-cosmic-proto/scripts/aminos.cjs b/packages/new-cosmic-proto/scripts/aminos.cjs deleted file mode 100644 index fe7aee16cb4..00000000000 --- a/packages/new-cosmic-proto/scripts/aminos.cjs +++ /dev/null @@ -1,29 +0,0 @@ -module.exports.AMINO_MAP = { - // PUT YOUR AMINO names here... - // Staking - // '/cosmos.staking.v1beta1.MsgCreateValidator': { - // aminoType: 'cosmos-sdk/MsgCreateValidator' - // }, - // '/cosmos.staking.v1beta1.MsgEditValidator': { - // aminoType: 'cosmos-sdk/MsgEditValidator' - // }, - // '/cosmos.staking.v1beta1.MsgDelegate': { - // aminoType: 'cosmos-sdk/MsgDelegate' - // }, - // '/cosmos.staking.v1beta1.MsgUndelegate': { - // aminoType: 'cosmos-sdk/MsgUndelegate' - // }, - // '/cosmos.staking.v1beta1.MsgBeginRedelegate': { - // aminoType: 'cosmos-sdk/MsgBeginRedelegate' - // }, - // '/cosmos.staking.v1beta1.MsgCancelUnbondingDelegation': { - // aminoType: 'cosmos-sdk/MsgCancelUnbondingDelegation' - // }, - // '/cosmos.staking.v1beta1.MsgUpdateParams': { - // aminoType: 'cosmos-sdk/x/staking/MsgUpdateParams' - // }, - // // IBC - // '/ibc.applications.transfer.v1.MsgTransfer': { - // aminoType: 'cosmos-sdk/MsgTransfer' - // } -}; diff --git a/packages/new-cosmic-proto/scripts/codegen.cjs b/packages/new-cosmic-proto/scripts/codegen.cjs index 73a570ce351..fcc9a5d857d 100644 --- a/packages/new-cosmic-proto/scripts/codegen.cjs +++ b/packages/new-cosmic-proto/scripts/codegen.cjs @@ -1,7 +1,7 @@ const { join } = require('path'); +const process = require('process'); const telescope = require('@cosmology/telescope').default; const rimraf = require('rimraf').rimrafSync; -const { AMINO_MAP } = require('./aminos.cjs'); const protoDirs = [join(__dirname, '/../proto')]; const outPath = join(__dirname, '../src/codegen'); @@ -12,6 +12,8 @@ telescope({ outPath, options: { tsDisable: { + // FIXME types aren't resolving correctly + disableAll: true, files: [ 'cosmos/authz/v1beta1/tx.amino.ts', 'cosmos/staking/v1beta1/tx.amino.ts', @@ -20,6 +22,7 @@ telescope({ }, prototypes: { includePackageVar: false, + // @ts-expect-error this is actually part of TelescopeOpts so huH? removeUnusedImports: true, experimentalGlobalProtoNamespace: true, interfaces: { @@ -55,8 +58,8 @@ telescope({ ], }, methods: { - fromJSON: false, - toJSON: false, + fromJSON: true, + toJSON: true, encode: true, decode: true, fromPartial: true, @@ -80,8 +83,7 @@ telescope({ }, }, aminoEncoding: { - enabled: true, - exceptions: AMINO_MAP, + enabled: false, }, lcdClients: { enabled: false, From 4250edfbd157ea8c99f04b24ff1471cd6cebee03 Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Fri, 16 Feb 2024 15:24:09 -0800 Subject: [PATCH 13/47] chore: tweak Telescope config (no change in output) --- packages/new-cosmic-proto/scripts/codegen.cjs | 33 +++++++++---------- 1 file changed, 15 insertions(+), 18 deletions(-) diff --git a/packages/new-cosmic-proto/scripts/codegen.cjs b/packages/new-cosmic-proto/scripts/codegen.cjs index fcc9a5d857d..3ce7945b9d5 100644 --- a/packages/new-cosmic-proto/scripts/codegen.cjs +++ b/packages/new-cosmic-proto/scripts/codegen.cjs @@ -20,15 +20,12 @@ telescope({ ], patterns: ['**/*amino.ts', '**/*registry.ts'], }, + interfaces: { + enabled: true, + useUnionTypes: false, + }, prototypes: { includePackageVar: false, - // @ts-expect-error this is actually part of TelescopeOpts so huH? - removeUnusedImports: true, - experimentalGlobalProtoNamespace: true, - interfaces: { - enabled: true, - useUnionTypes: false, - }, excluded: { packages: [ 'ibc.applications.fee.v1', // issue with parsing protos (LCD routes with nested objects in params) @@ -63,23 +60,23 @@ telescope({ encode: true, decode: true, fromPartial: true, - toAmino: true, - fromAmino: true, + toAmino: false, + fromAmino: false, fromProto: true, toProto: true, }, parser: { keepCase: false, }, - }, - typingsFormat: { - duration: 'duration', - timestamp: 'date', - useExact: false, - useDeepPartial: false, - num64: 'bigint', - customTypes: { - useCosmosSDKDec: true, + typingsFormat: { + duration: 'duration', + timestamp: 'date', + useExact: false, + useDeepPartial: false, + num64: 'bigint', + customTypes: { + useCosmosSDKDec: true, + }, }, }, aminoEncoding: { From babf2e1fbbcff7cd2fc71f3f05b2aa8d15a38fc0 Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Fri, 16 Feb 2024 12:50:21 -0800 Subject: [PATCH 14/47] chore: replace cosmic-proto with Telescope version --- .../.eslintignore | 0 packages/cosmic-proto/.gitignore | 11 +- .../.npmignore | 0 packages/cosmic-proto/README.md | 284 +++++++- .../dist/agoric/swingset/msgs.d.ts | 538 -------------- .../cosmic-proto/dist/agoric/swingset/msgs.js | 668 ------------------ .../dist/agoric/swingset/query.d.ts | 406 ----------- .../dist/agoric/swingset/query.js | 346 --------- .../dist/agoric/swingset/swingset.d.ts | 568 --------------- .../dist/agoric/swingset/swingset.js | 642 ----------------- .../dist/agoric/vstorage/query.d.ts | 515 -------------- .../dist/agoric/vstorage/query.js | 382 ---------- .../cosmos/base/query/v1beta1/pagination.d.ts | 322 --------- .../cosmos/base/query/v1beta1/pagination.js | 186 ----- .../dist/cosmos/base/v1beta1/coin.d.ts | 90 --- .../dist/cosmos/base/v1beta1/coin.js | 197 ------ .../dist/cosmos_proto/cosmos.d.ts | 99 --- .../cosmic-proto/dist/cosmos_proto/cosmos.js | 176 ----- packages/cosmic-proto/package.json | 69 +- packages/cosmic-proto/proto/agoric | 1 - packages/cosmic-proto/proto/cosmos | 1 - packages/cosmic-proto/proto/cosmos_proto | 1 - packages/cosmic-proto/proto/gogoproto | 1 - packages/cosmic-proto/proto/google | 1 - .../scripts/codegen.cjs | 3 +- .../src/index.ts | 0 packages/cosmic-proto/swingset/msgs.js | 2 - packages/cosmic-proto/swingset/query.js | 2 - packages/cosmic-proto/swingset/swingset.js | 2 - .../test/query-swingset-params.js | 43 -- packages/cosmic-proto/test/sanity-test.js | 4 - packages/cosmic-proto/test/test-agoric.ts | 23 + packages/cosmic-proto/tsconfig.json | 38 +- packages/cosmic-proto/vstorage/query.js | 2 - packages/new-cosmic-proto/.gitignore | 54 -- packages/new-cosmic-proto/LICENSE | 201 ------ packages/new-cosmic-proto/README.md | 295 -------- packages/new-cosmic-proto/package.json | 95 --- packages/new-cosmic-proto/test/test-dummy.ts | 20 - packages/new-cosmic-proto/tsconfig.json | 28 - yarn.lock | 268 ++----- 41 files changed, 460 insertions(+), 6124 deletions(-) rename packages/{new-cosmic-proto => cosmic-proto}/.eslintignore (100%) rename packages/{new-cosmic-proto => cosmic-proto}/.npmignore (100%) delete mode 100644 packages/cosmic-proto/dist/agoric/swingset/msgs.d.ts delete mode 100644 packages/cosmic-proto/dist/agoric/swingset/msgs.js delete mode 100644 packages/cosmic-proto/dist/agoric/swingset/query.d.ts delete mode 100644 packages/cosmic-proto/dist/agoric/swingset/query.js delete mode 100644 packages/cosmic-proto/dist/agoric/swingset/swingset.d.ts delete mode 100644 packages/cosmic-proto/dist/agoric/swingset/swingset.js delete mode 100644 packages/cosmic-proto/dist/agoric/vstorage/query.d.ts delete mode 100644 packages/cosmic-proto/dist/agoric/vstorage/query.js delete mode 100644 packages/cosmic-proto/dist/cosmos/base/query/v1beta1/pagination.d.ts delete mode 100644 packages/cosmic-proto/dist/cosmos/base/query/v1beta1/pagination.js delete mode 100644 packages/cosmic-proto/dist/cosmos/base/v1beta1/coin.d.ts delete mode 100644 packages/cosmic-proto/dist/cosmos/base/v1beta1/coin.js delete mode 100644 packages/cosmic-proto/dist/cosmos_proto/cosmos.d.ts delete mode 100644 packages/cosmic-proto/dist/cosmos_proto/cosmos.js delete mode 120000 packages/cosmic-proto/proto/agoric delete mode 120000 packages/cosmic-proto/proto/cosmos delete mode 120000 packages/cosmic-proto/proto/cosmos_proto delete mode 120000 packages/cosmic-proto/proto/gogoproto delete mode 120000 packages/cosmic-proto/proto/google rename packages/{new-cosmic-proto => cosmic-proto}/scripts/codegen.cjs (97%) rename packages/{new-cosmic-proto => cosmic-proto}/src/index.ts (100%) delete mode 100644 packages/cosmic-proto/swingset/msgs.js delete mode 100644 packages/cosmic-proto/swingset/query.js delete mode 100644 packages/cosmic-proto/swingset/swingset.js delete mode 100644 packages/cosmic-proto/test/query-swingset-params.js delete mode 100644 packages/cosmic-proto/test/sanity-test.js create mode 100644 packages/cosmic-proto/test/test-agoric.ts delete mode 100644 packages/cosmic-proto/vstorage/query.js delete mode 100644 packages/new-cosmic-proto/.gitignore delete mode 100644 packages/new-cosmic-proto/LICENSE delete mode 100644 packages/new-cosmic-proto/README.md delete mode 100644 packages/new-cosmic-proto/package.json delete mode 100644 packages/new-cosmic-proto/test/test-dummy.ts delete mode 100644 packages/new-cosmic-proto/tsconfig.json diff --git a/packages/new-cosmic-proto/.eslintignore b/packages/cosmic-proto/.eslintignore similarity index 100% rename from packages/new-cosmic-proto/.eslintignore rename to packages/cosmic-proto/.eslintignore diff --git a/packages/cosmic-proto/.gitignore b/packages/cosmic-proto/.gitignore index 784280d10f7..266a097cb75 100644 --- a/packages/cosmic-proto/.gitignore +++ b/packages/cosmic-proto/.gitignore @@ -1,2 +1,9 @@ -dist/gogoproto -dist/google \ No newline at end of file +# TODO gitignore build output and generate it as needed +# We don't ignore the build output (dist, mjs) because our CI integration +# tests don't have hooks to build the package before consuming it. + +# cache +.tsimp + +# Copy as needed from golang tree +proto diff --git a/packages/new-cosmic-proto/.npmignore b/packages/cosmic-proto/.npmignore similarity index 100% rename from packages/new-cosmic-proto/.npmignore rename to packages/cosmic-proto/.npmignore diff --git a/packages/cosmic-proto/README.md b/packages/cosmic-proto/README.md index 38fbe6198ba..91a1aac5044 100644 --- a/packages/cosmic-proto/README.md +++ b/packages/cosmic-proto/README.md @@ -1,9 +1,281 @@ -# Protobuf Stubs for Cosmic Swingset +# @agoric/cosmic-proto -This package exports JavaScript and TypeScript generated from Cosmic Swingset -Protobuf IDL so that clients can communicate with the Agoric chain. +

+
+ Protobuf interfaces for Agoric on Cosmos +

-Package exports are defined in `exports` and also made available in the -`swingset/` file path for backwards compatibility. -See also [MAINTAINERS.md](MAINTAINERS.md). +## install + +```sh +npm install @agoric/cosmic-proto +``` +## Table of contents + +- [Usage](#usage) + - [RPC Clients](#rpc-clients) + - [Composing Messages](#composing-messages) + - Cosmos, CosmWasm, and IBC + - [CosmWasm](#cosmwasm-messages) + - [IBC](#ibc-messages) + - [Cosmos](#cosmos-messages) +- [Wallets and Signers](#connecting-with-wallets-and-signing-messages) + - [Stargate Client](#initializing-the-stargate-client) + - [Creating Signers](#creating-signers) + - [Broadcasting Messages](#broadcasting-messages) +- [Advanced Usage](#advanced-usage) +- [Developing](#developing) +- [Credits](#credits) + +## Usage +### RPC Clients + +```js +import { agoric } from '@agoric/cosmic-proto'; + +const { createRPCQueryClient } = agoric.ClientFactory; +const client = await createRPCQueryClient({ rpcEndpoint: RPC_ENDPOINT }); + +// now you can query the cosmos modules +const balance = await client.cosmos.bank.v1beta1 + .allBalances({ address: 'agoric1addresshere' }); + +// you can also query the agoric modules +const swingsetParams = await client.agoric.swingset.params() +``` + +### Composing Messages + +Import the `agoric` object from `@agoric/cosmic-proto`. + +```js +import { agoric } from '@agoric/cosmic-proto'; + +const { + installBundle, +} = agoric.exchange.v1beta1.MessageComposer.withTypeUrl; +``` + +#### IBC Messages + +```js +import { ibc } from '@agoric/cosmic-proto'; + +const { + transfer +} = ibc.applications.transfer.v1.MessageComposer.withTypeUrl +``` + +#### Cosmos Messages + +```js +import { cosmos } from '@agoric/cosmic-proto'; + +const { + fundCommunityPool, + setWithdrawAddress, + withdrawDelegatorReward, + withdrawValidatorCommission +} = cosmos.distribution.v1beta1.MessageComposer.fromPartial; + +const { + multiSend, + send +} = cosmos.bank.v1beta1.MessageComposer.fromPartial; + +const { + beginRedelegate, + createValidator, + delegate, + editValidator, + undelegate +} = cosmos.staking.v1beta1.MessageComposer.fromPartial; + +const { + deposit, + submitProposal, + vote, + voteWeighted +} = cosmos.gov.v1beta1.MessageComposer.fromPartial; +``` + +## Connecting with Wallets and Signing Messages + +⚡️ For web interfaces, we recommend using [cosmos-kit](https://github.com/cosmology-tech/cosmos-kit). Continue below to see how to manually construct signers and clients. + +Here are the docs on [creating signers](https://github.com/cosmology-tech/cosmos-kit/tree/main/packages/react#signing-clients) in cosmos-kit that can be used with Keplr and other wallets. + +### Initializing the Stargate Client + +Use `getSigningAgoricClient` to get your `SigningStargateClient`, with the proto/amino messages full-loaded. No need to manually add amino types, just require and initialize the client: + +```js +import { getSigningAgoricClient } from '@agoric/cosmic-proto'; + +const stargateClient = await getSigningAgoricClient({ + rpcEndpoint, + signer // OfflineSigner +}); +``` +### Creating Signers + +To broadcast messages, you can create signers with a variety of options: + +* [cosmos-kit](https://github.com/cosmology-tech/cosmos-kit/tree/main/packages/react#signing-clients) (recommended) +* [keplr](https://docs.keplr.app/api/cosmjs.html) +* [cosmjs](https://gist.github.com/webmaster128/8444d42a7eceeda2544c8a59fbd7e1d9) +### Amino Signer + +Likely you'll want to use the Amino, so unless you need proto, you should use this one: + +```js +import { getOfflineSignerAmino as getOfflineSigner } from 'cosmjs-utils'; +``` +### Proto Signer + +```js +import { getOfflineSignerProto as getOfflineSigner } from 'cosmjs-utils'; +``` + +WARNING: NOT RECOMMENDED TO USE PLAIN-TEXT MNEMONICS. Please take care of your security and use best practices such as AES encryption and/or methods from 12factor applications. + +```js +import { chains } from 'chain-registry'; + +const mnemonic = + 'unfold client turtle either pilot stock floor glow toward bullet car science'; + const chain = chains.find(({ chain_name }) => chain_name === 'agoric'); + const signer = await getOfflineSigner({ + mnemonic, + chain + }); +``` +### Broadcasting Messages + +Now that you have your `stargateClient`, you can broadcast messages: + +```js +const { send } = cosmos.bank.v1beta1.MessageComposer.withTypeUrl; + +const msg = send({ + amount: [ + { + denom: 'coin', + amount: '1000' + } + ], + toAddress: address, + fromAddress: address +}); + +const fee: StdFee = { + amount: [ + { + denom: 'coin', + amount: '864' + } + ], + gas: '86364' +}; +const response = await stargateClient.signAndBroadcast(address, [msg], fee); +``` + +## Advanced Usage + + +If you want to manually construct a stargate client + +```js +import { OfflineSigner, GeneratedType, Registry } from "@cosmjs/proto-signing"; +import { AminoTypes, SigningStargateClient } from "@cosmjs/stargate"; + +import { + cosmosAminoConverters, + cosmosProtoRegistry, + cosmwasmAminoConverters, + cosmwasmProtoRegistry, + ibcProtoRegistry, + ibcAminoConverters, + agoricAminoConverters, + agoricProtoRegistry +} from '@agoric/cosmic-proto'; + +const signer: OfflineSigner = /* create your signer (see above) */ +const rpcEndpint = 'https://rpc.cosmos.directory/agoric'; // or another URL + +const protoRegistry: ReadonlyArray<[string, GeneratedType]> = [ + ...cosmosProtoRegistry, + ...cosmwasmProtoRegistry, + ...ibcProtoRegistry, + ...agoricProtoRegistry +]; + +const aminoConverters = { + ...cosmosAminoConverters, + ...cosmwasmAminoConverters, + ...ibcAminoConverters, + ...agoricAminoConverters +}; + +const registry = new Registry(protoRegistry); +const aminoTypes = new AminoTypes(aminoConverters); + +const stargateClient = await SigningStargateClient.connectWithSigner(rpcEndpoint, signer, { + registry, + aminoTypes +}); +``` + +## Developing + +When first cloning the repo: + +``` +yarn +yarn build +``` + +For very basic unit tests: +``` +yarn test +``` + +To test with a real network, +``` +yarn test:live +``` +Note that requires `tsx` in the global path (`npm install -g tsx`). + + + + +### Codegen + +Contract schemas live in `./contracts`, and protos in `./proto`. Look inside of `scripts/codegen.cjs` and configure the settings for bundling your SDK and contracts into `@agoric/cosmic-proto`: + +``` +yarn codegen +``` + +### Publishing + +Build the types and then publish: + +``` +yarn build +yarn publish +``` + +## Related + +Checkout these related projects: + +* [@cosmology/telescope](https://github.com/cosmology-tech/telescope) Your Frontend Companion for Building with TypeScript with Cosmos SDK Modules. +* [@cosmwasm/ts-codegen](https://github.com/CosmWasm/ts-codegen) Convert your CosmWasm smart contracts into dev-friendly TypeScript classes. +* [chain-registry](https://github.com/cosmology-tech/chain-registry) Everything from token symbols, logos, and IBC denominations for all assets you want to support in your application. +* [cosmos-kit](https://github.com/cosmology-tech/cosmos-kit) Experience the convenience of connecting with a variety of web3 wallets through a single, streamlined interface. +* [create-cosmos-app](https://github.com/cosmology-tech/create-cosmos-app) Set up a modern Cosmos app by running one command. +* [interchain-ui](https://github.com/cosmology-tech/interchain-ui) The Interchain Design System, empowering developers with a flexible, easy-to-use UI kit. +* [starship](https://github.com/cosmology-tech/starship) Unified Testing and Development for the Interchain. + diff --git a/packages/cosmic-proto/dist/agoric/swingset/msgs.d.ts b/packages/cosmic-proto/dist/agoric/swingset/msgs.d.ts deleted file mode 100644 index e71a311ba58..00000000000 --- a/packages/cosmic-proto/dist/agoric/swingset/msgs.d.ts +++ /dev/null @@ -1,538 +0,0 @@ -import Long from 'long'; -import _m0 from 'protobufjs/minimal.js'; -export declare const protobufPackage = 'agoric.swingset'; -/** MsgDeliverInbound defines an SDK message for delivering an eventual send */ -export interface MsgDeliverInbound { - messages: string[]; - nums: Long[]; - ack: Long; - submitter: Uint8Array; -} -/** MsgDeliverInboundResponse is an empty reply. */ -export interface MsgDeliverInboundResponse {} -/** - * MsgWalletAction defines an SDK message for the on-chain wallet to perform an - * action that *does not* spend any assets (other than gas fees/stamps). This - * message type is typically protected by feegrant budgets. - */ -export interface MsgWalletAction { - owner: Uint8Array; - /** The action to perform, as JSON-stringified marshalled data. */ - action: string; -} -/** MsgWalletActionResponse is an empty reply. */ -export interface MsgWalletActionResponse {} -/** - * MsgWalletSpendAction defines an SDK message for the on-chain wallet to - * perform an action that *does spend the owner's assets.* This message type is - * typically protected by explicit confirmation by the user. - */ -export interface MsgWalletSpendAction { - owner: Uint8Array; - /** The action to perform, as JSON-stringified marshalled data. */ - spendAction: string; -} -/** MsgWalletSpendActionResponse is an empty reply. */ -export interface MsgWalletSpendActionResponse {} -/** MsgProvision defines an SDK message for provisioning a client to the chain */ -export interface MsgProvision { - nickname: string; - address: Uint8Array; - powerFlags: string[]; - submitter: Uint8Array; -} -/** MsgProvisionResponse is an empty reply. */ -export interface MsgProvisionResponse {} -/** MsgInstallBundle carries a signed bundle to SwingSet. */ -export interface MsgInstallBundle { - bundle: string; - submitter: Uint8Array; - /** - * Either bundle or compressed_bundle will be set. - * Default compression algorithm is gzip. - */ - compressedBundle: Uint8Array; - /** Size in bytes of uncompression of compressed_bundle. */ - uncompressedSize: Long; -} -/** - * MsgInstallBundleResponse is an empty acknowledgement that an install bundle - * message has been queued for the SwingSet kernel's consideration. - */ -export interface MsgInstallBundleResponse {} -export declare const MsgDeliverInbound: { - encode(message: MsgDeliverInbound, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): MsgDeliverInbound; - fromJSON(object: any): MsgDeliverInbound; - toJSON(message: MsgDeliverInbound): unknown; - fromPartial< - I extends { - messages?: string[] | undefined; - nums?: (string | number | Long)[] | undefined; - ack?: string | number | Long | undefined; - submitter?: Uint8Array | undefined; - } & { - messages?: - | (string[] & - string[] & { - [K in Exclude]: never; - }) - | undefined; - nums?: - | ((string | number | Long)[] & - ( - | string - | number - | (Long & { - high: number; - low: number; - unsigned: boolean; - add: (addend: string | number | Long) => Long; - and: (other: string | number | Long) => Long; - compare: (other: string | number | Long) => number; - comp: (other: string | number | Long) => number; - divide: (divisor: string | number | Long) => Long; - div: (divisor: string | number | Long) => Long; - equals: (other: string | number | Long) => boolean; - eq: (other: string | number | Long) => boolean; - getHighBits: () => number; - getHighBitsUnsigned: () => number; - getLowBits: () => number; - getLowBitsUnsigned: () => number; - getNumBitsAbs: () => number; - greaterThan: (other: string | number | Long) => boolean; - gt: (other: string | number | Long) => boolean; - greaterThanOrEqual: ( - other: string | number | Long, - ) => boolean; - gte: (other: string | number | Long) => boolean; - ge: (other: string | number | Long) => boolean; - isEven: () => boolean; - isNegative: () => boolean; - isOdd: () => boolean; - isPositive: () => boolean; - isZero: () => boolean; - eqz: () => boolean; - lessThan: (other: string | number | Long) => boolean; - lt: (other: string | number | Long) => boolean; - lessThanOrEqual: (other: string | number | Long) => boolean; - lte: (other: string | number | Long) => boolean; - le: (other: string | number | Long) => boolean; - modulo: (other: string | number | Long) => Long; - mod: (other: string | number | Long) => Long; - rem: (other: string | number | Long) => Long; - multiply: (multiplier: string | number | Long) => Long; - mul: (multiplier: string | number | Long) => Long; - negate: () => Long; - neg: () => Long; - not: () => Long; - countLeadingZeros: () => number; - clz: () => number; - countTrailingZeros: () => number; - ctz: () => number; - notEquals: (other: string | number | Long) => boolean; - neq: (other: string | number | Long) => boolean; - ne: (other: string | number | Long) => boolean; - or: (other: string | number | Long) => Long; - shiftLeft: (numBits: number | Long) => Long; - shl: (numBits: number | Long) => Long; - shiftRight: (numBits: number | Long) => Long; - shr: (numBits: number | Long) => Long; - shiftRightUnsigned: (numBits: number | Long) => Long; - shru: (numBits: number | Long) => Long; - shr_u: (numBits: number | Long) => Long; - rotateLeft: (numBits: number | Long) => Long; - rotl: (numBits: number | Long) => Long; - rotateRight: (numBits: number | Long) => Long; - rotr: (numBits: number | Long) => Long; - subtract: (subtrahend: string | number | Long) => Long; - sub: (subtrahend: string | number | Long) => Long; - toInt: () => number; - toNumber: () => number; - toBytes: (le?: boolean | undefined) => number[]; - toBytesLE: () => number[]; - toBytesBE: () => number[]; - toSigned: () => Long; - toString: (radix?: number | undefined) => string; - toUnsigned: () => Long; - xor: (other: string | number | Long) => Long; - } & { - [K_1 in Exclude]: never; - }) - )[] & { - [K_2 in Exclude< - keyof I['nums'], - keyof (string | number | Long)[] - >]: never; - }) - | undefined; - ack?: - | string - | number - | (Long & { - high: number; - low: number; - unsigned: boolean; - add: (addend: string | number | Long) => Long; - and: (other: string | number | Long) => Long; - compare: (other: string | number | Long) => number; - comp: (other: string | number | Long) => number; - divide: (divisor: string | number | Long) => Long; - div: (divisor: string | number | Long) => Long; - equals: (other: string | number | Long) => boolean; - eq: (other: string | number | Long) => boolean; - getHighBits: () => number; - getHighBitsUnsigned: () => number; - getLowBits: () => number; - getLowBitsUnsigned: () => number; - getNumBitsAbs: () => number; - greaterThan: (other: string | number | Long) => boolean; - gt: (other: string | number | Long) => boolean; - greaterThanOrEqual: (other: string | number | Long) => boolean; - gte: (other: string | number | Long) => boolean; - ge: (other: string | number | Long) => boolean; - isEven: () => boolean; - isNegative: () => boolean; - isOdd: () => boolean; - isPositive: () => boolean; - isZero: () => boolean; - eqz: () => boolean; - lessThan: (other: string | number | Long) => boolean; - lt: (other: string | number | Long) => boolean; - lessThanOrEqual: (other: string | number | Long) => boolean; - lte: (other: string | number | Long) => boolean; - le: (other: string | number | Long) => boolean; - modulo: (other: string | number | Long) => Long; - mod: (other: string | number | Long) => Long; - rem: (other: string | number | Long) => Long; - multiply: (multiplier: string | number | Long) => Long; - mul: (multiplier: string | number | Long) => Long; - negate: () => Long; - neg: () => Long; - not: () => Long; - countLeadingZeros: () => number; - clz: () => number; - countTrailingZeros: () => number; - ctz: () => number; - notEquals: (other: string | number | Long) => boolean; - neq: (other: string | number | Long) => boolean; - ne: (other: string | number | Long) => boolean; - or: (other: string | number | Long) => Long; - shiftLeft: (numBits: number | Long) => Long; - shl: (numBits: number | Long) => Long; - shiftRight: (numBits: number | Long) => Long; - shr: (numBits: number | Long) => Long; - shiftRightUnsigned: (numBits: number | Long) => Long; - shru: (numBits: number | Long) => Long; - shr_u: (numBits: number | Long) => Long; - rotateLeft: (numBits: number | Long) => Long; - rotl: (numBits: number | Long) => Long; - rotateRight: (numBits: number | Long) => Long; - rotr: (numBits: number | Long) => Long; - subtract: (subtrahend: string | number | Long) => Long; - sub: (subtrahend: string | number | Long) => Long; - toInt: () => number; - toNumber: () => number; - toBytes: (le?: boolean | undefined) => number[]; - toBytesLE: () => number[]; - toBytesBE: () => number[]; - toSigned: () => Long; - toString: (radix?: number | undefined) => string; - toUnsigned: () => Long; - xor: (other: string | number | Long) => Long; - } & { [K_3 in Exclude]: never }) - | undefined; - submitter?: Uint8Array | undefined; - } & { [K_4 in Exclude]: never }, - >( - object: I, - ): MsgDeliverInbound; -}; -export declare const MsgDeliverInboundResponse: { - encode(_: MsgDeliverInboundResponse, writer?: _m0.Writer): _m0.Writer; - decode( - input: _m0.Reader | Uint8Array, - length?: number, - ): MsgDeliverInboundResponse; - fromJSON(_: any): MsgDeliverInboundResponse; - toJSON(_: MsgDeliverInboundResponse): unknown; - fromPartial]: never }>( - _: I, - ): MsgDeliverInboundResponse; -}; -export declare const MsgWalletAction: { - encode(message: MsgWalletAction, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): MsgWalletAction; - fromJSON(object: any): MsgWalletAction; - toJSON(message: MsgWalletAction): unknown; - fromPartial< - I extends { - owner?: Uint8Array | undefined; - action?: string | undefined; - } & { - owner?: Uint8Array | undefined; - action?: string | undefined; - } & { [K in Exclude]: never }, - >( - object: I, - ): MsgWalletAction; -}; -export declare const MsgWalletActionResponse: { - encode(_: MsgWalletActionResponse, writer?: _m0.Writer): _m0.Writer; - decode( - input: _m0.Reader | Uint8Array, - length?: number, - ): MsgWalletActionResponse; - fromJSON(_: any): MsgWalletActionResponse; - toJSON(_: MsgWalletActionResponse): unknown; - fromPartial]: never }>( - _: I, - ): MsgWalletActionResponse; -}; -export declare const MsgWalletSpendAction: { - encode(message: MsgWalletSpendAction, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): MsgWalletSpendAction; - fromJSON(object: any): MsgWalletSpendAction; - toJSON(message: MsgWalletSpendAction): unknown; - fromPartial< - I extends { - owner?: Uint8Array | undefined; - spendAction?: string | undefined; - } & { - owner?: Uint8Array | undefined; - spendAction?: string | undefined; - } & { [K in Exclude]: never }, - >( - object: I, - ): MsgWalletSpendAction; -}; -export declare const MsgWalletSpendActionResponse: { - encode(_: MsgWalletSpendActionResponse, writer?: _m0.Writer): _m0.Writer; - decode( - input: _m0.Reader | Uint8Array, - length?: number, - ): MsgWalletSpendActionResponse; - fromJSON(_: any): MsgWalletSpendActionResponse; - toJSON(_: MsgWalletSpendActionResponse): unknown; - fromPartial]: never }>( - _: I, - ): MsgWalletSpendActionResponse; -}; -export declare const MsgProvision: { - encode(message: MsgProvision, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): MsgProvision; - fromJSON(object: any): MsgProvision; - toJSON(message: MsgProvision): unknown; - fromPartial< - I extends { - nickname?: string | undefined; - address?: Uint8Array | undefined; - powerFlags?: string[] | undefined; - submitter?: Uint8Array | undefined; - } & { - nickname?: string | undefined; - address?: Uint8Array | undefined; - powerFlags?: - | (string[] & - string[] & { - [K in Exclude]: never; - }) - | undefined; - submitter?: Uint8Array | undefined; - } & { [K_1 in Exclude]: never }, - >( - object: I, - ): MsgProvision; -}; -export declare const MsgProvisionResponse: { - encode(_: MsgProvisionResponse, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): MsgProvisionResponse; - fromJSON(_: any): MsgProvisionResponse; - toJSON(_: MsgProvisionResponse): unknown; - fromPartial]: never }>( - _: I, - ): MsgProvisionResponse; -}; -export declare const MsgInstallBundle: { - encode(message: MsgInstallBundle, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): MsgInstallBundle; - fromJSON(object: any): MsgInstallBundle; - toJSON(message: MsgInstallBundle): unknown; - fromPartial< - I extends { - bundle?: string | undefined; - submitter?: Uint8Array | undefined; - compressedBundle?: Uint8Array | undefined; - uncompressedSize?: string | number | Long | undefined; - } & { - bundle?: string | undefined; - submitter?: Uint8Array | undefined; - compressedBundle?: Uint8Array | undefined; - uncompressedSize?: - | string - | number - | (Long & { - high: number; - low: number; - unsigned: boolean; - add: (addend: string | number | Long) => Long; - and: (other: string | number | Long) => Long; - compare: (other: string | number | Long) => number; - comp: (other: string | number | Long) => number; - divide: (divisor: string | number | Long) => Long; - div: (divisor: string | number | Long) => Long; - equals: (other: string | number | Long) => boolean; - eq: (other: string | number | Long) => boolean; - getHighBits: () => number; - getHighBitsUnsigned: () => number; - getLowBits: () => number; - getLowBitsUnsigned: () => number; - getNumBitsAbs: () => number; - greaterThan: (other: string | number | Long) => boolean; - gt: (other: string | number | Long) => boolean; - greaterThanOrEqual: (other: string | number | Long) => boolean; - gte: (other: string | number | Long) => boolean; - ge: (other: string | number | Long) => boolean; - isEven: () => boolean; - isNegative: () => boolean; - isOdd: () => boolean; - isPositive: () => boolean; - isZero: () => boolean; - eqz: () => boolean; - lessThan: (other: string | number | Long) => boolean; - lt: (other: string | number | Long) => boolean; - lessThanOrEqual: (other: string | number | Long) => boolean; - lte: (other: string | number | Long) => boolean; - le: (other: string | number | Long) => boolean; - modulo: (other: string | number | Long) => Long; - mod: (other: string | number | Long) => Long; - rem: (other: string | number | Long) => Long; - multiply: (multiplier: string | number | Long) => Long; - mul: (multiplier: string | number | Long) => Long; - negate: () => Long; - neg: () => Long; - not: () => Long; - countLeadingZeros: () => number; - clz: () => number; - countTrailingZeros: () => number; - ctz: () => number; - notEquals: (other: string | number | Long) => boolean; - neq: (other: string | number | Long) => boolean; - ne: (other: string | number | Long) => boolean; - or: (other: string | number | Long) => Long; - shiftLeft: (numBits: number | Long) => Long; - shl: (numBits: number | Long) => Long; - shiftRight: (numBits: number | Long) => Long; - shr: (numBits: number | Long) => Long; - shiftRightUnsigned: (numBits: number | Long) => Long; - shru: (numBits: number | Long) => Long; - shr_u: (numBits: number | Long) => Long; - rotateLeft: (numBits: number | Long) => Long; - rotl: (numBits: number | Long) => Long; - rotateRight: (numBits: number | Long) => Long; - rotr: (numBits: number | Long) => Long; - subtract: (subtrahend: string | number | Long) => Long; - sub: (subtrahend: string | number | Long) => Long; - toInt: () => number; - toNumber: () => number; - toBytes: (le?: boolean | undefined) => number[]; - toBytesLE: () => number[]; - toBytesBE: () => number[]; - toSigned: () => Long; - toString: (radix?: number | undefined) => string; - toUnsigned: () => Long; - xor: (other: string | number | Long) => Long; - } & { - [K in Exclude]: never; - }) - | undefined; - } & { [K_1 in Exclude]: never }, - >( - object: I, - ): MsgInstallBundle; -}; -export declare const MsgInstallBundleResponse: { - encode(_: MsgInstallBundleResponse, writer?: _m0.Writer): _m0.Writer; - decode( - input: _m0.Reader | Uint8Array, - length?: number, - ): MsgInstallBundleResponse; - fromJSON(_: any): MsgInstallBundleResponse; - toJSON(_: MsgInstallBundleResponse): unknown; - fromPartial]: never }>( - _: I, - ): MsgInstallBundleResponse; -}; -/** Transactions. */ -export interface Msg { - /** Install a JavaScript sources bundle on the chain's SwingSet controller. */ - InstallBundle(request: MsgInstallBundle): Promise; - /** Send inbound messages. */ - DeliverInbound( - request: MsgDeliverInbound, - ): Promise; - /** Perform a low-privilege wallet action. */ - WalletAction(request: MsgWalletAction): Promise; - /** Perform a wallet action that spends assets. */ - WalletSpendAction( - request: MsgWalletSpendAction, - ): Promise; - /** Provision a new endpoint. */ - Provision(request: MsgProvision): Promise; -} -export declare class MsgClientImpl implements Msg { - private readonly rpc; - private readonly service; - constructor( - rpc: Rpc, - opts?: { - service?: string; - }, - ); - InstallBundle(request: MsgInstallBundle): Promise; - DeliverInbound( - request: MsgDeliverInbound, - ): Promise; - WalletAction(request: MsgWalletAction): Promise; - WalletSpendAction( - request: MsgWalletSpendAction, - ): Promise; - Provision(request: MsgProvision): Promise; -} -interface Rpc { - request( - service: string, - method: string, - data: Uint8Array, - ): Promise; -} -type Builtin = - | Date - | Function - | Uint8Array - | string - | number - | boolean - | undefined; -export type DeepPartial = T extends Builtin - ? T - : T extends Long - ? string | number | Long - : T extends Array - ? Array> - : T extends ReadonlyArray - ? ReadonlyArray> - : T extends {} - ? { - [K in keyof T]?: DeepPartial; - } - : Partial; -type KeysOfUnion = T extends T ? keyof T : never; -export type Exact = P extends Builtin - ? P - : P & { - [K in keyof P]: Exact; - } & { - [K in Exclude>]: never; - }; -export {}; diff --git a/packages/cosmic-proto/dist/agoric/swingset/msgs.js b/packages/cosmic-proto/dist/agoric/swingset/msgs.js deleted file mode 100644 index 7f600954e89..00000000000 --- a/packages/cosmic-proto/dist/agoric/swingset/msgs.js +++ /dev/null @@ -1,668 +0,0 @@ -/* eslint-disable */ -import Long from 'long'; -import _m0 from 'protobufjs/minimal.js'; -export const protobufPackage = 'agoric.swingset'; -function createBaseMsgDeliverInbound() { - return { - messages: [], - nums: [], - ack: Long.UZERO, - submitter: new Uint8Array(), - }; -} -export const MsgDeliverInbound = { - encode(message, writer = _m0.Writer.create()) { - for (const v of message.messages) { - writer.uint32(10).string(v); - } - writer.uint32(18).fork(); - for (const v of message.nums) { - writer.uint64(v); - } - writer.ldelim(); - if (!message.ack.isZero()) { - writer.uint32(24).uint64(message.ack); - } - if (message.submitter.length !== 0) { - writer.uint32(34).bytes(message.submitter); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMsgDeliverInbound(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.messages.push(reader.string()); - break; - case 2: - if ((tag & 7) === 2) { - const end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) { - message.nums.push(reader.uint64()); - } - } else { - message.nums.push(reader.uint64()); - } - break; - case 3: - message.ack = reader.uint64(); - break; - case 4: - message.submitter = reader.bytes(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - messages: Array.isArray(object?.messages) - ? object.messages.map(e => String(e)) - : [], - nums: Array.isArray(object?.nums) - ? object.nums.map(e => Long.fromValue(e)) - : [], - ack: isSet(object.ack) ? Long.fromValue(object.ack) : Long.UZERO, - submitter: isSet(object.submitter) - ? bytesFromBase64(object.submitter) - : new Uint8Array(), - }; - }, - toJSON(message) { - const obj = {}; - if (message.messages) { - obj.messages = message.messages.map(e => e); - } else { - obj.messages = []; - } - if (message.nums) { - obj.nums = message.nums.map(e => (e || Long.UZERO).toString()); - } else { - obj.nums = []; - } - message.ack !== undefined && - (obj.ack = (message.ack || Long.UZERO).toString()); - message.submitter !== undefined && - (obj.submitter = base64FromBytes( - message.submitter !== undefined ? message.submitter : new Uint8Array(), - )); - return obj; - }, - fromPartial(object) { - const message = createBaseMsgDeliverInbound(); - message.messages = object.messages?.map(e => e) || []; - message.nums = object.nums?.map(e => Long.fromValue(e)) || []; - message.ack = - object.ack !== undefined && object.ack !== null - ? Long.fromValue(object.ack) - : Long.UZERO; - message.submitter = object.submitter ?? new Uint8Array(); - return message; - }, -}; -function createBaseMsgDeliverInboundResponse() { - return {}; -} -export const MsgDeliverInboundResponse = { - encode(_, writer = _m0.Writer.create()) { - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMsgDeliverInboundResponse(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(_) { - return {}; - }, - toJSON(_) { - const obj = {}; - return obj; - }, - fromPartial(_) { - const message = createBaseMsgDeliverInboundResponse(); - return message; - }, -}; -function createBaseMsgWalletAction() { - return { owner: new Uint8Array(), action: '' }; -} -export const MsgWalletAction = { - encode(message, writer = _m0.Writer.create()) { - if (message.owner.length !== 0) { - writer.uint32(10).bytes(message.owner); - } - if (message.action !== '') { - writer.uint32(18).string(message.action); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMsgWalletAction(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.owner = reader.bytes(); - break; - case 2: - message.action = reader.string(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - owner: isSet(object.owner) - ? bytesFromBase64(object.owner) - : new Uint8Array(), - action: isSet(object.action) ? String(object.action) : '', - }; - }, - toJSON(message) { - const obj = {}; - message.owner !== undefined && - (obj.owner = base64FromBytes( - message.owner !== undefined ? message.owner : new Uint8Array(), - )); - message.action !== undefined && (obj.action = message.action); - return obj; - }, - fromPartial(object) { - const message = createBaseMsgWalletAction(); - message.owner = object.owner ?? new Uint8Array(); - message.action = object.action ?? ''; - return message; - }, -}; -function createBaseMsgWalletActionResponse() { - return {}; -} -export const MsgWalletActionResponse = { - encode(_, writer = _m0.Writer.create()) { - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMsgWalletActionResponse(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(_) { - return {}; - }, - toJSON(_) { - const obj = {}; - return obj; - }, - fromPartial(_) { - const message = createBaseMsgWalletActionResponse(); - return message; - }, -}; -function createBaseMsgWalletSpendAction() { - return { owner: new Uint8Array(), spendAction: '' }; -} -export const MsgWalletSpendAction = { - encode(message, writer = _m0.Writer.create()) { - if (message.owner.length !== 0) { - writer.uint32(10).bytes(message.owner); - } - if (message.spendAction !== '') { - writer.uint32(18).string(message.spendAction); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMsgWalletSpendAction(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.owner = reader.bytes(); - break; - case 2: - message.spendAction = reader.string(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - owner: isSet(object.owner) - ? bytesFromBase64(object.owner) - : new Uint8Array(), - spendAction: isSet(object.spendAction) ? String(object.spendAction) : '', - }; - }, - toJSON(message) { - const obj = {}; - message.owner !== undefined && - (obj.owner = base64FromBytes( - message.owner !== undefined ? message.owner : new Uint8Array(), - )); - message.spendAction !== undefined && - (obj.spendAction = message.spendAction); - return obj; - }, - fromPartial(object) { - const message = createBaseMsgWalletSpendAction(); - message.owner = object.owner ?? new Uint8Array(); - message.spendAction = object.spendAction ?? ''; - return message; - }, -}; -function createBaseMsgWalletSpendActionResponse() { - return {}; -} -export const MsgWalletSpendActionResponse = { - encode(_, writer = _m0.Writer.create()) { - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMsgWalletSpendActionResponse(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(_) { - return {}; - }, - toJSON(_) { - const obj = {}; - return obj; - }, - fromPartial(_) { - const message = createBaseMsgWalletSpendActionResponse(); - return message; - }, -}; -function createBaseMsgProvision() { - return { - nickname: '', - address: new Uint8Array(), - powerFlags: [], - submitter: new Uint8Array(), - }; -} -export const MsgProvision = { - encode(message, writer = _m0.Writer.create()) { - if (message.nickname !== '') { - writer.uint32(10).string(message.nickname); - } - if (message.address.length !== 0) { - writer.uint32(18).bytes(message.address); - } - for (const v of message.powerFlags) { - writer.uint32(26).string(v); - } - if (message.submitter.length !== 0) { - writer.uint32(34).bytes(message.submitter); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMsgProvision(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.nickname = reader.string(); - break; - case 2: - message.address = reader.bytes(); - break; - case 3: - message.powerFlags.push(reader.string()); - break; - case 4: - message.submitter = reader.bytes(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - nickname: isSet(object.nickname) ? String(object.nickname) : '', - address: isSet(object.address) - ? bytesFromBase64(object.address) - : new Uint8Array(), - powerFlags: Array.isArray(object?.powerFlags) - ? object.powerFlags.map(e => String(e)) - : [], - submitter: isSet(object.submitter) - ? bytesFromBase64(object.submitter) - : new Uint8Array(), - }; - }, - toJSON(message) { - const obj = {}; - message.nickname !== undefined && (obj.nickname = message.nickname); - message.address !== undefined && - (obj.address = base64FromBytes( - message.address !== undefined ? message.address : new Uint8Array(), - )); - if (message.powerFlags) { - obj.powerFlags = message.powerFlags.map(e => e); - } else { - obj.powerFlags = []; - } - message.submitter !== undefined && - (obj.submitter = base64FromBytes( - message.submitter !== undefined ? message.submitter : new Uint8Array(), - )); - return obj; - }, - fromPartial(object) { - const message = createBaseMsgProvision(); - message.nickname = object.nickname ?? ''; - message.address = object.address ?? new Uint8Array(); - message.powerFlags = object.powerFlags?.map(e => e) || []; - message.submitter = object.submitter ?? new Uint8Array(); - return message; - }, -}; -function createBaseMsgProvisionResponse() { - return {}; -} -export const MsgProvisionResponse = { - encode(_, writer = _m0.Writer.create()) { - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMsgProvisionResponse(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(_) { - return {}; - }, - toJSON(_) { - const obj = {}; - return obj; - }, - fromPartial(_) { - const message = createBaseMsgProvisionResponse(); - return message; - }, -}; -function createBaseMsgInstallBundle() { - return { - bundle: '', - submitter: new Uint8Array(), - compressedBundle: new Uint8Array(), - uncompressedSize: Long.ZERO, - }; -} -export const MsgInstallBundle = { - encode(message, writer = _m0.Writer.create()) { - if (message.bundle !== '') { - writer.uint32(10).string(message.bundle); - } - if (message.submitter.length !== 0) { - writer.uint32(18).bytes(message.submitter); - } - if (message.compressedBundle.length !== 0) { - writer.uint32(26).bytes(message.compressedBundle); - } - if (!message.uncompressedSize.isZero()) { - writer.uint32(32).int64(message.uncompressedSize); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMsgInstallBundle(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.bundle = reader.string(); - break; - case 2: - message.submitter = reader.bytes(); - break; - case 3: - message.compressedBundle = reader.bytes(); - break; - case 4: - message.uncompressedSize = reader.int64(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - bundle: isSet(object.bundle) ? String(object.bundle) : '', - submitter: isSet(object.submitter) - ? bytesFromBase64(object.submitter) - : new Uint8Array(), - compressedBundle: isSet(object.compressedBundle) - ? bytesFromBase64(object.compressedBundle) - : new Uint8Array(), - uncompressedSize: isSet(object.uncompressedSize) - ? Long.fromValue(object.uncompressedSize) - : Long.ZERO, - }; - }, - toJSON(message) { - const obj = {}; - message.bundle !== undefined && (obj.bundle = message.bundle); - message.submitter !== undefined && - (obj.submitter = base64FromBytes( - message.submitter !== undefined ? message.submitter : new Uint8Array(), - )); - message.compressedBundle !== undefined && - (obj.compressedBundle = base64FromBytes( - message.compressedBundle !== undefined - ? message.compressedBundle - : new Uint8Array(), - )); - message.uncompressedSize !== undefined && - (obj.uncompressedSize = ( - message.uncompressedSize || Long.ZERO - ).toString()); - return obj; - }, - fromPartial(object) { - const message = createBaseMsgInstallBundle(); - message.bundle = object.bundle ?? ''; - message.submitter = object.submitter ?? new Uint8Array(); - message.compressedBundle = object.compressedBundle ?? new Uint8Array(); - message.uncompressedSize = - object.uncompressedSize !== undefined && object.uncompressedSize !== null - ? Long.fromValue(object.uncompressedSize) - : Long.ZERO; - return message; - }, -}; -function createBaseMsgInstallBundleResponse() { - return {}; -} -export const MsgInstallBundleResponse = { - encode(_, writer = _m0.Writer.create()) { - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseMsgInstallBundleResponse(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(_) { - return {}; - }, - toJSON(_) { - const obj = {}; - return obj; - }, - fromPartial(_) { - const message = createBaseMsgInstallBundleResponse(); - return message; - }, -}; -export class MsgClientImpl { - rpc; - service; - constructor(rpc, opts) { - this.service = opts?.service || 'agoric.swingset.Msg'; - this.rpc = rpc; - this.InstallBundle = this.InstallBundle.bind(this); - this.DeliverInbound = this.DeliverInbound.bind(this); - this.WalletAction = this.WalletAction.bind(this); - this.WalletSpendAction = this.WalletSpendAction.bind(this); - this.Provision = this.Provision.bind(this); - } - InstallBundle(request) { - const data = MsgInstallBundle.encode(request).finish(); - const promise = this.rpc.request(this.service, 'InstallBundle', data); - return promise.then(data => - MsgInstallBundleResponse.decode(new _m0.Reader(data)), - ); - } - DeliverInbound(request) { - const data = MsgDeliverInbound.encode(request).finish(); - const promise = this.rpc.request(this.service, 'DeliverInbound', data); - return promise.then(data => - MsgDeliverInboundResponse.decode(new _m0.Reader(data)), - ); - } - WalletAction(request) { - const data = MsgWalletAction.encode(request).finish(); - const promise = this.rpc.request(this.service, 'WalletAction', data); - return promise.then(data => - MsgWalletActionResponse.decode(new _m0.Reader(data)), - ); - } - WalletSpendAction(request) { - const data = MsgWalletSpendAction.encode(request).finish(); - const promise = this.rpc.request(this.service, 'WalletSpendAction', data); - return promise.then(data => - MsgWalletSpendActionResponse.decode(new _m0.Reader(data)), - ); - } - Provision(request) { - const data = MsgProvision.encode(request).finish(); - const promise = this.rpc.request(this.service, 'Provision', data); - return promise.then(data => - MsgProvisionResponse.decode(new _m0.Reader(data)), - ); - } -} -var globalThis = (() => { - if (typeof globalThis !== 'undefined') { - return globalThis; - } - if (typeof self !== 'undefined') { - return self; - } - if (typeof window !== 'undefined') { - return window; - } - if (typeof global !== 'undefined') { - return global; - } - throw 'Unable to locate global object'; -})(); -function bytesFromBase64(b64) { - if (globalThis.Buffer) { - return Uint8Array.from(globalThis.Buffer.from(b64, 'base64')); - } else { - const bin = globalThis.atob(b64); - const arr = new Uint8Array(bin.length); - for (let i = 0; i < bin.length; ++i) { - arr[i] = bin.charCodeAt(i); - } - return arr; - } -} -function base64FromBytes(arr) { - if (globalThis.Buffer) { - return globalThis.Buffer.from(arr).toString('base64'); - } else { - const bin = []; - arr.forEach(byte => { - bin.push(String.fromCharCode(byte)); - }); - return globalThis.btoa(bin.join('')); - } -} -if (_m0.util.Long !== Long) { - _m0.util.Long = Long; - _m0.configure(); -} -function isSet(value) { - return value !== null && value !== undefined; -} diff --git a/packages/cosmic-proto/dist/agoric/swingset/query.d.ts b/packages/cosmic-proto/dist/agoric/swingset/query.d.ts deleted file mode 100644 index b7738adb11a..00000000000 --- a/packages/cosmic-proto/dist/agoric/swingset/query.d.ts +++ /dev/null @@ -1,406 +0,0 @@ -import Long from 'long'; -import _m0 from 'protobufjs/minimal.js'; -import { Egress, Params } from './swingset.js'; -export declare const protobufPackage = 'agoric.swingset'; -/** QueryParamsRequest is the request type for the Query/Params RPC method. */ -export interface QueryParamsRequest {} -/** QueryParamsResponse is the response type for the Query/Params RPC method. */ -export interface QueryParamsResponse { - /** params defines the parameters of the module. */ - params?: Params; -} -/** QueryEgressRequest is the request type for the Query/Egress RPC method */ -export interface QueryEgressRequest { - peer: Uint8Array; -} -/** QueryEgressResponse is the egress response. */ -export interface QueryEgressResponse { - egress?: Egress; -} -/** QueryMailboxRequest is the mailbox query. */ -export interface QueryMailboxRequest { - peer: Uint8Array; -} -/** QueryMailboxResponse is the mailbox response. */ -export interface QueryMailboxResponse { - value: string; -} -export declare const QueryParamsRequest: { - encode(_: QueryParamsRequest, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsRequest; - fromJSON(_: any): QueryParamsRequest; - toJSON(_: QueryParamsRequest): unknown; - fromPartial]: never }>( - _: I, - ): QueryParamsRequest; -}; -export declare const QueryParamsResponse: { - encode(message: QueryParamsResponse, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): QueryParamsResponse; - fromJSON(object: any): QueryParamsResponse; - toJSON(message: QueryParamsResponse): unknown; - fromPartial< - I extends { - params?: - | { - beansPerUnit?: - | { - key?: string | undefined; - beans?: string | undefined; - }[] - | undefined; - feeUnitPrice?: - | { - denom?: string | undefined; - amount?: string | undefined; - }[] - | undefined; - bootstrapVatConfig?: string | undefined; - powerFlagFees?: - | { - powerFlag?: string | undefined; - fee?: - | { - denom?: string | undefined; - amount?: string | undefined; - }[] - | undefined; - }[] - | undefined; - queueMax?: - | { - key?: string | undefined; - size?: number | undefined; - }[] - | undefined; - } - | undefined; - } & { - params?: - | ({ - beansPerUnit?: - | { - key?: string | undefined; - beans?: string | undefined; - }[] - | undefined; - feeUnitPrice?: - | { - denom?: string | undefined; - amount?: string | undefined; - }[] - | undefined; - bootstrapVatConfig?: string | undefined; - powerFlagFees?: - | { - powerFlag?: string | undefined; - fee?: - | { - denom?: string | undefined; - amount?: string | undefined; - }[] - | undefined; - }[] - | undefined; - queueMax?: - | { - key?: string | undefined; - size?: number | undefined; - }[] - | undefined; - } & { - beansPerUnit?: - | ({ - key?: string | undefined; - beans?: string | undefined; - }[] & - ({ - key?: string | undefined; - beans?: string | undefined; - } & { - key?: string | undefined; - beans?: string | undefined; - } & { - [K in Exclude< - keyof I['params']['beansPerUnit'][number], - keyof import('./swingset.js').StringBeans - >]: never; - })[] & { - [K_1 in Exclude< - keyof I['params']['beansPerUnit'], - keyof { - key?: string | undefined; - beans?: string | undefined; - }[] - >]: never; - }) - | undefined; - feeUnitPrice?: - | ({ - denom?: string | undefined; - amount?: string | undefined; - }[] & - ({ - denom?: string | undefined; - amount?: string | undefined; - } & { - denom?: string | undefined; - amount?: string | undefined; - } & { - [K_2 in Exclude< - keyof I['params']['feeUnitPrice'][number], - keyof import('../../cosmos/base/v1beta1/coin.js').Coin - >]: never; - })[] & { - [K_3 in Exclude< - keyof I['params']['feeUnitPrice'], - keyof { - denom?: string | undefined; - amount?: string | undefined; - }[] - >]: never; - }) - | undefined; - bootstrapVatConfig?: string | undefined; - powerFlagFees?: - | ({ - powerFlag?: string | undefined; - fee?: - | { - denom?: string | undefined; - amount?: string | undefined; - }[] - | undefined; - }[] & - ({ - powerFlag?: string | undefined; - fee?: - | { - denom?: string | undefined; - amount?: string | undefined; - }[] - | undefined; - } & { - powerFlag?: string | undefined; - fee?: - | ({ - denom?: string | undefined; - amount?: string | undefined; - }[] & - ({ - denom?: string | undefined; - amount?: string | undefined; - } & { - denom?: string | undefined; - amount?: string | undefined; - } & { - [K_4 in Exclude< - keyof I['params']['powerFlagFees'][number]['fee'][number], - keyof import('../../cosmos/base/v1beta1/coin.js').Coin - >]: never; - })[] & { - [K_5 in Exclude< - keyof I['params']['powerFlagFees'][number]['fee'], - keyof { - denom?: string | undefined; - amount?: string | undefined; - }[] - >]: never; - }) - | undefined; - } & { - [K_6 in Exclude< - keyof I['params']['powerFlagFees'][number], - keyof import('./swingset.js').PowerFlagFee - >]: never; - })[] & { - [K_7 in Exclude< - keyof I['params']['powerFlagFees'], - keyof { - powerFlag?: string | undefined; - fee?: - | { - denom?: string | undefined; - amount?: string | undefined; - }[] - | undefined; - }[] - >]: never; - }) - | undefined; - queueMax?: - | ({ - key?: string | undefined; - size?: number | undefined; - }[] & - ({ - key?: string | undefined; - size?: number | undefined; - } & { - key?: string | undefined; - size?: number | undefined; - } & { - [K_8 in Exclude< - keyof I['params']['queueMax'][number], - keyof import('./swingset.js').QueueSize - >]: never; - })[] & { - [K_9 in Exclude< - keyof I['params']['queueMax'], - keyof { - key?: string | undefined; - size?: number | undefined; - }[] - >]: never; - }) - | undefined; - } & { [K_10 in Exclude]: never }) - | undefined; - } & { [K_11 in Exclude]: never }, - >( - object: I, - ): QueryParamsResponse; -}; -export declare const QueryEgressRequest: { - encode(message: QueryEgressRequest, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): QueryEgressRequest; - fromJSON(object: any): QueryEgressRequest; - toJSON(message: QueryEgressRequest): unknown; - fromPartial< - I extends { - peer?: Uint8Array | undefined; - } & { - peer?: Uint8Array | undefined; - } & { [K in Exclude]: never }, - >( - object: I, - ): QueryEgressRequest; -}; -export declare const QueryEgressResponse: { - encode(message: QueryEgressResponse, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): QueryEgressResponse; - fromJSON(object: any): QueryEgressResponse; - toJSON(message: QueryEgressResponse): unknown; - fromPartial< - I extends { - egress?: - | { - nickname?: string | undefined; - peer?: Uint8Array | undefined; - powerFlags?: string[] | undefined; - } - | undefined; - } & { - egress?: - | ({ - nickname?: string | undefined; - peer?: Uint8Array | undefined; - powerFlags?: string[] | undefined; - } & { - nickname?: string | undefined; - peer?: Uint8Array | undefined; - powerFlags?: - | (string[] & - string[] & { - [K in Exclude< - keyof I['egress']['powerFlags'], - keyof string[] - >]: never; - }) - | undefined; - } & { [K_1 in Exclude]: never }) - | undefined; - } & { [K_2 in Exclude]: never }, - >( - object: I, - ): QueryEgressResponse; -}; -export declare const QueryMailboxRequest: { - encode(message: QueryMailboxRequest, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): QueryMailboxRequest; - fromJSON(object: any): QueryMailboxRequest; - toJSON(message: QueryMailboxRequest): unknown; - fromPartial< - I extends { - peer?: Uint8Array | undefined; - } & { - peer?: Uint8Array | undefined; - } & { [K in Exclude]: never }, - >( - object: I, - ): QueryMailboxRequest; -}; -export declare const QueryMailboxResponse: { - encode(message: QueryMailboxResponse, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): QueryMailboxResponse; - fromJSON(object: any): QueryMailboxResponse; - toJSON(message: QueryMailboxResponse): unknown; - fromPartial< - I extends { - value?: string | undefined; - } & { - value?: string | undefined; - } & { [K in Exclude]: never }, - >( - object: I, - ): QueryMailboxResponse; -}; -/** Query provides defines the gRPC querier service */ -export interface Query { - /** Params queries params of the swingset module. */ - Params(request: QueryParamsRequest): Promise; - /** Egress queries a provisioned egress. */ - Egress(request: QueryEgressRequest): Promise; - /** Return the contents of a peer's outbound mailbox. */ - Mailbox(request: QueryMailboxRequest): Promise; -} -export declare class QueryClientImpl implements Query { - private readonly rpc; - private readonly service; - constructor( - rpc: Rpc, - opts?: { - service?: string; - }, - ); - Params(request: QueryParamsRequest): Promise; - Egress(request: QueryEgressRequest): Promise; - Mailbox(request: QueryMailboxRequest): Promise; -} -interface Rpc { - request( - service: string, - method: string, - data: Uint8Array, - ): Promise; -} -type Builtin = - | Date - | Function - | Uint8Array - | string - | number - | boolean - | undefined; -export type DeepPartial = T extends Builtin - ? T - : T extends Long - ? string | number | Long - : T extends Array - ? Array> - : T extends ReadonlyArray - ? ReadonlyArray> - : T extends {} - ? { - [K in keyof T]?: DeepPartial; - } - : Partial; -type KeysOfUnion = T extends T ? keyof T : never; -export type Exact = P extends Builtin - ? P - : P & { - [K in keyof P]: Exact; - } & { - [K in Exclude>]: never; - }; -export {}; diff --git a/packages/cosmic-proto/dist/agoric/swingset/query.js b/packages/cosmic-proto/dist/agoric/swingset/query.js deleted file mode 100644 index 2cc834cb1c3..00000000000 --- a/packages/cosmic-proto/dist/agoric/swingset/query.js +++ /dev/null @@ -1,346 +0,0 @@ -/* eslint-disable */ -import Long from 'long'; -import _m0 from 'protobufjs/minimal.js'; -import { Egress, Params } from './swingset.js'; -export const protobufPackage = 'agoric.swingset'; -function createBaseQueryParamsRequest() { - return {}; -} -export const QueryParamsRequest = { - encode(_, writer = _m0.Writer.create()) { - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseQueryParamsRequest(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(_) { - return {}; - }, - toJSON(_) { - const obj = {}; - return obj; - }, - fromPartial(_) { - const message = createBaseQueryParamsRequest(); - return message; - }, -}; -function createBaseQueryParamsResponse() { - return { params: undefined }; -} -export const QueryParamsResponse = { - encode(message, writer = _m0.Writer.create()) { - if (message.params !== undefined) { - Params.encode(message.params, writer.uint32(10).fork()).ldelim(); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseQueryParamsResponse(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.params = Params.decode(reader, reader.uint32()); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - params: isSet(object.params) ? Params.fromJSON(object.params) : undefined, - }; - }, - toJSON(message) { - const obj = {}; - message.params !== undefined && - (obj.params = message.params ? Params.toJSON(message.params) : undefined); - return obj; - }, - fromPartial(object) { - const message = createBaseQueryParamsResponse(); - message.params = - object.params !== undefined && object.params !== null - ? Params.fromPartial(object.params) - : undefined; - return message; - }, -}; -function createBaseQueryEgressRequest() { - return { peer: new Uint8Array() }; -} -export const QueryEgressRequest = { - encode(message, writer = _m0.Writer.create()) { - if (message.peer.length !== 0) { - writer.uint32(10).bytes(message.peer); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseQueryEgressRequest(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.peer = reader.bytes(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - peer: isSet(object.peer) - ? bytesFromBase64(object.peer) - : new Uint8Array(), - }; - }, - toJSON(message) { - const obj = {}; - message.peer !== undefined && - (obj.peer = base64FromBytes( - message.peer !== undefined ? message.peer : new Uint8Array(), - )); - return obj; - }, - fromPartial(object) { - const message = createBaseQueryEgressRequest(); - message.peer = object.peer ?? new Uint8Array(); - return message; - }, -}; -function createBaseQueryEgressResponse() { - return { egress: undefined }; -} -export const QueryEgressResponse = { - encode(message, writer = _m0.Writer.create()) { - if (message.egress !== undefined) { - Egress.encode(message.egress, writer.uint32(10).fork()).ldelim(); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseQueryEgressResponse(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.egress = Egress.decode(reader, reader.uint32()); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - egress: isSet(object.egress) ? Egress.fromJSON(object.egress) : undefined, - }; - }, - toJSON(message) { - const obj = {}; - message.egress !== undefined && - (obj.egress = message.egress ? Egress.toJSON(message.egress) : undefined); - return obj; - }, - fromPartial(object) { - const message = createBaseQueryEgressResponse(); - message.egress = - object.egress !== undefined && object.egress !== null - ? Egress.fromPartial(object.egress) - : undefined; - return message; - }, -}; -function createBaseQueryMailboxRequest() { - return { peer: new Uint8Array() }; -} -export const QueryMailboxRequest = { - encode(message, writer = _m0.Writer.create()) { - if (message.peer.length !== 0) { - writer.uint32(10).bytes(message.peer); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseQueryMailboxRequest(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.peer = reader.bytes(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - peer: isSet(object.peer) - ? bytesFromBase64(object.peer) - : new Uint8Array(), - }; - }, - toJSON(message) { - const obj = {}; - message.peer !== undefined && - (obj.peer = base64FromBytes( - message.peer !== undefined ? message.peer : new Uint8Array(), - )); - return obj; - }, - fromPartial(object) { - const message = createBaseQueryMailboxRequest(); - message.peer = object.peer ?? new Uint8Array(); - return message; - }, -}; -function createBaseQueryMailboxResponse() { - return { value: '' }; -} -export const QueryMailboxResponse = { - encode(message, writer = _m0.Writer.create()) { - if (message.value !== '') { - writer.uint32(10).string(message.value); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseQueryMailboxResponse(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.value = reader.string(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { value: isSet(object.value) ? String(object.value) : '' }; - }, - toJSON(message) { - const obj = {}; - message.value !== undefined && (obj.value = message.value); - return obj; - }, - fromPartial(object) { - const message = createBaseQueryMailboxResponse(); - message.value = object.value ?? ''; - return message; - }, -}; -export class QueryClientImpl { - rpc; - service; - constructor(rpc, opts) { - this.service = opts?.service || 'agoric.swingset.Query'; - this.rpc = rpc; - this.Params = this.Params.bind(this); - this.Egress = this.Egress.bind(this); - this.Mailbox = this.Mailbox.bind(this); - } - Params(request) { - const data = QueryParamsRequest.encode(request).finish(); - const promise = this.rpc.request(this.service, 'Params', data); - return promise.then(data => - QueryParamsResponse.decode(new _m0.Reader(data)), - ); - } - Egress(request) { - const data = QueryEgressRequest.encode(request).finish(); - const promise = this.rpc.request(this.service, 'Egress', data); - return promise.then(data => - QueryEgressResponse.decode(new _m0.Reader(data)), - ); - } - Mailbox(request) { - const data = QueryMailboxRequest.encode(request).finish(); - const promise = this.rpc.request(this.service, 'Mailbox', data); - return promise.then(data => - QueryMailboxResponse.decode(new _m0.Reader(data)), - ); - } -} -var globalThis = (() => { - if (typeof globalThis !== 'undefined') { - return globalThis; - } - if (typeof self !== 'undefined') { - return self; - } - if (typeof window !== 'undefined') { - return window; - } - if (typeof global !== 'undefined') { - return global; - } - throw 'Unable to locate global object'; -})(); -function bytesFromBase64(b64) { - if (globalThis.Buffer) { - return Uint8Array.from(globalThis.Buffer.from(b64, 'base64')); - } else { - const bin = globalThis.atob(b64); - const arr = new Uint8Array(bin.length); - for (let i = 0; i < bin.length; ++i) { - arr[i] = bin.charCodeAt(i); - } - return arr; - } -} -function base64FromBytes(arr) { - if (globalThis.Buffer) { - return globalThis.Buffer.from(arr).toString('base64'); - } else { - const bin = []; - arr.forEach(byte => { - bin.push(String.fromCharCode(byte)); - }); - return globalThis.btoa(bin.join('')); - } -} -if (_m0.util.Long !== Long) { - _m0.util.Long = Long; - _m0.configure(); -} -function isSet(value) { - return value !== null && value !== undefined; -} diff --git a/packages/cosmic-proto/dist/agoric/swingset/swingset.d.ts b/packages/cosmic-proto/dist/agoric/swingset/swingset.d.ts deleted file mode 100644 index 6aab530e35a..00000000000 --- a/packages/cosmic-proto/dist/agoric/swingset/swingset.d.ts +++ /dev/null @@ -1,568 +0,0 @@ -import Long from 'long'; -import _m0 from 'protobufjs/minimal.js'; -import { Coin } from '../../cosmos/base/v1beta1/coin.js'; -export declare const protobufPackage = 'agoric.swingset'; -/** - * CoreEvalProposal is a gov Content type for evaluating code in the SwingSet - * core. - * See `agoric-sdk/packages/vats/src/core/eval.js`. - */ -export interface CoreEvalProposal { - title: string; - description: string; - /** - * Although evals are sequential, they may run concurrently, since they each - * can return a Promise. - */ - evals: CoreEval[]; -} -/** - * CoreEval defines an individual SwingSet core evaluation, for use in - * CoreEvalProposal. - */ -export interface CoreEval { - /** - * Grant these JSON-stringified core bootstrap permits to the jsCode, as the - * `powers` endowment. - */ - jsonPermits: string; - /** - * Evaluate this JavaScript code in a Compartment endowed with `powers` as - * well as some powerless helpers. - */ - jsCode: string; -} -/** Params are the swingset configuration/governance parameters. */ -export interface Params { - /** - * Map from unit name to a value in SwingSet "beans". - * Must not be negative. - * - * These values are used by SwingSet to normalize named per-resource charges - * (maybe rent) in a single Nat usage unit, the "bean". - * - * There is no required order to this list of entries, but all the chain - * nodes must all serialize and deserialize the existing order without - * permuting it. - */ - beansPerUnit: StringBeans[]; - /** - * The price in Coins per the unit named "fee". This value is used by - * cosmic-swingset JS code to decide how many tokens to charge. - * - * cost = beans_used * fee_unit_price / beans_per_unit["fee"] - */ - feeUnitPrice: Coin[]; - /** - * The SwingSet bootstrap vat configuration file. Not usefully modifiable - * via governance as it is only referenced by the chain's initial - * construction. - */ - bootstrapVatConfig: string; - /** - * If the provision submitter doesn't hold a provisionpass, their requested - * power flags are looked up in this fee menu (first match wins) and the sum - * is charged. If any power flag is not found in this menu, the request is - * rejected. - */ - powerFlagFees: PowerFlagFee[]; - /** - * Maximum sizes for queues. - * These values are used by SwingSet to compute how many messages should be - * accepted in a block. - * - * There is no required order to this list of entries, but all the chain - * nodes must all serialize and deserialize the existing order without - * permuting it. - */ - queueMax: QueueSize[]; -} -/** The current state of the module. */ -export interface State { - /** - * The allowed number of items to add to queues, as determined by SwingSet. - * Transactions which attempt to enqueue more should be rejected. - */ - queueAllowed: QueueSize[]; -} -/** Map element of a string key to a Nat bean count. */ -export interface StringBeans { - /** What the beans are for. */ - key: string; - /** The actual bean value. */ - beans: string; -} -/** Map a provisioning power flag to its corresponding fee. */ -export interface PowerFlagFee { - powerFlag: string; - fee: Coin[]; -} -/** Map element of a string key to a size. */ -export interface QueueSize { - /** What the size is for. */ - key: string; - /** The actual size value. */ - size: number; -} -/** Egress is the format for a swingset egress. */ -export interface Egress { - nickname: string; - peer: Uint8Array; - /** TODO: Remove these power flags as they are deprecated and have no effect. */ - powerFlags: string[]; -} -/** - * SwingStoreArtifact encodes an artifact of a swing-store export. - * Artifacts may be stored or transmitted in any order. Most handlers do - * maintain the artifact order from their original source as an effect of how - * they handle the artifacts. - */ -export interface SwingStoreArtifact { - name: string; - data: Uint8Array; -} -export declare const CoreEvalProposal: { - encode(message: CoreEvalProposal, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): CoreEvalProposal; - fromJSON(object: any): CoreEvalProposal; - toJSON(message: CoreEvalProposal): unknown; - fromPartial< - I extends { - title?: string | undefined; - description?: string | undefined; - evals?: - | { - jsonPermits?: string | undefined; - jsCode?: string | undefined; - }[] - | undefined; - } & { - title?: string | undefined; - description?: string | undefined; - evals?: - | ({ - jsonPermits?: string | undefined; - jsCode?: string | undefined; - }[] & - ({ - jsonPermits?: string | undefined; - jsCode?: string | undefined; - } & { - jsonPermits?: string | undefined; - jsCode?: string | undefined; - } & { - [K in Exclude]: never; - })[] & { - [K_1 in Exclude< - keyof I['evals'], - keyof { - jsonPermits?: string | undefined; - jsCode?: string | undefined; - }[] - >]: never; - }) - | undefined; - } & { [K_2 in Exclude]: never }, - >( - object: I, - ): CoreEvalProposal; -}; -export declare const CoreEval: { - encode(message: CoreEval, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): CoreEval; - fromJSON(object: any): CoreEval; - toJSON(message: CoreEval): unknown; - fromPartial< - I extends { - jsonPermits?: string | undefined; - jsCode?: string | undefined; - } & { - jsonPermits?: string | undefined; - jsCode?: string | undefined; - } & { [K in Exclude]: never }, - >( - object: I, - ): CoreEval; -}; -export declare const Params: { - encode(message: Params, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): Params; - fromJSON(object: any): Params; - toJSON(message: Params): unknown; - fromPartial< - I extends { - beansPerUnit?: - | { - key?: string | undefined; - beans?: string | undefined; - }[] - | undefined; - feeUnitPrice?: - | { - denom?: string | undefined; - amount?: string | undefined; - }[] - | undefined; - bootstrapVatConfig?: string | undefined; - powerFlagFees?: - | { - powerFlag?: string | undefined; - fee?: - | { - denom?: string | undefined; - amount?: string | undefined; - }[] - | undefined; - }[] - | undefined; - queueMax?: - | { - key?: string | undefined; - size?: number | undefined; - }[] - | undefined; - } & { - beansPerUnit?: - | ({ - key?: string | undefined; - beans?: string | undefined; - }[] & - ({ - key?: string | undefined; - beans?: string | undefined; - } & { - key?: string | undefined; - beans?: string | undefined; - } & { - [K in Exclude< - keyof I['beansPerUnit'][number], - keyof StringBeans - >]: never; - })[] & { - [K_1 in Exclude< - keyof I['beansPerUnit'], - keyof { - key?: string | undefined; - beans?: string | undefined; - }[] - >]: never; - }) - | undefined; - feeUnitPrice?: - | ({ - denom?: string | undefined; - amount?: string | undefined; - }[] & - ({ - denom?: string | undefined; - amount?: string | undefined; - } & { - denom?: string | undefined; - amount?: string | undefined; - } & { - [K_2 in Exclude< - keyof I['feeUnitPrice'][number], - keyof Coin - >]: never; - })[] & { - [K_3 in Exclude< - keyof I['feeUnitPrice'], - keyof { - denom?: string | undefined; - amount?: string | undefined; - }[] - >]: never; - }) - | undefined; - bootstrapVatConfig?: string | undefined; - powerFlagFees?: - | ({ - powerFlag?: string | undefined; - fee?: - | { - denom?: string | undefined; - amount?: string | undefined; - }[] - | undefined; - }[] & - ({ - powerFlag?: string | undefined; - fee?: - | { - denom?: string | undefined; - amount?: string | undefined; - }[] - | undefined; - } & { - powerFlag?: string | undefined; - fee?: - | ({ - denom?: string | undefined; - amount?: string | undefined; - }[] & - ({ - denom?: string | undefined; - amount?: string | undefined; - } & { - denom?: string | undefined; - amount?: string | undefined; - } & { - [K_4 in Exclude< - keyof I['powerFlagFees'][number]['fee'][number], - keyof Coin - >]: never; - })[] & { - [K_5 in Exclude< - keyof I['powerFlagFees'][number]['fee'], - keyof { - denom?: string | undefined; - amount?: string | undefined; - }[] - >]: never; - }) - | undefined; - } & { - [K_6 in Exclude< - keyof I['powerFlagFees'][number], - keyof PowerFlagFee - >]: never; - })[] & { - [K_7 in Exclude< - keyof I['powerFlagFees'], - keyof { - powerFlag?: string | undefined; - fee?: - | { - denom?: string | undefined; - amount?: string | undefined; - }[] - | undefined; - }[] - >]: never; - }) - | undefined; - queueMax?: - | ({ - key?: string | undefined; - size?: number | undefined; - }[] & - ({ - key?: string | undefined; - size?: number | undefined; - } & { - key?: string | undefined; - size?: number | undefined; - } & { - [K_8 in Exclude< - keyof I['queueMax'][number], - keyof QueueSize - >]: never; - })[] & { - [K_9 in Exclude< - keyof I['queueMax'], - keyof { - key?: string | undefined; - size?: number | undefined; - }[] - >]: never; - }) - | undefined; - } & { [K_10 in Exclude]: never }, - >( - object: I, - ): Params; -}; -export declare const State: { - encode(message: State, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): State; - fromJSON(object: any): State; - toJSON(message: State): unknown; - fromPartial< - I extends { - queueAllowed?: - | { - key?: string | undefined; - size?: number | undefined; - }[] - | undefined; - } & { - queueAllowed?: - | ({ - key?: string | undefined; - size?: number | undefined; - }[] & - ({ - key?: string | undefined; - size?: number | undefined; - } & { - key?: string | undefined; - size?: number | undefined; - } & { - [K in Exclude< - keyof I['queueAllowed'][number], - keyof QueueSize - >]: never; - })[] & { - [K_1 in Exclude< - keyof I['queueAllowed'], - keyof { - key?: string | undefined; - size?: number | undefined; - }[] - >]: never; - }) - | undefined; - } & { [K_2 in Exclude]: never }, - >( - object: I, - ): State; -}; -export declare const StringBeans: { - encode(message: StringBeans, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): StringBeans; - fromJSON(object: any): StringBeans; - toJSON(message: StringBeans): unknown; - fromPartial< - I extends { - key?: string | undefined; - beans?: string | undefined; - } & { - key?: string | undefined; - beans?: string | undefined; - } & { [K in Exclude]: never }, - >( - object: I, - ): StringBeans; -}; -export declare const PowerFlagFee: { - encode(message: PowerFlagFee, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): PowerFlagFee; - fromJSON(object: any): PowerFlagFee; - toJSON(message: PowerFlagFee): unknown; - fromPartial< - I extends { - powerFlag?: string | undefined; - fee?: - | { - denom?: string | undefined; - amount?: string | undefined; - }[] - | undefined; - } & { - powerFlag?: string | undefined; - fee?: - | ({ - denom?: string | undefined; - amount?: string | undefined; - }[] & - ({ - denom?: string | undefined; - amount?: string | undefined; - } & { - denom?: string | undefined; - amount?: string | undefined; - } & { - [K in Exclude]: never; - })[] & { - [K_1 in Exclude< - keyof I['fee'], - keyof { - denom?: string | undefined; - amount?: string | undefined; - }[] - >]: never; - }) - | undefined; - } & { [K_2 in Exclude]: never }, - >( - object: I, - ): PowerFlagFee; -}; -export declare const QueueSize: { - encode(message: QueueSize, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): QueueSize; - fromJSON(object: any): QueueSize; - toJSON(message: QueueSize): unknown; - fromPartial< - I extends { - key?: string | undefined; - size?: number | undefined; - } & { - key?: string | undefined; - size?: number | undefined; - } & { [K in Exclude]: never }, - >( - object: I, - ): QueueSize; -}; -export declare const Egress: { - encode(message: Egress, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): Egress; - fromJSON(object: any): Egress; - toJSON(message: Egress): unknown; - fromPartial< - I extends { - nickname?: string | undefined; - peer?: Uint8Array | undefined; - powerFlags?: string[] | undefined; - } & { - nickname?: string | undefined; - peer?: Uint8Array | undefined; - powerFlags?: - | (string[] & - string[] & { - [K in Exclude]: never; - }) - | undefined; - } & { [K_1 in Exclude]: never }, - >( - object: I, - ): Egress; -}; -export declare const SwingStoreArtifact: { - encode(message: SwingStoreArtifact, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): SwingStoreArtifact; - fromJSON(object: any): SwingStoreArtifact; - toJSON(message: SwingStoreArtifact): unknown; - fromPartial< - I extends { - name?: string | undefined; - data?: Uint8Array | undefined; - } & { - name?: string | undefined; - data?: Uint8Array | undefined; - } & { [K in Exclude]: never }, - >( - object: I, - ): SwingStoreArtifact; -}; -type Builtin = - | Date - | Function - | Uint8Array - | string - | number - | boolean - | undefined; -export type DeepPartial = T extends Builtin - ? T - : T extends Long - ? string | number | Long - : T extends Array - ? Array> - : T extends ReadonlyArray - ? ReadonlyArray> - : T extends {} - ? { - [K in keyof T]?: DeepPartial; - } - : Partial; -type KeysOfUnion = T extends T ? keyof T : never; -export type Exact = P extends Builtin - ? P - : P & { - [K in keyof P]: Exact; - } & { - [K in Exclude>]: never; - }; -export {}; diff --git a/packages/cosmic-proto/dist/agoric/swingset/swingset.js b/packages/cosmic-proto/dist/agoric/swingset/swingset.js deleted file mode 100644 index 87caa3a5edf..00000000000 --- a/packages/cosmic-proto/dist/agoric/swingset/swingset.js +++ /dev/null @@ -1,642 +0,0 @@ -/* eslint-disable */ -import Long from 'long'; -import _m0 from 'protobufjs/minimal.js'; -import { Coin } from '../../cosmos/base/v1beta1/coin.js'; -export const protobufPackage = 'agoric.swingset'; -function createBaseCoreEvalProposal() { - return { title: '', description: '', evals: [] }; -} -export const CoreEvalProposal = { - encode(message, writer = _m0.Writer.create()) { - if (message.title !== '') { - writer.uint32(10).string(message.title); - } - if (message.description !== '') { - writer.uint32(18).string(message.description); - } - for (const v of message.evals) { - CoreEval.encode(v, writer.uint32(26).fork()).ldelim(); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseCoreEvalProposal(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.title = reader.string(); - break; - case 2: - message.description = reader.string(); - break; - case 3: - message.evals.push(CoreEval.decode(reader, reader.uint32())); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - title: isSet(object.title) ? String(object.title) : '', - description: isSet(object.description) ? String(object.description) : '', - evals: Array.isArray(object?.evals) - ? object.evals.map(e => CoreEval.fromJSON(e)) - : [], - }; - }, - toJSON(message) { - const obj = {}; - message.title !== undefined && (obj.title = message.title); - message.description !== undefined && - (obj.description = message.description); - if (message.evals) { - obj.evals = message.evals.map(e => (e ? CoreEval.toJSON(e) : undefined)); - } else { - obj.evals = []; - } - return obj; - }, - fromPartial(object) { - const message = createBaseCoreEvalProposal(); - message.title = object.title ?? ''; - message.description = object.description ?? ''; - message.evals = object.evals?.map(e => CoreEval.fromPartial(e)) || []; - return message; - }, -}; -function createBaseCoreEval() { - return { jsonPermits: '', jsCode: '' }; -} -export const CoreEval = { - encode(message, writer = _m0.Writer.create()) { - if (message.jsonPermits !== '') { - writer.uint32(10).string(message.jsonPermits); - } - if (message.jsCode !== '') { - writer.uint32(18).string(message.jsCode); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseCoreEval(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.jsonPermits = reader.string(); - break; - case 2: - message.jsCode = reader.string(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - jsonPermits: isSet(object.jsonPermits) ? String(object.jsonPermits) : '', - jsCode: isSet(object.jsCode) ? String(object.jsCode) : '', - }; - }, - toJSON(message) { - const obj = {}; - message.jsonPermits !== undefined && - (obj.jsonPermits = message.jsonPermits); - message.jsCode !== undefined && (obj.jsCode = message.jsCode); - return obj; - }, - fromPartial(object) { - const message = createBaseCoreEval(); - message.jsonPermits = object.jsonPermits ?? ''; - message.jsCode = object.jsCode ?? ''; - return message; - }, -}; -function createBaseParams() { - return { - beansPerUnit: [], - feeUnitPrice: [], - bootstrapVatConfig: '', - powerFlagFees: [], - queueMax: [], - }; -} -export const Params = { - encode(message, writer = _m0.Writer.create()) { - for (const v of message.beansPerUnit) { - StringBeans.encode(v, writer.uint32(10).fork()).ldelim(); - } - for (const v of message.feeUnitPrice) { - Coin.encode(v, writer.uint32(18).fork()).ldelim(); - } - if (message.bootstrapVatConfig !== '') { - writer.uint32(26).string(message.bootstrapVatConfig); - } - for (const v of message.powerFlagFees) { - PowerFlagFee.encode(v, writer.uint32(34).fork()).ldelim(); - } - for (const v of message.queueMax) { - QueueSize.encode(v, writer.uint32(42).fork()).ldelim(); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseParams(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.beansPerUnit.push( - StringBeans.decode(reader, reader.uint32()), - ); - break; - case 2: - message.feeUnitPrice.push(Coin.decode(reader, reader.uint32())); - break; - case 3: - message.bootstrapVatConfig = reader.string(); - break; - case 4: - message.powerFlagFees.push( - PowerFlagFee.decode(reader, reader.uint32()), - ); - break; - case 5: - message.queueMax.push(QueueSize.decode(reader, reader.uint32())); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - beansPerUnit: Array.isArray(object?.beansPerUnit) - ? object.beansPerUnit.map(e => StringBeans.fromJSON(e)) - : [], - feeUnitPrice: Array.isArray(object?.feeUnitPrice) - ? object.feeUnitPrice.map(e => Coin.fromJSON(e)) - : [], - bootstrapVatConfig: isSet(object.bootstrapVatConfig) - ? String(object.bootstrapVatConfig) - : '', - powerFlagFees: Array.isArray(object?.powerFlagFees) - ? object.powerFlagFees.map(e => PowerFlagFee.fromJSON(e)) - : [], - queueMax: Array.isArray(object?.queueMax) - ? object.queueMax.map(e => QueueSize.fromJSON(e)) - : [], - }; - }, - toJSON(message) { - const obj = {}; - if (message.beansPerUnit) { - obj.beansPerUnit = message.beansPerUnit.map(e => - e ? StringBeans.toJSON(e) : undefined, - ); - } else { - obj.beansPerUnit = []; - } - if (message.feeUnitPrice) { - obj.feeUnitPrice = message.feeUnitPrice.map(e => - e ? Coin.toJSON(e) : undefined, - ); - } else { - obj.feeUnitPrice = []; - } - message.bootstrapVatConfig !== undefined && - (obj.bootstrapVatConfig = message.bootstrapVatConfig); - if (message.powerFlagFees) { - obj.powerFlagFees = message.powerFlagFees.map(e => - e ? PowerFlagFee.toJSON(e) : undefined, - ); - } else { - obj.powerFlagFees = []; - } - if (message.queueMax) { - obj.queueMax = message.queueMax.map(e => - e ? QueueSize.toJSON(e) : undefined, - ); - } else { - obj.queueMax = []; - } - return obj; - }, - fromPartial(object) { - const message = createBaseParams(); - message.beansPerUnit = - object.beansPerUnit?.map(e => StringBeans.fromPartial(e)) || []; - message.feeUnitPrice = - object.feeUnitPrice?.map(e => Coin.fromPartial(e)) || []; - message.bootstrapVatConfig = object.bootstrapVatConfig ?? ''; - message.powerFlagFees = - object.powerFlagFees?.map(e => PowerFlagFee.fromPartial(e)) || []; - message.queueMax = - object.queueMax?.map(e => QueueSize.fromPartial(e)) || []; - return message; - }, -}; -function createBaseState() { - return { queueAllowed: [] }; -} -export const State = { - encode(message, writer = _m0.Writer.create()) { - for (const v of message.queueAllowed) { - QueueSize.encode(v, writer.uint32(10).fork()).ldelim(); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseState(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.queueAllowed.push(QueueSize.decode(reader, reader.uint32())); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - queueAllowed: Array.isArray(object?.queueAllowed) - ? object.queueAllowed.map(e => QueueSize.fromJSON(e)) - : [], - }; - }, - toJSON(message) { - const obj = {}; - if (message.queueAllowed) { - obj.queueAllowed = message.queueAllowed.map(e => - e ? QueueSize.toJSON(e) : undefined, - ); - } else { - obj.queueAllowed = []; - } - return obj; - }, - fromPartial(object) { - const message = createBaseState(); - message.queueAllowed = - object.queueAllowed?.map(e => QueueSize.fromPartial(e)) || []; - return message; - }, -}; -function createBaseStringBeans() { - return { key: '', beans: '' }; -} -export const StringBeans = { - encode(message, writer = _m0.Writer.create()) { - if (message.key !== '') { - writer.uint32(10).string(message.key); - } - if (message.beans !== '') { - writer.uint32(18).string(message.beans); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseStringBeans(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.key = reader.string(); - break; - case 2: - message.beans = reader.string(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - key: isSet(object.key) ? String(object.key) : '', - beans: isSet(object.beans) ? String(object.beans) : '', - }; - }, - toJSON(message) { - const obj = {}; - message.key !== undefined && (obj.key = message.key); - message.beans !== undefined && (obj.beans = message.beans); - return obj; - }, - fromPartial(object) { - const message = createBaseStringBeans(); - message.key = object.key ?? ''; - message.beans = object.beans ?? ''; - return message; - }, -}; -function createBasePowerFlagFee() { - return { powerFlag: '', fee: [] }; -} -export const PowerFlagFee = { - encode(message, writer = _m0.Writer.create()) { - if (message.powerFlag !== '') { - writer.uint32(10).string(message.powerFlag); - } - for (const v of message.fee) { - Coin.encode(v, writer.uint32(18).fork()).ldelim(); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBasePowerFlagFee(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.powerFlag = reader.string(); - break; - case 2: - message.fee.push(Coin.decode(reader, reader.uint32())); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - powerFlag: isSet(object.powerFlag) ? String(object.powerFlag) : '', - fee: Array.isArray(object?.fee) - ? object.fee.map(e => Coin.fromJSON(e)) - : [], - }; - }, - toJSON(message) { - const obj = {}; - message.powerFlag !== undefined && (obj.powerFlag = message.powerFlag); - if (message.fee) { - obj.fee = message.fee.map(e => (e ? Coin.toJSON(e) : undefined)); - } else { - obj.fee = []; - } - return obj; - }, - fromPartial(object) { - const message = createBasePowerFlagFee(); - message.powerFlag = object.powerFlag ?? ''; - message.fee = object.fee?.map(e => Coin.fromPartial(e)) || []; - return message; - }, -}; -function createBaseQueueSize() { - return { key: '', size: 0 }; -} -export const QueueSize = { - encode(message, writer = _m0.Writer.create()) { - if (message.key !== '') { - writer.uint32(10).string(message.key); - } - if (message.size !== 0) { - writer.uint32(16).int32(message.size); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseQueueSize(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.key = reader.string(); - break; - case 2: - message.size = reader.int32(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - key: isSet(object.key) ? String(object.key) : '', - size: isSet(object.size) ? Number(object.size) : 0, - }; - }, - toJSON(message) { - const obj = {}; - message.key !== undefined && (obj.key = message.key); - message.size !== undefined && (obj.size = Math.round(message.size)); - return obj; - }, - fromPartial(object) { - const message = createBaseQueueSize(); - message.key = object.key ?? ''; - message.size = object.size ?? 0; - return message; - }, -}; -function createBaseEgress() { - return { nickname: '', peer: new Uint8Array(), powerFlags: [] }; -} -export const Egress = { - encode(message, writer = _m0.Writer.create()) { - if (message.nickname !== '') { - writer.uint32(10).string(message.nickname); - } - if (message.peer.length !== 0) { - writer.uint32(18).bytes(message.peer); - } - for (const v of message.powerFlags) { - writer.uint32(26).string(v); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseEgress(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.nickname = reader.string(); - break; - case 2: - message.peer = reader.bytes(); - break; - case 3: - message.powerFlags.push(reader.string()); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - nickname: isSet(object.nickname) ? String(object.nickname) : '', - peer: isSet(object.peer) - ? bytesFromBase64(object.peer) - : new Uint8Array(), - powerFlags: Array.isArray(object?.powerFlags) - ? object.powerFlags.map(e => String(e)) - : [], - }; - }, - toJSON(message) { - const obj = {}; - message.nickname !== undefined && (obj.nickname = message.nickname); - message.peer !== undefined && - (obj.peer = base64FromBytes( - message.peer !== undefined ? message.peer : new Uint8Array(), - )); - if (message.powerFlags) { - obj.powerFlags = message.powerFlags.map(e => e); - } else { - obj.powerFlags = []; - } - return obj; - }, - fromPartial(object) { - const message = createBaseEgress(); - message.nickname = object.nickname ?? ''; - message.peer = object.peer ?? new Uint8Array(); - message.powerFlags = object.powerFlags?.map(e => e) || []; - return message; - }, -}; -function createBaseSwingStoreArtifact() { - return { name: '', data: new Uint8Array() }; -} -export const SwingStoreArtifact = { - encode(message, writer = _m0.Writer.create()) { - if (message.name !== '') { - writer.uint32(10).string(message.name); - } - if (message.data.length !== 0) { - writer.uint32(18).bytes(message.data); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseSwingStoreArtifact(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - message.data = reader.bytes(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - name: isSet(object.name) ? String(object.name) : '', - data: isSet(object.data) - ? bytesFromBase64(object.data) - : new Uint8Array(), - }; - }, - toJSON(message) { - const obj = {}; - message.name !== undefined && (obj.name = message.name); - message.data !== undefined && - (obj.data = base64FromBytes( - message.data !== undefined ? message.data : new Uint8Array(), - )); - return obj; - }, - fromPartial(object) { - const message = createBaseSwingStoreArtifact(); - message.name = object.name ?? ''; - message.data = object.data ?? new Uint8Array(); - return message; - }, -}; -var globalThis = (() => { - if (typeof globalThis !== 'undefined') { - return globalThis; - } - if (typeof self !== 'undefined') { - return self; - } - if (typeof window !== 'undefined') { - return window; - } - if (typeof global !== 'undefined') { - return global; - } - throw 'Unable to locate global object'; -})(); -function bytesFromBase64(b64) { - if (globalThis.Buffer) { - return Uint8Array.from(globalThis.Buffer.from(b64, 'base64')); - } else { - const bin = globalThis.atob(b64); - const arr = new Uint8Array(bin.length); - for (let i = 0; i < bin.length; ++i) { - arr[i] = bin.charCodeAt(i); - } - return arr; - } -} -function base64FromBytes(arr) { - if (globalThis.Buffer) { - return globalThis.Buffer.from(arr).toString('base64'); - } else { - const bin = []; - arr.forEach(byte => { - bin.push(String.fromCharCode(byte)); - }); - return globalThis.btoa(bin.join('')); - } -} -if (_m0.util.Long !== Long) { - _m0.util.Long = Long; - _m0.configure(); -} -function isSet(value) { - return value !== null && value !== undefined; -} diff --git a/packages/cosmic-proto/dist/agoric/vstorage/query.d.ts b/packages/cosmic-proto/dist/agoric/vstorage/query.d.ts deleted file mode 100644 index 3283f878ea6..00000000000 --- a/packages/cosmic-proto/dist/agoric/vstorage/query.d.ts +++ /dev/null @@ -1,515 +0,0 @@ -import Long from 'long'; -import _m0 from 'protobufjs/minimal.js'; -import { - PageRequest, - PageResponse, -} from '../../cosmos/base/query/v1beta1/pagination.js'; -export declare const protobufPackage = 'agoric.vstorage'; -/** QueryDataRequest is the vstorage path data query. */ -export interface QueryDataRequest { - path: string; -} -/** QueryDataResponse is the vstorage path data response. */ -export interface QueryDataResponse { - value: string; -} -/** QueryCapDataRequest contains a path and formatting configuration. */ -export interface QueryCapDataRequest { - path: string; - /** - * mediaType must be an actual media type in the registry at - * https://www.iana.org/assignments/media-types/media-types.xhtml - * or a special value that does not conflict with the media type syntax. - * The only valid value is "JSON Lines", which is also the default. - */ - mediaType: string; - /** - * itemFormat, if present, must be the special value "flat" to indicate that - * the deep structure of each item should be flattened into a single level - * with kebab-case keys (e.g., `{ "metrics": { "min": 0, "max": 88 } }` as - * `{ "metrics-min": 0, "metrics-max": 88 }`). - */ - itemFormat: string; - /** - * remotableValueFormat indicates how to transform references to opaque but - * distinguishable Remotables into readable embedded representations. - * * "object" represents each Remotable as an `{ id, allegedName }` object, e.g. `{ "id": "board007", "allegedName": "IST brand" }`. - * * "string" represents each Remotable as a string with bracket-wrapped contents including its alleged name and id, e.g. "[Alleged: IST brand ]". - */ - remotableValueFormat: string; -} -/** - * QueryCapDataResponse represents the result with the requested formatting, - * reserving space for future metadata such as media type. - */ -export interface QueryCapDataResponse { - blockHeight: string; - value: string; -} -/** QueryChildrenRequest is the vstorage path children query. */ -export interface QueryChildrenRequest { - path: string; - pagination?: PageRequest; -} -/** QueryChildrenResponse is the vstorage path children response. */ -export interface QueryChildrenResponse { - children: string[]; - pagination?: PageResponse; -} -export declare const QueryDataRequest: { - encode(message: QueryDataRequest, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): QueryDataRequest; - fromJSON(object: any): QueryDataRequest; - toJSON(message: QueryDataRequest): unknown; - fromPartial< - I extends { - path?: string | undefined; - } & { - path?: string | undefined; - } & { [K in Exclude]: never }, - >( - object: I, - ): QueryDataRequest; -}; -export declare const QueryDataResponse: { - encode(message: QueryDataResponse, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): QueryDataResponse; - fromJSON(object: any): QueryDataResponse; - toJSON(message: QueryDataResponse): unknown; - fromPartial< - I extends { - value?: string | undefined; - } & { - value?: string | undefined; - } & { [K in Exclude]: never }, - >( - object: I, - ): QueryDataResponse; -}; -export declare const QueryCapDataRequest: { - encode(message: QueryCapDataRequest, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): QueryCapDataRequest; - fromJSON(object: any): QueryCapDataRequest; - toJSON(message: QueryCapDataRequest): unknown; - fromPartial< - I extends { - path?: string | undefined; - mediaType?: string | undefined; - itemFormat?: string | undefined; - remotableValueFormat?: string | undefined; - } & { - path?: string | undefined; - mediaType?: string | undefined; - itemFormat?: string | undefined; - remotableValueFormat?: string | undefined; - } & { [K in Exclude]: never }, - >( - object: I, - ): QueryCapDataRequest; -}; -export declare const QueryCapDataResponse: { - encode(message: QueryCapDataResponse, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): QueryCapDataResponse; - fromJSON(object: any): QueryCapDataResponse; - toJSON(message: QueryCapDataResponse): unknown; - fromPartial< - I extends { - blockHeight?: string | undefined; - value?: string | undefined; - } & { - blockHeight?: string | undefined; - value?: string | undefined; - } & { [K in Exclude]: never }, - >( - object: I, - ): QueryCapDataResponse; -}; -export declare const QueryChildrenRequest: { - encode(message: QueryChildrenRequest, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): QueryChildrenRequest; - fromJSON(object: any): QueryChildrenRequest; - toJSON(message: QueryChildrenRequest): unknown; - fromPartial< - I extends { - path?: string | undefined; - pagination?: - | { - key?: Uint8Array | undefined; - offset?: string | number | Long | undefined; - limit?: string | number | Long | undefined; - countTotal?: boolean | undefined; - reverse?: boolean | undefined; - } - | undefined; - } & { - path?: string | undefined; - pagination?: - | ({ - key?: Uint8Array | undefined; - offset?: string | number | Long | undefined; - limit?: string | number | Long | undefined; - countTotal?: boolean | undefined; - reverse?: boolean | undefined; - } & { - key?: Uint8Array | undefined; - offset?: - | string - | number - | (Long & { - high: number; - low: number; - unsigned: boolean; - add: (addend: string | number | Long) => Long; - and: (other: string | number | Long) => Long; - compare: (other: string | number | Long) => number; - comp: (other: string | number | Long) => number; - divide: (divisor: string | number | Long) => Long; - div: (divisor: string | number | Long) => Long; - equals: (other: string | number | Long) => boolean; - eq: (other: string | number | Long) => boolean; - getHighBits: () => number; - getHighBitsUnsigned: () => number; - getLowBits: () => number; - getLowBitsUnsigned: () => number; - getNumBitsAbs: () => number; - greaterThan: (other: string | number | Long) => boolean; - gt: (other: string | number | Long) => boolean; - greaterThanOrEqual: ( - other: string | number | Long, - ) => boolean; - gte: (other: string | number | Long) => boolean; - ge: (other: string | number | Long) => boolean; - isEven: () => boolean; - isNegative: () => boolean; - isOdd: () => boolean; - isPositive: () => boolean; - isZero: () => boolean; - eqz: () => boolean; - lessThan: (other: string | number | Long) => boolean; - lt: (other: string | number | Long) => boolean; - lessThanOrEqual: (other: string | number | Long) => boolean; - lte: (other: string | number | Long) => boolean; - le: (other: string | number | Long) => boolean; - modulo: (other: string | number | Long) => Long; - mod: (other: string | number | Long) => Long; - rem: (other: string | number | Long) => Long; - multiply: (multiplier: string | number | Long) => Long; - mul: (multiplier: string | number | Long) => Long; - negate: () => Long; - neg: () => Long; - not: () => Long; - countLeadingZeros: () => number; - clz: () => number; - countTrailingZeros: () => number; - ctz: () => number; - notEquals: (other: string | number | Long) => boolean; - neq: (other: string | number | Long) => boolean; - ne: (other: string | number | Long) => boolean; - or: (other: string | number | Long) => Long; - shiftLeft: (numBits: number | Long) => Long; - shl: (numBits: number | Long) => Long; - shiftRight: (numBits: number | Long) => Long; - shr: (numBits: number | Long) => Long; - shiftRightUnsigned: (numBits: number | Long) => Long; - shru: (numBits: number | Long) => Long; - shr_u: (numBits: number | Long) => Long; - rotateLeft: (numBits: number | Long) => Long; - rotl: (numBits: number | Long) => Long; - rotateRight: (numBits: number | Long) => Long; - rotr: (numBits: number | Long) => Long; - subtract: (subtrahend: string | number | Long) => Long; - sub: (subtrahend: string | number | Long) => Long; - toInt: () => number; - toNumber: () => number; - toBytes: (le?: boolean | undefined) => number[]; - toBytesLE: () => number[]; - toBytesBE: () => number[]; - toSigned: () => Long; - toString: (radix?: number | undefined) => string; - toUnsigned: () => Long; - xor: (other: string | number | Long) => Long; - } & { - [K in Exclude< - keyof I['pagination']['offset'], - keyof Long - >]: never; - }) - | undefined; - limit?: - | string - | number - | (Long & { - high: number; - low: number; - unsigned: boolean; - add: (addend: string | number | Long) => Long; - and: (other: string | number | Long) => Long; - compare: (other: string | number | Long) => number; - comp: (other: string | number | Long) => number; - divide: (divisor: string | number | Long) => Long; - div: (divisor: string | number | Long) => Long; - equals: (other: string | number | Long) => boolean; - eq: (other: string | number | Long) => boolean; - getHighBits: () => number; - getHighBitsUnsigned: () => number; - getLowBits: () => number; - getLowBitsUnsigned: () => number; - getNumBitsAbs: () => number; - greaterThan: (other: string | number | Long) => boolean; - gt: (other: string | number | Long) => boolean; - greaterThanOrEqual: ( - other: string | number | Long, - ) => boolean; - gte: (other: string | number | Long) => boolean; - ge: (other: string | number | Long) => boolean; - isEven: () => boolean; - isNegative: () => boolean; - isOdd: () => boolean; - isPositive: () => boolean; - isZero: () => boolean; - eqz: () => boolean; - lessThan: (other: string | number | Long) => boolean; - lt: (other: string | number | Long) => boolean; - lessThanOrEqual: (other: string | number | Long) => boolean; - lte: (other: string | number | Long) => boolean; - le: (other: string | number | Long) => boolean; - modulo: (other: string | number | Long) => Long; - mod: (other: string | number | Long) => Long; - rem: (other: string | number | Long) => Long; - multiply: (multiplier: string | number | Long) => Long; - mul: (multiplier: string | number | Long) => Long; - negate: () => Long; - neg: () => Long; - not: () => Long; - countLeadingZeros: () => number; - clz: () => number; - countTrailingZeros: () => number; - ctz: () => number; - notEquals: (other: string | number | Long) => boolean; - neq: (other: string | number | Long) => boolean; - ne: (other: string | number | Long) => boolean; - or: (other: string | number | Long) => Long; - shiftLeft: (numBits: number | Long) => Long; - shl: (numBits: number | Long) => Long; - shiftRight: (numBits: number | Long) => Long; - shr: (numBits: number | Long) => Long; - shiftRightUnsigned: (numBits: number | Long) => Long; - shru: (numBits: number | Long) => Long; - shr_u: (numBits: number | Long) => Long; - rotateLeft: (numBits: number | Long) => Long; - rotl: (numBits: number | Long) => Long; - rotateRight: (numBits: number | Long) => Long; - rotr: (numBits: number | Long) => Long; - subtract: (subtrahend: string | number | Long) => Long; - sub: (subtrahend: string | number | Long) => Long; - toInt: () => number; - toNumber: () => number; - toBytes: (le?: boolean | undefined) => number[]; - toBytesLE: () => number[]; - toBytesBE: () => number[]; - toSigned: () => Long; - toString: (radix?: number | undefined) => string; - toUnsigned: () => Long; - xor: (other: string | number | Long) => Long; - } & { - [K_1 in Exclude< - keyof I['pagination']['limit'], - keyof Long - >]: never; - }) - | undefined; - countTotal?: boolean | undefined; - reverse?: boolean | undefined; - } & { - [K_2 in Exclude]: never; - }) - | undefined; - } & { [K_3 in Exclude]: never }, - >( - object: I, - ): QueryChildrenRequest; -}; -export declare const QueryChildrenResponse: { - encode(message: QueryChildrenResponse, writer?: _m0.Writer): _m0.Writer; - decode( - input: _m0.Reader | Uint8Array, - length?: number, - ): QueryChildrenResponse; - fromJSON(object: any): QueryChildrenResponse; - toJSON(message: QueryChildrenResponse): unknown; - fromPartial< - I extends { - children?: string[] | undefined; - pagination?: - | { - nextKey?: Uint8Array | undefined; - total?: string | number | Long | undefined; - } - | undefined; - } & { - children?: - | (string[] & - string[] & { - [K in Exclude]: never; - }) - | undefined; - pagination?: - | ({ - nextKey?: Uint8Array | undefined; - total?: string | number | Long | undefined; - } & { - nextKey?: Uint8Array | undefined; - total?: - | string - | number - | (Long & { - high: number; - low: number; - unsigned: boolean; - add: (addend: string | number | Long) => Long; - and: (other: string | number | Long) => Long; - compare: (other: string | number | Long) => number; - comp: (other: string | number | Long) => number; - divide: (divisor: string | number | Long) => Long; - div: (divisor: string | number | Long) => Long; - equals: (other: string | number | Long) => boolean; - eq: (other: string | number | Long) => boolean; - getHighBits: () => number; - getHighBitsUnsigned: () => number; - getLowBits: () => number; - getLowBitsUnsigned: () => number; - getNumBitsAbs: () => number; - greaterThan: (other: string | number | Long) => boolean; - gt: (other: string | number | Long) => boolean; - greaterThanOrEqual: ( - other: string | number | Long, - ) => boolean; - gte: (other: string | number | Long) => boolean; - ge: (other: string | number | Long) => boolean; - isEven: () => boolean; - isNegative: () => boolean; - isOdd: () => boolean; - isPositive: () => boolean; - isZero: () => boolean; - eqz: () => boolean; - lessThan: (other: string | number | Long) => boolean; - lt: (other: string | number | Long) => boolean; - lessThanOrEqual: (other: string | number | Long) => boolean; - lte: (other: string | number | Long) => boolean; - le: (other: string | number | Long) => boolean; - modulo: (other: string | number | Long) => Long; - mod: (other: string | number | Long) => Long; - rem: (other: string | number | Long) => Long; - multiply: (multiplier: string | number | Long) => Long; - mul: (multiplier: string | number | Long) => Long; - negate: () => Long; - neg: () => Long; - not: () => Long; - countLeadingZeros: () => number; - clz: () => number; - countTrailingZeros: () => number; - ctz: () => number; - notEquals: (other: string | number | Long) => boolean; - neq: (other: string | number | Long) => boolean; - ne: (other: string | number | Long) => boolean; - or: (other: string | number | Long) => Long; - shiftLeft: (numBits: number | Long) => Long; - shl: (numBits: number | Long) => Long; - shiftRight: (numBits: number | Long) => Long; - shr: (numBits: number | Long) => Long; - shiftRightUnsigned: (numBits: number | Long) => Long; - shru: (numBits: number | Long) => Long; - shr_u: (numBits: number | Long) => Long; - rotateLeft: (numBits: number | Long) => Long; - rotl: (numBits: number | Long) => Long; - rotateRight: (numBits: number | Long) => Long; - rotr: (numBits: number | Long) => Long; - subtract: (subtrahend: string | number | Long) => Long; - sub: (subtrahend: string | number | Long) => Long; - toInt: () => number; - toNumber: () => number; - toBytes: (le?: boolean | undefined) => number[]; - toBytesLE: () => number[]; - toBytesBE: () => number[]; - toSigned: () => Long; - toString: (radix?: number | undefined) => string; - toUnsigned: () => Long; - xor: (other: string | number | Long) => Long; - } & { - [K_1 in Exclude< - keyof I['pagination']['total'], - keyof Long - >]: never; - }) - | undefined; - } & { - [K_2 in Exclude]: never; - }) - | undefined; - } & { [K_3 in Exclude]: never }, - >( - object: I, - ): QueryChildrenResponse; -}; -/** Query defines the gRPC querier service */ -export interface Query { - /** Return the raw string value of an arbitrary vstorage datum. */ - Data(request: QueryDataRequest): Promise; - /** - * Return a formatted representation of a vstorage datum that must be - * a valid StreamCell with CapData values, or standalone CapData. - */ - CapData(request: QueryCapDataRequest): Promise; - /** Return the children of a given vstorage path. */ - Children(request: QueryChildrenRequest): Promise; -} -export declare class QueryClientImpl implements Query { - private readonly rpc; - private readonly service; - constructor( - rpc: Rpc, - opts?: { - service?: string; - }, - ); - Data(request: QueryDataRequest): Promise; - CapData(request: QueryCapDataRequest): Promise; - Children(request: QueryChildrenRequest): Promise; -} -interface Rpc { - request( - service: string, - method: string, - data: Uint8Array, - ): Promise; -} -type Builtin = - | Date - | Function - | Uint8Array - | string - | number - | boolean - | undefined; -export type DeepPartial = T extends Builtin - ? T - : T extends Long - ? string | number | Long - : T extends Array - ? Array> - : T extends ReadonlyArray - ? ReadonlyArray> - : T extends {} - ? { - [K in keyof T]?: DeepPartial; - } - : Partial; -type KeysOfUnion = T extends T ? keyof T : never; -export type Exact = P extends Builtin - ? P - : P & { - [K in keyof P]: Exact; - } & { - [K in Exclude>]: never; - }; -export {}; diff --git a/packages/cosmic-proto/dist/agoric/vstorage/query.js b/packages/cosmic-proto/dist/agoric/vstorage/query.js deleted file mode 100644 index f156baa3fe2..00000000000 --- a/packages/cosmic-proto/dist/agoric/vstorage/query.js +++ /dev/null @@ -1,382 +0,0 @@ -/* eslint-disable */ -import Long from 'long'; -import _m0 from 'protobufjs/minimal.js'; -import { - PageRequest, - PageResponse, -} from '../../cosmos/base/query/v1beta1/pagination.js'; -export const protobufPackage = 'agoric.vstorage'; -function createBaseQueryDataRequest() { - return { path: '' }; -} -export const QueryDataRequest = { - encode(message, writer = _m0.Writer.create()) { - if (message.path !== '') { - writer.uint32(10).string(message.path); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseQueryDataRequest(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.path = reader.string(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { path: isSet(object.path) ? String(object.path) : '' }; - }, - toJSON(message) { - const obj = {}; - message.path !== undefined && (obj.path = message.path); - return obj; - }, - fromPartial(object) { - const message = createBaseQueryDataRequest(); - message.path = object.path ?? ''; - return message; - }, -}; -function createBaseQueryDataResponse() { - return { value: '' }; -} -export const QueryDataResponse = { - encode(message, writer = _m0.Writer.create()) { - if (message.value !== '') { - writer.uint32(10).string(message.value); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseQueryDataResponse(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.value = reader.string(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { value: isSet(object.value) ? String(object.value) : '' }; - }, - toJSON(message) { - const obj = {}; - message.value !== undefined && (obj.value = message.value); - return obj; - }, - fromPartial(object) { - const message = createBaseQueryDataResponse(); - message.value = object.value ?? ''; - return message; - }, -}; -function createBaseQueryCapDataRequest() { - return { path: '', mediaType: '', itemFormat: '', remotableValueFormat: '' }; -} -export const QueryCapDataRequest = { - encode(message, writer = _m0.Writer.create()) { - if (message.path !== '') { - writer.uint32(10).string(message.path); - } - if (message.mediaType !== '') { - writer.uint32(18).string(message.mediaType); - } - if (message.itemFormat !== '') { - writer.uint32(26).string(message.itemFormat); - } - if (message.remotableValueFormat !== '') { - writer.uint32(82).string(message.remotableValueFormat); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseQueryCapDataRequest(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.path = reader.string(); - break; - case 2: - message.mediaType = reader.string(); - break; - case 3: - message.itemFormat = reader.string(); - break; - case 10: - message.remotableValueFormat = reader.string(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - path: isSet(object.path) ? String(object.path) : '', - mediaType: isSet(object.mediaType) ? String(object.mediaType) : '', - itemFormat: isSet(object.itemFormat) ? String(object.itemFormat) : '', - remotableValueFormat: isSet(object.remotableValueFormat) - ? String(object.remotableValueFormat) - : '', - }; - }, - toJSON(message) { - const obj = {}; - message.path !== undefined && (obj.path = message.path); - message.mediaType !== undefined && (obj.mediaType = message.mediaType); - message.itemFormat !== undefined && (obj.itemFormat = message.itemFormat); - message.remotableValueFormat !== undefined && - (obj.remotableValueFormat = message.remotableValueFormat); - return obj; - }, - fromPartial(object) { - const message = createBaseQueryCapDataRequest(); - message.path = object.path ?? ''; - message.mediaType = object.mediaType ?? ''; - message.itemFormat = object.itemFormat ?? ''; - message.remotableValueFormat = object.remotableValueFormat ?? ''; - return message; - }, -}; -function createBaseQueryCapDataResponse() { - return { blockHeight: '', value: '' }; -} -export const QueryCapDataResponse = { - encode(message, writer = _m0.Writer.create()) { - if (message.blockHeight !== '') { - writer.uint32(10).string(message.blockHeight); - } - if (message.value !== '') { - writer.uint32(82).string(message.value); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseQueryCapDataResponse(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.blockHeight = reader.string(); - break; - case 10: - message.value = reader.string(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - blockHeight: isSet(object.blockHeight) ? String(object.blockHeight) : '', - value: isSet(object.value) ? String(object.value) : '', - }; - }, - toJSON(message) { - const obj = {}; - message.blockHeight !== undefined && - (obj.blockHeight = message.blockHeight); - message.value !== undefined && (obj.value = message.value); - return obj; - }, - fromPartial(object) { - const message = createBaseQueryCapDataResponse(); - message.blockHeight = object.blockHeight ?? ''; - message.value = object.value ?? ''; - return message; - }, -}; -function createBaseQueryChildrenRequest() { - return { path: '', pagination: undefined }; -} -export const QueryChildrenRequest = { - encode(message, writer = _m0.Writer.create()) { - if (message.path !== '') { - writer.uint32(10).string(message.path); - } - if (message.pagination !== undefined) { - PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseQueryChildrenRequest(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.path = reader.string(); - break; - case 2: - message.pagination = PageRequest.decode(reader, reader.uint32()); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - path: isSet(object.path) ? String(object.path) : '', - pagination: isSet(object.pagination) - ? PageRequest.fromJSON(object.pagination) - : undefined, - }; - }, - toJSON(message) { - const obj = {}; - message.path !== undefined && (obj.path = message.path); - message.pagination !== undefined && - (obj.pagination = message.pagination - ? PageRequest.toJSON(message.pagination) - : undefined); - return obj; - }, - fromPartial(object) { - const message = createBaseQueryChildrenRequest(); - message.path = object.path ?? ''; - message.pagination = - object.pagination !== undefined && object.pagination !== null - ? PageRequest.fromPartial(object.pagination) - : undefined; - return message; - }, -}; -function createBaseQueryChildrenResponse() { - return { children: [], pagination: undefined }; -} -export const QueryChildrenResponse = { - encode(message, writer = _m0.Writer.create()) { - for (const v of message.children) { - writer.uint32(10).string(v); - } - if (message.pagination !== undefined) { - PageResponse.encode( - message.pagination, - writer.uint32(18).fork(), - ).ldelim(); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseQueryChildrenResponse(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.children.push(reader.string()); - break; - case 2: - message.pagination = PageResponse.decode(reader, reader.uint32()); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - children: Array.isArray(object?.children) - ? object.children.map(e => String(e)) - : [], - pagination: isSet(object.pagination) - ? PageResponse.fromJSON(object.pagination) - : undefined, - }; - }, - toJSON(message) { - const obj = {}; - if (message.children) { - obj.children = message.children.map(e => e); - } else { - obj.children = []; - } - message.pagination !== undefined && - (obj.pagination = message.pagination - ? PageResponse.toJSON(message.pagination) - : undefined); - return obj; - }, - fromPartial(object) { - const message = createBaseQueryChildrenResponse(); - message.children = object.children?.map(e => e) || []; - message.pagination = - object.pagination !== undefined && object.pagination !== null - ? PageResponse.fromPartial(object.pagination) - : undefined; - return message; - }, -}; -export class QueryClientImpl { - rpc; - service; - constructor(rpc, opts) { - this.service = opts?.service || 'agoric.vstorage.Query'; - this.rpc = rpc; - this.Data = this.Data.bind(this); - this.CapData = this.CapData.bind(this); - this.Children = this.Children.bind(this); - } - Data(request) { - const data = QueryDataRequest.encode(request).finish(); - const promise = this.rpc.request(this.service, 'Data', data); - return promise.then(data => QueryDataResponse.decode(new _m0.Reader(data))); - } - CapData(request) { - const data = QueryCapDataRequest.encode(request).finish(); - const promise = this.rpc.request(this.service, 'CapData', data); - return promise.then(data => - QueryCapDataResponse.decode(new _m0.Reader(data)), - ); - } - Children(request) { - const data = QueryChildrenRequest.encode(request).finish(); - const promise = this.rpc.request(this.service, 'Children', data); - return promise.then(data => - QueryChildrenResponse.decode(new _m0.Reader(data)), - ); - } -} -if (_m0.util.Long !== Long) { - _m0.util.Long = Long; - _m0.configure(); -} -function isSet(value) { - return value !== null && value !== undefined; -} diff --git a/packages/cosmic-proto/dist/cosmos/base/query/v1beta1/pagination.d.ts b/packages/cosmic-proto/dist/cosmos/base/query/v1beta1/pagination.d.ts deleted file mode 100644 index 3f3571dd673..00000000000 --- a/packages/cosmic-proto/dist/cosmos/base/query/v1beta1/pagination.d.ts +++ /dev/null @@ -1,322 +0,0 @@ -import Long from "long"; -import _m0 from "protobufjs/minimal.js"; -export declare const protobufPackage = "cosmos.base.query.v1beta1"; -/** - * PageRequest is to be embedded in gRPC request messages for efficient - * pagination. Ex: - * - * message SomeRequest { - * Foo some_parameter = 1; - * PageRequest pagination = 2; - * } - */ -export interface PageRequest { - /** - * key is a value returned in PageResponse.next_key to begin - * querying the next page most efficiently. Only one of offset or key - * should be set. - */ - key: Uint8Array; - /** - * offset is a numeric offset that can be used when key is unavailable. - * It is less efficient than using key. Only one of offset or key should - * be set. - */ - offset: Long; - /** - * limit is the total number of results to be returned in the result page. - * If left empty it will default to a value to be set by each app. - */ - limit: Long; - /** - * count_total is set to true to indicate that the result set should include - * a count of the total number of items available for pagination in UIs. - * count_total is only respected when offset is used. It is ignored when key - * is set. - */ - countTotal: boolean; - /** - * reverse is set to true if results are to be returned in the descending order. - * - * Since: cosmos-sdk 0.43 - */ - reverse: boolean; -} -/** - * PageResponse is to be embedded in gRPC response messages where the - * corresponding request message has used PageRequest. - * - * message SomeResponse { - * repeated Bar results = 1; - * PageResponse page = 2; - * } - */ -export interface PageResponse { - /** - * next_key is the key to be passed to PageRequest.key to - * query the next page most efficiently. It will be empty if - * there are no more results. - */ - nextKey: Uint8Array; - /** - * total is total number of results available if PageRequest.count_total - * was set, its value is undefined otherwise - */ - total: Long; -} -export declare const PageRequest: { - encode(message: PageRequest, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): PageRequest; - fromJSON(object: any): PageRequest; - toJSON(message: PageRequest): unknown; - fromPartial Long; - and: (other: string | number | Long) => Long; - compare: (other: string | number | Long) => number; - comp: (other: string | number | Long) => number; - divide: (divisor: string | number | Long) => Long; - div: (divisor: string | number | Long) => Long; - equals: (other: string | number | Long) => boolean; - eq: (other: string | number | Long) => boolean; - getHighBits: () => number; - getHighBitsUnsigned: () => number; - getLowBits: () => number; - getLowBitsUnsigned: () => number; - getNumBitsAbs: () => number; - greaterThan: (other: string | number | Long) => boolean; - gt: (other: string | number | Long) => boolean; - greaterThanOrEqual: (other: string | number | Long) => boolean; - gte: (other: string | number | Long) => boolean; - ge: (other: string | number | Long) => boolean; - isEven: () => boolean; - isNegative: () => boolean; - isOdd: () => boolean; - isPositive: () => boolean; - isZero: () => boolean; - eqz: () => boolean; - lessThan: (other: string | number | Long) => boolean; - lt: (other: string | number | Long) => boolean; - lessThanOrEqual: (other: string | number | Long) => boolean; - lte: (other: string | number | Long) => boolean; - le: (other: string | number | Long) => boolean; - modulo: (other: string | number | Long) => Long; - mod: (other: string | number | Long) => Long; - rem: (other: string | number | Long) => Long; - multiply: (multiplier: string | number | Long) => Long; - mul: (multiplier: string | number | Long) => Long; - negate: () => Long; - neg: () => Long; - not: () => Long; - countLeadingZeros: () => number; - clz: () => number; - countTrailingZeros: () => number; - ctz: () => number; - notEquals: (other: string | number | Long) => boolean; - neq: (other: string | number | Long) => boolean; - ne: (other: string | number | Long) => boolean; - or: (other: string | number | Long) => Long; - shiftLeft: (numBits: number | Long) => Long; - shl: (numBits: number | Long) => Long; - shiftRight: (numBits: number | Long) => Long; - shr: (numBits: number | Long) => Long; - shiftRightUnsigned: (numBits: number | Long) => Long; - shru: (numBits: number | Long) => Long; - shr_u: (numBits: number | Long) => Long; - rotateLeft: (numBits: number | Long) => Long; - rotl: (numBits: number | Long) => Long; - rotateRight: (numBits: number | Long) => Long; - rotr: (numBits: number | Long) => Long; - subtract: (subtrahend: string | number | Long) => Long; - sub: (subtrahend: string | number | Long) => Long; - toInt: () => number; - toNumber: () => number; - toBytes: (le?: boolean | undefined) => number[]; - toBytesLE: () => number[]; - toBytesBE: () => number[]; - toSigned: () => Long; - toString: (radix?: number | undefined) => string; - toUnsigned: () => Long; - xor: (other: string | number | Long) => Long; - } & { [K in Exclude]: never; }) | undefined; - limit?: string | number | (Long & { - high: number; - low: number; - unsigned: boolean; - add: (addend: string | number | Long) => Long; - and: (other: string | number | Long) => Long; - compare: (other: string | number | Long) => number; - comp: (other: string | number | Long) => number; - divide: (divisor: string | number | Long) => Long; - div: (divisor: string | number | Long) => Long; - equals: (other: string | number | Long) => boolean; - eq: (other: string | number | Long) => boolean; - getHighBits: () => number; - getHighBitsUnsigned: () => number; - getLowBits: () => number; - getLowBitsUnsigned: () => number; - getNumBitsAbs: () => number; - greaterThan: (other: string | number | Long) => boolean; - gt: (other: string | number | Long) => boolean; - greaterThanOrEqual: (other: string | number | Long) => boolean; - gte: (other: string | number | Long) => boolean; - ge: (other: string | number | Long) => boolean; - isEven: () => boolean; - isNegative: () => boolean; - isOdd: () => boolean; - isPositive: () => boolean; - isZero: () => boolean; - eqz: () => boolean; - lessThan: (other: string | number | Long) => boolean; - lt: (other: string | number | Long) => boolean; - lessThanOrEqual: (other: string | number | Long) => boolean; - lte: (other: string | number | Long) => boolean; - le: (other: string | number | Long) => boolean; - modulo: (other: string | number | Long) => Long; - mod: (other: string | number | Long) => Long; - rem: (other: string | number | Long) => Long; - multiply: (multiplier: string | number | Long) => Long; - mul: (multiplier: string | number | Long) => Long; - negate: () => Long; - neg: () => Long; - not: () => Long; - countLeadingZeros: () => number; - clz: () => number; - countTrailingZeros: () => number; - ctz: () => number; - notEquals: (other: string | number | Long) => boolean; - neq: (other: string | number | Long) => boolean; - ne: (other: string | number | Long) => boolean; - or: (other: string | number | Long) => Long; - shiftLeft: (numBits: number | Long) => Long; - shl: (numBits: number | Long) => Long; - shiftRight: (numBits: number | Long) => Long; - shr: (numBits: number | Long) => Long; - shiftRightUnsigned: (numBits: number | Long) => Long; - shru: (numBits: number | Long) => Long; - shr_u: (numBits: number | Long) => Long; - rotateLeft: (numBits: number | Long) => Long; - rotl: (numBits: number | Long) => Long; - rotateRight: (numBits: number | Long) => Long; - rotr: (numBits: number | Long) => Long; - subtract: (subtrahend: string | number | Long) => Long; - sub: (subtrahend: string | number | Long) => Long; - toInt: () => number; - toNumber: () => number; - toBytes: (le?: boolean | undefined) => number[]; - toBytesLE: () => number[]; - toBytesBE: () => number[]; - toSigned: () => Long; - toString: (radix?: number | undefined) => string; - toUnsigned: () => Long; - xor: (other: string | number | Long) => Long; - } & { [K_1 in Exclude]: never; }) | undefined; - countTotal?: boolean | undefined; - reverse?: boolean | undefined; - } & { [K_2 in Exclude]: never; }>(object: I): PageRequest; -}; -export declare const PageResponse: { - encode(message: PageResponse, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): PageResponse; - fromJSON(object: any): PageResponse; - toJSON(message: PageResponse): unknown; - fromPartial Long; - and: (other: string | number | Long) => Long; - compare: (other: string | number | Long) => number; - comp: (other: string | number | Long) => number; - divide: (divisor: string | number | Long) => Long; - div: (divisor: string | number | Long) => Long; - equals: (other: string | number | Long) => boolean; - eq: (other: string | number | Long) => boolean; - getHighBits: () => number; - getHighBitsUnsigned: () => number; - getLowBits: () => number; - getLowBitsUnsigned: () => number; - getNumBitsAbs: () => number; - greaterThan: (other: string | number | Long) => boolean; - gt: (other: string | number | Long) => boolean; - greaterThanOrEqual: (other: string | number | Long) => boolean; - gte: (other: string | number | Long) => boolean; - ge: (other: string | number | Long) => boolean; - isEven: () => boolean; - isNegative: () => boolean; - isOdd: () => boolean; - isPositive: () => boolean; - isZero: () => boolean; - eqz: () => boolean; - lessThan: (other: string | number | Long) => boolean; - lt: (other: string | number | Long) => boolean; - lessThanOrEqual: (other: string | number | Long) => boolean; - lte: (other: string | number | Long) => boolean; - le: (other: string | number | Long) => boolean; - modulo: (other: string | number | Long) => Long; - mod: (other: string | number | Long) => Long; - rem: (other: string | number | Long) => Long; - multiply: (multiplier: string | number | Long) => Long; - mul: (multiplier: string | number | Long) => Long; - negate: () => Long; - neg: () => Long; - not: () => Long; - countLeadingZeros: () => number; - clz: () => number; - countTrailingZeros: () => number; - ctz: () => number; - notEquals: (other: string | number | Long) => boolean; - neq: (other: string | number | Long) => boolean; - ne: (other: string | number | Long) => boolean; - or: (other: string | number | Long) => Long; - shiftLeft: (numBits: number | Long) => Long; - shl: (numBits: number | Long) => Long; - shiftRight: (numBits: number | Long) => Long; - shr: (numBits: number | Long) => Long; - shiftRightUnsigned: (numBits: number | Long) => Long; - shru: (numBits: number | Long) => Long; - shr_u: (numBits: number | Long) => Long; - rotateLeft: (numBits: number | Long) => Long; - rotl: (numBits: number | Long) => Long; - rotateRight: (numBits: number | Long) => Long; - rotr: (numBits: number | Long) => Long; - subtract: (subtrahend: string | number | Long) => Long; - sub: (subtrahend: string | number | Long) => Long; - toInt: () => number; - toNumber: () => number; - toBytes: (le?: boolean | undefined) => number[]; - toBytesLE: () => number[]; - toBytesBE: () => number[]; - toSigned: () => Long; - toString: (radix?: number | undefined) => string; - toUnsigned: () => Long; - xor: (other: string | number | Long) => Long; - } & { [K in Exclude]: never; }) | undefined; - } & { [K_1 in Exclude]: never; }>(object: I): PageResponse; -}; -type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; -export type DeepPartial = T extends Builtin ? T : T extends Long ? string | number | Long : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> : T extends {} ? { - [K in keyof T]?: DeepPartial; -} : Partial; -type KeysOfUnion = T extends T ? keyof T : never; -export type Exact = P extends Builtin ? P : P & { - [K in keyof P]: Exact; -} & { - [K in Exclude>]: never; -}; -export {}; diff --git a/packages/cosmic-proto/dist/cosmos/base/query/v1beta1/pagination.js b/packages/cosmic-proto/dist/cosmos/base/query/v1beta1/pagination.js deleted file mode 100644 index 8ce3adb3fab..00000000000 --- a/packages/cosmic-proto/dist/cosmos/base/query/v1beta1/pagination.js +++ /dev/null @@ -1,186 +0,0 @@ -/* eslint-disable */ -import Long from "long"; -import _m0 from "protobufjs/minimal.js"; -export const protobufPackage = "cosmos.base.query.v1beta1"; -function createBasePageRequest() { - return { key: new Uint8Array(), offset: Long.UZERO, limit: Long.UZERO, countTotal: false, reverse: false }; -} -export const PageRequest = { - encode(message, writer = _m0.Writer.create()) { - if (message.key.length !== 0) { - writer.uint32(10).bytes(message.key); - } - if (!message.offset.isZero()) { - writer.uint32(16).uint64(message.offset); - } - if (!message.limit.isZero()) { - writer.uint32(24).uint64(message.limit); - } - if (message.countTotal === true) { - writer.uint32(32).bool(message.countTotal); - } - if (message.reverse === true) { - writer.uint32(40).bool(message.reverse); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBasePageRequest(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.key = reader.bytes(); - break; - case 2: - message.offset = reader.uint64(); - break; - case 3: - message.limit = reader.uint64(); - break; - case 4: - message.countTotal = reader.bool(); - break; - case 5: - message.reverse = reader.bool(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), - offset: isSet(object.offset) ? Long.fromValue(object.offset) : Long.UZERO, - limit: isSet(object.limit) ? Long.fromValue(object.limit) : Long.UZERO, - countTotal: isSet(object.countTotal) ? Boolean(object.countTotal) : false, - reverse: isSet(object.reverse) ? Boolean(object.reverse) : false, - }; - }, - toJSON(message) { - const obj = {}; - message.key !== undefined && - (obj.key = base64FromBytes(message.key !== undefined ? message.key : new Uint8Array())); - message.offset !== undefined && (obj.offset = (message.offset || Long.UZERO).toString()); - message.limit !== undefined && (obj.limit = (message.limit || Long.UZERO).toString()); - message.countTotal !== undefined && (obj.countTotal = message.countTotal); - message.reverse !== undefined && (obj.reverse = message.reverse); - return obj; - }, - fromPartial(object) { - const message = createBasePageRequest(); - message.key = object.key ?? new Uint8Array(); - message.offset = (object.offset !== undefined && object.offset !== null) - ? Long.fromValue(object.offset) - : Long.UZERO; - message.limit = (object.limit !== undefined && object.limit !== null) ? Long.fromValue(object.limit) : Long.UZERO; - message.countTotal = object.countTotal ?? false; - message.reverse = object.reverse ?? false; - return message; - }, -}; -function createBasePageResponse() { - return { nextKey: new Uint8Array(), total: Long.UZERO }; -} -export const PageResponse = { - encode(message, writer = _m0.Writer.create()) { - if (message.nextKey.length !== 0) { - writer.uint32(10).bytes(message.nextKey); - } - if (!message.total.isZero()) { - writer.uint32(16).uint64(message.total); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBasePageResponse(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.nextKey = reader.bytes(); - break; - case 2: - message.total = reader.uint64(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - nextKey: isSet(object.nextKey) ? bytesFromBase64(object.nextKey) : new Uint8Array(), - total: isSet(object.total) ? Long.fromValue(object.total) : Long.UZERO, - }; - }, - toJSON(message) { - const obj = {}; - message.nextKey !== undefined && - (obj.nextKey = base64FromBytes(message.nextKey !== undefined ? message.nextKey : new Uint8Array())); - message.total !== undefined && (obj.total = (message.total || Long.UZERO).toString()); - return obj; - }, - fromPartial(object) { - const message = createBasePageResponse(); - message.nextKey = object.nextKey ?? new Uint8Array(); - message.total = (object.total !== undefined && object.total !== null) ? Long.fromValue(object.total) : Long.UZERO; - return message; - }, -}; -var globalThis = (() => { - if (typeof globalThis !== "undefined") { - return globalThis; - } - if (typeof self !== "undefined") { - return self; - } - if (typeof window !== "undefined") { - return window; - } - if (typeof global !== "undefined") { - return global; - } - throw "Unable to locate global object"; -})(); -function bytesFromBase64(b64) { - if (globalThis.Buffer) { - return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); - } - else { - const bin = globalThis.atob(b64); - const arr = new Uint8Array(bin.length); - for (let i = 0; i < bin.length; ++i) { - arr[i] = bin.charCodeAt(i); - } - return arr; - } -} -function base64FromBytes(arr) { - if (globalThis.Buffer) { - return globalThis.Buffer.from(arr).toString("base64"); - } - else { - const bin = []; - arr.forEach((byte) => { - bin.push(String.fromCharCode(byte)); - }); - return globalThis.btoa(bin.join("")); - } -} -if (_m0.util.Long !== Long) { - _m0.util.Long = Long; - _m0.configure(); -} -function isSet(value) { - return value !== null && value !== undefined; -} diff --git a/packages/cosmic-proto/dist/cosmos/base/v1beta1/coin.d.ts b/packages/cosmic-proto/dist/cosmos/base/v1beta1/coin.d.ts deleted file mode 100644 index 8eedcbad2c8..00000000000 --- a/packages/cosmic-proto/dist/cosmos/base/v1beta1/coin.d.ts +++ /dev/null @@ -1,90 +0,0 @@ -import Long from "long"; -import _m0 from "protobufjs/minimal.js"; -export declare const protobufPackage = "cosmos.base.v1beta1"; -/** - * Coin defines a token with a denomination and an amount. - * - * NOTE: The amount field is an Int which implements the custom method - * signatures required by gogoproto. - */ -export interface Coin { - denom: string; - amount: string; -} -/** - * DecCoin defines a token with a denomination and a decimal amount. - * - * NOTE: The amount field is an Dec which implements the custom method - * signatures required by gogoproto. - */ -export interface DecCoin { - denom: string; - amount: string; -} -/** IntProto defines a Protobuf wrapper around an Int object. */ -export interface IntProto { - int: string; -} -/** DecProto defines a Protobuf wrapper around a Dec object. */ -export interface DecProto { - dec: string; -} -export declare const Coin: { - encode(message: Coin, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): Coin; - fromJSON(object: any): Coin; - toJSON(message: Coin): unknown; - fromPartial]: never; }>(object: I): Coin; -}; -export declare const DecCoin: { - encode(message: DecCoin, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): DecCoin; - fromJSON(object: any): DecCoin; - toJSON(message: DecCoin): unknown; - fromPartial]: never; }>(object: I): DecCoin; -}; -export declare const IntProto: { - encode(message: IntProto, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): IntProto; - fromJSON(object: any): IntProto; - toJSON(message: IntProto): unknown; - fromPartial]: never; }>(object: I): IntProto; -}; -export declare const DecProto: { - encode(message: DecProto, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): DecProto; - fromJSON(object: any): DecProto; - toJSON(message: DecProto): unknown; - fromPartial]: never; }>(object: I): DecProto; -}; -type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; -export type DeepPartial = T extends Builtin ? T : T extends Long ? string | number | Long : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> : T extends {} ? { - [K in keyof T]?: DeepPartial; -} : Partial; -type KeysOfUnion = T extends T ? keyof T : never; -export type Exact = P extends Builtin ? P : P & { - [K in keyof P]: Exact; -} & { - [K in Exclude>]: never; -}; -export {}; diff --git a/packages/cosmic-proto/dist/cosmos/base/v1beta1/coin.js b/packages/cosmic-proto/dist/cosmos/base/v1beta1/coin.js deleted file mode 100644 index 5a68175dc23..00000000000 --- a/packages/cosmic-proto/dist/cosmos/base/v1beta1/coin.js +++ /dev/null @@ -1,197 +0,0 @@ -/* eslint-disable */ -import Long from "long"; -import _m0 from "protobufjs/minimal.js"; -export const protobufPackage = "cosmos.base.v1beta1"; -function createBaseCoin() { - return { denom: "", amount: "" }; -} -export const Coin = { - encode(message, writer = _m0.Writer.create()) { - if (message.denom !== "") { - writer.uint32(10).string(message.denom); - } - if (message.amount !== "") { - writer.uint32(18).string(message.amount); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseCoin(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.denom = reader.string(); - break; - case 2: - message.amount = reader.string(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - denom: isSet(object.denom) ? String(object.denom) : "", - amount: isSet(object.amount) ? String(object.amount) : "", - }; - }, - toJSON(message) { - const obj = {}; - message.denom !== undefined && (obj.denom = message.denom); - message.amount !== undefined && (obj.amount = message.amount); - return obj; - }, - fromPartial(object) { - const message = createBaseCoin(); - message.denom = object.denom ?? ""; - message.amount = object.amount ?? ""; - return message; - }, -}; -function createBaseDecCoin() { - return { denom: "", amount: "" }; -} -export const DecCoin = { - encode(message, writer = _m0.Writer.create()) { - if (message.denom !== "") { - writer.uint32(10).string(message.denom); - } - if (message.amount !== "") { - writer.uint32(18).string(message.amount); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseDecCoin(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.denom = reader.string(); - break; - case 2: - message.amount = reader.string(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - denom: isSet(object.denom) ? String(object.denom) : "", - amount: isSet(object.amount) ? String(object.amount) : "", - }; - }, - toJSON(message) { - const obj = {}; - message.denom !== undefined && (obj.denom = message.denom); - message.amount !== undefined && (obj.amount = message.amount); - return obj; - }, - fromPartial(object) { - const message = createBaseDecCoin(); - message.denom = object.denom ?? ""; - message.amount = object.amount ?? ""; - return message; - }, -}; -function createBaseIntProto() { - return { int: "" }; -} -export const IntProto = { - encode(message, writer = _m0.Writer.create()) { - if (message.int !== "") { - writer.uint32(10).string(message.int); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseIntProto(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.int = reader.string(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { int: isSet(object.int) ? String(object.int) : "" }; - }, - toJSON(message) { - const obj = {}; - message.int !== undefined && (obj.int = message.int); - return obj; - }, - fromPartial(object) { - const message = createBaseIntProto(); - message.int = object.int ?? ""; - return message; - }, -}; -function createBaseDecProto() { - return { dec: "" }; -} -export const DecProto = { - encode(message, writer = _m0.Writer.create()) { - if (message.dec !== "") { - writer.uint32(10).string(message.dec); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseDecProto(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.dec = reader.string(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { dec: isSet(object.dec) ? String(object.dec) : "" }; - }, - toJSON(message) { - const obj = {}; - message.dec !== undefined && (obj.dec = message.dec); - return obj; - }, - fromPartial(object) { - const message = createBaseDecProto(); - message.dec = object.dec ?? ""; - return message; - }, -}; -if (_m0.util.Long !== Long) { - _m0.util.Long = Long; - _m0.configure(); -} -function isSet(value) { - return value !== null && value !== undefined; -} diff --git a/packages/cosmic-proto/dist/cosmos_proto/cosmos.d.ts b/packages/cosmic-proto/dist/cosmos_proto/cosmos.d.ts deleted file mode 100644 index 599ab7a0713..00000000000 --- a/packages/cosmic-proto/dist/cosmos_proto/cosmos.d.ts +++ /dev/null @@ -1,99 +0,0 @@ -import Long from "long"; -import _m0 from "protobufjs/minimal.js"; -export declare const protobufPackage = "cosmos_proto"; -export declare enum ScalarType { - SCALAR_TYPE_UNSPECIFIED = 0, - SCALAR_TYPE_STRING = 1, - SCALAR_TYPE_BYTES = 2, - UNRECOGNIZED = -1 -} -export declare function scalarTypeFromJSON(object: any): ScalarType; -export declare function scalarTypeToJSON(object: ScalarType): string; -/** - * InterfaceDescriptor describes an interface type to be used with - * accepts_interface and implements_interface and declared by declare_interface. - */ -export interface InterfaceDescriptor { - /** - * name is the name of the interface. It should be a short-name (without - * a period) such that the fully qualified name of the interface will be - * package.name, ex. for the package a.b and interface named C, the - * fully-qualified name will be a.b.C. - */ - name: string; - /** - * description is a human-readable description of the interface and its - * purpose. - */ - description: string; -} -/** - * ScalarDescriptor describes an scalar type to be used with - * the scalar field option and declared by declare_scalar. - * Scalars extend simple protobuf built-in types with additional - * syntax and semantics, for instance to represent big integers. - * Scalars should ideally define an encoding such that there is only one - * valid syntactical representation for a given semantic meaning, - * i.e. the encoding should be deterministic. - */ -export interface ScalarDescriptor { - /** - * name is the name of the scalar. It should be a short-name (without - * a period) such that the fully qualified name of the scalar will be - * package.name, ex. for the package a.b and scalar named C, the - * fully-qualified name will be a.b.C. - */ - name: string; - /** - * description is a human-readable description of the scalar and its - * encoding format. For instance a big integer or decimal scalar should - * specify precisely the expected encoding format. - */ - description: string; - /** - * field_type is the type of field with which this scalar can be used. - * Scalars can be used with one and only one type of field so that - * encoding standards and simple and clear. Currently only string and - * bytes fields are supported for scalars. - */ - fieldType: ScalarType[]; -} -export declare const InterfaceDescriptor: { - encode(message: InterfaceDescriptor, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): InterfaceDescriptor; - fromJSON(object: any): InterfaceDescriptor; - toJSON(message: InterfaceDescriptor): unknown; - fromPartial]: never; }>(object: I): InterfaceDescriptor; -}; -export declare const ScalarDescriptor: { - encode(message: ScalarDescriptor, writer?: _m0.Writer): _m0.Writer; - decode(input: _m0.Reader | Uint8Array, length?: number): ScalarDescriptor; - fromJSON(object: any): ScalarDescriptor; - toJSON(message: ScalarDescriptor): unknown; - fromPartial]: never; }) | undefined; - } & { [K_1 in Exclude]: never; }>(object: I): ScalarDescriptor; -}; -type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; -export type DeepPartial = T extends Builtin ? T : T extends Long ? string | number | Long : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> : T extends {} ? { - [K in keyof T]?: DeepPartial; -} : Partial; -type KeysOfUnion = T extends T ? keyof T : never; -export type Exact = P extends Builtin ? P : P & { - [K in keyof P]: Exact; -} & { - [K in Exclude>]: never; -}; -export {}; diff --git a/packages/cosmic-proto/dist/cosmos_proto/cosmos.js b/packages/cosmic-proto/dist/cosmos_proto/cosmos.js deleted file mode 100644 index 9d1a08f3f20..00000000000 --- a/packages/cosmic-proto/dist/cosmos_proto/cosmos.js +++ /dev/null @@ -1,176 +0,0 @@ -/* eslint-disable */ -import Long from "long"; -import _m0 from "protobufjs/minimal.js"; -export const protobufPackage = "cosmos_proto"; -export var ScalarType; -(function (ScalarType) { - ScalarType[ScalarType["SCALAR_TYPE_UNSPECIFIED"] = 0] = "SCALAR_TYPE_UNSPECIFIED"; - ScalarType[ScalarType["SCALAR_TYPE_STRING"] = 1] = "SCALAR_TYPE_STRING"; - ScalarType[ScalarType["SCALAR_TYPE_BYTES"] = 2] = "SCALAR_TYPE_BYTES"; - ScalarType[ScalarType["UNRECOGNIZED"] = -1] = "UNRECOGNIZED"; -})(ScalarType || (ScalarType = {})); -export function scalarTypeFromJSON(object) { - switch (object) { - case 0: - case "SCALAR_TYPE_UNSPECIFIED": - return ScalarType.SCALAR_TYPE_UNSPECIFIED; - case 1: - case "SCALAR_TYPE_STRING": - return ScalarType.SCALAR_TYPE_STRING; - case 2: - case "SCALAR_TYPE_BYTES": - return ScalarType.SCALAR_TYPE_BYTES; - case -1: - case "UNRECOGNIZED": - default: - return ScalarType.UNRECOGNIZED; - } -} -export function scalarTypeToJSON(object) { - switch (object) { - case ScalarType.SCALAR_TYPE_UNSPECIFIED: - return "SCALAR_TYPE_UNSPECIFIED"; - case ScalarType.SCALAR_TYPE_STRING: - return "SCALAR_TYPE_STRING"; - case ScalarType.SCALAR_TYPE_BYTES: - return "SCALAR_TYPE_BYTES"; - case ScalarType.UNRECOGNIZED: - default: - return "UNRECOGNIZED"; - } -} -function createBaseInterfaceDescriptor() { - return { name: "", description: "" }; -} -export const InterfaceDescriptor = { - encode(message, writer = _m0.Writer.create()) { - if (message.name !== "") { - writer.uint32(10).string(message.name); - } - if (message.description !== "") { - writer.uint32(18).string(message.description); - } - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseInterfaceDescriptor(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - message.description = reader.string(); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - name: isSet(object.name) ? String(object.name) : "", - description: isSet(object.description) ? String(object.description) : "", - }; - }, - toJSON(message) { - const obj = {}; - message.name !== undefined && (obj.name = message.name); - message.description !== undefined && (obj.description = message.description); - return obj; - }, - fromPartial(object) { - const message = createBaseInterfaceDescriptor(); - message.name = object.name ?? ""; - message.description = object.description ?? ""; - return message; - }, -}; -function createBaseScalarDescriptor() { - return { name: "", description: "", fieldType: [] }; -} -export const ScalarDescriptor = { - encode(message, writer = _m0.Writer.create()) { - if (message.name !== "") { - writer.uint32(10).string(message.name); - } - if (message.description !== "") { - writer.uint32(18).string(message.description); - } - writer.uint32(26).fork(); - for (const v of message.fieldType) { - writer.int32(v); - } - writer.ldelim(); - return writer; - }, - decode(input, length) { - const reader = input instanceof _m0.Reader ? input : new _m0.Reader(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseScalarDescriptor(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.name = reader.string(); - break; - case 2: - message.description = reader.string(); - break; - case 3: - if ((tag & 7) === 2) { - const end2 = reader.uint32() + reader.pos; - while (reader.pos < end2) { - message.fieldType.push(reader.int32()); - } - } - else { - message.fieldType.push(reader.int32()); - } - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }, - fromJSON(object) { - return { - name: isSet(object.name) ? String(object.name) : "", - description: isSet(object.description) ? String(object.description) : "", - fieldType: Array.isArray(object?.fieldType) ? object.fieldType.map((e) => scalarTypeFromJSON(e)) : [], - }; - }, - toJSON(message) { - const obj = {}; - message.name !== undefined && (obj.name = message.name); - message.description !== undefined && (obj.description = message.description); - if (message.fieldType) { - obj.fieldType = message.fieldType.map((e) => scalarTypeToJSON(e)); - } - else { - obj.fieldType = []; - } - return obj; - }, - fromPartial(object) { - const message = createBaseScalarDescriptor(); - message.name = object.name ?? ""; - message.description = object.description ?? ""; - message.fieldType = object.fieldType?.map((e) => e) || []; - return message; - }, -}; -if (_m0.util.Long !== Long) { - _m0.util.Long = Long; - _m0.configure(); -} -function isSet(value) { - return value !== null && value !== undefined; -} diff --git a/packages/cosmic-proto/package.json b/packages/cosmic-proto/package.json index 50da4d65979..a0b042917a8 100644 --- a/packages/cosmic-proto/package.json +++ b/packages/cosmic-proto/package.json @@ -1,6 +1,6 @@ { "name": "@agoric/cosmic-proto", - "version": "0.3.0", + "version": "0.4.0", "description": "Protobuf stubs for the Agoric cosmos-sdk module", "keywords": [], "author": "Agoric", @@ -32,30 +32,61 @@ "default": "./dist/codegen/agoric/vstorage/query.js" } }, - "scripts": { - "build": "echo Use yarn rebuild to update dist output", - "rebuild": "rm -rf gen && mkdir -p gen && yarn generate && tsc --build && yarn prettier -w dist/agoric && rm -rf gen", - "generate": "protoc --plugin=node_modules/.bin/protoc-gen-ts_proto --ts_proto_opt='esModuleInterop=true,forceLong=long,useOptionals=messages' --ts_proto_out=./gen --ts_proto_opt=importSuffix=.js ./proto/agoric/swingset/msgs.proto ./proto/agoric/swingset/query.proto ./proto/agoric/vstorage/query.proto -I./proto", - "test": "node test/sanity-test.js", - "test:xs": "exit 0", - "lint": "exit 0", - "lint-fix": "exit 0" - }, - "dependencies": { - "protobufjs": "^7.0.0" - }, - "devDependencies": { - "ts-proto": "^1.131.0" + "main": "dist/index.js", + "module": "dist/index.mjs", + "typings": "dist/index.d.ts", + "directories": { + "lib": "src" }, "files": [ "dist", - "LICENSE*", - "swingset" + "!CHANGELOG.md" ], + "scripts": { + "build:cjs": "yarn tsc --outDir dist --module commonjs || true", + "build:mjs": "yarn tsc --outDir mjs --module es2022 --declaration false || true", + "clean:mjs": "rimraf mjs", + "clean:dist": "rimraf dist", + "clean": "npm run clean:mjs && npm run clean:dist", + "build:rename": "publish-scripts --cmd rename --srcDir mjs --outDir dist --findExt js --replaceExt mjs --no-rmDir", + "build": "npm run clean && npm run build:cjs && npm run build:mjs && npm run build:rename", + "codegen": "yarn protos-update && node scripts/codegen.cjs", + "prepare": "npm run build", + "protos-update": "cp -rf ../../golang/cosmos/third_party/proto . && cp -rf ../../golang/cosmos/proto/agoric proto", + "lint-fix": "yarn lint:eslint --fix", + "lint": "tsc", + "test": "ava", + "test:xs": "exit 0" + }, "publishConfig": { "access": "public" }, - "typeCoverage": { - "atLeast": null + "devDependencies": { + "@agoric/cosmos": "^0.34.1", + "@cosmology/telescope": "^1.0.1", + "ava": "^5.3.1", + "publish-scripts": "0.1.0", + "rimraf": "^5.0.0", + "tsimp": "^2.0.11", + "typescript": "^5.0.4" + }, + "dependencies": { + "@cosmjs/amino": "0.29.4", + "@cosmjs/proto-signing": "^0.30.1", + "@cosmjs/stargate": "^0.30.1", + "@cosmjs/tendermint-rpc": "^0.30.1", + "@endo/init": "^1.0.3" + }, + "ava": { + "extensions": { + "ts": "module" + }, + "files": [ + "test/**/test-*.ts" + ], + "nodeArguments": [ + "--import=tsimp", + "--no-warnings" + ] } } diff --git a/packages/cosmic-proto/proto/agoric b/packages/cosmic-proto/proto/agoric deleted file mode 120000 index b256674d526..00000000000 --- a/packages/cosmic-proto/proto/agoric +++ /dev/null @@ -1 +0,0 @@ -../../../golang/cosmos/proto/agoric \ No newline at end of file diff --git a/packages/cosmic-proto/proto/cosmos b/packages/cosmic-proto/proto/cosmos deleted file mode 120000 index 8089f8a2702..00000000000 --- a/packages/cosmic-proto/proto/cosmos +++ /dev/null @@ -1 +0,0 @@ -../../../golang/cosmos/third_party/proto/cosmos \ No newline at end of file diff --git a/packages/cosmic-proto/proto/cosmos_proto b/packages/cosmic-proto/proto/cosmos_proto deleted file mode 120000 index 5f57b79469a..00000000000 --- a/packages/cosmic-proto/proto/cosmos_proto +++ /dev/null @@ -1 +0,0 @@ -../../../golang/cosmos/third_party/proto/cosmos_proto \ No newline at end of file diff --git a/packages/cosmic-proto/proto/gogoproto b/packages/cosmic-proto/proto/gogoproto deleted file mode 120000 index ced32ad138f..00000000000 --- a/packages/cosmic-proto/proto/gogoproto +++ /dev/null @@ -1 +0,0 @@ -../../../golang/cosmos/third_party/proto/gogoproto \ No newline at end of file diff --git a/packages/cosmic-proto/proto/google b/packages/cosmic-proto/proto/google deleted file mode 120000 index db6e2bf53c8..00000000000 --- a/packages/cosmic-proto/proto/google +++ /dev/null @@ -1 +0,0 @@ -../node_modules/protobufjs/google \ No newline at end of file diff --git a/packages/new-cosmic-proto/scripts/codegen.cjs b/packages/cosmic-proto/scripts/codegen.cjs similarity index 97% rename from packages/new-cosmic-proto/scripts/codegen.cjs rename to packages/cosmic-proto/scripts/codegen.cjs index 3ce7945b9d5..49481de57b2 100644 --- a/packages/new-cosmic-proto/scripts/codegen.cjs +++ b/packages/cosmic-proto/scripts/codegen.cjs @@ -80,7 +80,8 @@ telescope({ }, }, aminoEncoding: { - enabled: false, + // Necessary for getSigningAgoricClient + enabled: true, }, lcdClients: { enabled: false, diff --git a/packages/new-cosmic-proto/src/index.ts b/packages/cosmic-proto/src/index.ts similarity index 100% rename from packages/new-cosmic-proto/src/index.ts rename to packages/cosmic-proto/src/index.ts diff --git a/packages/cosmic-proto/swingset/msgs.js b/packages/cosmic-proto/swingset/msgs.js deleted file mode 100644 index 406e26231de..00000000000 --- a/packages/cosmic-proto/swingset/msgs.js +++ /dev/null @@ -1,2 +0,0 @@ -/** @file for backwards compatibility */ -export * from '../dist/agoric/swingset/msgs.js'; diff --git a/packages/cosmic-proto/swingset/query.js b/packages/cosmic-proto/swingset/query.js deleted file mode 100644 index 1ead26ce258..00000000000 --- a/packages/cosmic-proto/swingset/query.js +++ /dev/null @@ -1,2 +0,0 @@ -/** @file for backwards compatibility */ -export * from '../dist/agoric/swingset/query.js'; diff --git a/packages/cosmic-proto/swingset/swingset.js b/packages/cosmic-proto/swingset/swingset.js deleted file mode 100644 index 924fd6cedc4..00000000000 --- a/packages/cosmic-proto/swingset/swingset.js +++ /dev/null @@ -1,2 +0,0 @@ -/** @file for backwards compatibility */ -export * from '../dist/agoric/swingset/swingset.js'; diff --git a/packages/cosmic-proto/test/query-swingset-params.js b/packages/cosmic-proto/test/query-swingset-params.js deleted file mode 100644 index 7522f9417b1..00000000000 --- a/packages/cosmic-proto/test/query-swingset-params.js +++ /dev/null @@ -1,43 +0,0 @@ -// @ts-check -import { QueryClient, createProtobufRpcClient } from '@cosmjs/stargate'; -import { QueryClientImpl } from '@agoric/cosmic-proto/swingset/query.js'; - -import { HttpClient, Tendermint34Client } from '@cosmjs/tendermint-rpc'; -import process from 'process'; - -/** - * Query swingset params - * - * For example, minFeeThingy@@@ - * - * @param {import('@cosmjs/tendermint-rpc').Tendermint34Client} rpcClient - * @returns {Promise} - */ -const querySwingsetParams = async rpcClient => { - const base = QueryClient.withExtensions(rpcClient); - const rpc = createProtobufRpcClient(base); - const queryService = new QueryClientImpl(rpc); - console.log('query swingset params'); - const result = await queryService.Params({}); - - return result; -}; - -const testMain = async () => { - const endPoint = 'https://emerynet.rpc.agoric.net:443'; - const rpc = new HttpClient(endPoint); - const trpc = await Tendermint34Client.create(rpc); - const params = await querySwingsetParams(trpc); - console.log(JSON.stringify(params, null, 2)); -}; - -process.exitCode = 1; -testMain().then( - () => { - process.exitCode = 0; - }, - err => { - console.error('Failed with', err); - process.exit(process.exitCode || 1); - }, -); diff --git a/packages/cosmic-proto/test/sanity-test.js b/packages/cosmic-proto/test/sanity-test.js deleted file mode 100644 index 96487d96409..00000000000 --- a/packages/cosmic-proto/test/sanity-test.js +++ /dev/null @@ -1,4 +0,0 @@ -/* eslint-disable import/no-extraneous-dependencies -- bug in rule thinks this package depends on itself */ -import '@agoric/cosmic-proto/swingset/msgs.js'; -import '@agoric/cosmic-proto/swingset/query.js'; -import '@agoric/cosmic-proto/vstorage/query.js'; diff --git a/packages/cosmic-proto/test/test-agoric.ts b/packages/cosmic-proto/test/test-agoric.ts new file mode 100644 index 00000000000..d2ca8ab4836 --- /dev/null +++ b/packages/cosmic-proto/test/test-agoric.ts @@ -0,0 +1,23 @@ +/* eslint-disable @typescript-eslint/prefer-ts-expect-error */ +import test from 'ava'; +import { coins } from '@cosmjs/amino'; + +// FIXME does not work after SES init (add this back to package.json: +// "require": [ +// "@endo/init/debug.js" +// ]) +// import * as index from '../src/index.js'; + +// @ts-ignore tsc thinks Module '"../src/index.js"' has no exported member 'agoric'. +import { agoric } from '../src/index.js'; + +test('it loads', t => { + t.deepEqual(Object.keys(agoric), [ + 'lien', + 'swingset', + 'vbank', + 'vibc', + 'vstorage', + 'ClientFactory', + ]); +}); diff --git a/packages/cosmic-proto/tsconfig.json b/packages/cosmic-proto/tsconfig.json index f112528b179..38eee4ccf88 100644 --- a/packages/cosmic-proto/tsconfig.json +++ b/packages/cosmic-proto/tsconfig.json @@ -1,12 +1,28 @@ { - "extends": "../../tsconfig.json", - "compilerOptions": { - "noEmit": false, - "declaration": true, - "allowSyntheticDefaultImports": true, - "outDir": "dist" - }, - "include": [ - "gen/**/*.ts" - ] -} + "compilerOptions": { + "baseUrl": ".", + "rootDir": "src", + "skipLibCheck": true, + "emitDeclarationOnly": false, + "declaration": true, + "esModuleInterop": true, + "target": "es2022", + "module": "es2022", + "lib": [ + "es2022", + "DOM" + ], + "sourceMap": true, + "isolatedModules": true, + "allowJs": true, + "downlevelIteration": true, + "moduleResolution": "node", + "resolveJsonModule": true + }, + "include": [ + "src/**/*" + ], + "exclude": [ + "node_modules" + ] +} \ No newline at end of file diff --git a/packages/cosmic-proto/vstorage/query.js b/packages/cosmic-proto/vstorage/query.js deleted file mode 100644 index 30ccbd27d81..00000000000 --- a/packages/cosmic-proto/vstorage/query.js +++ /dev/null @@ -1,2 +0,0 @@ -/** @file for backwards compatibility */ -export * from '../dist/agoric/vstorage/query.js'; diff --git a/packages/new-cosmic-proto/.gitignore b/packages/new-cosmic-proto/.gitignore deleted file mode 100644 index fe1ba73a67c..00000000000 --- a/packages/new-cosmic-proto/.gitignore +++ /dev/null @@ -1,54 +0,0 @@ -# Logs -logs -*.log -npm-debug.log* - -# Runtime data -pids -*.pid -*.seed -.tsimp - -# out -dist -mjs -main -module - -# Directory for instrumented libs generated by jscoverage/JSCover -lib-cov - -# Coverage directory used by tools like istanbul -coverage - -# nyc test coverage -.nyc_output - -# Compiled binary addons (http://nodejs.org/api/addons.html) -build/Release - -# Dependency directories -node_modules -jspm_packages - -# Optional npm cache directory -.npm - -# Optional REPL history -.node_repl_history - -# Editors -.idea - -# Lib -lib - -# npm package lock -package-lock.json -yarn.lock - -# others -.DS_Store - -# Copy as needed from golang tree -proto diff --git a/packages/new-cosmic-proto/LICENSE b/packages/new-cosmic-proto/LICENSE deleted file mode 100644 index 261eeb9e9f8..00000000000 --- a/packages/new-cosmic-proto/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/packages/new-cosmic-proto/README.md b/packages/new-cosmic-proto/README.md deleted file mode 100644 index 0be2c29b131..00000000000 --- a/packages/new-cosmic-proto/README.md +++ /dev/null @@ -1,295 +0,0 @@ -# @agoric/cosmic-proto - -

-
- Protobuf interfaces for Agoric on Cosmos -

- - -## install - -```sh -npm install @agoric/cosmic-proto -``` -## Table of contents - -- [Usage](#usage) - - [RPC Clients](#rpc-clients) - - [Composing Messages](#composing-messages) - - Cosmos, CosmWasm, and IBC - - [CosmWasm](#cosmwasm-messages) - - [IBC](#ibc-messages) - - [Cosmos](#cosmos-messages) -- [Wallets and Signers](#connecting-with-wallets-and-signing-messages) - - [Stargate Client](#initializing-the-stargate-client) - - [Creating Signers](#creating-signers) - - [Broadcasting Messages](#broadcasting-messages) -- [Advanced Usage](#advanced-usage) -- [Developing](#developing) -- [Credits](#credits) - -## Usage -### RPC Clients - -```js -import { agoric } from '@agoric/cosmic-proto'; - -const { createRPCQueryClient } = agoric.ClientFactory; -const client = await createRPCQueryClient({ rpcEndpoint: RPC_ENDPOINT }); - -// now you can query the cosmos modules -const balance = await client.cosmos.bank.v1beta1 - .allBalances({ address: 'agoric1addresshere' }); - -// you can also query the agoric modules -const balances = await client.agoric.exchange.v1beta1 - .exchangeBalances() -``` - -### Composing Messages - -Import the `agoric` object from `@agoric/cosmic-proto`. - -```js -import { agoric } from '@agoric/cosmic-proto'; - -const { - createSpotLimitOrder, - createSpotMarketOrder, - deposit -} = agoric.exchange.v1beta1.MessageComposer.withTypeUrl; -``` - -#### CosmWasm Messages - -```js -import { cosmwasm } from "@agoric/cosmic-proto"; - -const { - clearAdmin, - executeContract, - instantiateContract, - migrateContract, - storeCode, - updateAdmin -} = cosmwasm.wasm.v1.MessageComposer.withTypeUrl; -``` - -#### IBC Messages - -```js -import { ibc } from '@agoric/cosmic-proto'; - -const { - transfer -} = ibc.applications.transfer.v1.MessageComposer.withTypeUrl -``` - -#### Cosmos Messages - -```js -import { cosmos } from '@agoric/cosmic-proto'; - -const { - fundCommunityPool, - setWithdrawAddress, - withdrawDelegatorReward, - withdrawValidatorCommission -} = cosmos.distribution.v1beta1.MessageComposer.fromPartial; - -const { - multiSend, - send -} = cosmos.bank.v1beta1.MessageComposer.fromPartial; - -const { - beginRedelegate, - createValidator, - delegate, - editValidator, - undelegate -} = cosmos.staking.v1beta1.MessageComposer.fromPartial; - -const { - deposit, - submitProposal, - vote, - voteWeighted -} = cosmos.gov.v1beta1.MessageComposer.fromPartial; -``` - -## Connecting with Wallets and Signing Messages - -⚡️ For web interfaces, we recommend using [cosmos-kit](https://github.com/cosmology-tech/cosmos-kit). Continue below to see how to manually construct signers and clients. - -Here are the docs on [creating signers](https://github.com/cosmology-tech/cosmos-kit/tree/main/packages/react#signing-clients) in cosmos-kit that can be used with Keplr and other wallets. - -### Initializing the Stargate Client - -Use `getSigningagoricClient` to get your `SigningStargateClient`, with the proto/amino messages full-loaded. No need to manually add amino types, just require and initialize the client: - -```js -import { getSigningagoricClient } from '@agoric/cosmic-proto'; - -const stargateClient = await getSigningagoricClient({ - rpcEndpoint, - signer // OfflineSigner -}); -``` -### Creating Signers - -To broadcast messages, you can create signers with a variety of options: - -* [cosmos-kit](https://github.com/cosmology-tech/cosmos-kit/tree/main/packages/react#signing-clients) (recommended) -* [keplr](https://docs.keplr.app/api/cosmjs.html) -* [cosmjs](https://gist.github.com/webmaster128/8444d42a7eceeda2544c8a59fbd7e1d9) -### Amino Signer - -Likely you'll want to use the Amino, so unless you need proto, you should use this one: - -```js -import { getOfflineSignerAmino as getOfflineSigner } from 'cosmjs-utils'; -``` -### Proto Signer - -```js -import { getOfflineSignerProto as getOfflineSigner } from 'cosmjs-utils'; -``` - -WARNING: NOT RECOMMENDED TO USE PLAIN-TEXT MNEMONICS. Please take care of your security and use best practices such as AES encryption and/or methods from 12factor applications. - -```js -import { chains } from 'chain-registry'; - -const mnemonic = - 'unfold client turtle either pilot stock floor glow toward bullet car science'; - const chain = chains.find(({ chain_name }) => chain_name === 'agoric'); - const signer = await getOfflineSigner({ - mnemonic, - chain - }); -``` -### Broadcasting Messages - -Now that you have your `stargateClient`, you can broadcast messages: - -```js -const { send } = cosmos.bank.v1beta1.MessageComposer.withTypeUrl; - -const msg = send({ - amount: [ - { - denom: 'coin', - amount: '1000' - } - ], - toAddress: address, - fromAddress: address -}); - -const fee: StdFee = { - amount: [ - { - denom: 'coin', - amount: '864' - } - ], - gas: '86364' -}; -const response = await stargateClient.signAndBroadcast(address, [msg], fee); -``` - -## Advanced Usage - - -If you want to manually construct a stargate client - -```js -import { OfflineSigner, GeneratedType, Registry } from "@cosmjs/proto-signing"; -import { AminoTypes, SigningStargateClient } from "@cosmjs/stargate"; - -import { - cosmosAminoConverters, - cosmosProtoRegistry, - cosmwasmAminoConverters, - cosmwasmProtoRegistry, - ibcProtoRegistry, - ibcAminoConverters, - agoricAminoConverters, - agoricProtoRegistry -} from '@agoric/cosmic-proto'; - -const signer: OfflineSigner = /* create your signer (see above) */ -const rpcEndpint = 'https://rpc.cosmos.directory/agoric'; // or another URL - -const protoRegistry: ReadonlyArray<[string, GeneratedType]> = [ - ...cosmosProtoRegistry, - ...cosmwasmProtoRegistry, - ...ibcProtoRegistry, - ...agoricProtoRegistry -]; - -const aminoConverters = { - ...cosmosAminoConverters, - ...cosmwasmAminoConverters, - ...ibcAminoConverters, - ...agoricAminoConverters -}; - -const registry = new Registry(protoRegistry); -const aminoTypes = new AminoTypes(aminoConverters); - -const stargateClient = await SigningStargateClient.connectWithSigner(rpcEndpoint, signer, { - registry, - aminoTypes -}); -``` - -## Developing - -When first cloning the repo: - -``` -yarn -yarn build -``` - -### Codegen - -Contract schemas live in `./contracts`, and protos in `./proto`. Look inside of `scripts/codegen.cjs` and configure the settings for bundling your SDK and contracts into `@agoric/cosmic-proto`: - -``` -yarn codegen -``` - -### Publishing - -Build the types and then publish: - -``` -yarn build -yarn publish -``` - -## Related - -Checkout these related projects: - -* [@cosmology/telescope](https://github.com/cosmology-tech/telescope) Your Frontend Companion for Building with TypeScript with Cosmos SDK Modules. -* [@cosmwasm/ts-codegen](https://github.com/CosmWasm/ts-codegen) Convert your CosmWasm smart contracts into dev-friendly TypeScript classes. -* [chain-registry](https://github.com/cosmology-tech/chain-registry) Everything from token symbols, logos, and IBC denominations for all assets you want to support in your application. -* [cosmos-kit](https://github.com/cosmology-tech/cosmos-kit) Experience the convenience of connecting with a variety of web3 wallets through a single, streamlined interface. -* [create-cosmos-app](https://github.com/cosmology-tech/create-cosmos-app) Set up a modern Cosmos app by running one command. -* [interchain-ui](https://github.com/cosmology-tech/interchain-ui) The Interchain Design System, empowering developers with a flexible, easy-to-use UI kit. -* [starship](https://github.com/cosmology-tech/starship) Unified Testing and Development for the Interchain. - -## Credits - -🛠 Built by Cosmology — if you like our tools, please consider delegating to [our validator ⚛️](https://cosmology.zone/validator) - - -## Disclaimer - -AS DESCRIBED IN THE LICENSES, THE SOFTWARE IS PROVIDED “AS IS”, AT YOUR OWN RISK, AND WITHOUT WARRANTIES OF ANY KIND. - -No developer or entity involved in creating this software will be liable for any claims or damages whatsoever associated with your use, inability to use, or your interaction with other users of the code, including any direct, indirect, incidental, special, exemplary, punitive or consequential damages, or loss of profits, cryptocurrencies, tokens, or anything else of value. diff --git a/packages/new-cosmic-proto/package.json b/packages/new-cosmic-proto/package.json deleted file mode 100644 index 5c9328aa1ca..00000000000 --- a/packages/new-cosmic-proto/package.json +++ /dev/null @@ -1,95 +0,0 @@ -{ - "name": "@agoric/new-cosmic-proto", - "version": "0.4.0", - "description": "Protobuf stubs for the Agoric cosmos-sdk module", - "keywords": [], - "author": "Agoric", - "license": "Apache-2.0", - "homepage": "https://github.com/Agoric/agoric-sdk/tree/HEAD/packages/new-cosmic-proto#readme", - "repository": { - "type": "git", - "url": "git+https://github.com/Agoric/agoric-sdk.git" - }, - "bugs": { - "url": "https://github.com/Agoric/agoric-sdk/issues" - }, - "exports": { - ".": { - "types": "./dist/codegen/index.d.ts", - "default": "./dist/codegen/index.js" - }, - "./package.json": "./package.json", - "./swingset/msgs.js": { - "types": "./dist/codegen/agoric/swingset/msgs.d.ts", - "default": "./dist/codegen/agoric/swingset/msgs.js" - }, - "./swingset/query.js": { - "types": "./dist/codegen/agoric/swingset/query.d.ts", - "default": "./dist/codegen/agoric/swingset/query.js" - }, - "./vstorage/query.js": { - "types": "./dist/codegen/agoric/vstorage/query.d.ts", - "default": "./dist/codegen/agoric/vstorage/query.js" - } - }, - "main": "dist/index.js", - "module": "dist/index.mjs", - "typings": "dist/index.d.ts", - "directories": { - "lib": "src" - }, - "files": [ - "dist", - "!CHANGELOG.md" - ], - "scripts": { - "build:cjs": "yarn tsc --outDir dist --module commonjs || true", - "build:mjs": "yarn tsc --outDir mjs --module es2022 --declaration false || true", - "clean:mjs": "rimraf mjs", - "clean:dist": "rimraf dist", - "clean": "npm run clean:mjs && npm run clean:dist", - "build:rename": "publish-scripts --cmd rename --srcDir mjs --outDir dist --findExt js --replaceExt mjs --no-rmDir", - "build": "npm run clean && npm run build:cjs && npm run build:mjs && npm run build:rename", - "codegen": "yarn protos-update && node scripts/codegen.cjs", - "prepare": "npm run build", - "protos-update": "cp -rf ../../golang/cosmos/third_party/proto . && cp -rf ../../golang/cosmos/proto/agoric proto", - "lint-fix": "yarn lint:eslint --fix", - "lint": "tsc", - "test": "ava", - "test:xs": "exit 0" - }, - "publishConfig": { - "access": "restricted" - }, - "devDependencies": { - "@agoric/cosmos": "^0.34.1", - "@cosmology/telescope": "^1.0.1", - "ava": "^5.3.1", - "eslint": "8.45.0", - "eslint-config-prettier": "^8.8.0", - "eslint-plugin-prettier": "^4.2.1", - "prettier": "^2.8.7", - "publish-scripts": "0.1.0", - "rimraf": "^5.0.0", - "tsimp": "^2.0.11", - "typescript": "^5.0.4" - }, - "dependencies": { - "@cosmjs/amino": "0.29.4", - "@cosmjs/proto-signing": "^0.30.1", - "@cosmjs/stargate": "^0.30.1", - "@cosmjs/tendermint-rpc": "^0.30.1" - }, - "ava": { - "extensions": { - "ts": "module" - }, - "files": [ - "test/**/test-*.ts" - ], - "nodeArguments": [ - "--import=tsimp", - "--no-warnings" - ] - } -} diff --git a/packages/new-cosmic-proto/test/test-dummy.ts b/packages/new-cosmic-proto/test/test-dummy.ts deleted file mode 100644 index a52996e91bc..00000000000 --- a/packages/new-cosmic-proto/test/test-dummy.ts +++ /dev/null @@ -1,20 +0,0 @@ -import test from 'ava'; -import { coins } from '@cosmjs/amino'; - -// FIXME does not work after SES init (add this back to package.json: -// "require": [ -// "@endo/init/debug.js" -// ]) -// import * as index from '../src/index.js'; - -import { agoric } from '../src/index.js'; - -console.log(agoric); - -test('it works', async t => { - const fee = { - amount: coins(0, 'uosmo'), - gas: '250000', - }; - t.pass(); -}); diff --git a/packages/new-cosmic-proto/tsconfig.json b/packages/new-cosmic-proto/tsconfig.json deleted file mode 100644 index 38eee4ccf88..00000000000 --- a/packages/new-cosmic-proto/tsconfig.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "compilerOptions": { - "baseUrl": ".", - "rootDir": "src", - "skipLibCheck": true, - "emitDeclarationOnly": false, - "declaration": true, - "esModuleInterop": true, - "target": "es2022", - "module": "es2022", - "lib": [ - "es2022", - "DOM" - ], - "sourceMap": true, - "isolatedModules": true, - "allowJs": true, - "downlevelIteration": true, - "moduleResolution": "node", - "resolveJsonModule": true - }, - "include": [ - "src/**/*" - ], - "exclude": [ - "node_modules" - ] -} \ No newline at end of file diff --git a/yarn.lock b/yarn.lock index 40ea5f3264d..6520cb636df 100644 --- a/yarn.lock +++ b/yarn.lock @@ -16,6 +16,13 @@ jsesc "^2.5.1" source-map "^0.5.0" +"@agoric/cosmic-proto@^0.3.0": + version "0.3.0" + resolved "https://registry.yarnpkg.com/@agoric/cosmic-proto/-/cosmic-proto-0.3.0.tgz#c9d31d3946c91fbb1630f89d8ba63a662bcdacc5" + integrity sha512-cIunby6gs53sGkHx3ALraREbfVQXvsIcObMjQQ0/tZt5HVqwoS7Y1Qj1Xl0ZZvqE8B1Zyk7QMDj829mbTII+9g== + dependencies: + protobufjs "^7.0.0" + "@agoric/wallet-ui@0.1.3-solo.0": version "0.1.3-solo.0" resolved "https://registry.yarnpkg.com/@agoric/wallet-ui/-/wallet-ui-0.1.3-solo.0.tgz#5f05c3dd2820d4f1efcbccbd2dc1292847ecbd2b" @@ -1620,6 +1627,13 @@ dependencies: "@endo/env-options" "^0.1.4" +"@endo/eventual-send@^1.1.1": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@endo/eventual-send/-/eventual-send-1.1.1.tgz#64f0d8e8e8c4f10e663bc875b6a69d7d3677d940" + integrity sha512-3o8JxHSkd8q+CMMhbvB6oAZX0g6UQbc2/0Cso1KUiDvYIjknVNEqnnVoNGFW4LdyiQdQTRoRcL7GDT8KE6ebAw== + dependencies: + "@endo/env-options" "^1.1.1" + "@endo/eventual-send@^1.1.2": version "1.1.2" resolved "https://registry.yarnpkg.com/@endo/eventual-send/-/eventual-send-1.1.2.tgz#496e97c572462d2552a114810ace61af548bdb1c" @@ -1668,6 +1682,16 @@ "@endo/where" "^1.0.2" ses "^1.3.0" +"@endo/init@^1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@endo/init/-/init-1.0.3.tgz#5d2275de87797aa1d4f28e6063f7194ac502ea22" + integrity sha512-eLy0K5kF07i+uPZtGCq8zXYdP69GImdOIe3SYPKU3tdWaeEO2C3Hfq6d2wGRUIG1C9lpY6qPIh5f4H0Jj7NJWw== + dependencies: + "@endo/base64" "^1.0.2" + "@endo/eventual-send" "^1.1.1" + "@endo/lockdown" "^1.0.3" + "@endo/promise-kit" "^1.0.3" + "@endo/init@^1.0.4": version "1.0.4" resolved "https://registry.yarnpkg.com/@endo/init/-/init-1.0.4.tgz#09df92dea145acbaa0fd0bea7a497076ba305337" @@ -1678,6 +1702,13 @@ "@endo/lockdown" "^1.0.4" "@endo/promise-kit" "^1.0.4" +"@endo/lockdown@^1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@endo/lockdown/-/lockdown-1.0.3.tgz#810b4b732242568ec189ff1dd290b7d53498e44b" + integrity sha512-HNRDzD4+svYwoLg91XBdUI7DGyJJle6b263xfBV4Chl49Na40LJCuwC1G+x+WZwywgkfM6ERctNw3j4wOzL9cA== + dependencies: + ses "^1.2.0" + "@endo/lockdown@^1.0.4": version "1.0.4" resolved "https://registry.yarnpkg.com/@endo/lockdown/-/lockdown-1.0.4.tgz#eefe6df51ed56bc1c5b0269ce8766f467bf5d25b" @@ -1748,6 +1779,13 @@ dependencies: ses "^0.18.8" +"@endo/promise-kit@^1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@endo/promise-kit/-/promise-kit-1.0.3.tgz#d86c09b7446124a0ba4874f5b0b22dae7ed43ae8" + integrity sha512-LnjNDbiYPuN69LxgLLQxunpO6OcRZtxiDJi4gvmaoi3w4GrMs28DcLb54h9haNRk+itkwvijgwX+5yDJZ/7iYQ== + dependencies: + ses "^1.2.0" + "@endo/promise-kit@^1.0.4": version "1.0.4" resolved "https://registry.yarnpkg.com/@endo/promise-kit/-/promise-kit-1.0.4.tgz#809569fe23af9a065a311aa11747e5f00c6e481c" @@ -1952,31 +1990,11 @@ dependencies: eslint-visitor-keys "^3.3.0" -"@eslint-community/regexpp@^4.4.0": - version "4.10.0" - resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.10.0.tgz#548f6de556857c8bb73bbee70c35dc82a2e74d63" - integrity sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA== - "@eslint-community/regexpp@^4.5.1", "@eslint-community/regexpp@^4.6.1": version "4.6.2" resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.6.2.tgz#1816b5f6948029c5eaacb0703b850ee0cb37d8f8" integrity sha512-pPTNuaAG3QMH+buKyBIGJs3g/S5y0caxw0ygM3YyE6yJFySwiGGSzA+mM3KJ8QQvzeLh3blwgSonkFjgQdxzMw== -"@eslint/eslintrc@^2.1.0": - version "2.1.4" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.1.4.tgz#388a269f0f25c1b6adc317b5a2c55714894c70ad" - integrity sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ== - dependencies: - ajv "^6.12.4" - debug "^4.3.2" - espree "^9.6.0" - globals "^13.19.0" - ignore "^5.2.0" - import-fresh "^3.2.1" - js-yaml "^4.1.0" - minimatch "^3.1.2" - strip-json-comments "^3.1.1" - "@eslint/eslintrc@^2.1.2": version "2.1.2" resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-2.1.2.tgz#c6936b4b328c64496692f76944e755738be62396" @@ -1992,11 +2010,6 @@ minimatch "^3.1.2" strip-json-comments "^3.1.1" -"@eslint/js@8.44.0": - version "8.44.0" - resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.44.0.tgz#961a5903c74139390478bdc808bcde3fc45ab7af" - integrity sha512-Ag+9YM4ocKQx9AarydN0KY2j0ErMHNIocPDrVo8zAE44xLTjEtz81OdR68/cydGtk6m6jDb5Za3r2useMzYmSw== - "@eslint/js@^8.47.0": version "8.47.0" resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.47.0.tgz#5478fdf443ff8158f9de171c704ae45308696c7d" @@ -3416,68 +3429,6 @@ resolved "https://registry.yarnpkg.com/@protobufjs/utf8/-/utf8-1.1.0.tgz#a777360b5b39a1a2e5106f8e858f2fd2d060c570" integrity sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw== -"@protobufs/amino@^0.0.11": - version "0.0.11" - resolved "https://registry.yarnpkg.com/@protobufs/amino/-/amino-0.0.11.tgz#5eb6b8193da8ea818484818dff2800a981cb7a61" - integrity sha512-JRIkW6/YGIUfbdDuASt3wsuxzC0Xj3U2sV0Arqa9iNwCvv4HtOpbqdWVVVgvQBnR0/ZkwQeXnt+GH7yT/DvsYQ== - dependencies: - "@protobufs/google" "^0.0.10" - -"@protobufs/confio@^0.0.6": - version "0.0.6" - resolved "https://registry.yarnpkg.com/@protobufs/confio/-/confio-0.0.6.tgz#a6ddf44eca2cbe535384228312ae7ef5dff29644" - integrity sha512-abZ0ntTJBuB8q2aMBvOerAFk8CSzafB09YdttKFEqwxokZsLFZ3+o7YaH3RIk863oeM//8sonwTaxRV8r4rmSA== - -"@protobufs/cosmos@^0.1.0": - version "0.1.0" - resolved "https://registry.yarnpkg.com/@protobufs/cosmos/-/cosmos-0.1.0.tgz#492251de16be3e0a89820f48637cd3f42114f24c" - integrity sha512-L3NZ+z0kI6GMTiD2HASNe3WbopPhQlaQaKZNRue+8LiGEv/vbbxD1lox8cwOqes3AN5dHiT0i3+gvzIbKBb7gw== - dependencies: - "@protobufs/amino" "^0.0.11" - "@protobufs/cosmos_proto" "^0.0.10" - "@protobufs/gogoproto" "^0.0.10" - "@protobufs/google" "^0.0.10" - "@protobufs/tendermint" "^0.0.10" - -"@protobufs/cosmos_proto@^0.0.10": - version "0.0.10" - resolved "https://registry.yarnpkg.com/@protobufs/cosmos_proto/-/cosmos_proto-0.0.10.tgz#622726ee227f220f608df180f938e5d8ebb1534a" - integrity sha512-4nMopXxN23udy1HEe+vS49zD9dxrA7i0E3n15QUz1x0tbrowYLHzJKeyCUNlsh5PKpEIXGxHXpPZWXs7vVCwUw== - dependencies: - "@protobufs/google" "^0.0.10" - -"@protobufs/gogoproto@^0.0.10": - version "0.0.10" - resolved "https://registry.yarnpkg.com/@protobufs/gogoproto/-/gogoproto-0.0.10.tgz#0181e17142c800b60c7ca5f92c76a614d86c5c54" - integrity sha512-u3eK1aSO3KOuX4RVFpqKPTaT/WLV50GFLuIC3slVGfD7Z1CfZ5ivHbFYUib96gihu1Mq2OZpNVj3dNws9YsVoQ== - dependencies: - "@protobufs/google" "^0.0.10" - -"@protobufs/google@^0.0.10": - version "0.0.10" - resolved "https://registry.yarnpkg.com/@protobufs/google/-/google-0.0.10.tgz#820f741b0c53f688550c74c7ddb25a5ee131a6bf" - integrity sha512-3yo+liabFM1519smwwfzh1C535CntXVsS7zT98xmo21tZUX7vxeFpQDMx38EzMGYSy/Reo8wEMWJUHqZzYsCUw== - -"@protobufs/ibc@^0.1.0": - version "0.1.0" - resolved "https://registry.yarnpkg.com/@protobufs/ibc/-/ibc-0.1.0.tgz#36aeadc9f09d185d683f66a650dad9dc40437875" - integrity sha512-GmGkX81yyd55Tm34SCOmcOiB0QRwFBHGmZpDRAsks33TBx4efAtT9rKAdtn/oPujx9sha1TqU2s3trnMPVvKyg== - dependencies: - "@protobufs/amino" "^0.0.11" - "@protobufs/confio" "^0.0.6" - "@protobufs/cosmos" "^0.1.0" - "@protobufs/gogoproto" "^0.0.10" - "@protobufs/google" "^0.0.10" - "@protobufs/tendermint" "^0.0.10" - -"@protobufs/tendermint@^0.0.10": - version "0.0.10" - resolved "https://registry.yarnpkg.com/@protobufs/tendermint/-/tendermint-0.0.10.tgz#816b27410afcecd8b6d403df149f3c2b9b80655e" - integrity sha512-hAAMLFhKdAovslKeWnLTp2gGn5bxSTDVcQLKs4C4cC91R/KfHOh+Klt4PqSGUv/APINAmREzsX2LDUbIQ2dCpg== - dependencies: - "@protobufs/gogoproto" "^0.0.10" - "@protobufs/google" "^0.0.10" - "@pyramation/json-schema-ref-parser@9.0.6": version "9.0.6" resolved "https://registry.yarnpkg.com/@pyramation/json-schema-ref-parser/-/json-schema-ref-parser-9.0.6.tgz#556e416ce7dcc15a3c1afd04d6a059e03ed09aeb" @@ -4136,7 +4087,7 @@ ajv@7.1.1: require-from-string "^2.0.2" uri-js "^4.2.2" -ajv@^6.10.0, ajv@^6.12.4: +ajv@^6.12.4: version "6.12.6" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== @@ -4556,9 +4507,9 @@ axios@0.21.4, axios@^0.21.2: follow-redirects "^1.14.0" axios@^1.0.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/axios/-/axios-1.4.0.tgz#38a7bf1224cd308de271146038b551d725f0be1f" - integrity sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA== + version "1.6.0" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.0.tgz#f1e5292f26b2fd5c2e66876adc5b06cdbd7d2102" + integrity sha512-EZ1DYihju9pwVB+jg67ogm+Tmqc6JmhamRN6I4Zt8DfZu5lbcQGw3ozH9lFejSJgs/ibaef3A9PMXPLeefFGJg== dependencies: follow-redirects "^1.15.0" form-data "^4.0.0" @@ -4941,11 +4892,6 @@ caniuse-lite@^1.0.30001587: resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001587.tgz#a0bce920155fa56a1885a69c74e1163fc34b4881" integrity sha512-HMFNotUmLXn71BQxg8cijvqxnIAofforZOwGsxyXJ0qugTdspUF4sPSJ2vhgprHCB996tIDzEq1ubumPDV8ULA== -case-anything@^2.1.13: - version "2.1.13" - resolved "https://registry.yarnpkg.com/case-anything/-/case-anything-2.1.13.tgz#0cdc16278cb29a7fcdeb072400da3f342ba329e9" - integrity sha512-zlOQ80VrQ2Ue+ymH5OuM/DlDq64mEm+B9UTdHULv5osUMD6HalNTblf2b1u/m6QecjsnOkBpqVZ+XPwIVsy7Ng== - case@1.6.3: version "1.6.3" resolved "https://registry.yarnpkg.com/case/-/case-1.6.3.tgz#0a4386e3e9825351ca2e6216c60467ff5f1ea1c9" @@ -5754,11 +5700,6 @@ detect-indent@^6.0.0: resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-6.1.0.tgz#592485ebbbf6b3b1ab2be175c8393d04ca0d57e6" integrity sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA== -detect-libc@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b" - integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= - detect-libc@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.1.tgz#e1897aa88fa6ad197862937fbc0441ef352ee0cd" @@ -5839,13 +5780,6 @@ dotty@0.1.2: resolved "https://registry.yarnpkg.com/dotty/-/dotty-0.1.2.tgz#512d44cc4111a724931226259297f235e8484f6f" integrity sha512-V0EWmKeH3DEhMwAZ+8ZB2Ao4OK6p++Z0hsDtZq3N0+0ZMVqkzrcEGROvOnZpLnvBg5PTNG23JEDLAm64gPaotQ== -dprint-node@^1.0.8: - version "1.0.8" - resolved "https://registry.yarnpkg.com/dprint-node/-/dprint-node-1.0.8.tgz#a02470722d8208a7d7eb3704328afda1d6758625" - integrity sha512-iVKnUtYfGrYcW1ZAlfR/F59cUVL8QIhWoBJoSjkkdua/dkWIgjZfiLMeTjiB06X0ZLkQ0M2C1VbUj/CxkIf1zg== - dependencies: - detect-libc "^1.0.3" - duplexer@^0.1.1: version "0.1.2" resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" @@ -6213,11 +6147,6 @@ eslint-config-prettier@>=8.0.0, eslint-config-prettier@^9.0.0: resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-9.0.0.tgz#eb25485946dd0c66cd216a46232dc05451518d1f" integrity sha512-IcJsTkJae2S35pRsRAwoCE+925rJJStOdkKnLVgtE+tEpqU0EVVM7OqrwxqgptKdX29NUwC82I5pXsGFIgSevw== -eslint-config-prettier@^8.8.0: - version "8.10.0" - resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-8.10.0.tgz#3a06a662130807e2502fc3ff8b4143d8a0658e11" - integrity sha512-SM8AMJdeQqRYT9O9zguiruQZaN7+z+E4eAP9oiLNGKMtomwaB1E9dcgUD6ZAn/eQAb52USbvezbiljfZUhbJcg== - eslint-formatter-pretty@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/eslint-formatter-pretty/-/eslint-formatter-pretty-4.1.0.tgz#7a6877c14ffe2672066c853587d89603e97c7708" @@ -6378,13 +6307,6 @@ eslint-plugin-no-only-tests@^3.0.0: resolved "https://registry.yarnpkg.com/eslint-plugin-no-only-tests/-/eslint-plugin-no-only-tests-3.1.0.tgz#f38e4935c6c6c4842bf158b64aaa20c366fe171b" integrity sha512-Lf4YW/bL6Un1R6A76pRZyE1dl1vr31G/ev8UzIc/geCgFWyrKil8hVjYqWVKGB/UIGmb6Slzs9T0wNezdSVegw== -eslint-plugin-prettier@^4.2.1: - version "4.2.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz#651cbb88b1dab98bfd42f017a12fa6b2d993f94b" - integrity sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ== - dependencies: - prettier-linter-helpers "^1.0.0" - eslint-plugin-prettier@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-5.0.0.tgz#6887780ed95f7708340ec79acfdf60c35b9be57a" @@ -6403,7 +6325,7 @@ eslint-rule-documentation@>=1.0.0: resolved "https://registry.yarnpkg.com/eslint-rule-documentation/-/eslint-rule-documentation-1.0.23.tgz#4e0886145597a78d24524ec7e0cf18c6fedc23a8" integrity sha512-pWReu3fkohwyvztx/oQWWgld2iad25TfUdi6wvhhaDPIQjHU/pyvlKgXFw1kX31SQK2Nq9MH+vRDWB0ZLy8fYw== -eslint-scope@^7.2.0, eslint-scope@^7.2.2: +eslint-scope@^7.2.2: version "7.2.2" resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.2.2.tgz#deb4f92563390f32006894af62a22dba1c46423f" integrity sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg== @@ -6433,49 +6355,6 @@ eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1, eslint-visitor-keys@^3.4 resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz#0cd72fe8550e3c2eae156a96a4dddcd1c8ac5800" integrity sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag== -eslint@8.45.0: - version "8.45.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.45.0.tgz#bab660f90d18e1364352c0a6b7c6db8edb458b78" - integrity sha512-pd8KSxiQpdYRfYa9Wufvdoct3ZPQQuVuU5O6scNgMuOMYuxvH0IGaYK0wUFjo4UYYQQCUndlXiMbnxopwvvTiw== - dependencies: - "@eslint-community/eslint-utils" "^4.2.0" - "@eslint-community/regexpp" "^4.4.0" - "@eslint/eslintrc" "^2.1.0" - "@eslint/js" "8.44.0" - "@humanwhocodes/config-array" "^0.11.10" - "@humanwhocodes/module-importer" "^1.0.1" - "@nodelib/fs.walk" "^1.2.8" - ajv "^6.10.0" - chalk "^4.0.0" - cross-spawn "^7.0.2" - debug "^4.3.2" - doctrine "^3.0.0" - escape-string-regexp "^4.0.0" - eslint-scope "^7.2.0" - eslint-visitor-keys "^3.4.1" - espree "^9.6.0" - esquery "^1.4.2" - esutils "^2.0.2" - fast-deep-equal "^3.1.3" - file-entry-cache "^6.0.1" - find-up "^5.0.0" - glob-parent "^6.0.2" - globals "^13.19.0" - graphemer "^1.4.0" - ignore "^5.2.0" - imurmurhash "^0.1.4" - is-glob "^4.0.0" - is-path-inside "^3.0.3" - js-yaml "^4.1.0" - json-stable-stringify-without-jsonify "^1.0.1" - levn "^0.4.1" - lodash.merge "^4.6.2" - minimatch "^3.1.2" - natural-compare "^1.4.0" - optionator "^0.9.3" - strip-ansi "^6.0.1" - text-table "^0.2.0" - eslint@^8.47.0: version "8.47.0" resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.47.0.tgz#c95f9b935463fb4fad7005e626c7621052e90806" @@ -8686,12 +8565,7 @@ long@^4.0.0: resolved "https://registry.yarnpkg.com/long/-/long-4.0.0.tgz#9a7b71cfb7d361a194ea555241c92f7468d5bf28" integrity sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA== -long@^5.0.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/long/-/long-5.2.0.tgz#2696dadf4b4da2ce3f6f6b89186085d94d52fd61" - integrity sha512-9RTUNjK60eJbx3uz+TEGF7fUr29ZDxR5QzXcyDpeSfeH28S9ycINflOgOlppit5U+4kNTe83KQnMEerw7GmE8w== - -long@^5.2.0, long@^5.2.1, long@^5.2.3: +long@^5.0.0, long@^5.2.0, long@^5.2.1: version "5.2.3" resolved "https://registry.yarnpkg.com/long/-/long-5.2.3.tgz#a3ba97f3877cf1d778eccbcb048525ebb77499e1" integrity sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q== @@ -10426,7 +10300,7 @@ prettier-plugin-jsdoc@^1.0.0: comment-parser "^1.3.1" mdast-util-from-markdown "^1.2.0" -prettier@^2.6.2, prettier@^2.8.7: +prettier@^2.6.2: version "2.8.8" resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.8.tgz#e8c5d7e98a4305ffe3de2e1fc4aca1a71c28b1da" integrity sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q== @@ -10532,7 +10406,25 @@ protobufjs@^6.10.3, protobufjs@^6.8.8, protobufjs@~6.11.2, protobufjs@~6.11.3: "@types/node" ">=13.7.0" long "^4.0.0" -protobufjs@^7.0.0, protobufjs@^7.2.4: +protobufjs@^7.0.0: + version "7.2.6" + resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-7.2.6.tgz#4a0ccd79eb292717aacf07530a07e0ed20278215" + integrity sha512-dgJaEDDL6x8ASUZ1YqWciTRrdOuYNzoOf27oHNfdyvKqHr5i0FV7FSLU+aIeFjyFgVxrpTOtQUi0BLLBymZaBw== + dependencies: + "@protobufjs/aspromise" "^1.1.2" + "@protobufjs/base64" "^1.1.2" + "@protobufjs/codegen" "^2.0.4" + "@protobufjs/eventemitter" "^1.1.0" + "@protobufjs/fetch" "^1.1.0" + "@protobufjs/float" "^1.0.2" + "@protobufjs/inquire" "^1.1.0" + "@protobufjs/path" "^1.1.2" + "@protobufjs/pool" "^1.1.0" + "@protobufjs/utf8" "^1.1.0" + "@types/node" ">=13.7.0" + long "^5.0.0" + +protobufjs@^7.2.4: version "7.2.4" resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-7.2.4.tgz#3fc1ec0cdc89dd91aef9ba6037ba07408485c3ae" integrity sha512-AT+RJgD2sH8phPmCf7OUZR8xGdcJRga4+1cOaXJ64hvcSkVhNcRHOwIxUatPH15+nj59WAGTDv3LSGZPEQbJaQ== @@ -11153,6 +11045,13 @@ ses@^0.18.8: dependencies: "@endo/env-options" "^0.1.4" +ses@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/ses/-/ses-1.2.0.tgz#88cea67bec9c6538bcc740baa939711622457c52" + integrity sha512-+klZ2zjFPbHAkf9owGeih+ZB1lqUqvVoCeL5IpNXmIZPhI5rGE1tR729EZmfBzmPh8j+CrsLr/+cTZUmHGFr9g== + dependencies: + "@endo/env-options" "^1.1.1" + ses@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/ses/-/ses-1.3.0.tgz#4de8a2e740e5ff9e3cdbc4fd4a3574075c493f40" @@ -11907,31 +11806,6 @@ ts-api-utils@~1.0.1: resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.0.3.tgz#f12c1c781d04427313dbac808f453f050e54a331" integrity sha512-wNMeqtMz5NtwpT/UZGY5alT+VoKdSsOOP/kqHFcUW1P/VRhH2wJ48+DN2WwUliNbQ976ETwDL0Ifd2VVvgonvg== -ts-poet@^6.7.0: - version "6.7.0" - resolved "https://registry.yarnpkg.com/ts-poet/-/ts-poet-6.7.0.tgz#6b2ff3b7b0c70ea650d6d570dfe6899f73fb3c38" - integrity sha512-A0wvFtpkTCWPw7ftTIwbEH+L+7ul4CU0x3jXKQ+kCnmEQIAOwhpUaBmcAYKxZCxHae9/MUl4LbyTqw25BpzW5Q== - dependencies: - dprint-node "^1.0.8" - -ts-proto-descriptors@1.15.0: - version "1.15.0" - resolved "https://registry.yarnpkg.com/ts-proto-descriptors/-/ts-proto-descriptors-1.15.0.tgz#e859e3a2887da2d954c552524719b80bdb6ee355" - integrity sha512-TYyJ7+H+7Jsqawdv+mfsEpZPTIj9siDHS6EMCzG/z3b/PZiphsX+mWtqFfFVe5/N0Th6V3elK9lQqjnrgTOfrg== - dependencies: - long "^5.2.3" - protobufjs "^7.2.4" - -ts-proto@^1.131.0: - version "1.167.5" - resolved "https://registry.yarnpkg.com/ts-proto/-/ts-proto-1.167.5.tgz#98f1a26c211b2c167ce05b4a1a2b9c998ee23d80" - integrity sha512-46ci2eWiLk+rA9rwYya98eUivYP3VRi+978yzt15I34BvPlQlfplMxW2bWJa3yJgdGtNWdT1WLkGn53uhSpjCw== - dependencies: - case-anything "^2.1.13" - protobufjs "^7.2.4" - ts-poet "^6.7.0" - ts-proto-descriptors "1.15.0" - tsconfig-paths@^3.14.1: version "3.15.0" resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz#5299ec605e55b1abb23ec939ef15edaf483070d4" From f122002e38c5ebc97824c84f6e3f12426a7c353d Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Wed, 28 Feb 2024 13:05:37 -0800 Subject: [PATCH 15/47] chore: drop CJS --- packages/cosmic-proto/package.json | 14 +- patches/@cosmology+telescope+1.4.12.patch | 183 ++++++++++++++++++++++ 2 files changed, 188 insertions(+), 9 deletions(-) create mode 100644 patches/@cosmology+telescope+1.4.12.patch diff --git a/packages/cosmic-proto/package.json b/packages/cosmic-proto/package.json index a0b042917a8..53c0927134d 100644 --- a/packages/cosmic-proto/package.json +++ b/packages/cosmic-proto/package.json @@ -13,6 +13,7 @@ "bugs": { "url": "https://github.com/Agoric/agoric-sdk/issues" }, + "type": "module", "exports": { ".": { "types": "./dist/codegen/index.d.ts", @@ -33,7 +34,7 @@ } }, "main": "dist/index.js", - "module": "dist/index.mjs", + "module": "dist/index.js", "typings": "dist/index.d.ts", "directories": { "lib": "src" @@ -43,14 +44,9 @@ "!CHANGELOG.md" ], "scripts": { - "build:cjs": "yarn tsc --outDir dist --module commonjs || true", - "build:mjs": "yarn tsc --outDir mjs --module es2022 --declaration false || true", - "clean:mjs": "rimraf mjs", - "clean:dist": "rimraf dist", - "clean": "npm run clean:mjs && npm run clean:dist", - "build:rename": "publish-scripts --cmd rename --srcDir mjs --outDir dist --findExt js --replaceExt mjs --no-rmDir", - "build": "npm run clean && npm run build:cjs && npm run build:mjs && npm run build:rename", - "codegen": "yarn protos-update && node scripts/codegen.cjs", + "build": "yarn tsc --project tsconfig.build.json", + "clean": "rimraf dist", + "codegen": "yarn protos-update && node scripts/codegen.cjs && yarn prettier --write src", "prepare": "npm run build", "protos-update": "cp -rf ../../golang/cosmos/third_party/proto . && cp -rf ../../golang/cosmos/proto/agoric proto", "lint-fix": "yarn lint:eslint --fix", diff --git a/patches/@cosmology+telescope+1.4.12.patch b/patches/@cosmology+telescope+1.4.12.patch new file mode 100644 index 00000000000..766e1fe60f7 --- /dev/null +++ b/patches/@cosmology+telescope+1.4.12.patch @@ -0,0 +1,183 @@ +diff --git a/node_modules/@cosmology/telescope/main/generators/create-bundle.js b/node_modules/@cosmology/telescope/main/generators/create-bundle.js +index 22a4f0b..47b5ad9 100644 +--- a/node_modules/@cosmology/telescope/main/generators/create-bundle.js ++++ b/node_modules/@cosmology/telescope/main/generators/create-bundle.js +@@ -6,6 +6,13 @@ const plugin = (builder, bundler) => { + if (!builder.options.bundle.enabled) { + return; + } ++ const importStmts = bundler.bundle.importPaths; ++ for (const stmt of importStmts) { ++ if (stmt.source.value.startsWith('.') && !stmt.source.value.endsWith('.js')) { ++ stmt.source.value += '.js'; ++ } ++ } ++ + // [x] bundle + const body = (0, ast_1.recursiveModuleBundle)(builder.options, bundler.bundle.bundleVariables); + const prog = [] +diff --git a/node_modules/@cosmology/telescope/main/generators/create-rpc-msg-clients.js b/node_modules/@cosmology/telescope/main/generators/create-rpc-msg-clients.js +index f37a528..2f84d84 100644 +--- a/node_modules/@cosmology/telescope/main/generators/create-rpc-msg-clients.js ++++ b/node_modules/@cosmology/telescope/main/generators/create-rpc-msg-clients.js +@@ -92,6 +92,7 @@ const plugin = (builder, bundler) => { + const serviceImports = (0, imports_1.getDepsFromQueries)(ctx.mutations, localname); + // TODO we do NOT need all imports... + const imports = (0, imports_1.buildAllImports)(ctx, serviceImports, localname); ++ + const prog = [] + .concat(imports) + .concat(ctx.body) +diff --git a/node_modules/@cosmology/telescope/main/generators/create-stargate-clients.js b/node_modules/@cosmology/telescope/main/generators/create-stargate-clients.js +index 7e75d9e..b593a81 100644 +--- a/node_modules/@cosmology/telescope/main/generators/create-stargate-clients.js ++++ b/node_modules/@cosmology/telescope/main/generators/create-stargate-clients.js +@@ -30,6 +30,9 @@ const plugin = (builder, bundler) => { + let rel = (0, path_1.relative)((0, path_1.dirname)(clientFile), registry.localname); + if (!rel.startsWith('.')) + rel = `./${rel}`; ++ // if (!rel.endsWith('.js')) ++ // rel = `./${rel}.js`; ++ + const variable = (0, utils_1.variableSlug)(registry.localname); + registryVariables.push(variable); + registryImports.push((0, ast_1.importNamespace)(variable, rel)); +@@ -71,7 +74,13 @@ const plugin = (builder, bundler) => { + getTxRpc = (0, ast_1.createGetTxRpc)(ctx, txRpcName, name); + } + const imports = (0, imports_1.buildAllImportsFromGenericContext)(ctx, clientFile); +- let cProg = [...imports, ...registryImports, ...converterImports] ++ const importStmts = [...imports, ...registryImports, ...converterImports]; ++ for (const stmt of importStmts) { ++ if (stmt.source.value.startsWith('.') && !stmt.source.value.endsWith('.js')) { ++ stmt.source.value += '.js'; ++ } ++ } ++ let cProg = importStmts + .concat(aminos) + .concat(protos) + .concat(clientOptions) +diff --git a/node_modules/@cosmology/telescope/main/helpers/binary-coder.js b/node_modules/@cosmology/telescope/main/helpers/binary-coder.js +index 768b9c5..a456537 100644 +--- a/node_modules/@cosmology/telescope/main/helpers/binary-coder.js ++++ b/node_modules/@cosmology/telescope/main/helpers/binary-coder.js +@@ -36,7 +36,7 @@ exports.binary = ` + // standalone and requires a support library to be linked with it. This + // support library is itself covered by the above license. + +-import { utf8Length, utf8Read, utf8Write } from "./utf8"; ++import { utf8Length, utf8Read, utf8Write } from "./utf8.js"; + import { + int64ToString, + readInt32, +@@ -52,7 +52,7 @@ import { + writeByte, + zzDecode, + zzEncode, +-} from "./varint"; ++} from "./varint.js"; + + export enum WireType { + Varint = 0, +diff --git a/node_modules/@cosmology/telescope/main/utils/common-create-bundle.js b/node_modules/@cosmology/telescope/main/utils/common-create-bundle.js +index 87c1dbe..66bd4fc 100644 +--- a/node_modules/@cosmology/telescope/main/utils/common-create-bundle.js ++++ b/node_modules/@cosmology/telescope/main/utils/common-create-bundle.js +@@ -8,6 +8,7 @@ const files_1 = require("../utils/files"); + const utils_1 = require("../utils"); + const proto_parser_1 = require("@cosmology/proto-parser"); + const commonBundlePlugin = (builder, bundleFilename, packageMappings, astFn) => { ++ console.log('commonBundlePlugin'); + const localname = bundleFilename; + // create proto ref for context + const pkg = '@root'; +@@ -18,6 +19,7 @@ const commonBundlePlugin = (builder, bundleFilename, packageMappings, astFn) => + const ast = astFn(pCtx.proto, packageMappings); + // generate imports added by context.addUtil + const imports = (0, utils_1.fixlocalpaths)((0, imports_1.aggregateImports)(pCtx, {}, localname)); ++ + const importStmts = (0, imports_1.getImportStatements)(localname, imports); + // construct the AST + const prog = [].concat(importStmts).concat(ast); +diff --git a/node_modules/@cosmology/telescope/main/utils/index.js b/node_modules/@cosmology/telescope/main/utils/index.js +index 5527e77..20bb1ce 100644 +--- a/node_modules/@cosmology/telescope/main/utils/index.js ++++ b/node_modules/@cosmology/telescope/main/utils/index.js +@@ -130,7 +130,7 @@ const getRelativePath = (f1, f2) => { + let importPath = rel.replace((0, path_1.extname)(rel), ''); + if (!/^\./.test(importPath)) + importPath = `./${importPath}`; +- return importPath; ++ return `${importPath}.js`; + }; + exports.getRelativePath = getRelativePath; + __exportStar(require("./common-create-bundle"), exports); +diff --git a/node_modules/@cosmology/telescope/module/helpers/binary-coder.js b/node_modules/@cosmology/telescope/module/helpers/binary-coder.js +index d61377a..76fb6b0 100644 +--- a/node_modules/@cosmology/telescope/module/helpers/binary-coder.js ++++ b/node_modules/@cosmology/telescope/module/helpers/binary-coder.js +@@ -33,7 +33,7 @@ export const binary = ` + // standalone and requires a support library to be linked with it. This + // support library is itself covered by the above license. + +-import { utf8Length, utf8Read, utf8Write } from "./utf8"; ++import { utf8Length, utf8Read, utf8Write } from "./utf8.js"; + import { + int64ToString, + readInt32, +@@ -49,7 +49,7 @@ import { + writeByte, + zzDecode, + zzEncode, +-} from "./varint"; ++} from "./varint.js"; + + export enum WireType { + Varint = 0, +diff --git a/node_modules/@cosmology/telescope/module/utils/index.js b/node_modules/@cosmology/telescope/module/utils/index.js +index 2348081..b7621eb 100644 +--- a/node_modules/@cosmology/telescope/module/utils/index.js ++++ b/node_modules/@cosmology/telescope/module/utils/index.js +@@ -111,6 +111,6 @@ export const getRelativePath = (f1, f2) => { + let importPath = rel.replace(extname(rel), ''); + if (!/^\./.test(importPath)) + importPath = `./${importPath}`; +- return importPath; ++ return `${importPath}.js`; + }; + export * from './common-create-bundle'; +diff --git a/node_modules/@cosmology/telescope/src/helpers/binary-coder.ts b/node_modules/@cosmology/telescope/src/helpers/binary-coder.ts +index d61377a..76fb6b0 100644 +--- a/node_modules/@cosmology/telescope/src/helpers/binary-coder.ts ++++ b/node_modules/@cosmology/telescope/src/helpers/binary-coder.ts +@@ -33,7 +33,7 @@ export const binary = ` + // standalone and requires a support library to be linked with it. This + // support library is itself covered by the above license. + +-import { utf8Length, utf8Read, utf8Write } from "./utf8"; ++import { utf8Length, utf8Read, utf8Write } from "./utf8.js"; + import { + int64ToString, + readInt32, +@@ -49,7 +49,7 @@ import { + writeByte, + zzDecode, + zzEncode, +-} from "./varint"; ++} from "./varint.js"; + + export enum WireType { + Varint = 0, +diff --git a/node_modules/@cosmology/telescope/src/utils/index.ts b/node_modules/@cosmology/telescope/src/utils/index.ts +index ca1c186..cb60a50 100644 +--- a/node_modules/@cosmology/telescope/src/utils/index.ts ++++ b/node_modules/@cosmology/telescope/src/utils/index.ts +@@ -117,7 +117,7 @@ export const getRelativePath = (f1: string, f2: string) => { + const rel = relative(dirname(f1), f2); + let importPath = rel.replace(extname(rel), ''); + if (!/^\./.test(importPath)) importPath = `./${importPath}`; +- return importPath; ++ return `${importPath}.js`; + } + + export * from './common-create-bundle'; From 446262b36eb4247c7730393a156d62c26cdecaf2 Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Mon, 26 Feb 2024 10:11:01 -0800 Subject: [PATCH 16/47] chore(deps): bump @cosmjs/* to get https://github.com/confio/cosmjs-types/issues/78 --- packages/agoric-cli/package.json | 10 +- packages/casting/package.json | 8 +- packages/cosmic-proto/package.json | 8 +- patches/protobufjs+6.11.3.patch | 50 -------- yarn.lock | 196 ++++++++++++++++++++++------- 5 files changed, 161 insertions(+), 111 deletions(-) delete mode 100644 patches/protobufjs+6.11.3.patch diff --git a/packages/agoric-cli/package.json b/packages/agoric-cli/package.json index 8e3a0e7caea..a6ef5d24f25 100644 --- a/packages/agoric-cli/package.json +++ b/packages/agoric-cli/package.json @@ -53,11 +53,11 @@ "@agoric/zoe": "^0.26.2", "@agoric/zone": "^0.2.2", "@confio/relayer": "^0.9.0", - "@cosmjs/crypto": "^0.30.1", - "@cosmjs/encoding": "^0.30.1", - "@cosmjs/math": "^0.30.1", - "@cosmjs/proto-signing": "^0.30.1", - "@cosmjs/stargate": "^0.30.1", + "@cosmjs/crypto": "^0.32.2", + "@cosmjs/encoding": "^0.32.2", + "@cosmjs/math": "^0.32.2", + "@cosmjs/proto-signing": "^0.32.2", + "@cosmjs/stargate": "^0.32.2", "@endo/bundle-source": "^3.1.0", "@endo/captp": "^4.0.4", "@endo/compartment-mapper": "^1.1.2", diff --git a/packages/casting/package.json b/packages/casting/package.json index 1329b9576c4..c9406f44f79 100644 --- a/packages/casting/package.json +++ b/packages/casting/package.json @@ -26,10 +26,10 @@ "@agoric/notifier": "^0.6.2", "@agoric/spawner": "^0.6.8", "@agoric/store": "^0.9.2", - "@cosmjs/encoding": "^0.30.1", - "@cosmjs/proto-signing": "^0.30.1", - "@cosmjs/stargate": "^0.30.1", - "@cosmjs/tendermint-rpc": "^0.30.1", + "@cosmjs/encoding": "^0.32.2", + "@cosmjs/proto-signing": "^0.32.2", + "@cosmjs/stargate": "^0.32.2", + "@cosmjs/tendermint-rpc": "^0.32.2", "@endo/far": "^1.0.4", "@endo/init": "^1.0.4", "@endo/lockdown": "^1.0.4", diff --git a/packages/cosmic-proto/package.json b/packages/cosmic-proto/package.json index 53c0927134d..08375ef070d 100644 --- a/packages/cosmic-proto/package.json +++ b/packages/cosmic-proto/package.json @@ -67,10 +67,10 @@ "typescript": "^5.0.4" }, "dependencies": { - "@cosmjs/amino": "0.29.4", - "@cosmjs/proto-signing": "^0.30.1", - "@cosmjs/stargate": "^0.30.1", - "@cosmjs/tendermint-rpc": "^0.30.1", + "@cosmjs/amino": "^0.32.2", + "@cosmjs/proto-signing": "^0.32.2", + "@cosmjs/stargate": "^0.32.2", + "@cosmjs/tendermint-rpc": "^0.32.2", "@endo/init": "^1.0.3" }, "ava": { diff --git a/patches/protobufjs+6.11.3.patch b/patches/protobufjs+6.11.3.patch deleted file mode 100644 index 87c85b8b8b8..00000000000 --- a/patches/protobufjs+6.11.3.patch +++ /dev/null @@ -1,50 +0,0 @@ -diff --git a/node_modules/protobufjs/src/util/minimal.js b/node_modules/protobufjs/src/util/minimal.js -index 3c406de..083f71c 100644 ---- a/node_modules/protobufjs/src/util/minimal.js -+++ b/node_modules/protobufjs/src/util/minimal.js -@@ -280,13 +280,38 @@ function newError(name) { - merge(this, properties); - } - -- (CustomError.prototype = Object.create(Error.prototype)).constructor = CustomError; -- -- Object.defineProperty(CustomError.prototype, "name", { get: function() { return name; } }); -- -- CustomError.prototype.toString = function toString() { -- return this.name + ": " + this.message; -- }; -+ CustomError.prototype = Object.create(Error.prototype, { -+ constructor: { -+ value: CustomError, -+ writable: true, -+ // enumerable: true would accurately preserve the behavior of the -+ // original assignment, but I'm guessing that was not intentional. -+ // For an actual error subclass, this property would not -+ // be enumerable. -+ enumerable: false, -+ configurable: true, -+ }, -+ name: { -+ get() { return name; }, -+ set: undefined, -+ enumerable: false, -+ // configurable: false would accurately preserve the behavior of -+ // the original, but I'm guessing that was not intentional. -+ // For an actual error subclass, this property would -+ // be configurable. -+ configurable: true, -+ }, -+ toString: { -+ value() { return this.name + ": " + this.message; }, -+ writable: true, -+ // enumerable: true would accurately preserve the behavior of the -+ // original assignment, but I'm guessing that was not intentional. -+ // For an actual error subclass, this property would not -+ // be enumerable. -+ enumerable: false, -+ configurable: true, -+ }, -+ }); - - return CustomError; - } diff --git a/yarn.lock b/yarn.lock index 6520cb636df..2f426c11d9e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1157,16 +1157,6 @@ triple-beam "1.3.0" winston "3.3.3" -"@cosmjs/amino@0.29.4": - version "0.29.4" - resolved "https://registry.yarnpkg.com/@cosmjs/amino/-/amino-0.29.4.tgz#93d5f90033cb2af1573627582cd2cf8a515c3ef4" - integrity sha512-FBjaJ4oUKFtH34O7XjUk370x8sF7EbXD29miXrm0Rl5GEtEORJgQwutXQllHo5gBkpOxC+ZQ40CibXhPzH7G7A== - dependencies: - "@cosmjs/crypto" "^0.29.4" - "@cosmjs/encoding" "^0.29.4" - "@cosmjs/math" "^0.29.4" - "@cosmjs/utils" "^0.29.4" - "@cosmjs/amino@^0.30.1": version "0.30.1" resolved "https://registry.yarnpkg.com/@cosmjs/amino/-/amino-0.30.1.tgz#7c18c14627361ba6c88e3495700ceea1f76baace" @@ -1177,6 +1167,16 @@ "@cosmjs/math" "^0.30.1" "@cosmjs/utils" "^0.30.1" +"@cosmjs/amino@^0.32.2": + version "0.32.2" + resolved "https://registry.yarnpkg.com/@cosmjs/amino/-/amino-0.32.2.tgz#ba3cf255e4e6b1ba67461f1ef7b0b8ad3f895da7" + integrity sha512-lcK5RCVm4OfdAooxKcF2+NwaDVVpghOq6o/A40c2mHXDUzUoRZ33VAHjVJ9Me6vOFxshrw/XEFn1f4KObntjYA== + dependencies: + "@cosmjs/crypto" "^0.32.2" + "@cosmjs/encoding" "^0.32.2" + "@cosmjs/math" "^0.32.2" + "@cosmjs/utils" "^0.32.2" + "@cosmjs/cosmwasm-stargate@^0.30.0": version "0.30.1" resolved "https://registry.yarnpkg.com/@cosmjs/cosmwasm-stargate/-/cosmwasm-stargate-0.30.1.tgz#6f9ca310f75433a3e30d683bc6aa24eadb345d79" @@ -1194,19 +1194,6 @@ long "^4.0.0" pako "^2.0.2" -"@cosmjs/crypto@^0.29.4": - version "0.29.5" - resolved "https://registry.yarnpkg.com/@cosmjs/crypto/-/crypto-0.29.5.tgz#ab99fc382b93d8a8db075780cf07487a0f9519fd" - integrity sha512-2bKkaLGictaNL0UipQCL6C1afaisv6k8Wr/GCLx9FqiyFkh9ZgRHDyetD64ZsjnWV/N/D44s/esI+k6oPREaiQ== - dependencies: - "@cosmjs/encoding" "^0.29.5" - "@cosmjs/math" "^0.29.5" - "@cosmjs/utils" "^0.29.5" - "@noble/hashes" "^1" - bn.js "^5.2.0" - elliptic "^6.5.4" - libsodium-wrappers "^0.7.6" - "@cosmjs/crypto@^0.30.0", "@cosmjs/crypto@^0.30.1": version "0.30.1" resolved "https://registry.yarnpkg.com/@cosmjs/crypto/-/crypto-0.30.1.tgz#21e94d5ca8f8ded16eee1389d2639cb5c43c3eb5" @@ -1220,14 +1207,18 @@ elliptic "^6.5.4" libsodium-wrappers "^0.7.6" -"@cosmjs/encoding@^0.29.4", "@cosmjs/encoding@^0.29.5": - version "0.29.5" - resolved "https://registry.yarnpkg.com/@cosmjs/encoding/-/encoding-0.29.5.tgz#009a4b1c596cdfd326f30ccfa79f5e56daa264f2" - integrity sha512-G4rGl/Jg4dMCw5u6PEZHZcoHnUBlukZODHbm/wcL4Uu91fkn5jVo5cXXZcvs4VCkArVGrEj/52eUgTZCmOBGWQ== +"@cosmjs/crypto@^0.32.2": + version "0.32.2" + resolved "https://registry.yarnpkg.com/@cosmjs/crypto/-/crypto-0.32.2.tgz#8ed255d3d1c1c4d916a1586f8cbc33eaab82f511" + integrity sha512-RuxrYKzhrPF9g6NmU7VEq++Hn1vZJjqqJpZ9Tmw9lOYOV8BUsv+j/0BE86kmWi7xVJ7EwxiuxYsKuM8IR18CIA== dependencies: - base64-js "^1.3.0" - bech32 "^1.1.4" - readonly-date "^1.0.0" + "@cosmjs/encoding" "^0.32.2" + "@cosmjs/math" "^0.32.2" + "@cosmjs/utils" "^0.32.2" + "@noble/hashes" "^1" + bn.js "^5.2.0" + elliptic "^6.5.4" + libsodium-wrappers-sumo "^0.7.11" "@cosmjs/encoding@^0.30.0", "@cosmjs/encoding@^0.30.1": version "0.30.1" @@ -1238,6 +1229,15 @@ bech32 "^1.1.4" readonly-date "^1.0.0" +"@cosmjs/encoding@^0.32.2": + version "0.32.2" + resolved "https://registry.yarnpkg.com/@cosmjs/encoding/-/encoding-0.32.2.tgz#8c5c64481a85cd570740c34dccce69d024a29805" + integrity sha512-WX7m1wLpA9V/zH0zRcz4EmgZdAv1F44g4dbXOgNj1eXZw1PIGR12p58OEkLN51Ha3S4DKRtCv5CkhK1KHEvQtg== + dependencies: + base64-js "^1.3.0" + bech32 "^1.1.4" + readonly-date "^1.0.0" + "@cosmjs/faucet-client@^0.30.0": version "0.30.1" resolved "https://registry.yarnpkg.com/@cosmjs/faucet-client/-/faucet-client-0.30.1.tgz#81406128830ba47ae824a912144a28a7fe70300d" @@ -1253,12 +1253,13 @@ "@cosmjs/stream" "^0.30.1" xstream "^11.14.0" -"@cosmjs/math@^0.29.4", "@cosmjs/math@^0.29.5": - version "0.29.5" - resolved "https://registry.yarnpkg.com/@cosmjs/math/-/math-0.29.5.tgz#722c96e080d6c2b62215ce9f4c70da7625b241b6" - integrity sha512-2GjKcv+A9f86MAWYLUkjhw1/WpRl2R1BTb3m9qPG7lzMA7ioYff9jY5SPCfafKdxM4TIQGxXQlYGewQL16O68Q== +"@cosmjs/json-rpc@^0.32.2": + version "0.32.2" + resolved "https://registry.yarnpkg.com/@cosmjs/json-rpc/-/json-rpc-0.32.2.tgz#f87fab0d6975ed1d1c7daafcf6f1f81e5e296912" + integrity sha512-lan2lOgmz4yVE/HR8eCOSiII/1OudIulk8836koyIDCsPEpt6eKBuctnAD168vABGArKccLAo7Mr2gy9nrKrOQ== dependencies: - bn.js "^5.2.0" + "@cosmjs/stream" "^0.32.2" + xstream "^11.14.0" "@cosmjs/math@^0.30.0", "@cosmjs/math@^0.30.1": version "0.30.1" @@ -1267,6 +1268,13 @@ dependencies: bn.js "^5.2.0" +"@cosmjs/math@^0.32.2": + version "0.32.2" + resolved "https://registry.yarnpkg.com/@cosmjs/math/-/math-0.32.2.tgz#4522312769197e132679e4960862bcec0eed4cb8" + integrity sha512-b8+ruAAY8aKtVKWSft2IvtCVCUH1LigIlf9ALIiY8n9jtM4kMASiaRbQ/27etnSAInV88IaezKK9rQZrtxTjcw== + dependencies: + bn.js "^5.2.0" + "@cosmjs/proto-signing@^0.30.0", "@cosmjs/proto-signing@^0.30.1": version "0.30.1" resolved "https://registry.yarnpkg.com/@cosmjs/proto-signing/-/proto-signing-0.30.1.tgz#f0dda372488df9cd2677150b89b3e9c72b3cb713" @@ -1280,6 +1288,18 @@ cosmjs-types "^0.7.1" long "^4.0.0" +"@cosmjs/proto-signing@^0.32.2": + version "0.32.2" + resolved "https://registry.yarnpkg.com/@cosmjs/proto-signing/-/proto-signing-0.32.2.tgz#26ed2675978ce24078981f4c15a06c5d6b808f44" + integrity sha512-UV4WwkE3W3G3s7wwU9rizNcUEz2g0W8jQZS5J6/3fiN0mRPwtPKQ6EinPN9ASqcAJ7/VQH4/9EPOw7d6XQGnqw== + dependencies: + "@cosmjs/amino" "^0.32.2" + "@cosmjs/crypto" "^0.32.2" + "@cosmjs/encoding" "^0.32.2" + "@cosmjs/math" "^0.32.2" + "@cosmjs/utils" "^0.32.2" + cosmjs-types "^0.9.0" + "@cosmjs/socket@^0.30.1": version "0.30.1" resolved "https://registry.yarnpkg.com/@cosmjs/socket/-/socket-0.30.1.tgz#00b22f4b5e2ab01f4d82ccdb7b2e59536bfe5ce0" @@ -1290,6 +1310,16 @@ ws "^7" xstream "^11.14.0" +"@cosmjs/socket@^0.32.2": + version "0.32.2" + resolved "https://registry.yarnpkg.com/@cosmjs/socket/-/socket-0.32.2.tgz#a66be3863d03bf2d8df0433af476df010ff10e8c" + integrity sha512-Qc8jaw4uSBJm09UwPgkqe3g9TBFx4ZR9HkXpwT6Z9I+6kbLerXPR0Gy3NSJFSUgxIfTpO8O1yqoWAyf0Ay17Mw== + dependencies: + "@cosmjs/stream" "^0.32.2" + isomorphic-ws "^4.0.1" + ws "^7" + xstream "^11.14.0" + "@cosmjs/stargate@^0.30.0", "@cosmjs/stargate@^0.30.1": version "0.30.1" resolved "https://registry.yarnpkg.com/@cosmjs/stargate/-/stargate-0.30.1.tgz#e1b22e1226cffc6e93914a410755f1f61057ba04" @@ -1308,6 +1338,22 @@ protobufjs "~6.11.3" xstream "^11.14.0" +"@cosmjs/stargate@^0.32.2": + version "0.32.2" + resolved "https://registry.yarnpkg.com/@cosmjs/stargate/-/stargate-0.32.2.tgz#73718c5c6a3ae138682ee9987333d911eca22a13" + integrity sha512-AsJa29fT7Jd4xt9Ai+HMqhyj7UQu7fyYKdXj/8+/9PD74xe6lZSYhQPcitUmMLJ1ckKPgXSk5Dd2LbsQT0IhZg== + dependencies: + "@confio/ics23" "^0.6.8" + "@cosmjs/amino" "^0.32.2" + "@cosmjs/encoding" "^0.32.2" + "@cosmjs/math" "^0.32.2" + "@cosmjs/proto-signing" "^0.32.2" + "@cosmjs/stream" "^0.32.2" + "@cosmjs/tendermint-rpc" "^0.32.2" + "@cosmjs/utils" "^0.32.2" + cosmjs-types "^0.9.0" + xstream "^11.14.0" + "@cosmjs/stream@^0.30.0", "@cosmjs/stream@^0.30.1": version "0.30.1" resolved "https://registry.yarnpkg.com/@cosmjs/stream/-/stream-0.30.1.tgz#ba038a2aaf41343696b1e6e759d8e03a9516ec1a" @@ -1315,6 +1361,13 @@ dependencies: xstream "^11.14.0" +"@cosmjs/stream@^0.32.2": + version "0.32.2" + resolved "https://registry.yarnpkg.com/@cosmjs/stream/-/stream-0.32.2.tgz#b1e8f977d25313d659f1aa89ad21614b5391cd93" + integrity sha512-gpCufLfHAD8Zp1ZKge7AHbDf4RA0TZp66wZY6JaQR5bSiEF2Drjtp4mwXZPGejtaUMnaAgff3LrUzPJfKYdQwg== + dependencies: + xstream "^11.14.0" + "@cosmjs/tendermint-rpc@^0.30.0", "@cosmjs/tendermint-rpc@^0.30.1": version "0.30.1" resolved "https://registry.yarnpkg.com/@cosmjs/tendermint-rpc/-/tendermint-rpc-0.30.1.tgz#c16378892ba1ac63f72803fdf7567eab9d4f0aa0" @@ -1331,16 +1384,32 @@ readonly-date "^1.0.0" xstream "^11.14.0" -"@cosmjs/utils@^0.29.4", "@cosmjs/utils@^0.29.5": - version "0.29.5" - resolved "https://registry.yarnpkg.com/@cosmjs/utils/-/utils-0.29.5.tgz#3fed1b3528ae8c5f1eb5d29b68755bebfd3294ee" - integrity sha512-m7h+RXDUxOzEOGt4P+3OVPX7PuakZT3GBmaM/Y2u+abN3xZkziykD/NvedYFvvCCdQo714XcGl33bwifS9FZPQ== +"@cosmjs/tendermint-rpc@^0.32.2": + version "0.32.2" + resolved "https://registry.yarnpkg.com/@cosmjs/tendermint-rpc/-/tendermint-rpc-0.32.2.tgz#c5607b8d472e5bf9fd58d5453db7194f500ccc62" + integrity sha512-DXyJHDmcAfCix4H/7/dKR0UMdshP01KxJOXHdHxBCbLIpck94BsWD3B2ZTXwfA6sv98so9wOzhp7qGQa5malxg== + dependencies: + "@cosmjs/crypto" "^0.32.2" + "@cosmjs/encoding" "^0.32.2" + "@cosmjs/json-rpc" "^0.32.2" + "@cosmjs/math" "^0.32.2" + "@cosmjs/socket" "^0.32.2" + "@cosmjs/stream" "^0.32.2" + "@cosmjs/utils" "^0.32.2" + axios "^1.6.0" + readonly-date "^1.0.0" + xstream "^11.14.0" "@cosmjs/utils@^0.30.0", "@cosmjs/utils@^0.30.1": version "0.30.1" resolved "https://registry.yarnpkg.com/@cosmjs/utils/-/utils-0.30.1.tgz#6d92582341be3c2ec8d82090253cfa4b7f959edb" integrity sha512-KvvX58MGMWh7xA+N+deCfunkA/ZNDvFLw4YbOmX3f/XBIkqrVY7qlotfy2aNb1kgp6h4B6Yc8YawJPDTfvWX7g== +"@cosmjs/utils@^0.32.2": + version "0.32.2" + resolved "https://registry.yarnpkg.com/@cosmjs/utils/-/utils-0.32.2.tgz#324304aa85bfa6f10561cc17781d824d02130897" + integrity sha512-Gg5t+eR7vPJMAmhkFt6CZrzPd0EKpAslWwk5rFVYZpJsM8JG5KT9XQ99hgNM3Ov6ScNoIWbXkpX27F6A9cXR4Q== + "@cosmology/ast@^1.4.8": version "1.4.8" resolved "https://registry.yarnpkg.com/@cosmology/ast/-/ast-1.4.8.tgz#9c6312de9fc4177d861a8723e4121cfdd4988533" @@ -4515,6 +4584,15 @@ axios@^1.0.0: form-data "^4.0.0" proxy-from-env "^1.1.0" +axios@^1.6.0: + version "1.6.7" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.7.tgz#7b48c2e27c96f9c68a2f8f31e2ab19f59b06b0a7" + integrity sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA== + dependencies: + follow-redirects "^1.15.4" + form-data "^4.0.0" + proxy-from-env "^1.1.0" + axobject-query@^3.1.1: version "3.2.1" resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-3.2.1.tgz#39c378a6e3b06ca679f29138151e45b2b32da62a" @@ -5435,6 +5513,11 @@ cosmjs-types@^0.7.1: long "^4.0.0" protobufjs "~6.11.2" +cosmjs-types@^0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/cosmjs-types/-/cosmjs-types-0.9.0.tgz#c3bc482d28c7dfa25d1445093fdb2d9da1f6cfcc" + integrity sha512-MN/yUe6mkJwHnCFfsNPeCfXVhyxHYW6c/xDUzrSbBycYzw++XvWDMJArXp2pLdgD6FQ8DW79vkPjeNKVrXaHeQ== + cross-spawn@^6.0.5: version "6.0.5" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" @@ -6770,11 +6853,16 @@ fn.name@1.x.x: resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc" integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw== -follow-redirects@^1.0.0, follow-redirects@^1.14.0, follow-redirects@^1.15.0: +follow-redirects@^1.0.0, follow-redirects@^1.15.0: version "1.15.2" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== +follow-redirects@^1.14.0, follow-redirects@^1.15.4: + version "1.15.5" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.5.tgz#54d4d6d062c0fa7d9d17feb008461550e3ba8020" + integrity sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw== + for-each@^0.3.3: version "0.3.3" resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" @@ -8399,17 +8487,29 @@ libnpmpublish@^6.0.4: semver "^7.3.7" ssri "^9.0.0" +libsodium-sumo@^0.7.13: + version "0.7.13" + resolved "https://registry.yarnpkg.com/libsodium-sumo/-/libsodium-sumo-0.7.13.tgz#533b97d2be44b1277e59c1f9f60805978ac5542d" + integrity sha512-zTGdLu4b9zSNLfovImpBCbdAA4xkpkZbMnSQjP8HShyOutnGjRHmSOKlsylh1okao6QhLiz7nG98EGn+04cZjQ== + +libsodium-wrappers-sumo@^0.7.11: + version "0.7.13" + resolved "https://registry.yarnpkg.com/libsodium-wrappers-sumo/-/libsodium-wrappers-sumo-0.7.13.tgz#a33aea845a0bb56db067548f04feba28c730ab8e" + integrity sha512-lz4YdplzDRh6AhnLGF2Dj2IUj94xRN6Bh8T0HLNwzYGwPehQJX6c7iYVrFUPZ3QqxE0bqC+K0IIqqZJYWumwSQ== + dependencies: + libsodium-sumo "^0.7.13" + libsodium-wrappers@^0.7.6: - version "0.7.10" - resolved "https://registry.yarnpkg.com/libsodium-wrappers/-/libsodium-wrappers-0.7.10.tgz#13ced44cacb0fc44d6ac9ce67d725956089ce733" - integrity sha512-pO3F1Q9NPLB/MWIhehim42b/Fwb30JNScCNh8TcQ/kIc+qGLQch8ag8wb0keK3EP5kbGakk1H8Wwo7v+36rNQg== + version "0.7.13" + resolved "https://registry.yarnpkg.com/libsodium-wrappers/-/libsodium-wrappers-0.7.13.tgz#83299e06ee1466057ba0e64e532777d2929b90d3" + integrity sha512-kasvDsEi/r1fMzKouIDv7B8I6vNmknXwGiYodErGuESoFTohGSKZplFtVxZqHaoQ217AynyIFgnOVRitpHs0Qw== dependencies: - libsodium "^0.7.0" + libsodium "^0.7.13" -libsodium@^0.7.0: - version "0.7.10" - resolved "https://registry.yarnpkg.com/libsodium/-/libsodium-0.7.10.tgz#c2429a7e4c0836f879d701fec2c8a208af024159" - integrity sha512-eY+z7hDrDKxkAK+QKZVNv92A5KYkxfvIshtBJkmg5TSiCnYqZP3i9OO9whE79Pwgm4jGaoHgkM4ao/b9Cyu4zQ== +libsodium@^0.7.13: + version "0.7.13" + resolved "https://registry.yarnpkg.com/libsodium/-/libsodium-0.7.13.tgz#230712ec0b7447c57b39489c48a4af01985fb393" + integrity sha512-mK8ju0fnrKXXfleL53vtp9xiPq5hKM0zbDQtcxQIsSmxNgSxqCj6R7Hl9PkrNe2j29T4yoDaF7DJLK9/i5iWUw== limiter@^1.1.4: version "1.1.5" From ca28da0e2cb5f54cd44a541e25dba963e0d096d1 Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Mon, 26 Feb 2024 10:21:01 -0800 Subject: [PATCH 17/47] chore(deps): bump @confio/relayer https://github.com/confio/ts-relayer/blob/main/CHANGELOG.md --- packages/agoric-cli/package.json | 4 +- yarn.lock | 239 ++++++------------------------- 2 files changed, 49 insertions(+), 194 deletions(-) diff --git a/packages/agoric-cli/package.json b/packages/agoric-cli/package.json index a6ef5d24f25..0e78d869d0a 100644 --- a/packages/agoric-cli/package.json +++ b/packages/agoric-cli/package.json @@ -29,8 +29,8 @@ "lint:eslint": "eslint ." }, "devDependencies": { - "@agoric/deploy-script-support": "^0.10.3", "@agoric/cosmic-swingset": "^0.41.3", + "@agoric/deploy-script-support": "^0.10.3", "ava": "^5.3.0", "c8": "^7.13.0", "dd-trace": "^4.11.1" @@ -52,7 +52,7 @@ "@agoric/vats": "^0.15.1", "@agoric/zoe": "^0.26.2", "@agoric/zone": "^0.2.2", - "@confio/relayer": "^0.9.0", + "@confio/relayer": "^0.11.3", "@cosmjs/crypto": "^0.32.2", "@cosmjs/encoding": "^0.32.2", "@cosmjs/math": "^0.32.2", diff --git a/yarn.lock b/yarn.lock index 2f426c11d9e..648495d2ec8 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1129,44 +1129,33 @@ "@noble/hashes" "^1.0.0" protobufjs "^6.8.8" -"@confio/relayer@^0.9.0": - version "0.9.0" - resolved "https://registry.yarnpkg.com/@confio/relayer/-/relayer-0.9.0.tgz#d0f9917f2a1b02e15d33dacb3e044d358022a9a9" - integrity sha512-YfoPMCH72BM/bYQ58+F75zYsZ2vEPocY0CaQLE6PPDtBOKrdXM3LTj5zUdZSbHDQSrDDmRrKcuZVufpdrRimQw== - dependencies: - "@cosmjs/cosmwasm-stargate" "^0.30.0" - "@cosmjs/crypto" "^0.30.0" - "@cosmjs/encoding" "^0.30.0" - "@cosmjs/faucet-client" "^0.30.0" - "@cosmjs/math" "^0.30.0" - "@cosmjs/proto-signing" "^0.30.0" - "@cosmjs/stargate" "^0.30.0" - "@cosmjs/stream" "^0.30.0" - "@cosmjs/tendermint-rpc" "^0.30.0" - "@cosmjs/utils" "^0.30.0" +"@confio/relayer@^0.11.3": + version "0.11.3" + resolved "https://registry.yarnpkg.com/@confio/relayer/-/relayer-0.11.3.tgz#549488757872e4176fddd4e4e0be36a059151f3b" + integrity sha512-dhARxp3SDcBlwcYZD2keZ2QCa9jmDqM6BdqM8Pu1STfOENXnii853/DcTJ9vrea9KCkU/eAiATl8FjlENwyUZA== + dependencies: + "@cosmjs/cosmwasm-stargate" "^0.32.1" + "@cosmjs/crypto" "^0.32.1" + "@cosmjs/encoding" "^0.32.1" + "@cosmjs/faucet-client" "^0.32.1" + "@cosmjs/math" "^0.32.1" + "@cosmjs/proto-signing" "^0.32.1" + "@cosmjs/stargate" "^0.32.1" + "@cosmjs/stream" "^0.32.1" + "@cosmjs/tendermint-rpc" "^0.32.1" + "@cosmjs/utils" "^0.32.1" ajv "7.1.1" - axios "0.21.4" + axios "^1.6.7" commander "7.1.0" - cosmjs-types "^0.7.1" + cosmjs-types "^0.9.0" fast-safe-stringify "2.0.4" js-yaml "4.0.0" lodash "4.17.21" prom-client "13.1.0" - protobufjs "^6.10.3" table "^6.7.1" triple-beam "1.3.0" winston "3.3.3" -"@cosmjs/amino@^0.30.1": - version "0.30.1" - resolved "https://registry.yarnpkg.com/@cosmjs/amino/-/amino-0.30.1.tgz#7c18c14627361ba6c88e3495700ceea1f76baace" - integrity sha512-yNHnzmvAlkETDYIpeCTdVqgvrdt1qgkOXwuRVi8s27UKI5hfqyE9fJ/fuunXE6ZZPnKkjIecDznmuUOMrMvw4w== - dependencies: - "@cosmjs/crypto" "^0.30.1" - "@cosmjs/encoding" "^0.30.1" - "@cosmjs/math" "^0.30.1" - "@cosmjs/utils" "^0.30.1" - "@cosmjs/amino@^0.32.2": version "0.32.2" resolved "https://registry.yarnpkg.com/@cosmjs/amino/-/amino-0.32.2.tgz#ba3cf255e4e6b1ba67461f1ef7b0b8ad3f895da7" @@ -1177,37 +1166,23 @@ "@cosmjs/math" "^0.32.2" "@cosmjs/utils" "^0.32.2" -"@cosmjs/cosmwasm-stargate@^0.30.0": - version "0.30.1" - resolved "https://registry.yarnpkg.com/@cosmjs/cosmwasm-stargate/-/cosmwasm-stargate-0.30.1.tgz#6f9ca310f75433a3e30d683bc6aa24eadb345d79" - integrity sha512-W/6SLUCJAJGBN+sJLXouLZikVgmqDd9LCdlMzQaxczcCHTWeJAmRvOiZGSZaSy3shw/JN1qc6g6PKpvTVgj10A== - dependencies: - "@cosmjs/amino" "^0.30.1" - "@cosmjs/crypto" "^0.30.1" - "@cosmjs/encoding" "^0.30.1" - "@cosmjs/math" "^0.30.1" - "@cosmjs/proto-signing" "^0.30.1" - "@cosmjs/stargate" "^0.30.1" - "@cosmjs/tendermint-rpc" "^0.30.1" - "@cosmjs/utils" "^0.30.1" - cosmjs-types "^0.7.1" - long "^4.0.0" - pako "^2.0.2" - -"@cosmjs/crypto@^0.30.0", "@cosmjs/crypto@^0.30.1": - version "0.30.1" - resolved "https://registry.yarnpkg.com/@cosmjs/crypto/-/crypto-0.30.1.tgz#21e94d5ca8f8ded16eee1389d2639cb5c43c3eb5" - integrity sha512-rAljUlake3MSXs9xAm87mu34GfBLN0h/1uPPV6jEwClWjNkAMotzjC0ab9MARy5FFAvYHL3lWb57bhkbt2GtzQ== +"@cosmjs/cosmwasm-stargate@^0.32.1": + version "0.32.2" + resolved "https://registry.yarnpkg.com/@cosmjs/cosmwasm-stargate/-/cosmwasm-stargate-0.32.2.tgz#32aca8b4c2043cd1bc91cf4d0225b268c166e421" + integrity sha512-OwJHzIx2CoJS6AULxOpNR6m+CI0GXxy8z9svHA1ZawzNM3ZGlL0GvHdhmF0WkpX4E7UdrYlJSLpKcgg5Fo6i7Q== dependencies: - "@cosmjs/encoding" "^0.30.1" - "@cosmjs/math" "^0.30.1" - "@cosmjs/utils" "^0.30.1" - "@noble/hashes" "^1" - bn.js "^5.2.0" - elliptic "^6.5.4" - libsodium-wrappers "^0.7.6" + "@cosmjs/amino" "^0.32.2" + "@cosmjs/crypto" "^0.32.2" + "@cosmjs/encoding" "^0.32.2" + "@cosmjs/math" "^0.32.2" + "@cosmjs/proto-signing" "^0.32.2" + "@cosmjs/stargate" "^0.32.2" + "@cosmjs/tendermint-rpc" "^0.32.2" + "@cosmjs/utils" "^0.32.2" + cosmjs-types "^0.9.0" + pako "^2.0.2" -"@cosmjs/crypto@^0.32.2": +"@cosmjs/crypto@^0.32.1", "@cosmjs/crypto@^0.32.2": version "0.32.2" resolved "https://registry.yarnpkg.com/@cosmjs/crypto/-/crypto-0.32.2.tgz#8ed255d3d1c1c4d916a1586f8cbc33eaab82f511" integrity sha512-RuxrYKzhrPF9g6NmU7VEq++Hn1vZJjqqJpZ9Tmw9lOYOV8BUsv+j/0BE86kmWi7xVJ7EwxiuxYsKuM8IR18CIA== @@ -1220,16 +1195,7 @@ elliptic "^6.5.4" libsodium-wrappers-sumo "^0.7.11" -"@cosmjs/encoding@^0.30.0", "@cosmjs/encoding@^0.30.1": - version "0.30.1" - resolved "https://registry.yarnpkg.com/@cosmjs/encoding/-/encoding-0.30.1.tgz#b5c4e0ef7ceb1f2753688eb96400ed70f35c6058" - integrity sha512-rXmrTbgqwihORwJ3xYhIgQFfMSrwLu1s43RIK9I8EBudPx3KmnmyAKzMOVsRDo9edLFNuZ9GIvysUCwQfq3WlQ== - dependencies: - base64-js "^1.3.0" - bech32 "^1.1.4" - readonly-date "^1.0.0" - -"@cosmjs/encoding@^0.32.2": +"@cosmjs/encoding@^0.32.1", "@cosmjs/encoding@^0.32.2": version "0.32.2" resolved "https://registry.yarnpkg.com/@cosmjs/encoding/-/encoding-0.32.2.tgz#8c5c64481a85cd570740c34dccce69d024a29805" integrity sha512-WX7m1wLpA9V/zH0zRcz4EmgZdAv1F44g4dbXOgNj1eXZw1PIGR12p58OEkLN51Ha3S4DKRtCv5CkhK1KHEvQtg== @@ -1238,20 +1204,12 @@ bech32 "^1.1.4" readonly-date "^1.0.0" -"@cosmjs/faucet-client@^0.30.0": - version "0.30.1" - resolved "https://registry.yarnpkg.com/@cosmjs/faucet-client/-/faucet-client-0.30.1.tgz#81406128830ba47ae824a912144a28a7fe70300d" - integrity sha512-uOYIjouOCjncfthDzeygcWtEpShiNN0u9tORSingufOAO/kmlHwM53M5I1a8ak1EktkEEa+Mxdj3kRfV87DOcw== - dependencies: - axios "^0.21.2" - -"@cosmjs/json-rpc@^0.30.1": - version "0.30.1" - resolved "https://registry.yarnpkg.com/@cosmjs/json-rpc/-/json-rpc-0.30.1.tgz#16f21305fc167598c8a23a45549b85106b2372bc" - integrity sha512-pitfC/2YN9t+kXZCbNuyrZ6M8abnCC2n62m+JtU9vQUfaEtVsgy+1Fk4TRQ175+pIWSdBMFi2wT8FWVEE4RhxQ== +"@cosmjs/faucet-client@^0.32.1": + version "0.32.2" + resolved "https://registry.yarnpkg.com/@cosmjs/faucet-client/-/faucet-client-0.32.2.tgz#c48a44102dd7332d377529cdcca661d4ad1bd2cb" + integrity sha512-tRumHGAURhCV7xvxPltwqiMX2bwJYR6M8Im586hnwVC+Ww2+TwH9c0QjZiUmWzZgCDUPkaSwVswUgucB7H59xA== dependencies: - "@cosmjs/stream" "^0.30.1" - xstream "^11.14.0" + axios "^1.6.0" "@cosmjs/json-rpc@^0.32.2": version "0.32.2" @@ -1261,34 +1219,14 @@ "@cosmjs/stream" "^0.32.2" xstream "^11.14.0" -"@cosmjs/math@^0.30.0", "@cosmjs/math@^0.30.1": - version "0.30.1" - resolved "https://registry.yarnpkg.com/@cosmjs/math/-/math-0.30.1.tgz#8b816ef4de5d3afa66cb9fdfb5df2357a7845b8a" - integrity sha512-yaoeI23pin9ZiPHIisa6qqLngfnBR/25tSaWpkTm8Cy10MX70UF5oN4+/t1heLaM6SSmRrhk3psRkV4+7mH51Q== - dependencies: - bn.js "^5.2.0" - -"@cosmjs/math@^0.32.2": +"@cosmjs/math@^0.32.1", "@cosmjs/math@^0.32.2": version "0.32.2" resolved "https://registry.yarnpkg.com/@cosmjs/math/-/math-0.32.2.tgz#4522312769197e132679e4960862bcec0eed4cb8" integrity sha512-b8+ruAAY8aKtVKWSft2IvtCVCUH1LigIlf9ALIiY8n9jtM4kMASiaRbQ/27etnSAInV88IaezKK9rQZrtxTjcw== dependencies: bn.js "^5.2.0" -"@cosmjs/proto-signing@^0.30.0", "@cosmjs/proto-signing@^0.30.1": - version "0.30.1" - resolved "https://registry.yarnpkg.com/@cosmjs/proto-signing/-/proto-signing-0.30.1.tgz#f0dda372488df9cd2677150b89b3e9c72b3cb713" - integrity sha512-tXh8pPYXV4aiJVhTKHGyeZekjj+K9s2KKojMB93Gcob2DxUjfKapFYBMJSgfKPuWUPEmyr8Q9km2hplI38ILgQ== - dependencies: - "@cosmjs/amino" "^0.30.1" - "@cosmjs/crypto" "^0.30.1" - "@cosmjs/encoding" "^0.30.1" - "@cosmjs/math" "^0.30.1" - "@cosmjs/utils" "^0.30.1" - cosmjs-types "^0.7.1" - long "^4.0.0" - -"@cosmjs/proto-signing@^0.32.2": +"@cosmjs/proto-signing@^0.32.1", "@cosmjs/proto-signing@^0.32.2": version "0.32.2" resolved "https://registry.yarnpkg.com/@cosmjs/proto-signing/-/proto-signing-0.32.2.tgz#26ed2675978ce24078981f4c15a06c5d6b808f44" integrity sha512-UV4WwkE3W3G3s7wwU9rizNcUEz2g0W8jQZS5J6/3fiN0mRPwtPKQ6EinPN9ASqcAJ7/VQH4/9EPOw7d6XQGnqw== @@ -1300,16 +1238,6 @@ "@cosmjs/utils" "^0.32.2" cosmjs-types "^0.9.0" -"@cosmjs/socket@^0.30.1": - version "0.30.1" - resolved "https://registry.yarnpkg.com/@cosmjs/socket/-/socket-0.30.1.tgz#00b22f4b5e2ab01f4d82ccdb7b2e59536bfe5ce0" - integrity sha512-r6MpDL+9N+qOS/D5VaxnPaMJ3flwQ36G+vPvYJsXArj93BjgyFB7BwWwXCQDzZ+23cfChPUfhbINOenr8N2Kow== - dependencies: - "@cosmjs/stream" "^0.30.1" - isomorphic-ws "^4.0.1" - ws "^7" - xstream "^11.14.0" - "@cosmjs/socket@^0.32.2": version "0.32.2" resolved "https://registry.yarnpkg.com/@cosmjs/socket/-/socket-0.32.2.tgz#a66be3863d03bf2d8df0433af476df010ff10e8c" @@ -1320,25 +1248,7 @@ ws "^7" xstream "^11.14.0" -"@cosmjs/stargate@^0.30.0", "@cosmjs/stargate@^0.30.1": - version "0.30.1" - resolved "https://registry.yarnpkg.com/@cosmjs/stargate/-/stargate-0.30.1.tgz#e1b22e1226cffc6e93914a410755f1f61057ba04" - integrity sha512-RdbYKZCGOH8gWebO7r6WvNnQMxHrNXInY/gPHPzMjbQF6UatA6fNM2G2tdgS5j5u7FTqlCI10stNXrknaNdzog== - dependencies: - "@confio/ics23" "^0.6.8" - "@cosmjs/amino" "^0.30.1" - "@cosmjs/encoding" "^0.30.1" - "@cosmjs/math" "^0.30.1" - "@cosmjs/proto-signing" "^0.30.1" - "@cosmjs/stream" "^0.30.1" - "@cosmjs/tendermint-rpc" "^0.30.1" - "@cosmjs/utils" "^0.30.1" - cosmjs-types "^0.7.1" - long "^4.0.0" - protobufjs "~6.11.3" - xstream "^11.14.0" - -"@cosmjs/stargate@^0.32.2": +"@cosmjs/stargate@^0.32.1", "@cosmjs/stargate@^0.32.2": version "0.32.2" resolved "https://registry.yarnpkg.com/@cosmjs/stargate/-/stargate-0.32.2.tgz#73718c5c6a3ae138682ee9987333d911eca22a13" integrity sha512-AsJa29fT7Jd4xt9Ai+HMqhyj7UQu7fyYKdXj/8+/9PD74xe6lZSYhQPcitUmMLJ1ckKPgXSk5Dd2LbsQT0IhZg== @@ -1354,37 +1264,14 @@ cosmjs-types "^0.9.0" xstream "^11.14.0" -"@cosmjs/stream@^0.30.0", "@cosmjs/stream@^0.30.1": - version "0.30.1" - resolved "https://registry.yarnpkg.com/@cosmjs/stream/-/stream-0.30.1.tgz#ba038a2aaf41343696b1e6e759d8e03a9516ec1a" - integrity sha512-Fg0pWz1zXQdoxQZpdHRMGvUH5RqS6tPv+j9Eh7Q953UjMlrwZVo0YFLC8OTf/HKVf10E4i0u6aM8D69Q6cNkgQ== - dependencies: - xstream "^11.14.0" - -"@cosmjs/stream@^0.32.2": +"@cosmjs/stream@^0.32.1", "@cosmjs/stream@^0.32.2": version "0.32.2" resolved "https://registry.yarnpkg.com/@cosmjs/stream/-/stream-0.32.2.tgz#b1e8f977d25313d659f1aa89ad21614b5391cd93" integrity sha512-gpCufLfHAD8Zp1ZKge7AHbDf4RA0TZp66wZY6JaQR5bSiEF2Drjtp4mwXZPGejtaUMnaAgff3LrUzPJfKYdQwg== dependencies: xstream "^11.14.0" -"@cosmjs/tendermint-rpc@^0.30.0", "@cosmjs/tendermint-rpc@^0.30.1": - version "0.30.1" - resolved "https://registry.yarnpkg.com/@cosmjs/tendermint-rpc/-/tendermint-rpc-0.30.1.tgz#c16378892ba1ac63f72803fdf7567eab9d4f0aa0" - integrity sha512-Z3nCwhXSbPZJ++v85zHObeUggrEHVfm1u18ZRwXxFE9ZMl5mXTybnwYhczuYOl7KRskgwlB+rID0WYACxj4wdQ== - dependencies: - "@cosmjs/crypto" "^0.30.1" - "@cosmjs/encoding" "^0.30.1" - "@cosmjs/json-rpc" "^0.30.1" - "@cosmjs/math" "^0.30.1" - "@cosmjs/socket" "^0.30.1" - "@cosmjs/stream" "^0.30.1" - "@cosmjs/utils" "^0.30.1" - axios "^0.21.2" - readonly-date "^1.0.0" - xstream "^11.14.0" - -"@cosmjs/tendermint-rpc@^0.32.2": +"@cosmjs/tendermint-rpc@^0.32.1", "@cosmjs/tendermint-rpc@^0.32.2": version "0.32.2" resolved "https://registry.yarnpkg.com/@cosmjs/tendermint-rpc/-/tendermint-rpc-0.32.2.tgz#c5607b8d472e5bf9fd58d5453db7194f500ccc62" integrity sha512-DXyJHDmcAfCix4H/7/dKR0UMdshP01KxJOXHdHxBCbLIpck94BsWD3B2ZTXwfA6sv98so9wOzhp7qGQa5malxg== @@ -1400,12 +1287,7 @@ readonly-date "^1.0.0" xstream "^11.14.0" -"@cosmjs/utils@^0.30.0", "@cosmjs/utils@^0.30.1": - version "0.30.1" - resolved "https://registry.yarnpkg.com/@cosmjs/utils/-/utils-0.30.1.tgz#6d92582341be3c2ec8d82090253cfa4b7f959edb" - integrity sha512-KvvX58MGMWh7xA+N+deCfunkA/ZNDvFLw4YbOmX3f/XBIkqrVY7qlotfy2aNb1kgp6h4B6Yc8YawJPDTfvWX7g== - -"@cosmjs/utils@^0.32.2": +"@cosmjs/utils@^0.32.1", "@cosmjs/utils@^0.32.2": version "0.32.2" resolved "https://registry.yarnpkg.com/@cosmjs/utils/-/utils-0.32.2.tgz#324304aa85bfa6f10561cc17781d824d02130897" integrity sha512-Gg5t+eR7vPJMAmhkFt6CZrzPd0EKpAslWwk5rFVYZpJsM8JG5KT9XQ99hgNM3Ov6ScNoIWbXkpX27F6A9cXR4Q== @@ -4568,13 +4450,6 @@ axe-core@^4.6.2: resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.7.2.tgz#040a7342b20765cb18bb50b628394c21bccc17a0" integrity sha512-zIURGIS1E1Q4pcrMjp+nnEh+16G56eG/MUllJH8yEvw7asDo7Ac9uhC9KIH5jzpITueEZolfYglnCGIuSBz39g== -axios@0.21.4, axios@^0.21.2: - version "0.21.4" - resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575" - integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg== - dependencies: - follow-redirects "^1.14.0" - axios@^1.0.0: version "1.6.0" resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.0.tgz#f1e5292f26b2fd5c2e66876adc5b06cdbd7d2102" @@ -4584,7 +4459,7 @@ axios@^1.0.0: form-data "^4.0.0" proxy-from-env "^1.1.0" -axios@^1.6.0: +axios@^1.6.0, axios@^1.6.7: version "1.6.7" resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.7.tgz#7b48c2e27c96f9c68a2f8f31e2ab19f59b06b0a7" integrity sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA== @@ -5505,14 +5380,6 @@ cosmiconfig@^7.0.0: path-type "^4.0.0" yaml "^1.10.0" -cosmjs-types@^0.7.1: - version "0.7.2" - resolved "https://registry.yarnpkg.com/cosmjs-types/-/cosmjs-types-0.7.2.tgz#a757371abd340949c5bd5d49c6f8379ae1ffd7e2" - integrity sha512-vf2uLyktjr/XVAgEq0DjMxeAWh1yYREe7AMHDKd7EiHVqxBPCaBS+qEEQUkXbR9ndnckqr1sUG8BQhazh4X5lA== - dependencies: - long "^4.0.0" - protobufjs "~6.11.2" - cosmjs-types@^0.9.0: version "0.9.0" resolved "https://registry.yarnpkg.com/cosmjs-types/-/cosmjs-types-0.9.0.tgz#c3bc482d28c7dfa25d1445093fdb2d9da1f6cfcc" @@ -6858,7 +6725,7 @@ follow-redirects@^1.0.0, follow-redirects@^1.15.0: resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== -follow-redirects@^1.14.0, follow-redirects@^1.15.4: +follow-redirects@^1.15.4: version "1.15.5" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.5.tgz#54d4d6d062c0fa7d9d17feb008461550e3ba8020" integrity sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw== @@ -8499,18 +8366,6 @@ libsodium-wrappers-sumo@^0.7.11: dependencies: libsodium-sumo "^0.7.13" -libsodium-wrappers@^0.7.6: - version "0.7.13" - resolved "https://registry.yarnpkg.com/libsodium-wrappers/-/libsodium-wrappers-0.7.13.tgz#83299e06ee1466057ba0e64e532777d2929b90d3" - integrity sha512-kasvDsEi/r1fMzKouIDv7B8I6vNmknXwGiYodErGuESoFTohGSKZplFtVxZqHaoQ217AynyIFgnOVRitpHs0Qw== - dependencies: - libsodium "^0.7.13" - -libsodium@^0.7.13: - version "0.7.13" - resolved "https://registry.yarnpkg.com/libsodium/-/libsodium-0.7.13.tgz#230712ec0b7447c57b39489c48a4af01985fb393" - integrity sha512-mK8ju0fnrKXXfleL53vtp9xiPq5hKM0zbDQtcxQIsSmxNgSxqCj6R7Hl9PkrNe2j29T4yoDaF7DJLK9/i5iWUw== - limiter@^1.1.4: version "1.1.5" resolved "https://registry.yarnpkg.com/limiter/-/limiter-1.1.5.tgz#8f92a25b3b16c6131293a0cc834b4a838a2aa7c2" @@ -10487,7 +10342,7 @@ proto-list@~1.2.1: resolved "https://registry.yarnpkg.com/proto-list/-/proto-list-1.2.4.tgz#212d5bfe1318306a420f6402b8e26ff39647a849" integrity sha1-IS1b/hMYMGpCD2QCuOJv85ZHqEk= -protobufjs@^6.10.3, protobufjs@^6.8.8, protobufjs@~6.11.2, protobufjs@~6.11.3: +protobufjs@^6.8.8: version "6.11.3" resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-6.11.3.tgz#637a527205a35caa4f3e2a9a4a13ddffe0e7af74" integrity sha512-xL96WDdCZYdU7Slin569tFX712BxsxslWwAfAhCYjQKGTq7dAU91Lomy6nLLhh/dyGhk/YH4TwTSRxTzhuHyZg== From 3a550258b919bc10698ae6ef1cb60e8bd36c013a Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Fri, 23 Feb 2024 15:53:38 -0800 Subject: [PATCH 18/47] build: separate tsconfig so lint doesn't build --- packages/cosmic-proto/tsconfig.build.json | 22 ++++++++++++++++++++++ packages/cosmic-proto/tsconfig.json | 16 ++-------------- 2 files changed, 24 insertions(+), 14 deletions(-) create mode 100644 packages/cosmic-proto/tsconfig.build.json diff --git a/packages/cosmic-proto/tsconfig.build.json b/packages/cosmic-proto/tsconfig.build.json new file mode 100644 index 00000000000..6ec7f697de9 --- /dev/null +++ b/packages/cosmic-proto/tsconfig.build.json @@ -0,0 +1,22 @@ +{ + "extends": [ + "./tsconfig.json", + // Different build options from other SDK packages because this generates the JavaScript, not just types + // "../../tsconfig-build-options.json" + ], + "compilerOptions": { + "noEmit": false, + "outDir": "dist", + "emitDeclarationOnly": false, + "declaration": true, + "target": "es2022", + "module": "es2022", + "lib": [ + "es2022", + "DOM" + ], + "sourceMap": true, + "isolatedModules": true, + "downlevelIteration": true, + } +} diff --git a/packages/cosmic-proto/tsconfig.json b/packages/cosmic-proto/tsconfig.json index 38eee4ccf88..bd076a2943f 100644 --- a/packages/cosmic-proto/tsconfig.json +++ b/packages/cosmic-proto/tsconfig.json @@ -1,21 +1,9 @@ { + "extends": "../../tsconfig.json", "compilerOptions": { "baseUrl": ".", - "rootDir": "src", "skipLibCheck": true, - "emitDeclarationOnly": false, - "declaration": true, "esModuleInterop": true, - "target": "es2022", - "module": "es2022", - "lib": [ - "es2022", - "DOM" - ], - "sourceMap": true, - "isolatedModules": true, - "allowJs": true, - "downlevelIteration": true, "moduleResolution": "node", "resolveJsonModule": true }, @@ -25,4 +13,4 @@ "exclude": [ "node_modules" ] -} \ No newline at end of file +} From 6c3b57135b76b927c38a61933d8aac3c24de120d Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Fri, 16 Feb 2024 16:57:54 -0800 Subject: [PATCH 19/47] chore: yarn codegen --- .../proto/agoric/lien/genesis.proto | 25 + .../cosmic-proto/proto/agoric/lien/lien.proto | 25 + .../proto/agoric/swingset/genesis.proto | 26 + .../proto/agoric/swingset/msgs.proto | 139 + .../proto/agoric/swingset/query.proto | 66 + .../proto/agoric/swingset/swingset.proto | 168 + .../proto/agoric/vbank/genesis.proto | 18 + .../proto/agoric/vbank/msgs.proto | 8 + .../proto/agoric/vbank/query.proto | 39 + .../proto/agoric/vbank/vbank.proto | 67 + .../cosmic-proto/proto/agoric/vibc/msgs.proto | 32 + .../proto/agoric/vstorage/genesis.proto | 25 + .../proto/agoric/vstorage/query.proto | 110 + .../proto/agoric/vstorage/vstorage.proto | 27 + .../cosmic-proto/src/codegen/agoric/bundle.ts | 69 + .../cosmic-proto/src/codegen/agoric/client.ts | 59 + .../src/codegen/agoric/lien/genesis.ts | 235 + .../src/codegen/agoric/lien/lien.ts | 153 + .../src/codegen/agoric/rpc.query.ts | 24 + .../cosmic-proto/src/codegen/agoric/rpc.tx.ts | 10 + .../src/codegen/agoric/swingset/genesis.ts | 297 + .../src/codegen/agoric/swingset/msgs.amino.ts | 35 + .../codegen/agoric/swingset/msgs.registry.ts | 183 + .../codegen/agoric/swingset/msgs.rpc.msg.ts | 98 + .../src/codegen/agoric/swingset/msgs.ts | 1246 +++ .../agoric/swingset/query.rpc.Query.ts | 66 + .../src/codegen/agoric/swingset/query.ts | 609 ++ .../src/codegen/agoric/swingset/swingset.ts | 1372 ++++ .../src/codegen/agoric/vbank/genesis.ts | 137 + .../src/codegen/agoric/vbank/msgs.ts | 1 + .../codegen/agoric/vbank/query.rpc.Query.ts | 51 + .../src/codegen/agoric/vbank/query.ts | 376 + .../src/codegen/agoric/vbank/vbank.ts | 431 ++ .../src/codegen/agoric/vibc/msgs.amino.ts | 9 + .../src/codegen/agoric/vibc/msgs.registry.ts | 53 + .../src/codegen/agoric/vibc/msgs.rpc.msg.ts | 23 + .../src/codegen/agoric/vibc/msgs.ts | 209 + .../codegen/agoric/vlocalchain/vlocalchain.ts | 507 ++ .../src/codegen/agoric/vstorage/genesis.ts | 239 + .../agoric/vstorage/query.rpc.Query.ts | 69 + .../src/codegen/agoric/vstorage/query.ts | 790 ++ .../src/codegen/agoric/vstorage/vstorage.ts | 214 + packages/cosmic-proto/src/codegen/binary.ts | 534 ++ .../cosmos/base/query/v1beta1/pagination.ts | 438 ++ .../src/codegen/cosmos/base/v1beta1/coin.ts | 479 ++ .../cosmic-proto/src/codegen/cosmos/bundle.ts | 21 + .../codegen/cosmos/upgrade/v1beta1/upgrade.ts | 748 ++ .../src/codegen/cosmos_proto/bundle.ts | 5 + .../src/codegen/cosmos_proto/cosmos.ts | 396 + .../src/codegen/gogoproto/bundle.ts | 5 + .../src/codegen/gogoproto/gogo.ts | 1 + .../src/codegen/google/api/annotations.ts | 1 + .../src/codegen/google/api/http.ts | 1472 ++++ .../cosmic-proto/src/codegen/google/bundle.ts | 11 + .../src/codegen/google/protobuf/any.ts | 421 + .../src/codegen/google/protobuf/descriptor.ts | 6741 +++++++++++++++++ .../src/codegen/google/protobuf/timestamp.ts | 372 + packages/cosmic-proto/src/codegen/helpers.ts | 250 + .../cosmic-proto/src/codegen/ibc/bundle.ts | 17 + .../codegen/ibc/core/channel/v1/channel.ts | 1511 ++++ .../src/codegen/ibc/core/client/v1/client.ts | 1164 +++ packages/cosmic-proto/src/codegen/index.ts | 17 + packages/cosmic-proto/src/codegen/utf8.ts | 148 + packages/cosmic-proto/src/codegen/varint.ts | 488 ++ 64 files changed, 23580 insertions(+) create mode 100644 packages/cosmic-proto/proto/agoric/lien/genesis.proto create mode 100644 packages/cosmic-proto/proto/agoric/lien/lien.proto create mode 100644 packages/cosmic-proto/proto/agoric/swingset/genesis.proto create mode 100644 packages/cosmic-proto/proto/agoric/swingset/msgs.proto create mode 100644 packages/cosmic-proto/proto/agoric/swingset/query.proto create mode 100644 packages/cosmic-proto/proto/agoric/swingset/swingset.proto create mode 100644 packages/cosmic-proto/proto/agoric/vbank/genesis.proto create mode 100644 packages/cosmic-proto/proto/agoric/vbank/msgs.proto create mode 100644 packages/cosmic-proto/proto/agoric/vbank/query.proto create mode 100644 packages/cosmic-proto/proto/agoric/vbank/vbank.proto create mode 100644 packages/cosmic-proto/proto/agoric/vibc/msgs.proto create mode 100644 packages/cosmic-proto/proto/agoric/vstorage/genesis.proto create mode 100644 packages/cosmic-proto/proto/agoric/vstorage/query.proto create mode 100644 packages/cosmic-proto/proto/agoric/vstorage/vstorage.proto create mode 100644 packages/cosmic-proto/src/codegen/agoric/bundle.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/client.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/lien/genesis.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/lien/lien.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/rpc.query.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/rpc.tx.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/swingset/genesis.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/swingset/msgs.amino.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/swingset/msgs.registry.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/swingset/msgs.rpc.msg.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/swingset/msgs.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/swingset/query.rpc.Query.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/swingset/query.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/swingset/swingset.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/vbank/genesis.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/vbank/msgs.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/vbank/query.rpc.Query.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/vbank/query.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/vbank/vbank.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/vibc/msgs.amino.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/vibc/msgs.registry.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/vibc/msgs.rpc.msg.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/vibc/msgs.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/vlocalchain/vlocalchain.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/vstorage/genesis.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/vstorage/query.rpc.Query.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/vstorage/query.ts create mode 100644 packages/cosmic-proto/src/codegen/agoric/vstorage/vstorage.ts create mode 100644 packages/cosmic-proto/src/codegen/binary.ts create mode 100644 packages/cosmic-proto/src/codegen/cosmos/base/query/v1beta1/pagination.ts create mode 100644 packages/cosmic-proto/src/codegen/cosmos/base/v1beta1/coin.ts create mode 100644 packages/cosmic-proto/src/codegen/cosmos/bundle.ts create mode 100644 packages/cosmic-proto/src/codegen/cosmos/upgrade/v1beta1/upgrade.ts create mode 100644 packages/cosmic-proto/src/codegen/cosmos_proto/bundle.ts create mode 100644 packages/cosmic-proto/src/codegen/cosmos_proto/cosmos.ts create mode 100644 packages/cosmic-proto/src/codegen/gogoproto/bundle.ts create mode 100644 packages/cosmic-proto/src/codegen/gogoproto/gogo.ts create mode 100644 packages/cosmic-proto/src/codegen/google/api/annotations.ts create mode 100644 packages/cosmic-proto/src/codegen/google/api/http.ts create mode 100644 packages/cosmic-proto/src/codegen/google/bundle.ts create mode 100644 packages/cosmic-proto/src/codegen/google/protobuf/any.ts create mode 100644 packages/cosmic-proto/src/codegen/google/protobuf/descriptor.ts create mode 100644 packages/cosmic-proto/src/codegen/google/protobuf/timestamp.ts create mode 100644 packages/cosmic-proto/src/codegen/helpers.ts create mode 100644 packages/cosmic-proto/src/codegen/ibc/bundle.ts create mode 100644 packages/cosmic-proto/src/codegen/ibc/core/channel/v1/channel.ts create mode 100644 packages/cosmic-proto/src/codegen/ibc/core/client/v1/client.ts create mode 100644 packages/cosmic-proto/src/codegen/index.ts create mode 100644 packages/cosmic-proto/src/codegen/utf8.ts create mode 100644 packages/cosmic-proto/src/codegen/varint.ts diff --git a/packages/cosmic-proto/proto/agoric/lien/genesis.proto b/packages/cosmic-proto/proto/agoric/lien/genesis.proto new file mode 100644 index 00000000000..b5786957fd7 --- /dev/null +++ b/packages/cosmic-proto/proto/agoric/lien/genesis.proto @@ -0,0 +1,25 @@ +syntax = "proto3"; +package agoric.lien; + +import "gogoproto/gogo.proto"; +import "agoric/lien/lien.proto"; + +option go_package = "github.com/Agoric/agoric-sdk/golang/cosmos/x/lien/types"; + +// The initial or exported state. +message GenesisState { + option (gogoproto.equal) = false; + + repeated AccountLien liens = 1 [ + (gogoproto.nullable) = false + ]; +} + +// The lien on a particular account +message AccountLien { + // Account address, bech32-encoded. + string address = 1; + + // The liened amount. Should be nonzero. + Lien lien = 2; +} diff --git a/packages/cosmic-proto/proto/agoric/lien/lien.proto b/packages/cosmic-proto/proto/agoric/lien/lien.proto new file mode 100644 index 00000000000..d66e9c2955a --- /dev/null +++ b/packages/cosmic-proto/proto/agoric/lien/lien.proto @@ -0,0 +1,25 @@ +syntax = "proto3"; +package agoric.lien; + +import "gogoproto/gogo.proto"; +import "cosmos/base/v1beta1/coin.proto"; + +option go_package = "github.com/Agoric/agoric-sdk/golang/cosmos/x/lien/types"; + +// Lien contains the lien state of a particular account. +message Lien { + // coins holds the amount liened + repeated cosmos.base.v1beta1.Coin coins = 1 [ + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins", + (gogoproto.moretags) = "yaml:\"coins\"" + ]; + // delegated tracks the net amount delegated for non-vesting accounts, + // or zero coins for vesting accounts. + // (Vesting accounts have their own fields to track delegation.) + repeated cosmos.base.v1beta1.Coin delegated = 2 [ + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins", + (gogoproto.moretags) = "yaml:\"delegated\"" + ]; +} diff --git a/packages/cosmic-proto/proto/agoric/swingset/genesis.proto b/packages/cosmic-proto/proto/agoric/swingset/genesis.proto new file mode 100644 index 00000000000..8a178e4e12e --- /dev/null +++ b/packages/cosmic-proto/proto/agoric/swingset/genesis.proto @@ -0,0 +1,26 @@ +syntax = "proto3"; +package agoric.swingset; + +import "gogoproto/gogo.proto"; +import "agoric/swingset/swingset.proto"; + +option go_package = "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types"; + +// The initial or exported state. +message GenesisState { + option (gogoproto.equal) = false; + + Params params = 2 [(gogoproto.nullable) = false]; + + State state = 3 [(gogoproto.nullable) = false]; + + repeated SwingStoreExportDataEntry swing_store_export_data = 4 [ + (gogoproto.jsontag) = "swingStoreExportData" + ]; +} + +// A SwingStore "export data" entry. +message SwingStoreExportDataEntry { + string key = 1; + string value = 2; +} diff --git a/packages/cosmic-proto/proto/agoric/swingset/msgs.proto b/packages/cosmic-proto/proto/agoric/swingset/msgs.proto new file mode 100644 index 00000000000..fcd96c1ea1b --- /dev/null +++ b/packages/cosmic-proto/proto/agoric/swingset/msgs.proto @@ -0,0 +1,139 @@ +syntax = "proto3"; +package agoric.swingset; + +import "gogoproto/gogo.proto"; + +option go_package = "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types"; + +// Transactions. +service Msg { + // Install a JavaScript sources bundle on the chain's SwingSet controller. + rpc InstallBundle(MsgInstallBundle) returns (MsgInstallBundleResponse); + // Send inbound messages. + rpc DeliverInbound(MsgDeliverInbound) returns (MsgDeliverInboundResponse); + // Perform a low-privilege wallet action. + rpc WalletAction(MsgWalletAction) returns (MsgWalletActionResponse); + // Perform a wallet action that spends assets. + rpc WalletSpendAction(MsgWalletSpendAction) returns (MsgWalletSpendActionResponse); + // Provision a new endpoint. + rpc Provision(MsgProvision) returns (MsgProvisionResponse); +} + +// MsgDeliverInbound defines an SDK message for delivering an eventual send +message MsgDeliverInbound { + option (gogoproto.equal) = false; + + repeated string messages = 1 [ + (gogoproto.jsontag) = "messages", + (gogoproto.moretags) = "yaml:\"messages\"" + ]; + repeated uint64 nums = 2 [ + (gogoproto.jsontag) = "nums", + (gogoproto.moretags) = "yaml:\"nums\"" + ]; + uint64 ack = 3 [ + (gogoproto.jsontag) = "ack", + (gogoproto.moretags) = "yaml:\"ack\"" + ]; + bytes submitter = 4 [ + (gogoproto.casttype) = "github.com/cosmos/cosmos-sdk/types.AccAddress", + (gogoproto.jsontag) = "submitter", + (gogoproto.moretags) = "yaml:\"submitter\"" + ]; +} + +// MsgDeliverInboundResponse is an empty reply. +message MsgDeliverInboundResponse {} + +// MsgWalletAction defines an SDK message for the on-chain wallet to perform an +// action that *does not* spend any assets (other than gas fees/stamps). This +// message type is typically protected by feegrant budgets. +message MsgWalletAction { + option (gogoproto.equal) = false; + + bytes owner = 1 [ + (gogoproto.casttype) = "github.com/cosmos/cosmos-sdk/types.AccAddress", + (gogoproto.jsontag) = "owner", + (gogoproto.moretags) = "yaml:\"owner\"" + ]; + + // The action to perform, as JSON-stringified marshalled data. + string action = 2; +} + +// MsgWalletActionResponse is an empty reply. +message MsgWalletActionResponse {} + +// MsgWalletSpendAction defines an SDK message for the on-chain wallet to +// perform an action that *does spend the owner's assets.* This message type is +// typically protected by explicit confirmation by the user. +message MsgWalletSpendAction { + option (gogoproto.equal) = false; + + bytes owner = 1 [ + (gogoproto.casttype) = "github.com/cosmos/cosmos-sdk/types.AccAddress", + (gogoproto.jsontag) = "owner", + (gogoproto.moretags) = "yaml:\"owner\"" + ]; + + // The action to perform, as JSON-stringified marshalled data. + string spend_action = 2; +} + +// MsgWalletSpendActionResponse is an empty reply. +message MsgWalletSpendActionResponse {} + +// MsgProvision defines an SDK message for provisioning a client to the chain +message MsgProvision { + option (gogoproto.equal) = false; + + string nickname = 1 [ + (gogoproto.jsontag) = "nickname", + (gogoproto.moretags) = "yaml:\"nickname\"" + ]; + bytes address = 2 [ + (gogoproto.casttype) = "github.com/cosmos/cosmos-sdk/types.AccAddress", + (gogoproto.jsontag) = "address", + (gogoproto.moretags) = "yaml:\"address\"" + ]; + repeated string power_flags = 3 [ + (gogoproto.customname) = "PowerFlags", + (gogoproto.jsontag) = "powerFlags", + (gogoproto.moretags) = "yaml:\"powerFlags\"" + ]; + bytes submitter = 4 [ + (gogoproto.casttype) = "github.com/cosmos/cosmos-sdk/types.AccAddress", + (gogoproto.jsontag) = "submitter", + (gogoproto.moretags) = "yaml:\"submitter\"" + ]; +} + +// MsgProvisionResponse is an empty reply. +message MsgProvisionResponse {} + +// MsgInstallBundle carries a signed bundle to SwingSet. +message MsgInstallBundle { + string bundle = 1 [ + (gogoproto.jsontag) = "bundle", + (gogoproto.moretags) = "yaml:\"bundle\"" + ]; + bytes submitter = 2 [ + (gogoproto.casttype) = "github.com/cosmos/cosmos-sdk/types.AccAddress", + (gogoproto.jsontag) = "submitter", + (gogoproto.moretags) = "yaml:\"submitter\"" + ]; + // Either bundle or compressed_bundle will be set. + // Default compression algorithm is gzip. + bytes compressed_bundle = 3 [ + (gogoproto.jsontag) = "compressedBundle", + (gogoproto.moretags) = "yaml:\"compressedBundle\"" + ]; + // Size in bytes of uncompression of compressed_bundle. + int64 uncompressed_size = 4 [ + (gogoproto.jsontag) = "uncompressedSize" + ]; +} + +// MsgInstallBundleResponse is an empty acknowledgement that an install bundle +// message has been queued for the SwingSet kernel's consideration. +message MsgInstallBundleResponse {} diff --git a/packages/cosmic-proto/proto/agoric/swingset/query.proto b/packages/cosmic-proto/proto/agoric/swingset/query.proto new file mode 100644 index 00000000000..12cc4396d45 --- /dev/null +++ b/packages/cosmic-proto/proto/agoric/swingset/query.proto @@ -0,0 +1,66 @@ +syntax = "proto3"; +package agoric.swingset; + +import "gogoproto/gogo.proto"; +import "agoric/swingset/swingset.proto"; +import "google/api/annotations.proto"; + +option go_package = "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types"; + +// Query provides defines the gRPC querier service +service Query { + // Params queries params of the swingset module. + rpc Params(QueryParamsRequest) returns (QueryParamsResponse) { + option (google.api.http).get = "/agoric/swingset/params"; + } + + // Egress queries a provisioned egress. + rpc Egress(QueryEgressRequest) returns (QueryEgressResponse) { + option (google.api.http).get = "/agoric/swingset/egress/{peer}"; + } + + // Return the contents of a peer's outbound mailbox. + rpc Mailbox(QueryMailboxRequest) returns (QueryMailboxResponse) { + option (google.api.http).get = "/agoric/swingset/mailbox/{peer}"; + } +} + +// QueryParamsRequest is the request type for the Query/Params RPC method. +message QueryParamsRequest {} + +// QueryParamsResponse is the response type for the Query/Params RPC method. +message QueryParamsResponse { + // params defines the parameters of the module. + Params params = 1 [(gogoproto.nullable) = false]; +} + +// QueryEgressRequest is the request type for the Query/Egress RPC method +message QueryEgressRequest { + bytes peer = 1 [ + (gogoproto.casttype) = "github.com/cosmos/cosmos-sdk/types.AccAddress", + (gogoproto.jsontag) = "peer", + (gogoproto.moretags) = "yaml:\"peer\"" + ]; +} + +// QueryEgressResponse is the egress response. +message QueryEgressResponse { + agoric.swingset.Egress egress = 1; +} + +// QueryMailboxRequest is the mailbox query. +message QueryMailboxRequest { + bytes peer = 1 [ + (gogoproto.casttype) = "github.com/cosmos/cosmos-sdk/types.AccAddress", + (gogoproto.jsontag) = "peer", + (gogoproto.moretags) = "yaml:\"peer\"" + ]; +} + +// QueryMailboxResponse is the mailbox response. +message QueryMailboxResponse { + string value = 1 [ + (gogoproto.jsontag) = "value", + (gogoproto.moretags) = "yaml:\"value\"" + ]; +} diff --git a/packages/cosmic-proto/proto/agoric/swingset/swingset.proto b/packages/cosmic-proto/proto/agoric/swingset/swingset.proto new file mode 100644 index 00000000000..c1a238edfea --- /dev/null +++ b/packages/cosmic-proto/proto/agoric/swingset/swingset.proto @@ -0,0 +1,168 @@ +syntax = "proto3"; +package agoric.swingset; + +import "gogoproto/gogo.proto"; +import "cosmos/base/v1beta1/coin.proto"; + +option go_package = "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types"; + +// CoreEvalProposal is a gov Content type for evaluating code in the SwingSet +// core. +// See `agoric-sdk/packages/vats/src/core/eval.js`. +message CoreEvalProposal { + option (gogoproto.goproto_getters) = false; + + string title = 1; + string description = 2; + + // Although evals are sequential, they may run concurrently, since they each + // can return a Promise. + repeated CoreEval evals = 3 [(gogoproto.nullable) = false]; +} + +// CoreEval defines an individual SwingSet core evaluation, for use in +// CoreEvalProposal. +message CoreEval { + // Grant these JSON-stringified core bootstrap permits to the jsCode, as the + // `powers` endowment. + string json_permits = 1 [(gogoproto.moretags) = "yaml:\"json_permits\""]; + + // Evaluate this JavaScript code in a Compartment endowed with `powers` as + // well as some powerless helpers. + string js_code = 2 [(gogoproto.moretags) = "yaml:\"js_code\""]; +} + +// Params are the swingset configuration/governance parameters. +message Params { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = false; + + // Map from unit name to a value in SwingSet "beans". + // Must not be negative. + // + // These values are used by SwingSet to normalize named per-resource charges + // (maybe rent) in a single Nat usage unit, the "bean". + // + // There is no required order to this list of entries, but all the chain + // nodes must all serialize and deserialize the existing order without + // permuting it. + repeated StringBeans beans_per_unit = 1 [ + (gogoproto.nullable) = false + ]; + + // The price in Coins per the unit named "fee". This value is used by + // cosmic-swingset JS code to decide how many tokens to charge. + // + // cost = beans_used * fee_unit_price / beans_per_unit["fee"] + repeated cosmos.base.v1beta1.Coin fee_unit_price = 2 [ + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins", + (gogoproto.nullable) = false + ]; + + // The SwingSet bootstrap vat configuration file. Not usefully modifiable + // via governance as it is only referenced by the chain's initial + // construction. + string bootstrap_vat_config = 3; + + // If the provision submitter doesn't hold a provisionpass, their requested + // power flags are looked up in this fee menu (first match wins) and the sum + // is charged. If any power flag is not found in this menu, the request is + // rejected. + repeated PowerFlagFee power_flag_fees = 4 [ + (gogoproto.nullable) = false + ]; + + // Maximum sizes for queues. + // These values are used by SwingSet to compute how many messages should be + // accepted in a block. + // + // There is no required order to this list of entries, but all the chain + // nodes must all serialize and deserialize the existing order without + // permuting it. + repeated QueueSize queue_max = 5 [ + (gogoproto.nullable) = false + ]; +} + +// The current state of the module. +message State { + // The allowed number of items to add to queues, as determined by SwingSet. + // Transactions which attempt to enqueue more should be rejected. + repeated QueueSize queue_allowed = 1 [ + (gogoproto.nullable) = false + ]; +} + +// Map element of a string key to a Nat bean count. +message StringBeans { + option (gogoproto.equal) = true; + + // What the beans are for. + string key = 1; + + // The actual bean value. + string beans = 2 [ + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Uint", + (gogoproto.nullable) = false + ]; +} + +// Map a provisioning power flag to its corresponding fee. +message PowerFlagFee { + option (gogoproto.equal) = true; + + string power_flag = 1; + repeated cosmos.base.v1beta1.Coin fee = 2 [ + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins", + (gogoproto.nullable) = false + ]; +} + +// Map element of a string key to a size. +message QueueSize { + option (gogoproto.equal) = true; + + // What the size is for. + string key = 1; + + // The actual size value. + int32 size = 2; +} + +// Egress is the format for a swingset egress. +message Egress { + option (gogoproto.equal) = false; + + string nickname = 1 [ + (gogoproto.jsontag) = "nickname", + (gogoproto.moretags) = "yaml:\"nickname\"" + ]; + bytes peer = 2 [ + (gogoproto.casttype) = "github.com/cosmos/cosmos-sdk/types.AccAddress", + (gogoproto.jsontag) = "peer", + (gogoproto.moretags) = "yaml:\"peer\"" + ]; + // TODO: Remove these power flags as they are deprecated and have no effect. + repeated string power_flags = 3 [ + (gogoproto.customname) = "PowerFlags", + (gogoproto.jsontag) = "powerFlags", + (gogoproto.moretags) = "yaml:\"powerFlags\"" + ]; +} + +// SwingStoreArtifact encodes an artifact of a swing-store export. +// Artifacts may be stored or transmitted in any order. Most handlers do +// maintain the artifact order from their original source as an effect of how +// they handle the artifacts. +message SwingStoreArtifact { + option (gogoproto.equal) = false; + string name = 1 [ + (gogoproto.jsontag) = "name", + (gogoproto.moretags) = "yaml:\"name\"" + ]; + + bytes data = 2 [ + (gogoproto.jsontag) = "data", + (gogoproto.moretags) = "yaml:\"data\"" + ]; +} diff --git a/packages/cosmic-proto/proto/agoric/vbank/genesis.proto b/packages/cosmic-proto/proto/agoric/vbank/genesis.proto new file mode 100644 index 00000000000..b28b6b4ee2c --- /dev/null +++ b/packages/cosmic-proto/proto/agoric/vbank/genesis.proto @@ -0,0 +1,18 @@ +syntax = "proto3"; +package agoric.vbank; + +import "gogoproto/gogo.proto"; +import "agoric/vbank/vbank.proto"; + +option go_package = "github.com/Agoric/agoric-sdk/golang/cosmos/x/vbank/types"; + +// The initial and exported module state. +message GenesisState { + option (gogoproto.equal) = false; + + // parms defines all the parameters of the module. + Params params = 1 [(gogoproto.nullable) = false]; + + // state is the current operation state. + State state = 2 [(gogoproto.nullable) = false]; +} diff --git a/packages/cosmic-proto/proto/agoric/vbank/msgs.proto b/packages/cosmic-proto/proto/agoric/vbank/msgs.proto new file mode 100644 index 00000000000..fc814019728 --- /dev/null +++ b/packages/cosmic-proto/proto/agoric/vbank/msgs.proto @@ -0,0 +1,8 @@ +syntax = "proto3"; +package agoric.vbank; + +option go_package = "github.com/Agoric/agoric-sdk/golang/cosmos/x/vbank/types"; + +// No transactions. +service Msg { +} diff --git a/packages/cosmic-proto/proto/agoric/vbank/query.proto b/packages/cosmic-proto/proto/agoric/vbank/query.proto new file mode 100644 index 00000000000..3363c952293 --- /dev/null +++ b/packages/cosmic-proto/proto/agoric/vbank/query.proto @@ -0,0 +1,39 @@ +syntax = "proto3"; +package agoric.vbank; + +import "gogoproto/gogo.proto"; +import "google/api/annotations.proto"; +import "agoric/vbank/vbank.proto"; + +option go_package = "github.com/Agoric/agoric-sdk/golang/cosmos/x/vbank/types"; + +// Query defines the gRPC querier service for vbank module. +service Query { + // Params queries params of the vbank module. + rpc Params(QueryParamsRequest) returns (QueryParamsResponse) { + option (google.api.http).get = "/agoric/vbank/params"; + } + + // State queries current state of the vbank module. + rpc State(QueryStateRequest) returns (QueryStateResponse) { + option (google.api.http).get = "/agoric/vbank/state"; + } +} + +// QueryParamsRequest is the request type for the Query/Params RPC method. +message QueryParamsRequest {} + +// QueryParamsResponse is the response type for the Query/Params RPC method. +message QueryParamsResponse { + // params defines the parameters of the module. + Params params = 1 [(gogoproto.nullable) = false]; +} + +// QueryStateRequest is the request type for the Query/State RPC method. +message QueryStateRequest {} + +// QueryStateResponse is the response type for the Query/State RPC method. +message QueryStateResponse { + // state defines the parameters of the module. + State state = 1 [(gogoproto.nullable) = false]; +} diff --git a/packages/cosmic-proto/proto/agoric/vbank/vbank.proto b/packages/cosmic-proto/proto/agoric/vbank/vbank.proto new file mode 100644 index 00000000000..7884abecc9a --- /dev/null +++ b/packages/cosmic-proto/proto/agoric/vbank/vbank.proto @@ -0,0 +1,67 @@ +syntax = "proto3"; +package agoric.vbank; + +import "gogoproto/gogo.proto"; +import "cosmos/base/v1beta1/coin.proto"; + +option go_package = "github.com/Agoric/agoric-sdk/golang/cosmos/x/vbank/types"; + +// The module governance/configuration parameters. +message Params { + option (gogoproto.equal) = true; + option (gogoproto.goproto_stringer) = false; + + // reward_epoch_duration_blocks is the length of a reward epoch, in blocks. + // A value of zero has the same meaning as a value of one: + // the full reward buffer should be distributed immediately. + int64 reward_epoch_duration_blocks = 1 [ + (gogoproto.moretags) = "yaml:\"reward_epoch_duration_blocks\"" + ]; + + // per_epoch_reward_fraction is a fraction of the reward pool to distrubute + // once every reward epoch. If less than zero, use approximately continuous + // per-block distribution. + string per_epoch_reward_fraction = 2 [ + (gogoproto.moretags) = "yaml:\"discrete_epoch_reward_fraction\"", + (gogoproto.customtype) = "github.com/cosmos/cosmos-sdk/types.Dec", + (gogoproto.nullable) = false + ]; + + // reward_smoothing_blocks is the number of blocks over which to distribute + // an epoch's rewards. If zero, use the same value as + // reward_epoch_duration_blocks. + int64 reward_smoothing_blocks = 3 [ + (gogoproto.moretags) = "yaml:\"reward_smoothing_blocks\"" + ]; +} + +// The current state of the module. +message State { + option (gogoproto.equal) = true; + + // rewardPool is the current balance of rewards in the module account. + // NOTE: Tracking manually since there is no bank call for getting a + // module account balance by name. + repeated cosmos.base.v1beta1.Coin reward_pool = 1 [ + (gogoproto.nullable) = false, + (gogoproto.moretags) = "yaml:\"reward_pool\"", + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins" + ]; + + // reward_block_amount is the amount of reward, if available, to send to the + // fee collector module on every block. + repeated cosmos.base.v1beta1.Coin reward_block_amount = 2 [ + (gogoproto.nullable) = false, + (gogoproto.moretags) = "yaml:\"reward_block_amount\"", + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins" + ]; + + // last_sequence is a sequence number for communicating with the VM. + uint64 last_sequence = 3 [ + (gogoproto.moretags) = "yaml:\"last_sequence\"" + ]; + + int64 last_reward_distribution_block = 4 [ + (gogoproto.moretags) = "yaml:\"last_reward_distribution_block\"" + ]; +} diff --git a/packages/cosmic-proto/proto/agoric/vibc/msgs.proto b/packages/cosmic-proto/proto/agoric/vibc/msgs.proto new file mode 100644 index 00000000000..9f415a5356c --- /dev/null +++ b/packages/cosmic-proto/proto/agoric/vibc/msgs.proto @@ -0,0 +1,32 @@ +syntax = "proto3"; +package agoric.vibc; + +import "gogoproto/gogo.proto"; +import "ibc/core/channel/v1/channel.proto"; + +option go_package = "github.com/Agoric/agoric-sdk/golang/cosmos/x/vibc/types"; + +// The module transactions. +service Msg { + // Force sending an arbitrary packet on a channel. + rpc SendPacket(MsgSendPacket) returns (MsgSendPacketResponse); +} + +// MsgSendPacket is an SDK message for sending an outgoing IBC packet +message MsgSendPacket { + option (gogoproto.equal) = false; + + ibc.core.channel.v1.Packet packet = 1 [ + (gogoproto.nullable) = false, + (gogoproto.jsontag) = "packet", + (gogoproto.moretags) = "yaml:\"packet\"" + ]; + bytes sender = 2 [ + (gogoproto.casttype) = "github.com/cosmos/cosmos-sdk/types.AccAddress", + (gogoproto.jsontag) = "submitter", + (gogoproto.moretags) = "yaml:\"submitter\"" + ]; +} + +// Empty response for SendPacket. +message MsgSendPacketResponse {} diff --git a/packages/cosmic-proto/proto/agoric/vstorage/genesis.proto b/packages/cosmic-proto/proto/agoric/vstorage/genesis.proto new file mode 100644 index 00000000000..da621b1af64 --- /dev/null +++ b/packages/cosmic-proto/proto/agoric/vstorage/genesis.proto @@ -0,0 +1,25 @@ +syntax = "proto3"; +package agoric.vstorage; + +import "gogoproto/gogo.proto"; + +option go_package = "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/types"; + +// The initial or exported state. +message GenesisState { + option (gogoproto.equal) = false; + + repeated DataEntry data = 1 [ + (gogoproto.jsontag) = "data", + (gogoproto.moretags) = "yaml:\"data\"" + ]; +} + +// A vstorage entry. The only necessary entries are those with data, as the +// ancestor nodes are reconstructed on import. +message DataEntry { + // A "."-separated path with individual path elements matching + // `[-_A-Za-z0-9]+` + string path = 1; + string value = 2; +} diff --git a/packages/cosmic-proto/proto/agoric/vstorage/query.proto b/packages/cosmic-proto/proto/agoric/vstorage/query.proto new file mode 100644 index 00000000000..462b7a8a60f --- /dev/null +++ b/packages/cosmic-proto/proto/agoric/vstorage/query.proto @@ -0,0 +1,110 @@ +syntax = "proto3"; +package agoric.vstorage; + +import "gogoproto/gogo.proto"; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "google/api/annotations.proto"; + +option go_package = "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/types"; + +// Query defines the gRPC querier service +service Query { + // Return the raw string value of an arbitrary vstorage datum. + rpc Data(QueryDataRequest) returns (QueryDataResponse) { + option (google.api.http).get = "/agoric/vstorage/data/{path}"; + } + + // Return a formatted representation of a vstorage datum that must be + // a valid StreamCell with CapData values, or standalone CapData. + rpc CapData(QueryCapDataRequest) + returns (QueryCapDataResponse) { + option (google.api.http).get = "/agoric/vstorage/capdata/{path}"; + } + + // Return the children of a given vstorage path. + rpc Children(QueryChildrenRequest) + returns (QueryChildrenResponse) { + option (google.api.http).get = "/agoric/vstorage/children/{path}"; + } +} + +// QueryDataRequest is the vstorage path data query. +message QueryDataRequest { + string path = 1 [ + (gogoproto.jsontag) = "path", + (gogoproto.moretags) = "yaml:\"path\"" + ]; +} + +// QueryDataResponse is the vstorage path data response. +message QueryDataResponse { + string value = 1 [ + (gogoproto.jsontag) = "value", + (gogoproto.moretags) = "yaml:\"value\"" + ]; +} + +// QueryCapDataRequest contains a path and formatting configuration. +message QueryCapDataRequest { + string path = 1 [ + (gogoproto.jsontag) = "path", + (gogoproto.moretags) = "yaml:\"path\"" + ]; + // mediaType must be an actual media type in the registry at + // https://www.iana.org/assignments/media-types/media-types.xhtml + // or a special value that does not conflict with the media type syntax. + // The only valid value is "JSON Lines", which is also the default. + string media_type = 2 [ + (gogoproto.jsontag) = "mediaType", + (gogoproto.moretags) = "yaml:\"mediaType\"" + ]; + // itemFormat, if present, must be the special value "flat" to indicate that + // the deep structure of each item should be flattened into a single level + // with kebab-case keys (e.g., `{ "metrics": { "min": 0, "max": 88 } }` as + // `{ "metrics-min": 0, "metrics-max": 88 }`). + string item_format = 3 [ + (gogoproto.jsontag) = "itemFormat", + (gogoproto.moretags) = "yaml:\"itemFormat\"" + ]; + // remotableValueFormat indicates how to transform references to opaque but + // distinguishable Remotables into readable embedded representations. + // * "object" represents each Remotable as an `{ id, allegedName }` object, e.g. `{ "id": "board007", "allegedName": "IST brand" }`. + // * "string" represents each Remotable as a string with bracket-wrapped contents including its alleged name and id, e.g. "[Alleged: IST brand ]". + string remotable_value_format = 10 [ + (gogoproto.jsontag) = "remotableValueFormat", + (gogoproto.moretags) = "yaml:\"remotableValueFormat\"" + ]; +} + +// QueryCapDataResponse represents the result with the requested formatting, +// reserving space for future metadata such as media type. +message QueryCapDataResponse { + string block_height = 1 [ + (gogoproto.jsontag) = "blockHeight", + (gogoproto.moretags) = "yaml:\"blockHeight\"" + ]; + string value = 10 [ + (gogoproto.jsontag) = "value", + (gogoproto.moretags) = "yaml:\"value\"" + ]; +} + +// QueryChildrenRequest is the vstorage path children query. +message QueryChildrenRequest { + string path = 1 [ + (gogoproto.jsontag) = "path", + (gogoproto.moretags) = "yaml:\"path\"" + ]; + + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} + +// QueryChildrenResponse is the vstorage path children response. +message QueryChildrenResponse { + repeated string children = 1 [ + (gogoproto.jsontag) = "children", + (gogoproto.moretags) = "yaml:\"children\"" + ]; + + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} diff --git a/packages/cosmic-proto/proto/agoric/vstorage/vstorage.proto b/packages/cosmic-proto/proto/agoric/vstorage/vstorage.proto new file mode 100644 index 00000000000..6270aec4435 --- /dev/null +++ b/packages/cosmic-proto/proto/agoric/vstorage/vstorage.proto @@ -0,0 +1,27 @@ +syntax = "proto3"; +package agoric.vstorage; + +import "gogoproto/gogo.proto"; + +option go_package = "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/types"; + +// Data is the vstorage node data. +message Data { + option (gogoproto.equal) = false; + + string value = 1 [ + (gogoproto.jsontag) = "value", + (gogoproto.moretags) = "yaml:\"value\"" + ]; +} + +// Children are the immediate names (just one level deep) of subnodes leading to +// more data from a given vstorage node. +message Children { + option (gogoproto.equal) = false; + + repeated string children = 1 [ + (gogoproto.jsontag) = "children", + (gogoproto.moretags) = "yaml:\"children\"" + ]; +} diff --git a/packages/cosmic-proto/src/codegen/agoric/bundle.ts b/packages/cosmic-proto/src/codegen/agoric/bundle.ts new file mode 100644 index 00000000000..d091d362a2b --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/bundle.ts @@ -0,0 +1,69 @@ +//@ts-nocheck +import * as _0 from './lien/genesis.js'; +import * as _1 from './lien/lien.js'; +import * as _2 from './swingset/genesis.js'; +import * as _3 from './swingset/msgs.js'; +import * as _4 from './swingset/query.js'; +import * as _5 from './swingset/swingset.js'; +import * as _6 from './vbank/genesis.js'; +import * as _7 from './vbank/msgs.js'; +import * as _8 from './vbank/query.js'; +import * as _9 from './vbank/vbank.js'; +import * as _10 from './vibc/msgs.js'; +import * as _11 from './vlocalchain/vlocalchain.js'; +import * as _12 from './vstorage/genesis.js'; +import * as _13 from './vstorage/query.js'; +import * as _14 from './vstorage/vstorage.js'; +import * as _25 from './swingset/msgs.amino.js'; +import * as _26 from './vibc/msgs.amino.js'; +import * as _27 from './swingset/msgs.registry.js'; +import * as _28 from './vibc/msgs.registry.js'; +import * as _29 from './swingset/query.rpc.Query.js'; +import * as _30 from './vbank/query.rpc.Query.js'; +import * as _31 from './vstorage/query.rpc.Query.js'; +import * as _32 from './swingset/msgs.rpc.msg.js'; +import * as _33 from './vibc/msgs.rpc.msg.js'; +import * as _34 from './rpc.query.js'; +import * as _35 from './rpc.tx.js'; +export namespace agoric { + export const lien = { + ..._0, + ..._1, + }; + export const swingset = { + ..._2, + ..._3, + ..._4, + ..._5, + ..._25, + ..._27, + ..._29, + ..._32, + }; + export const vbank = { + ..._6, + ..._7, + ..._8, + ..._9, + ..._30, + }; + export const vibc = { + ..._10, + ..._26, + ..._28, + ..._33, + }; + export const vlocalchain = { + ..._11, + }; + export const vstorage = { + ..._12, + ..._13, + ..._14, + ..._31, + }; + export const ClientFactory = { + ..._34, + ..._35, + }; +} diff --git a/packages/cosmic-proto/src/codegen/agoric/client.ts b/packages/cosmic-proto/src/codegen/agoric/client.ts new file mode 100644 index 00000000000..a639a96ad60 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/client.ts @@ -0,0 +1,59 @@ +//@ts-nocheck +import { GeneratedType, Registry, OfflineSigner } from '@cosmjs/proto-signing'; +import { + defaultRegistryTypes, + AminoTypes, + SigningStargateClient, +} from '@cosmjs/stargate'; +import { HttpEndpoint } from '@cosmjs/tendermint-rpc'; +import * as agoricSwingsetMsgsRegistry from './swingset/msgs.registry.js'; +import * as agoricVibcMsgsRegistry from './vibc/msgs.registry.js'; +import * as agoricSwingsetMsgsAmino from './swingset/msgs.amino.js'; +import * as agoricVibcMsgsAmino from './vibc/msgs.amino.js'; +export const agoricAminoConverters = { + ...agoricSwingsetMsgsAmino.AminoConverter, + ...agoricVibcMsgsAmino.AminoConverter, +}; +export const agoricProtoRegistry: ReadonlyArray<[string, GeneratedType]> = [ + ...agoricSwingsetMsgsRegistry.registry, + ...agoricVibcMsgsRegistry.registry, +]; +export const getSigningAgoricClientOptions = ({ + defaultTypes = defaultRegistryTypes, +}: { + defaultTypes?: ReadonlyArray<[string, GeneratedType]>; +} = {}): { + registry: Registry; + aminoTypes: AminoTypes; +} => { + const registry = new Registry([...defaultTypes, ...agoricProtoRegistry]); + const aminoTypes = new AminoTypes({ + ...agoricAminoConverters, + }); + return { + registry, + aminoTypes, + }; +}; +export const getSigningAgoricClient = async ({ + rpcEndpoint, + signer, + defaultTypes = defaultRegistryTypes, +}: { + rpcEndpoint: string | HttpEndpoint; + signer: OfflineSigner; + defaultTypes?: ReadonlyArray<[string, GeneratedType]>; +}) => { + const { registry, aminoTypes } = getSigningAgoricClientOptions({ + defaultTypes, + }); + const client = await SigningStargateClient.connectWithSigner( + rpcEndpoint, + signer, + { + registry: registry as any, + aminoTypes, + }, + ); + return client; +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/lien/genesis.ts b/packages/cosmic-proto/src/codegen/agoric/lien/genesis.ts new file mode 100644 index 00000000000..0ff9cdeffc1 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/lien/genesis.ts @@ -0,0 +1,235 @@ +//@ts-nocheck +import { Lien, LienAmino, LienSDKType } from './lien.js'; +import { BinaryReader, BinaryWriter } from '../../binary.js'; +import { isSet } from '../../helpers.js'; +/** The initial or exported state. */ +export interface GenesisState { + liens: AccountLien[]; +} +export interface GenesisStateProtoMsg { + typeUrl: '/agoric.lien.GenesisState'; + value: Uint8Array; +} +/** The initial or exported state. */ +export interface GenesisStateAmino { + liens?: AccountLienAmino[]; +} +export interface GenesisStateAminoMsg { + type: '/agoric.lien.GenesisState'; + value: GenesisStateAmino; +} +/** The initial or exported state. */ +export interface GenesisStateSDKType { + liens: AccountLienSDKType[]; +} +/** The lien on a particular account */ +export interface AccountLien { + /** Account address, bech32-encoded. */ + address: string; + /** The liened amount. Should be nonzero. */ + lien?: Lien; +} +export interface AccountLienProtoMsg { + typeUrl: '/agoric.lien.AccountLien'; + value: Uint8Array; +} +/** The lien on a particular account */ +export interface AccountLienAmino { + /** Account address, bech32-encoded. */ + address?: string; + /** The liened amount. Should be nonzero. */ + lien?: LienAmino; +} +export interface AccountLienAminoMsg { + type: '/agoric.lien.AccountLien'; + value: AccountLienAmino; +} +/** The lien on a particular account */ +export interface AccountLienSDKType { + address: string; + lien?: LienSDKType; +} +function createBaseGenesisState(): GenesisState { + return { + liens: [], + }; +} +export const GenesisState = { + typeUrl: '/agoric.lien.GenesisState', + encode( + message: GenesisState, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.liens) { + AccountLien.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): GenesisState { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.liens.push(AccountLien.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): GenesisState { + return { + liens: Array.isArray(object?.liens) + ? object.liens.map((e: any) => AccountLien.fromJSON(e)) + : [], + }; + }, + toJSON(message: GenesisState): unknown { + const obj: any = {}; + if (message.liens) { + obj.liens = message.liens.map(e => + e ? AccountLien.toJSON(e) : undefined, + ); + } else { + obj.liens = []; + } + return obj; + }, + fromPartial(object: Partial): GenesisState { + const message = createBaseGenesisState(); + message.liens = object.liens?.map(e => AccountLien.fromPartial(e)) || []; + return message; + }, + fromAmino(object: GenesisStateAmino): GenesisState { + const message = createBaseGenesisState(); + message.liens = object.liens?.map(e => AccountLien.fromAmino(e)) || []; + return message; + }, + toAmino(message: GenesisState): GenesisStateAmino { + const obj: any = {}; + if (message.liens) { + obj.liens = message.liens.map(e => + e ? AccountLien.toAmino(e) : undefined, + ); + } else { + obj.liens = []; + } + return obj; + }, + fromAminoMsg(object: GenesisStateAminoMsg): GenesisState { + return GenesisState.fromAmino(object.value); + }, + fromProtoMsg(message: GenesisStateProtoMsg): GenesisState { + return GenesisState.decode(message.value); + }, + toProto(message: GenesisState): Uint8Array { + return GenesisState.encode(message).finish(); + }, + toProtoMsg(message: GenesisState): GenesisStateProtoMsg { + return { + typeUrl: '/agoric.lien.GenesisState', + value: GenesisState.encode(message).finish(), + }; + }, +}; +function createBaseAccountLien(): AccountLien { + return { + address: '', + lien: undefined, + }; +} +export const AccountLien = { + typeUrl: '/agoric.lien.AccountLien', + encode( + message: AccountLien, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.address !== '') { + writer.uint32(10).string(message.address); + } + if (message.lien !== undefined) { + Lien.encode(message.lien, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): AccountLien { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAccountLien(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.address = reader.string(); + break; + case 2: + message.lien = Lien.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): AccountLien { + return { + address: isSet(object.address) ? String(object.address) : '', + lien: isSet(object.lien) ? Lien.fromJSON(object.lien) : undefined, + }; + }, + toJSON(message: AccountLien): unknown { + const obj: any = {}; + message.address !== undefined && (obj.address = message.address); + message.lien !== undefined && + (obj.lien = message.lien ? Lien.toJSON(message.lien) : undefined); + return obj; + }, + fromPartial(object: Partial): AccountLien { + const message = createBaseAccountLien(); + message.address = object.address ?? ''; + message.lien = + object.lien !== undefined && object.lien !== null + ? Lien.fromPartial(object.lien) + : undefined; + return message; + }, + fromAmino(object: AccountLienAmino): AccountLien { + const message = createBaseAccountLien(); + if (object.address !== undefined && object.address !== null) { + message.address = object.address; + } + if (object.lien !== undefined && object.lien !== null) { + message.lien = Lien.fromAmino(object.lien); + } + return message; + }, + toAmino(message: AccountLien): AccountLienAmino { + const obj: any = {}; + obj.address = message.address; + obj.lien = message.lien ? Lien.toAmino(message.lien) : undefined; + return obj; + }, + fromAminoMsg(object: AccountLienAminoMsg): AccountLien { + return AccountLien.fromAmino(object.value); + }, + fromProtoMsg(message: AccountLienProtoMsg): AccountLien { + return AccountLien.decode(message.value); + }, + toProto(message: AccountLien): Uint8Array { + return AccountLien.encode(message).finish(); + }, + toProtoMsg(message: AccountLien): AccountLienProtoMsg { + return { + typeUrl: '/agoric.lien.AccountLien', + value: AccountLien.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/lien/lien.ts b/packages/cosmic-proto/src/codegen/agoric/lien/lien.ts new file mode 100644 index 00000000000..3cb3ea6b929 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/lien/lien.ts @@ -0,0 +1,153 @@ +//@ts-nocheck +import { + Coin, + CoinAmino, + CoinSDKType, +} from '../../cosmos/base/v1beta1/coin.js'; +import { BinaryReader, BinaryWriter } from '../../binary.js'; +/** Lien contains the lien state of a particular account. */ +export interface Lien { + /** coins holds the amount liened */ + coins: Coin[]; + /** + * delegated tracks the net amount delegated for non-vesting accounts, + * or zero coins for vesting accounts. + * (Vesting accounts have their own fields to track delegation.) + */ + delegated: Coin[]; +} +export interface LienProtoMsg { + typeUrl: '/agoric.lien.Lien'; + value: Uint8Array; +} +/** Lien contains the lien state of a particular account. */ +export interface LienAmino { + /** coins holds the amount liened */ + coins?: CoinAmino[]; + /** + * delegated tracks the net amount delegated for non-vesting accounts, + * or zero coins for vesting accounts. + * (Vesting accounts have their own fields to track delegation.) + */ + delegated?: CoinAmino[]; +} +export interface LienAminoMsg { + type: '/agoric.lien.Lien'; + value: LienAmino; +} +/** Lien contains the lien state of a particular account. */ +export interface LienSDKType { + coins: CoinSDKType[]; + delegated: CoinSDKType[]; +} +function createBaseLien(): Lien { + return { + coins: [], + delegated: [], + }; +} +export const Lien = { + typeUrl: '/agoric.lien.Lien', + encode( + message: Lien, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.coins) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.delegated) { + Coin.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): Lien { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseLien(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.coins.push(Coin.decode(reader, reader.uint32())); + break; + case 2: + message.delegated.push(Coin.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): Lien { + return { + coins: Array.isArray(object?.coins) + ? object.coins.map((e: any) => Coin.fromJSON(e)) + : [], + delegated: Array.isArray(object?.delegated) + ? object.delegated.map((e: any) => Coin.fromJSON(e)) + : [], + }; + }, + toJSON(message: Lien): unknown { + const obj: any = {}; + if (message.coins) { + obj.coins = message.coins.map(e => (e ? Coin.toJSON(e) : undefined)); + } else { + obj.coins = []; + } + if (message.delegated) { + obj.delegated = message.delegated.map(e => + e ? Coin.toJSON(e) : undefined, + ); + } else { + obj.delegated = []; + } + return obj; + }, + fromPartial(object: Partial): Lien { + const message = createBaseLien(); + message.coins = object.coins?.map(e => Coin.fromPartial(e)) || []; + message.delegated = object.delegated?.map(e => Coin.fromPartial(e)) || []; + return message; + }, + fromAmino(object: LienAmino): Lien { + const message = createBaseLien(); + message.coins = object.coins?.map(e => Coin.fromAmino(e)) || []; + message.delegated = object.delegated?.map(e => Coin.fromAmino(e)) || []; + return message; + }, + toAmino(message: Lien): LienAmino { + const obj: any = {}; + if (message.coins) { + obj.coins = message.coins.map(e => (e ? Coin.toAmino(e) : undefined)); + } else { + obj.coins = []; + } + if (message.delegated) { + obj.delegated = message.delegated.map(e => + e ? Coin.toAmino(e) : undefined, + ); + } else { + obj.delegated = []; + } + return obj; + }, + fromAminoMsg(object: LienAminoMsg): Lien { + return Lien.fromAmino(object.value); + }, + fromProtoMsg(message: LienProtoMsg): Lien { + return Lien.decode(message.value); + }, + toProto(message: Lien): Uint8Array { + return Lien.encode(message).finish(); + }, + toProtoMsg(message: Lien): LienProtoMsg { + return { + typeUrl: '/agoric.lien.Lien', + value: Lien.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/rpc.query.ts b/packages/cosmic-proto/src/codegen/agoric/rpc.query.ts new file mode 100644 index 00000000000..e1ae79458f7 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/rpc.query.ts @@ -0,0 +1,24 @@ +//@ts-nocheck +import { Tendermint34Client, HttpEndpoint } from '@cosmjs/tendermint-rpc'; +import { QueryClient } from '@cosmjs/stargate'; +export const createRPCQueryClient = async ({ + rpcEndpoint, +}: { + rpcEndpoint: string | HttpEndpoint; +}) => { + const tmClient = await Tendermint34Client.connect(rpcEndpoint); + const client = new QueryClient(tmClient); + return { + agoric: { + swingset: ( + await import('./swingset/query.rpc.Query.js') + ).createRpcQueryExtension(client), + vbank: ( + await import('./vbank/query.rpc.Query.js') + ).createRpcQueryExtension(client), + vstorage: ( + await import('./vstorage/query.rpc.Query.js') + ).createRpcQueryExtension(client), + }, + }; +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/rpc.tx.ts b/packages/cosmic-proto/src/codegen/agoric/rpc.tx.ts new file mode 100644 index 00000000000..278acc1d18c --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/rpc.tx.ts @@ -0,0 +1,10 @@ +//@ts-nocheck +import { Rpc } from '../helpers.js'; +export const createRPCMsgClient = async ({ rpc }: { rpc: Rpc }) => ({ + agoric: { + swingset: new (await import('./swingset/msgs.rpc.msg.js')).MsgClientImpl( + rpc, + ), + vibc: new (await import('./vibc/msgs.rpc.msg.js')).MsgClientImpl(rpc), + }, +}); diff --git a/packages/cosmic-proto/src/codegen/agoric/swingset/genesis.ts b/packages/cosmic-proto/src/codegen/agoric/swingset/genesis.ts new file mode 100644 index 00000000000..4d2081ed98f --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/swingset/genesis.ts @@ -0,0 +1,297 @@ +//@ts-nocheck +import { + Params, + ParamsAmino, + ParamsSDKType, + State, + StateAmino, + StateSDKType, +} from './swingset.js'; +import { BinaryReader, BinaryWriter } from '../../binary.js'; +import { isSet } from '../../helpers.js'; +/** The initial or exported state. */ +export interface GenesisState { + params: Params; + state: State; + swingStoreExportData: SwingStoreExportDataEntry[]; +} +export interface GenesisStateProtoMsg { + typeUrl: '/agoric.swingset.GenesisState'; + value: Uint8Array; +} +/** The initial or exported state. */ +export interface GenesisStateAmino { + params?: ParamsAmino; + state?: StateAmino; + swing_store_export_data?: SwingStoreExportDataEntryAmino[]; +} +export interface GenesisStateAminoMsg { + type: '/agoric.swingset.GenesisState'; + value: GenesisStateAmino; +} +/** The initial or exported state. */ +export interface GenesisStateSDKType { + params: ParamsSDKType; + state: StateSDKType; + swing_store_export_data: SwingStoreExportDataEntrySDKType[]; +} +/** A SwingStore "export data" entry. */ +export interface SwingStoreExportDataEntry { + key: string; + value: string; +} +export interface SwingStoreExportDataEntryProtoMsg { + typeUrl: '/agoric.swingset.SwingStoreExportDataEntry'; + value: Uint8Array; +} +/** A SwingStore "export data" entry. */ +export interface SwingStoreExportDataEntryAmino { + key?: string; + value?: string; +} +export interface SwingStoreExportDataEntryAminoMsg { + type: '/agoric.swingset.SwingStoreExportDataEntry'; + value: SwingStoreExportDataEntryAmino; +} +/** A SwingStore "export data" entry. */ +export interface SwingStoreExportDataEntrySDKType { + key: string; + value: string; +} +function createBaseGenesisState(): GenesisState { + return { + params: Params.fromPartial({}), + state: State.fromPartial({}), + swingStoreExportData: [], + }; +} +export const GenesisState = { + typeUrl: '/agoric.swingset.GenesisState', + encode( + message: GenesisState, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(18).fork()).ldelim(); + } + if (message.state !== undefined) { + State.encode(message.state, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.swingStoreExportData) { + SwingStoreExportDataEntry.encode(v!, writer.uint32(34).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): GenesisState { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.params = Params.decode(reader, reader.uint32()); + break; + case 3: + message.state = State.decode(reader, reader.uint32()); + break; + case 4: + message.swingStoreExportData.push( + SwingStoreExportDataEntry.decode(reader, reader.uint32()), + ); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): GenesisState { + return { + params: isSet(object.params) ? Params.fromJSON(object.params) : undefined, + state: isSet(object.state) ? State.fromJSON(object.state) : undefined, + swingStoreExportData: Array.isArray(object?.swingStoreExportData) + ? object.swingStoreExportData.map((e: any) => + SwingStoreExportDataEntry.fromJSON(e), + ) + : [], + }; + }, + toJSON(message: GenesisState): unknown { + const obj: any = {}; + message.params !== undefined && + (obj.params = message.params ? Params.toJSON(message.params) : undefined); + message.state !== undefined && + (obj.state = message.state ? State.toJSON(message.state) : undefined); + if (message.swingStoreExportData) { + obj.swingStoreExportData = message.swingStoreExportData.map(e => + e ? SwingStoreExportDataEntry.toJSON(e) : undefined, + ); + } else { + obj.swingStoreExportData = []; + } + return obj; + }, + fromPartial(object: Partial): GenesisState { + const message = createBaseGenesisState(); + message.params = + object.params !== undefined && object.params !== null + ? Params.fromPartial(object.params) + : undefined; + message.state = + object.state !== undefined && object.state !== null + ? State.fromPartial(object.state) + : undefined; + message.swingStoreExportData = + object.swingStoreExportData?.map(e => + SwingStoreExportDataEntry.fromPartial(e), + ) || []; + return message; + }, + fromAmino(object: GenesisStateAmino): GenesisState { + const message = createBaseGenesisState(); + if (object.params !== undefined && object.params !== null) { + message.params = Params.fromAmino(object.params); + } + if (object.state !== undefined && object.state !== null) { + message.state = State.fromAmino(object.state); + } + message.swingStoreExportData = + object.swing_store_export_data?.map(e => + SwingStoreExportDataEntry.fromAmino(e), + ) || []; + return message; + }, + toAmino(message: GenesisState): GenesisStateAmino { + const obj: any = {}; + obj.params = message.params ? Params.toAmino(message.params) : undefined; + obj.state = message.state ? State.toAmino(message.state) : undefined; + if (message.swingStoreExportData) { + obj.swing_store_export_data = message.swingStoreExportData.map(e => + e ? SwingStoreExportDataEntry.toAmino(e) : undefined, + ); + } else { + obj.swing_store_export_data = []; + } + return obj; + }, + fromAminoMsg(object: GenesisStateAminoMsg): GenesisState { + return GenesisState.fromAmino(object.value); + }, + fromProtoMsg(message: GenesisStateProtoMsg): GenesisState { + return GenesisState.decode(message.value); + }, + toProto(message: GenesisState): Uint8Array { + return GenesisState.encode(message).finish(); + }, + toProtoMsg(message: GenesisState): GenesisStateProtoMsg { + return { + typeUrl: '/agoric.swingset.GenesisState', + value: GenesisState.encode(message).finish(), + }; + }, +}; +function createBaseSwingStoreExportDataEntry(): SwingStoreExportDataEntry { + return { + key: '', + value: '', + }; +} +export const SwingStoreExportDataEntry = { + typeUrl: '/agoric.swingset.SwingStoreExportDataEntry', + encode( + message: SwingStoreExportDataEntry, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.key !== '') { + writer.uint32(10).string(message.key); + } + if (message.value !== '') { + writer.uint32(18).string(message.value); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): SwingStoreExportDataEntry { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSwingStoreExportDataEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.string(); + break; + case 2: + message.value = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): SwingStoreExportDataEntry { + return { + key: isSet(object.key) ? String(object.key) : '', + value: isSet(object.value) ? String(object.value) : '', + }; + }, + toJSON(message: SwingStoreExportDataEntry): unknown { + const obj: any = {}; + message.key !== undefined && (obj.key = message.key); + message.value !== undefined && (obj.value = message.value); + return obj; + }, + fromPartial( + object: Partial, + ): SwingStoreExportDataEntry { + const message = createBaseSwingStoreExportDataEntry(); + message.key = object.key ?? ''; + message.value = object.value ?? ''; + return message; + }, + fromAmino(object: SwingStoreExportDataEntryAmino): SwingStoreExportDataEntry { + const message = createBaseSwingStoreExportDataEntry(); + if (object.key !== undefined && object.key !== null) { + message.key = object.key; + } + if (object.value !== undefined && object.value !== null) { + message.value = object.value; + } + return message; + }, + toAmino(message: SwingStoreExportDataEntry): SwingStoreExportDataEntryAmino { + const obj: any = {}; + obj.key = message.key; + obj.value = message.value; + return obj; + }, + fromAminoMsg( + object: SwingStoreExportDataEntryAminoMsg, + ): SwingStoreExportDataEntry { + return SwingStoreExportDataEntry.fromAmino(object.value); + }, + fromProtoMsg( + message: SwingStoreExportDataEntryProtoMsg, + ): SwingStoreExportDataEntry { + return SwingStoreExportDataEntry.decode(message.value); + }, + toProto(message: SwingStoreExportDataEntry): Uint8Array { + return SwingStoreExportDataEntry.encode(message).finish(); + }, + toProtoMsg( + message: SwingStoreExportDataEntry, + ): SwingStoreExportDataEntryProtoMsg { + return { + typeUrl: '/agoric.swingset.SwingStoreExportDataEntry', + value: SwingStoreExportDataEntry.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/swingset/msgs.amino.ts b/packages/cosmic-proto/src/codegen/agoric/swingset/msgs.amino.ts new file mode 100644 index 00000000000..3d1f6d57d01 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/swingset/msgs.amino.ts @@ -0,0 +1,35 @@ +//@ts-nocheck +import { + MsgInstallBundle, + MsgDeliverInbound, + MsgWalletAction, + MsgWalletSpendAction, + MsgProvision, +} from './msgs.js'; +export const AminoConverter = { + '/agoric.swingset.MsgInstallBundle': { + aminoType: '/agoric.swingset.MsgInstallBundle', + toAmino: MsgInstallBundle.toAmino, + fromAmino: MsgInstallBundle.fromAmino, + }, + '/agoric.swingset.MsgDeliverInbound': { + aminoType: '/agoric.swingset.MsgDeliverInbound', + toAmino: MsgDeliverInbound.toAmino, + fromAmino: MsgDeliverInbound.fromAmino, + }, + '/agoric.swingset.MsgWalletAction': { + aminoType: '/agoric.swingset.MsgWalletAction', + toAmino: MsgWalletAction.toAmino, + fromAmino: MsgWalletAction.fromAmino, + }, + '/agoric.swingset.MsgWalletSpendAction': { + aminoType: '/agoric.swingset.MsgWalletSpendAction', + toAmino: MsgWalletSpendAction.toAmino, + fromAmino: MsgWalletSpendAction.fromAmino, + }, + '/agoric.swingset.MsgProvision': { + aminoType: '/agoric.swingset.MsgProvision', + toAmino: MsgProvision.toAmino, + fromAmino: MsgProvision.fromAmino, + }, +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/swingset/msgs.registry.ts b/packages/cosmic-proto/src/codegen/agoric/swingset/msgs.registry.ts new file mode 100644 index 00000000000..919ba12f8a2 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/swingset/msgs.registry.ts @@ -0,0 +1,183 @@ +//@ts-nocheck +import { GeneratedType, Registry } from '@cosmjs/proto-signing'; +import { + MsgInstallBundle, + MsgDeliverInbound, + MsgWalletAction, + MsgWalletSpendAction, + MsgProvision, +} from './msgs.js'; +export const registry: ReadonlyArray<[string, GeneratedType]> = [ + ['/agoric.swingset.MsgInstallBundle', MsgInstallBundle], + ['/agoric.swingset.MsgDeliverInbound', MsgDeliverInbound], + ['/agoric.swingset.MsgWalletAction', MsgWalletAction], + ['/agoric.swingset.MsgWalletSpendAction', MsgWalletSpendAction], + ['/agoric.swingset.MsgProvision', MsgProvision], +]; +export const load = (protoRegistry: Registry) => { + registry.forEach(([typeUrl, mod]) => { + protoRegistry.register(typeUrl, mod); + }); +}; +export const MessageComposer = { + encoded: { + installBundle(value: MsgInstallBundle) { + return { + typeUrl: '/agoric.swingset.MsgInstallBundle', + value: MsgInstallBundle.encode(value).finish(), + }; + }, + deliverInbound(value: MsgDeliverInbound) { + return { + typeUrl: '/agoric.swingset.MsgDeliverInbound', + value: MsgDeliverInbound.encode(value).finish(), + }; + }, + walletAction(value: MsgWalletAction) { + return { + typeUrl: '/agoric.swingset.MsgWalletAction', + value: MsgWalletAction.encode(value).finish(), + }; + }, + walletSpendAction(value: MsgWalletSpendAction) { + return { + typeUrl: '/agoric.swingset.MsgWalletSpendAction', + value: MsgWalletSpendAction.encode(value).finish(), + }; + }, + provision(value: MsgProvision) { + return { + typeUrl: '/agoric.swingset.MsgProvision', + value: MsgProvision.encode(value).finish(), + }; + }, + }, + withTypeUrl: { + installBundle(value: MsgInstallBundle) { + return { + typeUrl: '/agoric.swingset.MsgInstallBundle', + value, + }; + }, + deliverInbound(value: MsgDeliverInbound) { + return { + typeUrl: '/agoric.swingset.MsgDeliverInbound', + value, + }; + }, + walletAction(value: MsgWalletAction) { + return { + typeUrl: '/agoric.swingset.MsgWalletAction', + value, + }; + }, + walletSpendAction(value: MsgWalletSpendAction) { + return { + typeUrl: '/agoric.swingset.MsgWalletSpendAction', + value, + }; + }, + provision(value: MsgProvision) { + return { + typeUrl: '/agoric.swingset.MsgProvision', + value, + }; + }, + }, + toJSON: { + installBundle(value: MsgInstallBundle) { + return { + typeUrl: '/agoric.swingset.MsgInstallBundle', + value: MsgInstallBundle.toJSON(value), + }; + }, + deliverInbound(value: MsgDeliverInbound) { + return { + typeUrl: '/agoric.swingset.MsgDeliverInbound', + value: MsgDeliverInbound.toJSON(value), + }; + }, + walletAction(value: MsgWalletAction) { + return { + typeUrl: '/agoric.swingset.MsgWalletAction', + value: MsgWalletAction.toJSON(value), + }; + }, + walletSpendAction(value: MsgWalletSpendAction) { + return { + typeUrl: '/agoric.swingset.MsgWalletSpendAction', + value: MsgWalletSpendAction.toJSON(value), + }; + }, + provision(value: MsgProvision) { + return { + typeUrl: '/agoric.swingset.MsgProvision', + value: MsgProvision.toJSON(value), + }; + }, + }, + fromJSON: { + installBundle(value: any) { + return { + typeUrl: '/agoric.swingset.MsgInstallBundle', + value: MsgInstallBundle.fromJSON(value), + }; + }, + deliverInbound(value: any) { + return { + typeUrl: '/agoric.swingset.MsgDeliverInbound', + value: MsgDeliverInbound.fromJSON(value), + }; + }, + walletAction(value: any) { + return { + typeUrl: '/agoric.swingset.MsgWalletAction', + value: MsgWalletAction.fromJSON(value), + }; + }, + walletSpendAction(value: any) { + return { + typeUrl: '/agoric.swingset.MsgWalletSpendAction', + value: MsgWalletSpendAction.fromJSON(value), + }; + }, + provision(value: any) { + return { + typeUrl: '/agoric.swingset.MsgProvision', + value: MsgProvision.fromJSON(value), + }; + }, + }, + fromPartial: { + installBundle(value: MsgInstallBundle) { + return { + typeUrl: '/agoric.swingset.MsgInstallBundle', + value: MsgInstallBundle.fromPartial(value), + }; + }, + deliverInbound(value: MsgDeliverInbound) { + return { + typeUrl: '/agoric.swingset.MsgDeliverInbound', + value: MsgDeliverInbound.fromPartial(value), + }; + }, + walletAction(value: MsgWalletAction) { + return { + typeUrl: '/agoric.swingset.MsgWalletAction', + value: MsgWalletAction.fromPartial(value), + }; + }, + walletSpendAction(value: MsgWalletSpendAction) { + return { + typeUrl: '/agoric.swingset.MsgWalletSpendAction', + value: MsgWalletSpendAction.fromPartial(value), + }; + }, + provision(value: MsgProvision) { + return { + typeUrl: '/agoric.swingset.MsgProvision', + value: MsgProvision.fromPartial(value), + }; + }, + }, +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/swingset/msgs.rpc.msg.ts b/packages/cosmic-proto/src/codegen/agoric/swingset/msgs.rpc.msg.ts new file mode 100644 index 00000000000..c94ce2c582d --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/swingset/msgs.rpc.msg.ts @@ -0,0 +1,98 @@ +//@ts-nocheck +import { Rpc } from '../../helpers.js'; +import { BinaryReader } from '../../binary.js'; +import { + MsgInstallBundle, + MsgInstallBundleResponse, + MsgDeliverInbound, + MsgDeliverInboundResponse, + MsgWalletAction, + MsgWalletActionResponse, + MsgWalletSpendAction, + MsgWalletSpendActionResponse, + MsgProvision, + MsgProvisionResponse, +} from './msgs.js'; +/** Transactions. */ +export interface Msg { + /** Install a JavaScript sources bundle on the chain's SwingSet controller. */ + installBundle(request: MsgInstallBundle): Promise; + /** Send inbound messages. */ + deliverInbound( + request: MsgDeliverInbound, + ): Promise; + /** Perform a low-privilege wallet action. */ + walletAction(request: MsgWalletAction): Promise; + /** Perform a wallet action that spends assets. */ + walletSpendAction( + request: MsgWalletSpendAction, + ): Promise; + /** Provision a new endpoint. */ + provision(request: MsgProvision): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.installBundle = this.installBundle.bind(this); + this.deliverInbound = this.deliverInbound.bind(this); + this.walletAction = this.walletAction.bind(this); + this.walletSpendAction = this.walletSpendAction.bind(this); + this.provision = this.provision.bind(this); + } + installBundle(request: MsgInstallBundle): Promise { + const data = MsgInstallBundle.encode(request).finish(); + const promise = this.rpc.request( + 'agoric.swingset.Msg', + 'InstallBundle', + data, + ); + return promise.then(data => + MsgInstallBundleResponse.decode(new BinaryReader(data)), + ); + } + deliverInbound( + request: MsgDeliverInbound, + ): Promise { + const data = MsgDeliverInbound.encode(request).finish(); + const promise = this.rpc.request( + 'agoric.swingset.Msg', + 'DeliverInbound', + data, + ); + return promise.then(data => + MsgDeliverInboundResponse.decode(new BinaryReader(data)), + ); + } + walletAction(request: MsgWalletAction): Promise { + const data = MsgWalletAction.encode(request).finish(); + const promise = this.rpc.request( + 'agoric.swingset.Msg', + 'WalletAction', + data, + ); + return promise.then(data => + MsgWalletActionResponse.decode(new BinaryReader(data)), + ); + } + walletSpendAction( + request: MsgWalletSpendAction, + ): Promise { + const data = MsgWalletSpendAction.encode(request).finish(); + const promise = this.rpc.request( + 'agoric.swingset.Msg', + 'WalletSpendAction', + data, + ); + return promise.then(data => + MsgWalletSpendActionResponse.decode(new BinaryReader(data)), + ); + } + provision(request: MsgProvision): Promise { + const data = MsgProvision.encode(request).finish(); + const promise = this.rpc.request('agoric.swingset.Msg', 'Provision', data); + return promise.then(data => + MsgProvisionResponse.decode(new BinaryReader(data)), + ); + } +} diff --git a/packages/cosmic-proto/src/codegen/agoric/swingset/msgs.ts b/packages/cosmic-proto/src/codegen/agoric/swingset/msgs.ts new file mode 100644 index 00000000000..0d913076428 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/swingset/msgs.ts @@ -0,0 +1,1246 @@ +//@ts-nocheck +import { BinaryReader, BinaryWriter } from '../../binary.js'; +import { isSet, bytesFromBase64, base64FromBytes } from '../../helpers.js'; +/** MsgDeliverInbound defines an SDK message for delivering an eventual send */ +export interface MsgDeliverInbound { + messages: string[]; + nums: bigint[]; + ack: bigint; + submitter: Uint8Array; +} +export interface MsgDeliverInboundProtoMsg { + typeUrl: '/agoric.swingset.MsgDeliverInbound'; + value: Uint8Array; +} +/** MsgDeliverInbound defines an SDK message for delivering an eventual send */ +export interface MsgDeliverInboundAmino { + messages?: string[]; + nums?: string[]; + ack?: string; + submitter?: string; +} +export interface MsgDeliverInboundAminoMsg { + type: '/agoric.swingset.MsgDeliverInbound'; + value: MsgDeliverInboundAmino; +} +/** MsgDeliverInbound defines an SDK message for delivering an eventual send */ +export interface MsgDeliverInboundSDKType { + messages: string[]; + nums: bigint[]; + ack: bigint; + submitter: Uint8Array; +} +/** MsgDeliverInboundResponse is an empty reply. */ +export interface MsgDeliverInboundResponse {} +export interface MsgDeliverInboundResponseProtoMsg { + typeUrl: '/agoric.swingset.MsgDeliverInboundResponse'; + value: Uint8Array; +} +/** MsgDeliverInboundResponse is an empty reply. */ +export interface MsgDeliverInboundResponseAmino {} +export interface MsgDeliverInboundResponseAminoMsg { + type: '/agoric.swingset.MsgDeliverInboundResponse'; + value: MsgDeliverInboundResponseAmino; +} +/** MsgDeliverInboundResponse is an empty reply. */ +export interface MsgDeliverInboundResponseSDKType {} +/** + * MsgWalletAction defines an SDK message for the on-chain wallet to perform an + * action that *does not* spend any assets (other than gas fees/stamps). This + * message type is typically protected by feegrant budgets. + */ +export interface MsgWalletAction { + owner: Uint8Array; + /** The action to perform, as JSON-stringified marshalled data. */ + action: string; +} +export interface MsgWalletActionProtoMsg { + typeUrl: '/agoric.swingset.MsgWalletAction'; + value: Uint8Array; +} +/** + * MsgWalletAction defines an SDK message for the on-chain wallet to perform an + * action that *does not* spend any assets (other than gas fees/stamps). This + * message type is typically protected by feegrant budgets. + */ +export interface MsgWalletActionAmino { + owner?: string; + /** The action to perform, as JSON-stringified marshalled data. */ + action?: string; +} +export interface MsgWalletActionAminoMsg { + type: '/agoric.swingset.MsgWalletAction'; + value: MsgWalletActionAmino; +} +/** + * MsgWalletAction defines an SDK message for the on-chain wallet to perform an + * action that *does not* spend any assets (other than gas fees/stamps). This + * message type is typically protected by feegrant budgets. + */ +export interface MsgWalletActionSDKType { + owner: Uint8Array; + action: string; +} +/** MsgWalletActionResponse is an empty reply. */ +export interface MsgWalletActionResponse {} +export interface MsgWalletActionResponseProtoMsg { + typeUrl: '/agoric.swingset.MsgWalletActionResponse'; + value: Uint8Array; +} +/** MsgWalletActionResponse is an empty reply. */ +export interface MsgWalletActionResponseAmino {} +export interface MsgWalletActionResponseAminoMsg { + type: '/agoric.swingset.MsgWalletActionResponse'; + value: MsgWalletActionResponseAmino; +} +/** MsgWalletActionResponse is an empty reply. */ +export interface MsgWalletActionResponseSDKType {} +/** + * MsgWalletSpendAction defines an SDK message for the on-chain wallet to + * perform an action that *does spend the owner's assets.* This message type is + * typically protected by explicit confirmation by the user. + */ +export interface MsgWalletSpendAction { + owner: Uint8Array; + /** The action to perform, as JSON-stringified marshalled data. */ + spendAction: string; +} +export interface MsgWalletSpendActionProtoMsg { + typeUrl: '/agoric.swingset.MsgWalletSpendAction'; + value: Uint8Array; +} +/** + * MsgWalletSpendAction defines an SDK message for the on-chain wallet to + * perform an action that *does spend the owner's assets.* This message type is + * typically protected by explicit confirmation by the user. + */ +export interface MsgWalletSpendActionAmino { + owner?: string; + /** The action to perform, as JSON-stringified marshalled data. */ + spend_action?: string; +} +export interface MsgWalletSpendActionAminoMsg { + type: '/agoric.swingset.MsgWalletSpendAction'; + value: MsgWalletSpendActionAmino; +} +/** + * MsgWalletSpendAction defines an SDK message for the on-chain wallet to + * perform an action that *does spend the owner's assets.* This message type is + * typically protected by explicit confirmation by the user. + */ +export interface MsgWalletSpendActionSDKType { + owner: Uint8Array; + spend_action: string; +} +/** MsgWalletSpendActionResponse is an empty reply. */ +export interface MsgWalletSpendActionResponse {} +export interface MsgWalletSpendActionResponseProtoMsg { + typeUrl: '/agoric.swingset.MsgWalletSpendActionResponse'; + value: Uint8Array; +} +/** MsgWalletSpendActionResponse is an empty reply. */ +export interface MsgWalletSpendActionResponseAmino {} +export interface MsgWalletSpendActionResponseAminoMsg { + type: '/agoric.swingset.MsgWalletSpendActionResponse'; + value: MsgWalletSpendActionResponseAmino; +} +/** MsgWalletSpendActionResponse is an empty reply. */ +export interface MsgWalletSpendActionResponseSDKType {} +/** MsgProvision defines an SDK message for provisioning a client to the chain */ +export interface MsgProvision { + nickname: string; + address: Uint8Array; + powerFlags: string[]; + submitter: Uint8Array; +} +export interface MsgProvisionProtoMsg { + typeUrl: '/agoric.swingset.MsgProvision'; + value: Uint8Array; +} +/** MsgProvision defines an SDK message for provisioning a client to the chain */ +export interface MsgProvisionAmino { + nickname?: string; + address?: string; + power_flags?: string[]; + submitter?: string; +} +export interface MsgProvisionAminoMsg { + type: '/agoric.swingset.MsgProvision'; + value: MsgProvisionAmino; +} +/** MsgProvision defines an SDK message for provisioning a client to the chain */ +export interface MsgProvisionSDKType { + nickname: string; + address: Uint8Array; + power_flags: string[]; + submitter: Uint8Array; +} +/** MsgProvisionResponse is an empty reply. */ +export interface MsgProvisionResponse {} +export interface MsgProvisionResponseProtoMsg { + typeUrl: '/agoric.swingset.MsgProvisionResponse'; + value: Uint8Array; +} +/** MsgProvisionResponse is an empty reply. */ +export interface MsgProvisionResponseAmino {} +export interface MsgProvisionResponseAminoMsg { + type: '/agoric.swingset.MsgProvisionResponse'; + value: MsgProvisionResponseAmino; +} +/** MsgProvisionResponse is an empty reply. */ +export interface MsgProvisionResponseSDKType {} +/** MsgInstallBundle carries a signed bundle to SwingSet. */ +export interface MsgInstallBundle { + bundle: string; + submitter: Uint8Array; + /** + * Either bundle or compressed_bundle will be set. + * Default compression algorithm is gzip. + */ + compressedBundle: Uint8Array; + /** Size in bytes of uncompression of compressed_bundle. */ + uncompressedSize: bigint; +} +export interface MsgInstallBundleProtoMsg { + typeUrl: '/agoric.swingset.MsgInstallBundle'; + value: Uint8Array; +} +/** MsgInstallBundle carries a signed bundle to SwingSet. */ +export interface MsgInstallBundleAmino { + bundle?: string; + submitter?: string; + /** + * Either bundle or compressed_bundle will be set. + * Default compression algorithm is gzip. + */ + compressed_bundle?: string; + /** Size in bytes of uncompression of compressed_bundle. */ + uncompressed_size?: string; +} +export interface MsgInstallBundleAminoMsg { + type: '/agoric.swingset.MsgInstallBundle'; + value: MsgInstallBundleAmino; +} +/** MsgInstallBundle carries a signed bundle to SwingSet. */ +export interface MsgInstallBundleSDKType { + bundle: string; + submitter: Uint8Array; + compressed_bundle: Uint8Array; + uncompressed_size: bigint; +} +/** + * MsgInstallBundleResponse is an empty acknowledgement that an install bundle + * message has been queued for the SwingSet kernel's consideration. + */ +export interface MsgInstallBundleResponse {} +export interface MsgInstallBundleResponseProtoMsg { + typeUrl: '/agoric.swingset.MsgInstallBundleResponse'; + value: Uint8Array; +} +/** + * MsgInstallBundleResponse is an empty acknowledgement that an install bundle + * message has been queued for the SwingSet kernel's consideration. + */ +export interface MsgInstallBundleResponseAmino {} +export interface MsgInstallBundleResponseAminoMsg { + type: '/agoric.swingset.MsgInstallBundleResponse'; + value: MsgInstallBundleResponseAmino; +} +/** + * MsgInstallBundleResponse is an empty acknowledgement that an install bundle + * message has been queued for the SwingSet kernel's consideration. + */ +export interface MsgInstallBundleResponseSDKType {} +function createBaseMsgDeliverInbound(): MsgDeliverInbound { + return { + messages: [], + nums: [], + ack: BigInt(0), + submitter: new Uint8Array(), + }; +} +export const MsgDeliverInbound = { + typeUrl: '/agoric.swingset.MsgDeliverInbound', + encode( + message: MsgDeliverInbound, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.messages) { + writer.uint32(10).string(v!); + } + writer.uint32(18).fork(); + for (const v of message.nums) { + writer.uint64(v); + } + writer.ldelim(); + if (message.ack !== BigInt(0)) { + writer.uint32(24).uint64(message.ack); + } + if (message.submitter.length !== 0) { + writer.uint32(34).bytes(message.submitter); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): MsgDeliverInbound { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDeliverInbound(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messages.push(reader.string()); + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.nums.push(reader.uint64()); + } + } else { + message.nums.push(reader.uint64()); + } + break; + case 3: + message.ack = reader.uint64(); + break; + case 4: + message.submitter = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): MsgDeliverInbound { + return { + messages: Array.isArray(object?.messages) + ? object.messages.map((e: any) => String(e)) + : [], + nums: Array.isArray(object?.nums) + ? object.nums.map((e: any) => BigInt(e.toString())) + : [], + ack: isSet(object.ack) ? BigInt(object.ack.toString()) : BigInt(0), + submitter: isSet(object.submitter) + ? bytesFromBase64(object.submitter) + : new Uint8Array(), + }; + }, + toJSON(message: MsgDeliverInbound): unknown { + const obj: any = {}; + if (message.messages) { + obj.messages = message.messages.map(e => e); + } else { + obj.messages = []; + } + if (message.nums) { + obj.nums = message.nums.map(e => (e || BigInt(0)).toString()); + } else { + obj.nums = []; + } + message.ack !== undefined && + (obj.ack = (message.ack || BigInt(0)).toString()); + message.submitter !== undefined && + (obj.submitter = base64FromBytes( + message.submitter !== undefined ? message.submitter : new Uint8Array(), + )); + return obj; + }, + fromPartial(object: Partial): MsgDeliverInbound { + const message = createBaseMsgDeliverInbound(); + message.messages = object.messages?.map(e => e) || []; + message.nums = object.nums?.map(e => BigInt(e.toString())) || []; + message.ack = + object.ack !== undefined && object.ack !== null + ? BigInt(object.ack.toString()) + : BigInt(0); + message.submitter = object.submitter ?? new Uint8Array(); + return message; + }, + fromAmino(object: MsgDeliverInboundAmino): MsgDeliverInbound { + const message = createBaseMsgDeliverInbound(); + message.messages = object.messages?.map(e => e) || []; + message.nums = object.nums?.map(e => BigInt(e)) || []; + if (object.ack !== undefined && object.ack !== null) { + message.ack = BigInt(object.ack); + } + if (object.submitter !== undefined && object.submitter !== null) { + message.submitter = bytesFromBase64(object.submitter); + } + return message; + }, + toAmino(message: MsgDeliverInbound): MsgDeliverInboundAmino { + const obj: any = {}; + if (message.messages) { + obj.messages = message.messages.map(e => e); + } else { + obj.messages = []; + } + if (message.nums) { + obj.nums = message.nums.map(e => e.toString()); + } else { + obj.nums = []; + } + obj.ack = message.ack ? message.ack.toString() : undefined; + obj.submitter = message.submitter + ? base64FromBytes(message.submitter) + : undefined; + return obj; + }, + fromAminoMsg(object: MsgDeliverInboundAminoMsg): MsgDeliverInbound { + return MsgDeliverInbound.fromAmino(object.value); + }, + fromProtoMsg(message: MsgDeliverInboundProtoMsg): MsgDeliverInbound { + return MsgDeliverInbound.decode(message.value); + }, + toProto(message: MsgDeliverInbound): Uint8Array { + return MsgDeliverInbound.encode(message).finish(); + }, + toProtoMsg(message: MsgDeliverInbound): MsgDeliverInboundProtoMsg { + return { + typeUrl: '/agoric.swingset.MsgDeliverInbound', + value: MsgDeliverInbound.encode(message).finish(), + }; + }, +}; +function createBaseMsgDeliverInboundResponse(): MsgDeliverInboundResponse { + return {}; +} +export const MsgDeliverInboundResponse = { + typeUrl: '/agoric.swingset.MsgDeliverInboundResponse', + encode( + _: MsgDeliverInboundResponse, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): MsgDeliverInboundResponse { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgDeliverInboundResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(_: any): MsgDeliverInboundResponse { + return {}; + }, + toJSON(_: MsgDeliverInboundResponse): unknown { + const obj: any = {}; + return obj; + }, + fromPartial( + _: Partial, + ): MsgDeliverInboundResponse { + const message = createBaseMsgDeliverInboundResponse(); + return message; + }, + fromAmino(_: MsgDeliverInboundResponseAmino): MsgDeliverInboundResponse { + const message = createBaseMsgDeliverInboundResponse(); + return message; + }, + toAmino(_: MsgDeliverInboundResponse): MsgDeliverInboundResponseAmino { + const obj: any = {}; + return obj; + }, + fromAminoMsg( + object: MsgDeliverInboundResponseAminoMsg, + ): MsgDeliverInboundResponse { + return MsgDeliverInboundResponse.fromAmino(object.value); + }, + fromProtoMsg( + message: MsgDeliverInboundResponseProtoMsg, + ): MsgDeliverInboundResponse { + return MsgDeliverInboundResponse.decode(message.value); + }, + toProto(message: MsgDeliverInboundResponse): Uint8Array { + return MsgDeliverInboundResponse.encode(message).finish(); + }, + toProtoMsg( + message: MsgDeliverInboundResponse, + ): MsgDeliverInboundResponseProtoMsg { + return { + typeUrl: '/agoric.swingset.MsgDeliverInboundResponse', + value: MsgDeliverInboundResponse.encode(message).finish(), + }; + }, +}; +function createBaseMsgWalletAction(): MsgWalletAction { + return { + owner: new Uint8Array(), + action: '', + }; +} +export const MsgWalletAction = { + typeUrl: '/agoric.swingset.MsgWalletAction', + encode( + message: MsgWalletAction, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.owner.length !== 0) { + writer.uint32(10).bytes(message.owner); + } + if (message.action !== '') { + writer.uint32(18).string(message.action); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): MsgWalletAction { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgWalletAction(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.owner = reader.bytes(); + break; + case 2: + message.action = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): MsgWalletAction { + return { + owner: isSet(object.owner) + ? bytesFromBase64(object.owner) + : new Uint8Array(), + action: isSet(object.action) ? String(object.action) : '', + }; + }, + toJSON(message: MsgWalletAction): unknown { + const obj: any = {}; + message.owner !== undefined && + (obj.owner = base64FromBytes( + message.owner !== undefined ? message.owner : new Uint8Array(), + )); + message.action !== undefined && (obj.action = message.action); + return obj; + }, + fromPartial(object: Partial): MsgWalletAction { + const message = createBaseMsgWalletAction(); + message.owner = object.owner ?? new Uint8Array(); + message.action = object.action ?? ''; + return message; + }, + fromAmino(object: MsgWalletActionAmino): MsgWalletAction { + const message = createBaseMsgWalletAction(); + if (object.owner !== undefined && object.owner !== null) { + message.owner = bytesFromBase64(object.owner); + } + if (object.action !== undefined && object.action !== null) { + message.action = object.action; + } + return message; + }, + toAmino(message: MsgWalletAction): MsgWalletActionAmino { + const obj: any = {}; + obj.owner = message.owner ? base64FromBytes(message.owner) : undefined; + obj.action = message.action; + return obj; + }, + fromAminoMsg(object: MsgWalletActionAminoMsg): MsgWalletAction { + return MsgWalletAction.fromAmino(object.value); + }, + fromProtoMsg(message: MsgWalletActionProtoMsg): MsgWalletAction { + return MsgWalletAction.decode(message.value); + }, + toProto(message: MsgWalletAction): Uint8Array { + return MsgWalletAction.encode(message).finish(); + }, + toProtoMsg(message: MsgWalletAction): MsgWalletActionProtoMsg { + return { + typeUrl: '/agoric.swingset.MsgWalletAction', + value: MsgWalletAction.encode(message).finish(), + }; + }, +}; +function createBaseMsgWalletActionResponse(): MsgWalletActionResponse { + return {}; +} +export const MsgWalletActionResponse = { + typeUrl: '/agoric.swingset.MsgWalletActionResponse', + encode( + _: MsgWalletActionResponse, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): MsgWalletActionResponse { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgWalletActionResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(_: any): MsgWalletActionResponse { + return {}; + }, + toJSON(_: MsgWalletActionResponse): unknown { + const obj: any = {}; + return obj; + }, + fromPartial(_: Partial): MsgWalletActionResponse { + const message = createBaseMsgWalletActionResponse(); + return message; + }, + fromAmino(_: MsgWalletActionResponseAmino): MsgWalletActionResponse { + const message = createBaseMsgWalletActionResponse(); + return message; + }, + toAmino(_: MsgWalletActionResponse): MsgWalletActionResponseAmino { + const obj: any = {}; + return obj; + }, + fromAminoMsg( + object: MsgWalletActionResponseAminoMsg, + ): MsgWalletActionResponse { + return MsgWalletActionResponse.fromAmino(object.value); + }, + fromProtoMsg( + message: MsgWalletActionResponseProtoMsg, + ): MsgWalletActionResponse { + return MsgWalletActionResponse.decode(message.value); + }, + toProto(message: MsgWalletActionResponse): Uint8Array { + return MsgWalletActionResponse.encode(message).finish(); + }, + toProtoMsg( + message: MsgWalletActionResponse, + ): MsgWalletActionResponseProtoMsg { + return { + typeUrl: '/agoric.swingset.MsgWalletActionResponse', + value: MsgWalletActionResponse.encode(message).finish(), + }; + }, +}; +function createBaseMsgWalletSpendAction(): MsgWalletSpendAction { + return { + owner: new Uint8Array(), + spendAction: '', + }; +} +export const MsgWalletSpendAction = { + typeUrl: '/agoric.swingset.MsgWalletSpendAction', + encode( + message: MsgWalletSpendAction, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.owner.length !== 0) { + writer.uint32(10).bytes(message.owner); + } + if (message.spendAction !== '') { + writer.uint32(18).string(message.spendAction); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): MsgWalletSpendAction { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgWalletSpendAction(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.owner = reader.bytes(); + break; + case 2: + message.spendAction = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): MsgWalletSpendAction { + return { + owner: isSet(object.owner) + ? bytesFromBase64(object.owner) + : new Uint8Array(), + spendAction: isSet(object.spendAction) ? String(object.spendAction) : '', + }; + }, + toJSON(message: MsgWalletSpendAction): unknown { + const obj: any = {}; + message.owner !== undefined && + (obj.owner = base64FromBytes( + message.owner !== undefined ? message.owner : new Uint8Array(), + )); + message.spendAction !== undefined && + (obj.spendAction = message.spendAction); + return obj; + }, + fromPartial(object: Partial): MsgWalletSpendAction { + const message = createBaseMsgWalletSpendAction(); + message.owner = object.owner ?? new Uint8Array(); + message.spendAction = object.spendAction ?? ''; + return message; + }, + fromAmino(object: MsgWalletSpendActionAmino): MsgWalletSpendAction { + const message = createBaseMsgWalletSpendAction(); + if (object.owner !== undefined && object.owner !== null) { + message.owner = bytesFromBase64(object.owner); + } + if (object.spend_action !== undefined && object.spend_action !== null) { + message.spendAction = object.spend_action; + } + return message; + }, + toAmino(message: MsgWalletSpendAction): MsgWalletSpendActionAmino { + const obj: any = {}; + obj.owner = message.owner ? base64FromBytes(message.owner) : undefined; + obj.spend_action = message.spendAction; + return obj; + }, + fromAminoMsg(object: MsgWalletSpendActionAminoMsg): MsgWalletSpendAction { + return MsgWalletSpendAction.fromAmino(object.value); + }, + fromProtoMsg(message: MsgWalletSpendActionProtoMsg): MsgWalletSpendAction { + return MsgWalletSpendAction.decode(message.value); + }, + toProto(message: MsgWalletSpendAction): Uint8Array { + return MsgWalletSpendAction.encode(message).finish(); + }, + toProtoMsg(message: MsgWalletSpendAction): MsgWalletSpendActionProtoMsg { + return { + typeUrl: '/agoric.swingset.MsgWalletSpendAction', + value: MsgWalletSpendAction.encode(message).finish(), + }; + }, +}; +function createBaseMsgWalletSpendActionResponse(): MsgWalletSpendActionResponse { + return {}; +} +export const MsgWalletSpendActionResponse = { + typeUrl: '/agoric.swingset.MsgWalletSpendActionResponse', + encode( + _: MsgWalletSpendActionResponse, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): MsgWalletSpendActionResponse { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgWalletSpendActionResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(_: any): MsgWalletSpendActionResponse { + return {}; + }, + toJSON(_: MsgWalletSpendActionResponse): unknown { + const obj: any = {}; + return obj; + }, + fromPartial( + _: Partial, + ): MsgWalletSpendActionResponse { + const message = createBaseMsgWalletSpendActionResponse(); + return message; + }, + fromAmino( + _: MsgWalletSpendActionResponseAmino, + ): MsgWalletSpendActionResponse { + const message = createBaseMsgWalletSpendActionResponse(); + return message; + }, + toAmino(_: MsgWalletSpendActionResponse): MsgWalletSpendActionResponseAmino { + const obj: any = {}; + return obj; + }, + fromAminoMsg( + object: MsgWalletSpendActionResponseAminoMsg, + ): MsgWalletSpendActionResponse { + return MsgWalletSpendActionResponse.fromAmino(object.value); + }, + fromProtoMsg( + message: MsgWalletSpendActionResponseProtoMsg, + ): MsgWalletSpendActionResponse { + return MsgWalletSpendActionResponse.decode(message.value); + }, + toProto(message: MsgWalletSpendActionResponse): Uint8Array { + return MsgWalletSpendActionResponse.encode(message).finish(); + }, + toProtoMsg( + message: MsgWalletSpendActionResponse, + ): MsgWalletSpendActionResponseProtoMsg { + return { + typeUrl: '/agoric.swingset.MsgWalletSpendActionResponse', + value: MsgWalletSpendActionResponse.encode(message).finish(), + }; + }, +}; +function createBaseMsgProvision(): MsgProvision { + return { + nickname: '', + address: new Uint8Array(), + powerFlags: [], + submitter: new Uint8Array(), + }; +} +export const MsgProvision = { + typeUrl: '/agoric.swingset.MsgProvision', + encode( + message: MsgProvision, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.nickname !== '') { + writer.uint32(10).string(message.nickname); + } + if (message.address.length !== 0) { + writer.uint32(18).bytes(message.address); + } + for (const v of message.powerFlags) { + writer.uint32(26).string(v!); + } + if (message.submitter.length !== 0) { + writer.uint32(34).bytes(message.submitter); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): MsgProvision { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgProvision(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nickname = reader.string(); + break; + case 2: + message.address = reader.bytes(); + break; + case 3: + message.powerFlags.push(reader.string()); + break; + case 4: + message.submitter = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): MsgProvision { + return { + nickname: isSet(object.nickname) ? String(object.nickname) : '', + address: isSet(object.address) + ? bytesFromBase64(object.address) + : new Uint8Array(), + powerFlags: Array.isArray(object?.powerFlags) + ? object.powerFlags.map((e: any) => String(e)) + : [], + submitter: isSet(object.submitter) + ? bytesFromBase64(object.submitter) + : new Uint8Array(), + }; + }, + toJSON(message: MsgProvision): unknown { + const obj: any = {}; + message.nickname !== undefined && (obj.nickname = message.nickname); + message.address !== undefined && + (obj.address = base64FromBytes( + message.address !== undefined ? message.address : new Uint8Array(), + )); + if (message.powerFlags) { + obj.powerFlags = message.powerFlags.map(e => e); + } else { + obj.powerFlags = []; + } + message.submitter !== undefined && + (obj.submitter = base64FromBytes( + message.submitter !== undefined ? message.submitter : new Uint8Array(), + )); + return obj; + }, + fromPartial(object: Partial): MsgProvision { + const message = createBaseMsgProvision(); + message.nickname = object.nickname ?? ''; + message.address = object.address ?? new Uint8Array(); + message.powerFlags = object.powerFlags?.map(e => e) || []; + message.submitter = object.submitter ?? new Uint8Array(); + return message; + }, + fromAmino(object: MsgProvisionAmino): MsgProvision { + const message = createBaseMsgProvision(); + if (object.nickname !== undefined && object.nickname !== null) { + message.nickname = object.nickname; + } + if (object.address !== undefined && object.address !== null) { + message.address = bytesFromBase64(object.address); + } + message.powerFlags = object.power_flags?.map(e => e) || []; + if (object.submitter !== undefined && object.submitter !== null) { + message.submitter = bytesFromBase64(object.submitter); + } + return message; + }, + toAmino(message: MsgProvision): MsgProvisionAmino { + const obj: any = {}; + obj.nickname = message.nickname; + obj.address = message.address + ? base64FromBytes(message.address) + : undefined; + if (message.powerFlags) { + obj.power_flags = message.powerFlags.map(e => e); + } else { + obj.power_flags = []; + } + obj.submitter = message.submitter + ? base64FromBytes(message.submitter) + : undefined; + return obj; + }, + fromAminoMsg(object: MsgProvisionAminoMsg): MsgProvision { + return MsgProvision.fromAmino(object.value); + }, + fromProtoMsg(message: MsgProvisionProtoMsg): MsgProvision { + return MsgProvision.decode(message.value); + }, + toProto(message: MsgProvision): Uint8Array { + return MsgProvision.encode(message).finish(); + }, + toProtoMsg(message: MsgProvision): MsgProvisionProtoMsg { + return { + typeUrl: '/agoric.swingset.MsgProvision', + value: MsgProvision.encode(message).finish(), + }; + }, +}; +function createBaseMsgProvisionResponse(): MsgProvisionResponse { + return {}; +} +export const MsgProvisionResponse = { + typeUrl: '/agoric.swingset.MsgProvisionResponse', + encode( + _: MsgProvisionResponse, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): MsgProvisionResponse { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgProvisionResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(_: any): MsgProvisionResponse { + return {}; + }, + toJSON(_: MsgProvisionResponse): unknown { + const obj: any = {}; + return obj; + }, + fromPartial(_: Partial): MsgProvisionResponse { + const message = createBaseMsgProvisionResponse(); + return message; + }, + fromAmino(_: MsgProvisionResponseAmino): MsgProvisionResponse { + const message = createBaseMsgProvisionResponse(); + return message; + }, + toAmino(_: MsgProvisionResponse): MsgProvisionResponseAmino { + const obj: any = {}; + return obj; + }, + fromAminoMsg(object: MsgProvisionResponseAminoMsg): MsgProvisionResponse { + return MsgProvisionResponse.fromAmino(object.value); + }, + fromProtoMsg(message: MsgProvisionResponseProtoMsg): MsgProvisionResponse { + return MsgProvisionResponse.decode(message.value); + }, + toProto(message: MsgProvisionResponse): Uint8Array { + return MsgProvisionResponse.encode(message).finish(); + }, + toProtoMsg(message: MsgProvisionResponse): MsgProvisionResponseProtoMsg { + return { + typeUrl: '/agoric.swingset.MsgProvisionResponse', + value: MsgProvisionResponse.encode(message).finish(), + }; + }, +}; +function createBaseMsgInstallBundle(): MsgInstallBundle { + return { + bundle: '', + submitter: new Uint8Array(), + compressedBundle: new Uint8Array(), + uncompressedSize: BigInt(0), + }; +} +export const MsgInstallBundle = { + typeUrl: '/agoric.swingset.MsgInstallBundle', + encode( + message: MsgInstallBundle, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.bundle !== '') { + writer.uint32(10).string(message.bundle); + } + if (message.submitter.length !== 0) { + writer.uint32(18).bytes(message.submitter); + } + if (message.compressedBundle.length !== 0) { + writer.uint32(26).bytes(message.compressedBundle); + } + if (message.uncompressedSize !== BigInt(0)) { + writer.uint32(32).int64(message.uncompressedSize); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): MsgInstallBundle { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgInstallBundle(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.bundle = reader.string(); + break; + case 2: + message.submitter = reader.bytes(); + break; + case 3: + message.compressedBundle = reader.bytes(); + break; + case 4: + message.uncompressedSize = reader.int64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): MsgInstallBundle { + return { + bundle: isSet(object.bundle) ? String(object.bundle) : '', + submitter: isSet(object.submitter) + ? bytesFromBase64(object.submitter) + : new Uint8Array(), + compressedBundle: isSet(object.compressedBundle) + ? bytesFromBase64(object.compressedBundle) + : new Uint8Array(), + uncompressedSize: isSet(object.uncompressedSize) + ? BigInt(object.uncompressedSize.toString()) + : BigInt(0), + }; + }, + toJSON(message: MsgInstallBundle): unknown { + const obj: any = {}; + message.bundle !== undefined && (obj.bundle = message.bundle); + message.submitter !== undefined && + (obj.submitter = base64FromBytes( + message.submitter !== undefined ? message.submitter : new Uint8Array(), + )); + message.compressedBundle !== undefined && + (obj.compressedBundle = base64FromBytes( + message.compressedBundle !== undefined + ? message.compressedBundle + : new Uint8Array(), + )); + message.uncompressedSize !== undefined && + (obj.uncompressedSize = ( + message.uncompressedSize || BigInt(0) + ).toString()); + return obj; + }, + fromPartial(object: Partial): MsgInstallBundle { + const message = createBaseMsgInstallBundle(); + message.bundle = object.bundle ?? ''; + message.submitter = object.submitter ?? new Uint8Array(); + message.compressedBundle = object.compressedBundle ?? new Uint8Array(); + message.uncompressedSize = + object.uncompressedSize !== undefined && object.uncompressedSize !== null + ? BigInt(object.uncompressedSize.toString()) + : BigInt(0); + return message; + }, + fromAmino(object: MsgInstallBundleAmino): MsgInstallBundle { + const message = createBaseMsgInstallBundle(); + if (object.bundle !== undefined && object.bundle !== null) { + message.bundle = object.bundle; + } + if (object.submitter !== undefined && object.submitter !== null) { + message.submitter = bytesFromBase64(object.submitter); + } + if ( + object.compressed_bundle !== undefined && + object.compressed_bundle !== null + ) { + message.compressedBundle = bytesFromBase64(object.compressed_bundle); + } + if ( + object.uncompressed_size !== undefined && + object.uncompressed_size !== null + ) { + message.uncompressedSize = BigInt(object.uncompressed_size); + } + return message; + }, + toAmino(message: MsgInstallBundle): MsgInstallBundleAmino { + const obj: any = {}; + obj.bundle = message.bundle; + obj.submitter = message.submitter + ? base64FromBytes(message.submitter) + : undefined; + obj.compressed_bundle = message.compressedBundle + ? base64FromBytes(message.compressedBundle) + : undefined; + obj.uncompressed_size = message.uncompressedSize + ? message.uncompressedSize.toString() + : undefined; + return obj; + }, + fromAminoMsg(object: MsgInstallBundleAminoMsg): MsgInstallBundle { + return MsgInstallBundle.fromAmino(object.value); + }, + fromProtoMsg(message: MsgInstallBundleProtoMsg): MsgInstallBundle { + return MsgInstallBundle.decode(message.value); + }, + toProto(message: MsgInstallBundle): Uint8Array { + return MsgInstallBundle.encode(message).finish(); + }, + toProtoMsg(message: MsgInstallBundle): MsgInstallBundleProtoMsg { + return { + typeUrl: '/agoric.swingset.MsgInstallBundle', + value: MsgInstallBundle.encode(message).finish(), + }; + }, +}; +function createBaseMsgInstallBundleResponse(): MsgInstallBundleResponse { + return {}; +} +export const MsgInstallBundleResponse = { + typeUrl: '/agoric.swingset.MsgInstallBundleResponse', + encode( + _: MsgInstallBundleResponse, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): MsgInstallBundleResponse { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgInstallBundleResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(_: any): MsgInstallBundleResponse { + return {}; + }, + toJSON(_: MsgInstallBundleResponse): unknown { + const obj: any = {}; + return obj; + }, + fromPartial(_: Partial): MsgInstallBundleResponse { + const message = createBaseMsgInstallBundleResponse(); + return message; + }, + fromAmino(_: MsgInstallBundleResponseAmino): MsgInstallBundleResponse { + const message = createBaseMsgInstallBundleResponse(); + return message; + }, + toAmino(_: MsgInstallBundleResponse): MsgInstallBundleResponseAmino { + const obj: any = {}; + return obj; + }, + fromAminoMsg( + object: MsgInstallBundleResponseAminoMsg, + ): MsgInstallBundleResponse { + return MsgInstallBundleResponse.fromAmino(object.value); + }, + fromProtoMsg( + message: MsgInstallBundleResponseProtoMsg, + ): MsgInstallBundleResponse { + return MsgInstallBundleResponse.decode(message.value); + }, + toProto(message: MsgInstallBundleResponse): Uint8Array { + return MsgInstallBundleResponse.encode(message).finish(); + }, + toProtoMsg( + message: MsgInstallBundleResponse, + ): MsgInstallBundleResponseProtoMsg { + return { + typeUrl: '/agoric.swingset.MsgInstallBundleResponse', + value: MsgInstallBundleResponse.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/swingset/query.rpc.Query.ts b/packages/cosmic-proto/src/codegen/agoric/swingset/query.rpc.Query.ts new file mode 100644 index 00000000000..85bc0cbd1fe --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/swingset/query.rpc.Query.ts @@ -0,0 +1,66 @@ +//@ts-nocheck +import { Rpc } from '../../helpers.js'; +import { BinaryReader } from '../../binary.js'; +import { QueryClient, createProtobufRpcClient } from '@cosmjs/stargate'; +import { + QueryParamsRequest, + QueryParamsResponse, + QueryEgressRequest, + QueryEgressResponse, + QueryMailboxRequest, + QueryMailboxResponse, +} from './query.js'; +/** Query provides defines the gRPC querier service */ +export interface Query { + /** Params queries params of the swingset module. */ + params(request?: QueryParamsRequest): Promise; + /** Egress queries a provisioned egress. */ + egress(request: QueryEgressRequest): Promise; + /** Return the contents of a peer's outbound mailbox. */ + mailbox(request: QueryMailboxRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.params = this.params.bind(this); + this.egress = this.egress.bind(this); + this.mailbox = this.mailbox.bind(this); + } + params(request: QueryParamsRequest = {}): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request('agoric.swingset.Query', 'Params', data); + return promise.then(data => + QueryParamsResponse.decode(new BinaryReader(data)), + ); + } + egress(request: QueryEgressRequest): Promise { + const data = QueryEgressRequest.encode(request).finish(); + const promise = this.rpc.request('agoric.swingset.Query', 'Egress', data); + return promise.then(data => + QueryEgressResponse.decode(new BinaryReader(data)), + ); + } + mailbox(request: QueryMailboxRequest): Promise { + const data = QueryMailboxRequest.encode(request).finish(); + const promise = this.rpc.request('agoric.swingset.Query', 'Mailbox', data); + return promise.then(data => + QueryMailboxResponse.decode(new BinaryReader(data)), + ); + } +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + params(request?: QueryParamsRequest): Promise { + return queryService.params(request); + }, + egress(request: QueryEgressRequest): Promise { + return queryService.egress(request); + }, + mailbox(request: QueryMailboxRequest): Promise { + return queryService.mailbox(request); + }, + }; +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/swingset/query.ts b/packages/cosmic-proto/src/codegen/agoric/swingset/query.ts new file mode 100644 index 00000000000..e523b0bbf05 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/swingset/query.ts @@ -0,0 +1,609 @@ +//@ts-nocheck +import { + Params, + ParamsAmino, + ParamsSDKType, + Egress, + EgressAmino, + EgressSDKType, +} from './swingset.js'; +import { BinaryReader, BinaryWriter } from '../../binary.js'; +import { isSet, bytesFromBase64, base64FromBytes } from '../../helpers.js'; +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequest {} +export interface QueryParamsRequestProtoMsg { + typeUrl: '/agoric.swingset.QueryParamsRequest'; + value: Uint8Array; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequestAmino {} +export interface QueryParamsRequestAminoMsg { + type: '/agoric.swingset.QueryParamsRequest'; + value: QueryParamsRequestAmino; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequestSDKType {} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponse { + /** params defines the parameters of the module. */ + params: Params; +} +export interface QueryParamsResponseProtoMsg { + typeUrl: '/agoric.swingset.QueryParamsResponse'; + value: Uint8Array; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponseAmino { + /** params defines the parameters of the module. */ + params?: ParamsAmino; +} +export interface QueryParamsResponseAminoMsg { + type: '/agoric.swingset.QueryParamsResponse'; + value: QueryParamsResponseAmino; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponseSDKType { + params: ParamsSDKType; +} +/** QueryEgressRequest is the request type for the Query/Egress RPC method */ +export interface QueryEgressRequest { + peer: Uint8Array; +} +export interface QueryEgressRequestProtoMsg { + typeUrl: '/agoric.swingset.QueryEgressRequest'; + value: Uint8Array; +} +/** QueryEgressRequest is the request type for the Query/Egress RPC method */ +export interface QueryEgressRequestAmino { + peer?: string; +} +export interface QueryEgressRequestAminoMsg { + type: '/agoric.swingset.QueryEgressRequest'; + value: QueryEgressRequestAmino; +} +/** QueryEgressRequest is the request type for the Query/Egress RPC method */ +export interface QueryEgressRequestSDKType { + peer: Uint8Array; +} +/** QueryEgressResponse is the egress response. */ +export interface QueryEgressResponse { + egress?: Egress; +} +export interface QueryEgressResponseProtoMsg { + typeUrl: '/agoric.swingset.QueryEgressResponse'; + value: Uint8Array; +} +/** QueryEgressResponse is the egress response. */ +export interface QueryEgressResponseAmino { + egress?: EgressAmino; +} +export interface QueryEgressResponseAminoMsg { + type: '/agoric.swingset.QueryEgressResponse'; + value: QueryEgressResponseAmino; +} +/** QueryEgressResponse is the egress response. */ +export interface QueryEgressResponseSDKType { + egress?: EgressSDKType; +} +/** QueryMailboxRequest is the mailbox query. */ +export interface QueryMailboxRequest { + peer: Uint8Array; +} +export interface QueryMailboxRequestProtoMsg { + typeUrl: '/agoric.swingset.QueryMailboxRequest'; + value: Uint8Array; +} +/** QueryMailboxRequest is the mailbox query. */ +export interface QueryMailboxRequestAmino { + peer?: string; +} +export interface QueryMailboxRequestAminoMsg { + type: '/agoric.swingset.QueryMailboxRequest'; + value: QueryMailboxRequestAmino; +} +/** QueryMailboxRequest is the mailbox query. */ +export interface QueryMailboxRequestSDKType { + peer: Uint8Array; +} +/** QueryMailboxResponse is the mailbox response. */ +export interface QueryMailboxResponse { + value: string; +} +export interface QueryMailboxResponseProtoMsg { + typeUrl: '/agoric.swingset.QueryMailboxResponse'; + value: Uint8Array; +} +/** QueryMailboxResponse is the mailbox response. */ +export interface QueryMailboxResponseAmino { + value?: string; +} +export interface QueryMailboxResponseAminoMsg { + type: '/agoric.swingset.QueryMailboxResponse'; + value: QueryMailboxResponseAmino; +} +/** QueryMailboxResponse is the mailbox response. */ +export interface QueryMailboxResponseSDKType { + value: string; +} +function createBaseQueryParamsRequest(): QueryParamsRequest { + return {}; +} +export const QueryParamsRequest = { + typeUrl: '/agoric.swingset.QueryParamsRequest', + encode( + _: QueryParamsRequest, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): QueryParamsRequest { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(_: any): QueryParamsRequest { + return {}; + }, + toJSON(_: QueryParamsRequest): unknown { + const obj: any = {}; + return obj; + }, + fromPartial(_: Partial): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + return message; + }, + fromAmino(_: QueryParamsRequestAmino): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + return message; + }, + toAmino(_: QueryParamsRequest): QueryParamsRequestAmino { + const obj: any = {}; + return obj; + }, + fromAminoMsg(object: QueryParamsRequestAminoMsg): QueryParamsRequest { + return QueryParamsRequest.fromAmino(object.value); + }, + fromProtoMsg(message: QueryParamsRequestProtoMsg): QueryParamsRequest { + return QueryParamsRequest.decode(message.value); + }, + toProto(message: QueryParamsRequest): Uint8Array { + return QueryParamsRequest.encode(message).finish(); + }, + toProtoMsg(message: QueryParamsRequest): QueryParamsRequestProtoMsg { + return { + typeUrl: '/agoric.swingset.QueryParamsRequest', + value: QueryParamsRequest.encode(message).finish(), + }; + }, +}; +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { + params: Params.fromPartial({}), + }; +} +export const QueryParamsResponse = { + typeUrl: '/agoric.swingset.QueryParamsResponse', + encode( + message: QueryParamsResponse, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): QueryParamsResponse { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): QueryParamsResponse { + return { + params: isSet(object.params) ? Params.fromJSON(object.params) : undefined, + }; + }, + toJSON(message: QueryParamsResponse): unknown { + const obj: any = {}; + message.params !== undefined && + (obj.params = message.params ? Params.toJSON(message.params) : undefined); + return obj; + }, + fromPartial(object: Partial): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.params = + object.params !== undefined && object.params !== null + ? Params.fromPartial(object.params) + : undefined; + return message; + }, + fromAmino(object: QueryParamsResponseAmino): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + if (object.params !== undefined && object.params !== null) { + message.params = Params.fromAmino(object.params); + } + return message; + }, + toAmino(message: QueryParamsResponse): QueryParamsResponseAmino { + const obj: any = {}; + obj.params = message.params ? Params.toAmino(message.params) : undefined; + return obj; + }, + fromAminoMsg(object: QueryParamsResponseAminoMsg): QueryParamsResponse { + return QueryParamsResponse.fromAmino(object.value); + }, + fromProtoMsg(message: QueryParamsResponseProtoMsg): QueryParamsResponse { + return QueryParamsResponse.decode(message.value); + }, + toProto(message: QueryParamsResponse): Uint8Array { + return QueryParamsResponse.encode(message).finish(); + }, + toProtoMsg(message: QueryParamsResponse): QueryParamsResponseProtoMsg { + return { + typeUrl: '/agoric.swingset.QueryParamsResponse', + value: QueryParamsResponse.encode(message).finish(), + }; + }, +}; +function createBaseQueryEgressRequest(): QueryEgressRequest { + return { + peer: new Uint8Array(), + }; +} +export const QueryEgressRequest = { + typeUrl: '/agoric.swingset.QueryEgressRequest', + encode( + message: QueryEgressRequest, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.peer.length !== 0) { + writer.uint32(10).bytes(message.peer); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): QueryEgressRequest { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryEgressRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.peer = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): QueryEgressRequest { + return { + peer: isSet(object.peer) + ? bytesFromBase64(object.peer) + : new Uint8Array(), + }; + }, + toJSON(message: QueryEgressRequest): unknown { + const obj: any = {}; + message.peer !== undefined && + (obj.peer = base64FromBytes( + message.peer !== undefined ? message.peer : new Uint8Array(), + )); + return obj; + }, + fromPartial(object: Partial): QueryEgressRequest { + const message = createBaseQueryEgressRequest(); + message.peer = object.peer ?? new Uint8Array(); + return message; + }, + fromAmino(object: QueryEgressRequestAmino): QueryEgressRequest { + const message = createBaseQueryEgressRequest(); + if (object.peer !== undefined && object.peer !== null) { + message.peer = bytesFromBase64(object.peer); + } + return message; + }, + toAmino(message: QueryEgressRequest): QueryEgressRequestAmino { + const obj: any = {}; + obj.peer = message.peer ? base64FromBytes(message.peer) : undefined; + return obj; + }, + fromAminoMsg(object: QueryEgressRequestAminoMsg): QueryEgressRequest { + return QueryEgressRequest.fromAmino(object.value); + }, + fromProtoMsg(message: QueryEgressRequestProtoMsg): QueryEgressRequest { + return QueryEgressRequest.decode(message.value); + }, + toProto(message: QueryEgressRequest): Uint8Array { + return QueryEgressRequest.encode(message).finish(); + }, + toProtoMsg(message: QueryEgressRequest): QueryEgressRequestProtoMsg { + return { + typeUrl: '/agoric.swingset.QueryEgressRequest', + value: QueryEgressRequest.encode(message).finish(), + }; + }, +}; +function createBaseQueryEgressResponse(): QueryEgressResponse { + return { + egress: undefined, + }; +} +export const QueryEgressResponse = { + typeUrl: '/agoric.swingset.QueryEgressResponse', + encode( + message: QueryEgressResponse, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.egress !== undefined) { + Egress.encode(message.egress, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): QueryEgressResponse { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryEgressResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.egress = Egress.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): QueryEgressResponse { + return { + egress: isSet(object.egress) ? Egress.fromJSON(object.egress) : undefined, + }; + }, + toJSON(message: QueryEgressResponse): unknown { + const obj: any = {}; + message.egress !== undefined && + (obj.egress = message.egress ? Egress.toJSON(message.egress) : undefined); + return obj; + }, + fromPartial(object: Partial): QueryEgressResponse { + const message = createBaseQueryEgressResponse(); + message.egress = + object.egress !== undefined && object.egress !== null + ? Egress.fromPartial(object.egress) + : undefined; + return message; + }, + fromAmino(object: QueryEgressResponseAmino): QueryEgressResponse { + const message = createBaseQueryEgressResponse(); + if (object.egress !== undefined && object.egress !== null) { + message.egress = Egress.fromAmino(object.egress); + } + return message; + }, + toAmino(message: QueryEgressResponse): QueryEgressResponseAmino { + const obj: any = {}; + obj.egress = message.egress ? Egress.toAmino(message.egress) : undefined; + return obj; + }, + fromAminoMsg(object: QueryEgressResponseAminoMsg): QueryEgressResponse { + return QueryEgressResponse.fromAmino(object.value); + }, + fromProtoMsg(message: QueryEgressResponseProtoMsg): QueryEgressResponse { + return QueryEgressResponse.decode(message.value); + }, + toProto(message: QueryEgressResponse): Uint8Array { + return QueryEgressResponse.encode(message).finish(); + }, + toProtoMsg(message: QueryEgressResponse): QueryEgressResponseProtoMsg { + return { + typeUrl: '/agoric.swingset.QueryEgressResponse', + value: QueryEgressResponse.encode(message).finish(), + }; + }, +}; +function createBaseQueryMailboxRequest(): QueryMailboxRequest { + return { + peer: new Uint8Array(), + }; +} +export const QueryMailboxRequest = { + typeUrl: '/agoric.swingset.QueryMailboxRequest', + encode( + message: QueryMailboxRequest, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.peer.length !== 0) { + writer.uint32(10).bytes(message.peer); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): QueryMailboxRequest { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryMailboxRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.peer = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): QueryMailboxRequest { + return { + peer: isSet(object.peer) + ? bytesFromBase64(object.peer) + : new Uint8Array(), + }; + }, + toJSON(message: QueryMailboxRequest): unknown { + const obj: any = {}; + message.peer !== undefined && + (obj.peer = base64FromBytes( + message.peer !== undefined ? message.peer : new Uint8Array(), + )); + return obj; + }, + fromPartial(object: Partial): QueryMailboxRequest { + const message = createBaseQueryMailboxRequest(); + message.peer = object.peer ?? new Uint8Array(); + return message; + }, + fromAmino(object: QueryMailboxRequestAmino): QueryMailboxRequest { + const message = createBaseQueryMailboxRequest(); + if (object.peer !== undefined && object.peer !== null) { + message.peer = bytesFromBase64(object.peer); + } + return message; + }, + toAmino(message: QueryMailboxRequest): QueryMailboxRequestAmino { + const obj: any = {}; + obj.peer = message.peer ? base64FromBytes(message.peer) : undefined; + return obj; + }, + fromAminoMsg(object: QueryMailboxRequestAminoMsg): QueryMailboxRequest { + return QueryMailboxRequest.fromAmino(object.value); + }, + fromProtoMsg(message: QueryMailboxRequestProtoMsg): QueryMailboxRequest { + return QueryMailboxRequest.decode(message.value); + }, + toProto(message: QueryMailboxRequest): Uint8Array { + return QueryMailboxRequest.encode(message).finish(); + }, + toProtoMsg(message: QueryMailboxRequest): QueryMailboxRequestProtoMsg { + return { + typeUrl: '/agoric.swingset.QueryMailboxRequest', + value: QueryMailboxRequest.encode(message).finish(), + }; + }, +}; +function createBaseQueryMailboxResponse(): QueryMailboxResponse { + return { + value: '', + }; +} +export const QueryMailboxResponse = { + typeUrl: '/agoric.swingset.QueryMailboxResponse', + encode( + message: QueryMailboxResponse, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.value !== '') { + writer.uint32(10).string(message.value); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): QueryMailboxResponse { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryMailboxResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.value = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): QueryMailboxResponse { + return { + value: isSet(object.value) ? String(object.value) : '', + }; + }, + toJSON(message: QueryMailboxResponse): unknown { + const obj: any = {}; + message.value !== undefined && (obj.value = message.value); + return obj; + }, + fromPartial(object: Partial): QueryMailboxResponse { + const message = createBaseQueryMailboxResponse(); + message.value = object.value ?? ''; + return message; + }, + fromAmino(object: QueryMailboxResponseAmino): QueryMailboxResponse { + const message = createBaseQueryMailboxResponse(); + if (object.value !== undefined && object.value !== null) { + message.value = object.value; + } + return message; + }, + toAmino(message: QueryMailboxResponse): QueryMailboxResponseAmino { + const obj: any = {}; + obj.value = message.value; + return obj; + }, + fromAminoMsg(object: QueryMailboxResponseAminoMsg): QueryMailboxResponse { + return QueryMailboxResponse.fromAmino(object.value); + }, + fromProtoMsg(message: QueryMailboxResponseProtoMsg): QueryMailboxResponse { + return QueryMailboxResponse.decode(message.value); + }, + toProto(message: QueryMailboxResponse): Uint8Array { + return QueryMailboxResponse.encode(message).finish(); + }, + toProtoMsg(message: QueryMailboxResponse): QueryMailboxResponseProtoMsg { + return { + typeUrl: '/agoric.swingset.QueryMailboxResponse', + value: QueryMailboxResponse.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/swingset/swingset.ts b/packages/cosmic-proto/src/codegen/agoric/swingset/swingset.ts new file mode 100644 index 00000000000..9c93417f166 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/swingset/swingset.ts @@ -0,0 +1,1372 @@ +//@ts-nocheck +import { + Coin, + CoinAmino, + CoinSDKType, +} from '../../cosmos/base/v1beta1/coin.js'; +import { BinaryReader, BinaryWriter } from '../../binary.js'; +import { isSet, bytesFromBase64, base64FromBytes } from '../../helpers.js'; +/** + * CoreEvalProposal is a gov Content type for evaluating code in the SwingSet + * core. + * See `agoric-sdk/packages/vats/src/core/eval.js`. + */ +export interface CoreEvalProposal { + title: string; + description: string; + /** + * Although evals are sequential, they may run concurrently, since they each + * can return a Promise. + */ + evals: CoreEval[]; +} +export interface CoreEvalProposalProtoMsg { + typeUrl: '/agoric.swingset.CoreEvalProposal'; + value: Uint8Array; +} +/** + * CoreEvalProposal is a gov Content type for evaluating code in the SwingSet + * core. + * See `agoric-sdk/packages/vats/src/core/eval.js`. + */ +export interface CoreEvalProposalAmino { + title?: string; + description?: string; + /** + * Although evals are sequential, they may run concurrently, since they each + * can return a Promise. + */ + evals?: CoreEvalAmino[]; +} +export interface CoreEvalProposalAminoMsg { + type: '/agoric.swingset.CoreEvalProposal'; + value: CoreEvalProposalAmino; +} +/** + * CoreEvalProposal is a gov Content type for evaluating code in the SwingSet + * core. + * See `agoric-sdk/packages/vats/src/core/eval.js`. + */ +export interface CoreEvalProposalSDKType { + title: string; + description: string; + evals: CoreEvalSDKType[]; +} +/** + * CoreEval defines an individual SwingSet core evaluation, for use in + * CoreEvalProposal. + */ +export interface CoreEval { + /** + * Grant these JSON-stringified core bootstrap permits to the jsCode, as the + * `powers` endowment. + */ + jsonPermits: string; + /** + * Evaluate this JavaScript code in a Compartment endowed with `powers` as + * well as some powerless helpers. + */ + jsCode: string; +} +export interface CoreEvalProtoMsg { + typeUrl: '/agoric.swingset.CoreEval'; + value: Uint8Array; +} +/** + * CoreEval defines an individual SwingSet core evaluation, for use in + * CoreEvalProposal. + */ +export interface CoreEvalAmino { + /** + * Grant these JSON-stringified core bootstrap permits to the jsCode, as the + * `powers` endowment. + */ + json_permits?: string; + /** + * Evaluate this JavaScript code in a Compartment endowed with `powers` as + * well as some powerless helpers. + */ + js_code?: string; +} +export interface CoreEvalAminoMsg { + type: '/agoric.swingset.CoreEval'; + value: CoreEvalAmino; +} +/** + * CoreEval defines an individual SwingSet core evaluation, for use in + * CoreEvalProposal. + */ +export interface CoreEvalSDKType { + json_permits: string; + js_code: string; +} +/** Params are the swingset configuration/governance parameters. */ +export interface Params { + /** + * Map from unit name to a value in SwingSet "beans". + * Must not be negative. + * + * These values are used by SwingSet to normalize named per-resource charges + * (maybe rent) in a single Nat usage unit, the "bean". + * + * There is no required order to this list of entries, but all the chain + * nodes must all serialize and deserialize the existing order without + * permuting it. + */ + beansPerUnit: StringBeans[]; + /** + * The price in Coins per the unit named "fee". This value is used by + * cosmic-swingset JS code to decide how many tokens to charge. + * + * cost = beans_used * fee_unit_price / beans_per_unit["fee"] + */ + feeUnitPrice: Coin[]; + /** + * The SwingSet bootstrap vat configuration file. Not usefully modifiable + * via governance as it is only referenced by the chain's initial + * construction. + */ + bootstrapVatConfig: string; + /** + * If the provision submitter doesn't hold a provisionpass, their requested + * power flags are looked up in this fee menu (first match wins) and the sum + * is charged. If any power flag is not found in this menu, the request is + * rejected. + */ + powerFlagFees: PowerFlagFee[]; + /** + * Maximum sizes for queues. + * These values are used by SwingSet to compute how many messages should be + * accepted in a block. + * + * There is no required order to this list of entries, but all the chain + * nodes must all serialize and deserialize the existing order without + * permuting it. + */ + queueMax: QueueSize[]; +} +export interface ParamsProtoMsg { + typeUrl: '/agoric.swingset.Params'; + value: Uint8Array; +} +/** Params are the swingset configuration/governance parameters. */ +export interface ParamsAmino { + /** + * Map from unit name to a value in SwingSet "beans". + * Must not be negative. + * + * These values are used by SwingSet to normalize named per-resource charges + * (maybe rent) in a single Nat usage unit, the "bean". + * + * There is no required order to this list of entries, but all the chain + * nodes must all serialize and deserialize the existing order without + * permuting it. + */ + beans_per_unit?: StringBeansAmino[]; + /** + * The price in Coins per the unit named "fee". This value is used by + * cosmic-swingset JS code to decide how many tokens to charge. + * + * cost = beans_used * fee_unit_price / beans_per_unit["fee"] + */ + fee_unit_price?: CoinAmino[]; + /** + * The SwingSet bootstrap vat configuration file. Not usefully modifiable + * via governance as it is only referenced by the chain's initial + * construction. + */ + bootstrap_vat_config?: string; + /** + * If the provision submitter doesn't hold a provisionpass, their requested + * power flags are looked up in this fee menu (first match wins) and the sum + * is charged. If any power flag is not found in this menu, the request is + * rejected. + */ + power_flag_fees?: PowerFlagFeeAmino[]; + /** + * Maximum sizes for queues. + * These values are used by SwingSet to compute how many messages should be + * accepted in a block. + * + * There is no required order to this list of entries, but all the chain + * nodes must all serialize and deserialize the existing order without + * permuting it. + */ + queue_max?: QueueSizeAmino[]; +} +export interface ParamsAminoMsg { + type: '/agoric.swingset.Params'; + value: ParamsAmino; +} +/** Params are the swingset configuration/governance parameters. */ +export interface ParamsSDKType { + beans_per_unit: StringBeansSDKType[]; + fee_unit_price: CoinSDKType[]; + bootstrap_vat_config: string; + power_flag_fees: PowerFlagFeeSDKType[]; + queue_max: QueueSizeSDKType[]; +} +/** The current state of the module. */ +export interface State { + /** + * The allowed number of items to add to queues, as determined by SwingSet. + * Transactions which attempt to enqueue more should be rejected. + */ + queueAllowed: QueueSize[]; +} +export interface StateProtoMsg { + typeUrl: '/agoric.swingset.State'; + value: Uint8Array; +} +/** The current state of the module. */ +export interface StateAmino { + /** + * The allowed number of items to add to queues, as determined by SwingSet. + * Transactions which attempt to enqueue more should be rejected. + */ + queue_allowed?: QueueSizeAmino[]; +} +export interface StateAminoMsg { + type: '/agoric.swingset.State'; + value: StateAmino; +} +/** The current state of the module. */ +export interface StateSDKType { + queue_allowed: QueueSizeSDKType[]; +} +/** Map element of a string key to a Nat bean count. */ +export interface StringBeans { + /** What the beans are for. */ + key: string; + /** The actual bean value. */ + beans: string; +} +export interface StringBeansProtoMsg { + typeUrl: '/agoric.swingset.StringBeans'; + value: Uint8Array; +} +/** Map element of a string key to a Nat bean count. */ +export interface StringBeansAmino { + /** What the beans are for. */ + key?: string; + /** The actual bean value. */ + beans?: string; +} +export interface StringBeansAminoMsg { + type: '/agoric.swingset.StringBeans'; + value: StringBeansAmino; +} +/** Map element of a string key to a Nat bean count. */ +export interface StringBeansSDKType { + key: string; + beans: string; +} +/** Map a provisioning power flag to its corresponding fee. */ +export interface PowerFlagFee { + powerFlag: string; + fee: Coin[]; +} +export interface PowerFlagFeeProtoMsg { + typeUrl: '/agoric.swingset.PowerFlagFee'; + value: Uint8Array; +} +/** Map a provisioning power flag to its corresponding fee. */ +export interface PowerFlagFeeAmino { + power_flag?: string; + fee?: CoinAmino[]; +} +export interface PowerFlagFeeAminoMsg { + type: '/agoric.swingset.PowerFlagFee'; + value: PowerFlagFeeAmino; +} +/** Map a provisioning power flag to its corresponding fee. */ +export interface PowerFlagFeeSDKType { + power_flag: string; + fee: CoinSDKType[]; +} +/** Map element of a string key to a size. */ +export interface QueueSize { + /** What the size is for. */ + key: string; + /** The actual size value. */ + size: number; +} +export interface QueueSizeProtoMsg { + typeUrl: '/agoric.swingset.QueueSize'; + value: Uint8Array; +} +/** Map element of a string key to a size. */ +export interface QueueSizeAmino { + /** What the size is for. */ + key?: string; + /** The actual size value. */ + size?: number; +} +export interface QueueSizeAminoMsg { + type: '/agoric.swingset.QueueSize'; + value: QueueSizeAmino; +} +/** Map element of a string key to a size. */ +export interface QueueSizeSDKType { + key: string; + size: number; +} +/** Egress is the format for a swingset egress. */ +export interface Egress { + nickname: string; + peer: Uint8Array; + /** TODO: Remove these power flags as they are deprecated and have no effect. */ + powerFlags: string[]; +} +export interface EgressProtoMsg { + typeUrl: '/agoric.swingset.Egress'; + value: Uint8Array; +} +/** Egress is the format for a swingset egress. */ +export interface EgressAmino { + nickname?: string; + peer?: string; + /** TODO: Remove these power flags as they are deprecated and have no effect. */ + power_flags?: string[]; +} +export interface EgressAminoMsg { + type: '/agoric.swingset.Egress'; + value: EgressAmino; +} +/** Egress is the format for a swingset egress. */ +export interface EgressSDKType { + nickname: string; + peer: Uint8Array; + power_flags: string[]; +} +/** + * SwingStoreArtifact encodes an artifact of a swing-store export. + * Artifacts may be stored or transmitted in any order. Most handlers do + * maintain the artifact order from their original source as an effect of how + * they handle the artifacts. + */ +export interface SwingStoreArtifact { + name: string; + data: Uint8Array; +} +export interface SwingStoreArtifactProtoMsg { + typeUrl: '/agoric.swingset.SwingStoreArtifact'; + value: Uint8Array; +} +/** + * SwingStoreArtifact encodes an artifact of a swing-store export. + * Artifacts may be stored or transmitted in any order. Most handlers do + * maintain the artifact order from their original source as an effect of how + * they handle the artifacts. + */ +export interface SwingStoreArtifactAmino { + name?: string; + data?: string; +} +export interface SwingStoreArtifactAminoMsg { + type: '/agoric.swingset.SwingStoreArtifact'; + value: SwingStoreArtifactAmino; +} +/** + * SwingStoreArtifact encodes an artifact of a swing-store export. + * Artifacts may be stored or transmitted in any order. Most handlers do + * maintain the artifact order from their original source as an effect of how + * they handle the artifacts. + */ +export interface SwingStoreArtifactSDKType { + name: string; + data: Uint8Array; +} +function createBaseCoreEvalProposal(): CoreEvalProposal { + return { + title: '', + description: '', + evals: [], + }; +} +export const CoreEvalProposal = { + typeUrl: '/agoric.swingset.CoreEvalProposal', + encode( + message: CoreEvalProposal, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.title !== '') { + writer.uint32(10).string(message.title); + } + if (message.description !== '') { + writer.uint32(18).string(message.description); + } + for (const v of message.evals) { + CoreEval.encode(v!, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): CoreEvalProposal { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCoreEvalProposal(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + message.evals.push(CoreEval.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): CoreEvalProposal { + return { + title: isSet(object.title) ? String(object.title) : '', + description: isSet(object.description) ? String(object.description) : '', + evals: Array.isArray(object?.evals) + ? object.evals.map((e: any) => CoreEval.fromJSON(e)) + : [], + }; + }, + toJSON(message: CoreEvalProposal): unknown { + const obj: any = {}; + message.title !== undefined && (obj.title = message.title); + message.description !== undefined && + (obj.description = message.description); + if (message.evals) { + obj.evals = message.evals.map(e => (e ? CoreEval.toJSON(e) : undefined)); + } else { + obj.evals = []; + } + return obj; + }, + fromPartial(object: Partial): CoreEvalProposal { + const message = createBaseCoreEvalProposal(); + message.title = object.title ?? ''; + message.description = object.description ?? ''; + message.evals = object.evals?.map(e => CoreEval.fromPartial(e)) || []; + return message; + }, + fromAmino(object: CoreEvalProposalAmino): CoreEvalProposal { + const message = createBaseCoreEvalProposal(); + if (object.title !== undefined && object.title !== null) { + message.title = object.title; + } + if (object.description !== undefined && object.description !== null) { + message.description = object.description; + } + message.evals = object.evals?.map(e => CoreEval.fromAmino(e)) || []; + return message; + }, + toAmino(message: CoreEvalProposal): CoreEvalProposalAmino { + const obj: any = {}; + obj.title = message.title; + obj.description = message.description; + if (message.evals) { + obj.evals = message.evals.map(e => (e ? CoreEval.toAmino(e) : undefined)); + } else { + obj.evals = []; + } + return obj; + }, + fromAminoMsg(object: CoreEvalProposalAminoMsg): CoreEvalProposal { + return CoreEvalProposal.fromAmino(object.value); + }, + fromProtoMsg(message: CoreEvalProposalProtoMsg): CoreEvalProposal { + return CoreEvalProposal.decode(message.value); + }, + toProto(message: CoreEvalProposal): Uint8Array { + return CoreEvalProposal.encode(message).finish(); + }, + toProtoMsg(message: CoreEvalProposal): CoreEvalProposalProtoMsg { + return { + typeUrl: '/agoric.swingset.CoreEvalProposal', + value: CoreEvalProposal.encode(message).finish(), + }; + }, +}; +function createBaseCoreEval(): CoreEval { + return { + jsonPermits: '', + jsCode: '', + }; +} +export const CoreEval = { + typeUrl: '/agoric.swingset.CoreEval', + encode( + message: CoreEval, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.jsonPermits !== '') { + writer.uint32(10).string(message.jsonPermits); + } + if (message.jsCode !== '') { + writer.uint32(18).string(message.jsCode); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): CoreEval { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCoreEval(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.jsonPermits = reader.string(); + break; + case 2: + message.jsCode = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): CoreEval { + return { + jsonPermits: isSet(object.jsonPermits) ? String(object.jsonPermits) : '', + jsCode: isSet(object.jsCode) ? String(object.jsCode) : '', + }; + }, + toJSON(message: CoreEval): unknown { + const obj: any = {}; + message.jsonPermits !== undefined && + (obj.jsonPermits = message.jsonPermits); + message.jsCode !== undefined && (obj.jsCode = message.jsCode); + return obj; + }, + fromPartial(object: Partial): CoreEval { + const message = createBaseCoreEval(); + message.jsonPermits = object.jsonPermits ?? ''; + message.jsCode = object.jsCode ?? ''; + return message; + }, + fromAmino(object: CoreEvalAmino): CoreEval { + const message = createBaseCoreEval(); + if (object.json_permits !== undefined && object.json_permits !== null) { + message.jsonPermits = object.json_permits; + } + if (object.js_code !== undefined && object.js_code !== null) { + message.jsCode = object.js_code; + } + return message; + }, + toAmino(message: CoreEval): CoreEvalAmino { + const obj: any = {}; + obj.json_permits = message.jsonPermits; + obj.js_code = message.jsCode; + return obj; + }, + fromAminoMsg(object: CoreEvalAminoMsg): CoreEval { + return CoreEval.fromAmino(object.value); + }, + fromProtoMsg(message: CoreEvalProtoMsg): CoreEval { + return CoreEval.decode(message.value); + }, + toProto(message: CoreEval): Uint8Array { + return CoreEval.encode(message).finish(); + }, + toProtoMsg(message: CoreEval): CoreEvalProtoMsg { + return { + typeUrl: '/agoric.swingset.CoreEval', + value: CoreEval.encode(message).finish(), + }; + }, +}; +function createBaseParams(): Params { + return { + beansPerUnit: [], + feeUnitPrice: [], + bootstrapVatConfig: '', + powerFlagFees: [], + queueMax: [], + }; +} +export const Params = { + typeUrl: '/agoric.swingset.Params', + encode( + message: Params, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.beansPerUnit) { + StringBeans.encode(v!, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.feeUnitPrice) { + Coin.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.bootstrapVatConfig !== '') { + writer.uint32(26).string(message.bootstrapVatConfig); + } + for (const v of message.powerFlagFees) { + PowerFlagFee.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.queueMax) { + QueueSize.encode(v!, writer.uint32(42).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): Params { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.beansPerUnit.push( + StringBeans.decode(reader, reader.uint32()), + ); + break; + case 2: + message.feeUnitPrice.push(Coin.decode(reader, reader.uint32())); + break; + case 3: + message.bootstrapVatConfig = reader.string(); + break; + case 4: + message.powerFlagFees.push( + PowerFlagFee.decode(reader, reader.uint32()), + ); + break; + case 5: + message.queueMax.push(QueueSize.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): Params { + return { + beansPerUnit: Array.isArray(object?.beansPerUnit) + ? object.beansPerUnit.map((e: any) => StringBeans.fromJSON(e)) + : [], + feeUnitPrice: Array.isArray(object?.feeUnitPrice) + ? object.feeUnitPrice.map((e: any) => Coin.fromJSON(e)) + : [], + bootstrapVatConfig: isSet(object.bootstrapVatConfig) + ? String(object.bootstrapVatConfig) + : '', + powerFlagFees: Array.isArray(object?.powerFlagFees) + ? object.powerFlagFees.map((e: any) => PowerFlagFee.fromJSON(e)) + : [], + queueMax: Array.isArray(object?.queueMax) + ? object.queueMax.map((e: any) => QueueSize.fromJSON(e)) + : [], + }; + }, + toJSON(message: Params): unknown { + const obj: any = {}; + if (message.beansPerUnit) { + obj.beansPerUnit = message.beansPerUnit.map(e => + e ? StringBeans.toJSON(e) : undefined, + ); + } else { + obj.beansPerUnit = []; + } + if (message.feeUnitPrice) { + obj.feeUnitPrice = message.feeUnitPrice.map(e => + e ? Coin.toJSON(e) : undefined, + ); + } else { + obj.feeUnitPrice = []; + } + message.bootstrapVatConfig !== undefined && + (obj.bootstrapVatConfig = message.bootstrapVatConfig); + if (message.powerFlagFees) { + obj.powerFlagFees = message.powerFlagFees.map(e => + e ? PowerFlagFee.toJSON(e) : undefined, + ); + } else { + obj.powerFlagFees = []; + } + if (message.queueMax) { + obj.queueMax = message.queueMax.map(e => + e ? QueueSize.toJSON(e) : undefined, + ); + } else { + obj.queueMax = []; + } + return obj; + }, + fromPartial(object: Partial): Params { + const message = createBaseParams(); + message.beansPerUnit = + object.beansPerUnit?.map(e => StringBeans.fromPartial(e)) || []; + message.feeUnitPrice = + object.feeUnitPrice?.map(e => Coin.fromPartial(e)) || []; + message.bootstrapVatConfig = object.bootstrapVatConfig ?? ''; + message.powerFlagFees = + object.powerFlagFees?.map(e => PowerFlagFee.fromPartial(e)) || []; + message.queueMax = + object.queueMax?.map(e => QueueSize.fromPartial(e)) || []; + return message; + }, + fromAmino(object: ParamsAmino): Params { + const message = createBaseParams(); + message.beansPerUnit = + object.beans_per_unit?.map(e => StringBeans.fromAmino(e)) || []; + message.feeUnitPrice = + object.fee_unit_price?.map(e => Coin.fromAmino(e)) || []; + if ( + object.bootstrap_vat_config !== undefined && + object.bootstrap_vat_config !== null + ) { + message.bootstrapVatConfig = object.bootstrap_vat_config; + } + message.powerFlagFees = + object.power_flag_fees?.map(e => PowerFlagFee.fromAmino(e)) || []; + message.queueMax = object.queue_max?.map(e => QueueSize.fromAmino(e)) || []; + return message; + }, + toAmino(message: Params): ParamsAmino { + const obj: any = {}; + if (message.beansPerUnit) { + obj.beans_per_unit = message.beansPerUnit.map(e => + e ? StringBeans.toAmino(e) : undefined, + ); + } else { + obj.beans_per_unit = []; + } + if (message.feeUnitPrice) { + obj.fee_unit_price = message.feeUnitPrice.map(e => + e ? Coin.toAmino(e) : undefined, + ); + } else { + obj.fee_unit_price = []; + } + obj.bootstrap_vat_config = message.bootstrapVatConfig; + if (message.powerFlagFees) { + obj.power_flag_fees = message.powerFlagFees.map(e => + e ? PowerFlagFee.toAmino(e) : undefined, + ); + } else { + obj.power_flag_fees = []; + } + if (message.queueMax) { + obj.queue_max = message.queueMax.map(e => + e ? QueueSize.toAmino(e) : undefined, + ); + } else { + obj.queue_max = []; + } + return obj; + }, + fromAminoMsg(object: ParamsAminoMsg): Params { + return Params.fromAmino(object.value); + }, + fromProtoMsg(message: ParamsProtoMsg): Params { + return Params.decode(message.value); + }, + toProto(message: Params): Uint8Array { + return Params.encode(message).finish(); + }, + toProtoMsg(message: Params): ParamsProtoMsg { + return { + typeUrl: '/agoric.swingset.Params', + value: Params.encode(message).finish(), + }; + }, +}; +function createBaseState(): State { + return { + queueAllowed: [], + }; +} +export const State = { + typeUrl: '/agoric.swingset.State', + encode( + message: State, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.queueAllowed) { + QueueSize.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): State { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.queueAllowed.push(QueueSize.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): State { + return { + queueAllowed: Array.isArray(object?.queueAllowed) + ? object.queueAllowed.map((e: any) => QueueSize.fromJSON(e)) + : [], + }; + }, + toJSON(message: State): unknown { + const obj: any = {}; + if (message.queueAllowed) { + obj.queueAllowed = message.queueAllowed.map(e => + e ? QueueSize.toJSON(e) : undefined, + ); + } else { + obj.queueAllowed = []; + } + return obj; + }, + fromPartial(object: Partial): State { + const message = createBaseState(); + message.queueAllowed = + object.queueAllowed?.map(e => QueueSize.fromPartial(e)) || []; + return message; + }, + fromAmino(object: StateAmino): State { + const message = createBaseState(); + message.queueAllowed = + object.queue_allowed?.map(e => QueueSize.fromAmino(e)) || []; + return message; + }, + toAmino(message: State): StateAmino { + const obj: any = {}; + if (message.queueAllowed) { + obj.queue_allowed = message.queueAllowed.map(e => + e ? QueueSize.toAmino(e) : undefined, + ); + } else { + obj.queue_allowed = []; + } + return obj; + }, + fromAminoMsg(object: StateAminoMsg): State { + return State.fromAmino(object.value); + }, + fromProtoMsg(message: StateProtoMsg): State { + return State.decode(message.value); + }, + toProto(message: State): Uint8Array { + return State.encode(message).finish(); + }, + toProtoMsg(message: State): StateProtoMsg { + return { + typeUrl: '/agoric.swingset.State', + value: State.encode(message).finish(), + }; + }, +}; +function createBaseStringBeans(): StringBeans { + return { + key: '', + beans: '', + }; +} +export const StringBeans = { + typeUrl: '/agoric.swingset.StringBeans', + encode( + message: StringBeans, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.key !== '') { + writer.uint32(10).string(message.key); + } + if (message.beans !== '') { + writer.uint32(18).string(message.beans); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): StringBeans { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseStringBeans(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.string(); + break; + case 2: + message.beans = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): StringBeans { + return { + key: isSet(object.key) ? String(object.key) : '', + beans: isSet(object.beans) ? String(object.beans) : '', + }; + }, + toJSON(message: StringBeans): unknown { + const obj: any = {}; + message.key !== undefined && (obj.key = message.key); + message.beans !== undefined && (obj.beans = message.beans); + return obj; + }, + fromPartial(object: Partial): StringBeans { + const message = createBaseStringBeans(); + message.key = object.key ?? ''; + message.beans = object.beans ?? ''; + return message; + }, + fromAmino(object: StringBeansAmino): StringBeans { + const message = createBaseStringBeans(); + if (object.key !== undefined && object.key !== null) { + message.key = object.key; + } + if (object.beans !== undefined && object.beans !== null) { + message.beans = object.beans; + } + return message; + }, + toAmino(message: StringBeans): StringBeansAmino { + const obj: any = {}; + obj.key = message.key; + obj.beans = message.beans; + return obj; + }, + fromAminoMsg(object: StringBeansAminoMsg): StringBeans { + return StringBeans.fromAmino(object.value); + }, + fromProtoMsg(message: StringBeansProtoMsg): StringBeans { + return StringBeans.decode(message.value); + }, + toProto(message: StringBeans): Uint8Array { + return StringBeans.encode(message).finish(); + }, + toProtoMsg(message: StringBeans): StringBeansProtoMsg { + return { + typeUrl: '/agoric.swingset.StringBeans', + value: StringBeans.encode(message).finish(), + }; + }, +}; +function createBasePowerFlagFee(): PowerFlagFee { + return { + powerFlag: '', + fee: [], + }; +} +export const PowerFlagFee = { + typeUrl: '/agoric.swingset.PowerFlagFee', + encode( + message: PowerFlagFee, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.powerFlag !== '') { + writer.uint32(10).string(message.powerFlag); + } + for (const v of message.fee) { + Coin.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): PowerFlagFee { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePowerFlagFee(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.powerFlag = reader.string(); + break; + case 2: + message.fee.push(Coin.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): PowerFlagFee { + return { + powerFlag: isSet(object.powerFlag) ? String(object.powerFlag) : '', + fee: Array.isArray(object?.fee) + ? object.fee.map((e: any) => Coin.fromJSON(e)) + : [], + }; + }, + toJSON(message: PowerFlagFee): unknown { + const obj: any = {}; + message.powerFlag !== undefined && (obj.powerFlag = message.powerFlag); + if (message.fee) { + obj.fee = message.fee.map(e => (e ? Coin.toJSON(e) : undefined)); + } else { + obj.fee = []; + } + return obj; + }, + fromPartial(object: Partial): PowerFlagFee { + const message = createBasePowerFlagFee(); + message.powerFlag = object.powerFlag ?? ''; + message.fee = object.fee?.map(e => Coin.fromPartial(e)) || []; + return message; + }, + fromAmino(object: PowerFlagFeeAmino): PowerFlagFee { + const message = createBasePowerFlagFee(); + if (object.power_flag !== undefined && object.power_flag !== null) { + message.powerFlag = object.power_flag; + } + message.fee = object.fee?.map(e => Coin.fromAmino(e)) || []; + return message; + }, + toAmino(message: PowerFlagFee): PowerFlagFeeAmino { + const obj: any = {}; + obj.power_flag = message.powerFlag; + if (message.fee) { + obj.fee = message.fee.map(e => (e ? Coin.toAmino(e) : undefined)); + } else { + obj.fee = []; + } + return obj; + }, + fromAminoMsg(object: PowerFlagFeeAminoMsg): PowerFlagFee { + return PowerFlagFee.fromAmino(object.value); + }, + fromProtoMsg(message: PowerFlagFeeProtoMsg): PowerFlagFee { + return PowerFlagFee.decode(message.value); + }, + toProto(message: PowerFlagFee): Uint8Array { + return PowerFlagFee.encode(message).finish(); + }, + toProtoMsg(message: PowerFlagFee): PowerFlagFeeProtoMsg { + return { + typeUrl: '/agoric.swingset.PowerFlagFee', + value: PowerFlagFee.encode(message).finish(), + }; + }, +}; +function createBaseQueueSize(): QueueSize { + return { + key: '', + size: 0, + }; +} +export const QueueSize = { + typeUrl: '/agoric.swingset.QueueSize', + encode( + message: QueueSize, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.key !== '') { + writer.uint32(10).string(message.key); + } + if (message.size !== 0) { + writer.uint32(16).int32(message.size); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): QueueSize { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueueSize(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.string(); + break; + case 2: + message.size = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): QueueSize { + return { + key: isSet(object.key) ? String(object.key) : '', + size: isSet(object.size) ? Number(object.size) : 0, + }; + }, + toJSON(message: QueueSize): unknown { + const obj: any = {}; + message.key !== undefined && (obj.key = message.key); + message.size !== undefined && (obj.size = Math.round(message.size)); + return obj; + }, + fromPartial(object: Partial): QueueSize { + const message = createBaseQueueSize(); + message.key = object.key ?? ''; + message.size = object.size ?? 0; + return message; + }, + fromAmino(object: QueueSizeAmino): QueueSize { + const message = createBaseQueueSize(); + if (object.key !== undefined && object.key !== null) { + message.key = object.key; + } + if (object.size !== undefined && object.size !== null) { + message.size = object.size; + } + return message; + }, + toAmino(message: QueueSize): QueueSizeAmino { + const obj: any = {}; + obj.key = message.key; + obj.size = message.size; + return obj; + }, + fromAminoMsg(object: QueueSizeAminoMsg): QueueSize { + return QueueSize.fromAmino(object.value); + }, + fromProtoMsg(message: QueueSizeProtoMsg): QueueSize { + return QueueSize.decode(message.value); + }, + toProto(message: QueueSize): Uint8Array { + return QueueSize.encode(message).finish(); + }, + toProtoMsg(message: QueueSize): QueueSizeProtoMsg { + return { + typeUrl: '/agoric.swingset.QueueSize', + value: QueueSize.encode(message).finish(), + }; + }, +}; +function createBaseEgress(): Egress { + return { + nickname: '', + peer: new Uint8Array(), + powerFlags: [], + }; +} +export const Egress = { + typeUrl: '/agoric.swingset.Egress', + encode( + message: Egress, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.nickname !== '') { + writer.uint32(10).string(message.nickname); + } + if (message.peer.length !== 0) { + writer.uint32(18).bytes(message.peer); + } + for (const v of message.powerFlags) { + writer.uint32(26).string(v!); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): Egress { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEgress(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nickname = reader.string(); + break; + case 2: + message.peer = reader.bytes(); + break; + case 3: + message.powerFlags.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): Egress { + return { + nickname: isSet(object.nickname) ? String(object.nickname) : '', + peer: isSet(object.peer) + ? bytesFromBase64(object.peer) + : new Uint8Array(), + powerFlags: Array.isArray(object?.powerFlags) + ? object.powerFlags.map((e: any) => String(e)) + : [], + }; + }, + toJSON(message: Egress): unknown { + const obj: any = {}; + message.nickname !== undefined && (obj.nickname = message.nickname); + message.peer !== undefined && + (obj.peer = base64FromBytes( + message.peer !== undefined ? message.peer : new Uint8Array(), + )); + if (message.powerFlags) { + obj.powerFlags = message.powerFlags.map(e => e); + } else { + obj.powerFlags = []; + } + return obj; + }, + fromPartial(object: Partial): Egress { + const message = createBaseEgress(); + message.nickname = object.nickname ?? ''; + message.peer = object.peer ?? new Uint8Array(); + message.powerFlags = object.powerFlags?.map(e => e) || []; + return message; + }, + fromAmino(object: EgressAmino): Egress { + const message = createBaseEgress(); + if (object.nickname !== undefined && object.nickname !== null) { + message.nickname = object.nickname; + } + if (object.peer !== undefined && object.peer !== null) { + message.peer = bytesFromBase64(object.peer); + } + message.powerFlags = object.power_flags?.map(e => e) || []; + return message; + }, + toAmino(message: Egress): EgressAmino { + const obj: any = {}; + obj.nickname = message.nickname; + obj.peer = message.peer ? base64FromBytes(message.peer) : undefined; + if (message.powerFlags) { + obj.power_flags = message.powerFlags.map(e => e); + } else { + obj.power_flags = []; + } + return obj; + }, + fromAminoMsg(object: EgressAminoMsg): Egress { + return Egress.fromAmino(object.value); + }, + fromProtoMsg(message: EgressProtoMsg): Egress { + return Egress.decode(message.value); + }, + toProto(message: Egress): Uint8Array { + return Egress.encode(message).finish(); + }, + toProtoMsg(message: Egress): EgressProtoMsg { + return { + typeUrl: '/agoric.swingset.Egress', + value: Egress.encode(message).finish(), + }; + }, +}; +function createBaseSwingStoreArtifact(): SwingStoreArtifact { + return { + name: '', + data: new Uint8Array(), + }; +} +export const SwingStoreArtifact = { + typeUrl: '/agoric.swingset.SwingStoreArtifact', + encode( + message: SwingStoreArtifact, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name); + } + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): SwingStoreArtifact { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSwingStoreArtifact(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.data = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): SwingStoreArtifact { + return { + name: isSet(object.name) ? String(object.name) : '', + data: isSet(object.data) + ? bytesFromBase64(object.data) + : new Uint8Array(), + }; + }, + toJSON(message: SwingStoreArtifact): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.data !== undefined && + (obj.data = base64FromBytes( + message.data !== undefined ? message.data : new Uint8Array(), + )); + return obj; + }, + fromPartial(object: Partial): SwingStoreArtifact { + const message = createBaseSwingStoreArtifact(); + message.name = object.name ?? ''; + message.data = object.data ?? new Uint8Array(); + return message; + }, + fromAmino(object: SwingStoreArtifactAmino): SwingStoreArtifact { + const message = createBaseSwingStoreArtifact(); + if (object.name !== undefined && object.name !== null) { + message.name = object.name; + } + if (object.data !== undefined && object.data !== null) { + message.data = bytesFromBase64(object.data); + } + return message; + }, + toAmino(message: SwingStoreArtifact): SwingStoreArtifactAmino { + const obj: any = {}; + obj.name = message.name; + obj.data = message.data ? base64FromBytes(message.data) : undefined; + return obj; + }, + fromAminoMsg(object: SwingStoreArtifactAminoMsg): SwingStoreArtifact { + return SwingStoreArtifact.fromAmino(object.value); + }, + fromProtoMsg(message: SwingStoreArtifactProtoMsg): SwingStoreArtifact { + return SwingStoreArtifact.decode(message.value); + }, + toProto(message: SwingStoreArtifact): Uint8Array { + return SwingStoreArtifact.encode(message).finish(); + }, + toProtoMsg(message: SwingStoreArtifact): SwingStoreArtifactProtoMsg { + return { + typeUrl: '/agoric.swingset.SwingStoreArtifact', + value: SwingStoreArtifact.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/vbank/genesis.ts b/packages/cosmic-proto/src/codegen/agoric/vbank/genesis.ts new file mode 100644 index 00000000000..2c066bb722e --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/vbank/genesis.ts @@ -0,0 +1,137 @@ +//@ts-nocheck +import { + Params, + ParamsAmino, + ParamsSDKType, + State, + StateAmino, + StateSDKType, +} from './vbank.js'; +import { BinaryReader, BinaryWriter } from '../../binary.js'; +import { isSet } from '../../helpers.js'; +/** The initial and exported module state. */ +export interface GenesisState { + /** parms defines all the parameters of the module. */ + params: Params; + /** state is the current operation state. */ + state: State; +} +export interface GenesisStateProtoMsg { + typeUrl: '/agoric.vbank.GenesisState'; + value: Uint8Array; +} +/** The initial and exported module state. */ +export interface GenesisStateAmino { + /** parms defines all the parameters of the module. */ + params?: ParamsAmino; + /** state is the current operation state. */ + state?: StateAmino; +} +export interface GenesisStateAminoMsg { + type: '/agoric.vbank.GenesisState'; + value: GenesisStateAmino; +} +/** The initial and exported module state. */ +export interface GenesisStateSDKType { + params: ParamsSDKType; + state: StateSDKType; +} +function createBaseGenesisState(): GenesisState { + return { + params: Params.fromPartial({}), + state: State.fromPartial({}), + }; +} +export const GenesisState = { + typeUrl: '/agoric.vbank.GenesisState', + encode( + message: GenesisState, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + if (message.state !== undefined) { + State.encode(message.state, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): GenesisState { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + case 2: + message.state = State.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): GenesisState { + return { + params: isSet(object.params) ? Params.fromJSON(object.params) : undefined, + state: isSet(object.state) ? State.fromJSON(object.state) : undefined, + }; + }, + toJSON(message: GenesisState): unknown { + const obj: any = {}; + message.params !== undefined && + (obj.params = message.params ? Params.toJSON(message.params) : undefined); + message.state !== undefined && + (obj.state = message.state ? State.toJSON(message.state) : undefined); + return obj; + }, + fromPartial(object: Partial): GenesisState { + const message = createBaseGenesisState(); + message.params = + object.params !== undefined && object.params !== null + ? Params.fromPartial(object.params) + : undefined; + message.state = + object.state !== undefined && object.state !== null + ? State.fromPartial(object.state) + : undefined; + return message; + }, + fromAmino(object: GenesisStateAmino): GenesisState { + const message = createBaseGenesisState(); + if (object.params !== undefined && object.params !== null) { + message.params = Params.fromAmino(object.params); + } + if (object.state !== undefined && object.state !== null) { + message.state = State.fromAmino(object.state); + } + return message; + }, + toAmino(message: GenesisState): GenesisStateAmino { + const obj: any = {}; + obj.params = message.params ? Params.toAmino(message.params) : undefined; + obj.state = message.state ? State.toAmino(message.state) : undefined; + return obj; + }, + fromAminoMsg(object: GenesisStateAminoMsg): GenesisState { + return GenesisState.fromAmino(object.value); + }, + fromProtoMsg(message: GenesisStateProtoMsg): GenesisState { + return GenesisState.decode(message.value); + }, + toProto(message: GenesisState): Uint8Array { + return GenesisState.encode(message).finish(); + }, + toProtoMsg(message: GenesisState): GenesisStateProtoMsg { + return { + typeUrl: '/agoric.vbank.GenesisState', + value: GenesisState.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/vbank/msgs.ts b/packages/cosmic-proto/src/codegen/agoric/vbank/msgs.ts new file mode 100644 index 00000000000..cb0ff5c3b54 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/vbank/msgs.ts @@ -0,0 +1 @@ +export {}; diff --git a/packages/cosmic-proto/src/codegen/agoric/vbank/query.rpc.Query.ts b/packages/cosmic-proto/src/codegen/agoric/vbank/query.rpc.Query.ts new file mode 100644 index 00000000000..730360b9661 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/vbank/query.rpc.Query.ts @@ -0,0 +1,51 @@ +//@ts-nocheck +import { Rpc } from '../../helpers.js'; +import { BinaryReader } from '../../binary.js'; +import { QueryClient, createProtobufRpcClient } from '@cosmjs/stargate'; +import { + QueryParamsRequest, + QueryParamsResponse, + QueryStateRequest, + QueryStateResponse, +} from './query.js'; +/** Query defines the gRPC querier service for vbank module. */ +export interface Query { + /** Params queries params of the vbank module. */ + params(request?: QueryParamsRequest): Promise; + /** State queries current state of the vbank module. */ + state(request?: QueryStateRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.params = this.params.bind(this); + this.state = this.state.bind(this); + } + params(request: QueryParamsRequest = {}): Promise { + const data = QueryParamsRequest.encode(request).finish(); + const promise = this.rpc.request('agoric.vbank.Query', 'Params', data); + return promise.then(data => + QueryParamsResponse.decode(new BinaryReader(data)), + ); + } + state(request: QueryStateRequest = {}): Promise { + const data = QueryStateRequest.encode(request).finish(); + const promise = this.rpc.request('agoric.vbank.Query', 'State', data); + return promise.then(data => + QueryStateResponse.decode(new BinaryReader(data)), + ); + } +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + params(request?: QueryParamsRequest): Promise { + return queryService.params(request); + }, + state(request?: QueryStateRequest): Promise { + return queryService.state(request); + }, + }; +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/vbank/query.ts b/packages/cosmic-proto/src/codegen/agoric/vbank/query.ts new file mode 100644 index 00000000000..cb84b32bb14 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/vbank/query.ts @@ -0,0 +1,376 @@ +//@ts-nocheck +import { + Params, + ParamsAmino, + ParamsSDKType, + State, + StateAmino, + StateSDKType, +} from './vbank.js'; +import { BinaryReader, BinaryWriter } from '../../binary.js'; +import { isSet } from '../../helpers.js'; +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequest {} +export interface QueryParamsRequestProtoMsg { + typeUrl: '/agoric.vbank.QueryParamsRequest'; + value: Uint8Array; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequestAmino {} +export interface QueryParamsRequestAminoMsg { + type: '/agoric.vbank.QueryParamsRequest'; + value: QueryParamsRequestAmino; +} +/** QueryParamsRequest is the request type for the Query/Params RPC method. */ +export interface QueryParamsRequestSDKType {} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponse { + /** params defines the parameters of the module. */ + params: Params; +} +export interface QueryParamsResponseProtoMsg { + typeUrl: '/agoric.vbank.QueryParamsResponse'; + value: Uint8Array; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponseAmino { + /** params defines the parameters of the module. */ + params?: ParamsAmino; +} +export interface QueryParamsResponseAminoMsg { + type: '/agoric.vbank.QueryParamsResponse'; + value: QueryParamsResponseAmino; +} +/** QueryParamsResponse is the response type for the Query/Params RPC method. */ +export interface QueryParamsResponseSDKType { + params: ParamsSDKType; +} +/** QueryStateRequest is the request type for the Query/State RPC method. */ +export interface QueryStateRequest {} +export interface QueryStateRequestProtoMsg { + typeUrl: '/agoric.vbank.QueryStateRequest'; + value: Uint8Array; +} +/** QueryStateRequest is the request type for the Query/State RPC method. */ +export interface QueryStateRequestAmino {} +export interface QueryStateRequestAminoMsg { + type: '/agoric.vbank.QueryStateRequest'; + value: QueryStateRequestAmino; +} +/** QueryStateRequest is the request type for the Query/State RPC method. */ +export interface QueryStateRequestSDKType {} +/** QueryStateResponse is the response type for the Query/State RPC method. */ +export interface QueryStateResponse { + /** state defines the parameters of the module. */ + state: State; +} +export interface QueryStateResponseProtoMsg { + typeUrl: '/agoric.vbank.QueryStateResponse'; + value: Uint8Array; +} +/** QueryStateResponse is the response type for the Query/State RPC method. */ +export interface QueryStateResponseAmino { + /** state defines the parameters of the module. */ + state?: StateAmino; +} +export interface QueryStateResponseAminoMsg { + type: '/agoric.vbank.QueryStateResponse'; + value: QueryStateResponseAmino; +} +/** QueryStateResponse is the response type for the Query/State RPC method. */ +export interface QueryStateResponseSDKType { + state: StateSDKType; +} +function createBaseQueryParamsRequest(): QueryParamsRequest { + return {}; +} +export const QueryParamsRequest = { + typeUrl: '/agoric.vbank.QueryParamsRequest', + encode( + _: QueryParamsRequest, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): QueryParamsRequest { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(_: any): QueryParamsRequest { + return {}; + }, + toJSON(_: QueryParamsRequest): unknown { + const obj: any = {}; + return obj; + }, + fromPartial(_: Partial): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + return message; + }, + fromAmino(_: QueryParamsRequestAmino): QueryParamsRequest { + const message = createBaseQueryParamsRequest(); + return message; + }, + toAmino(_: QueryParamsRequest): QueryParamsRequestAmino { + const obj: any = {}; + return obj; + }, + fromAminoMsg(object: QueryParamsRequestAminoMsg): QueryParamsRequest { + return QueryParamsRequest.fromAmino(object.value); + }, + fromProtoMsg(message: QueryParamsRequestProtoMsg): QueryParamsRequest { + return QueryParamsRequest.decode(message.value); + }, + toProto(message: QueryParamsRequest): Uint8Array { + return QueryParamsRequest.encode(message).finish(); + }, + toProtoMsg(message: QueryParamsRequest): QueryParamsRequestProtoMsg { + return { + typeUrl: '/agoric.vbank.QueryParamsRequest', + value: QueryParamsRequest.encode(message).finish(), + }; + }, +}; +function createBaseQueryParamsResponse(): QueryParamsResponse { + return { + params: Params.fromPartial({}), + }; +} +export const QueryParamsResponse = { + typeUrl: '/agoric.vbank.QueryParamsResponse', + encode( + message: QueryParamsResponse, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.params !== undefined) { + Params.encode(message.params, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): QueryParamsResponse { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryParamsResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.params = Params.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): QueryParamsResponse { + return { + params: isSet(object.params) ? Params.fromJSON(object.params) : undefined, + }; + }, + toJSON(message: QueryParamsResponse): unknown { + const obj: any = {}; + message.params !== undefined && + (obj.params = message.params ? Params.toJSON(message.params) : undefined); + return obj; + }, + fromPartial(object: Partial): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + message.params = + object.params !== undefined && object.params !== null + ? Params.fromPartial(object.params) + : undefined; + return message; + }, + fromAmino(object: QueryParamsResponseAmino): QueryParamsResponse { + const message = createBaseQueryParamsResponse(); + if (object.params !== undefined && object.params !== null) { + message.params = Params.fromAmino(object.params); + } + return message; + }, + toAmino(message: QueryParamsResponse): QueryParamsResponseAmino { + const obj: any = {}; + obj.params = message.params ? Params.toAmino(message.params) : undefined; + return obj; + }, + fromAminoMsg(object: QueryParamsResponseAminoMsg): QueryParamsResponse { + return QueryParamsResponse.fromAmino(object.value); + }, + fromProtoMsg(message: QueryParamsResponseProtoMsg): QueryParamsResponse { + return QueryParamsResponse.decode(message.value); + }, + toProto(message: QueryParamsResponse): Uint8Array { + return QueryParamsResponse.encode(message).finish(); + }, + toProtoMsg(message: QueryParamsResponse): QueryParamsResponseProtoMsg { + return { + typeUrl: '/agoric.vbank.QueryParamsResponse', + value: QueryParamsResponse.encode(message).finish(), + }; + }, +}; +function createBaseQueryStateRequest(): QueryStateRequest { + return {}; +} +export const QueryStateRequest = { + typeUrl: '/agoric.vbank.QueryStateRequest', + encode( + _: QueryStateRequest, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): QueryStateRequest { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryStateRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(_: any): QueryStateRequest { + return {}; + }, + toJSON(_: QueryStateRequest): unknown { + const obj: any = {}; + return obj; + }, + fromPartial(_: Partial): QueryStateRequest { + const message = createBaseQueryStateRequest(); + return message; + }, + fromAmino(_: QueryStateRequestAmino): QueryStateRequest { + const message = createBaseQueryStateRequest(); + return message; + }, + toAmino(_: QueryStateRequest): QueryStateRequestAmino { + const obj: any = {}; + return obj; + }, + fromAminoMsg(object: QueryStateRequestAminoMsg): QueryStateRequest { + return QueryStateRequest.fromAmino(object.value); + }, + fromProtoMsg(message: QueryStateRequestProtoMsg): QueryStateRequest { + return QueryStateRequest.decode(message.value); + }, + toProto(message: QueryStateRequest): Uint8Array { + return QueryStateRequest.encode(message).finish(); + }, + toProtoMsg(message: QueryStateRequest): QueryStateRequestProtoMsg { + return { + typeUrl: '/agoric.vbank.QueryStateRequest', + value: QueryStateRequest.encode(message).finish(), + }; + }, +}; +function createBaseQueryStateResponse(): QueryStateResponse { + return { + state: State.fromPartial({}), + }; +} +export const QueryStateResponse = { + typeUrl: '/agoric.vbank.QueryStateResponse', + encode( + message: QueryStateResponse, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.state !== undefined) { + State.encode(message.state, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): QueryStateResponse { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryStateResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.state = State.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): QueryStateResponse { + return { + state: isSet(object.state) ? State.fromJSON(object.state) : undefined, + }; + }, + toJSON(message: QueryStateResponse): unknown { + const obj: any = {}; + message.state !== undefined && + (obj.state = message.state ? State.toJSON(message.state) : undefined); + return obj; + }, + fromPartial(object: Partial): QueryStateResponse { + const message = createBaseQueryStateResponse(); + message.state = + object.state !== undefined && object.state !== null + ? State.fromPartial(object.state) + : undefined; + return message; + }, + fromAmino(object: QueryStateResponseAmino): QueryStateResponse { + const message = createBaseQueryStateResponse(); + if (object.state !== undefined && object.state !== null) { + message.state = State.fromAmino(object.state); + } + return message; + }, + toAmino(message: QueryStateResponse): QueryStateResponseAmino { + const obj: any = {}; + obj.state = message.state ? State.toAmino(message.state) : undefined; + return obj; + }, + fromAminoMsg(object: QueryStateResponseAminoMsg): QueryStateResponse { + return QueryStateResponse.fromAmino(object.value); + }, + fromProtoMsg(message: QueryStateResponseProtoMsg): QueryStateResponse { + return QueryStateResponse.decode(message.value); + }, + toProto(message: QueryStateResponse): Uint8Array { + return QueryStateResponse.encode(message).finish(); + }, + toProtoMsg(message: QueryStateResponse): QueryStateResponseProtoMsg { + return { + typeUrl: '/agoric.vbank.QueryStateResponse', + value: QueryStateResponse.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/vbank/vbank.ts b/packages/cosmic-proto/src/codegen/agoric/vbank/vbank.ts new file mode 100644 index 00000000000..907fd18ab71 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/vbank/vbank.ts @@ -0,0 +1,431 @@ +//@ts-nocheck +import { + Coin, + CoinAmino, + CoinSDKType, +} from '../../cosmos/base/v1beta1/coin.js'; +import { BinaryReader, BinaryWriter } from '../../binary.js'; +import { Decimal } from '@cosmjs/math'; +import { isSet } from '../../helpers.js'; +/** The module governance/configuration parameters. */ +export interface Params { + /** + * reward_epoch_duration_blocks is the length of a reward epoch, in blocks. + * A value of zero has the same meaning as a value of one: + * the full reward buffer should be distributed immediately. + */ + rewardEpochDurationBlocks: bigint; + /** + * per_epoch_reward_fraction is a fraction of the reward pool to distrubute + * once every reward epoch. If less than zero, use approximately continuous + * per-block distribution. + */ + perEpochRewardFraction: string; + /** + * reward_smoothing_blocks is the number of blocks over which to distribute + * an epoch's rewards. If zero, use the same value as + * reward_epoch_duration_blocks. + */ + rewardSmoothingBlocks: bigint; +} +export interface ParamsProtoMsg { + typeUrl: '/agoric.vbank.Params'; + value: Uint8Array; +} +/** The module governance/configuration parameters. */ +export interface ParamsAmino { + /** + * reward_epoch_duration_blocks is the length of a reward epoch, in blocks. + * A value of zero has the same meaning as a value of one: + * the full reward buffer should be distributed immediately. + */ + reward_epoch_duration_blocks?: string; + /** + * per_epoch_reward_fraction is a fraction of the reward pool to distrubute + * once every reward epoch. If less than zero, use approximately continuous + * per-block distribution. + */ + per_epoch_reward_fraction?: string; + /** + * reward_smoothing_blocks is the number of blocks over which to distribute + * an epoch's rewards. If zero, use the same value as + * reward_epoch_duration_blocks. + */ + reward_smoothing_blocks?: string; +} +export interface ParamsAminoMsg { + type: '/agoric.vbank.Params'; + value: ParamsAmino; +} +/** The module governance/configuration parameters. */ +export interface ParamsSDKType { + reward_epoch_duration_blocks: bigint; + per_epoch_reward_fraction: string; + reward_smoothing_blocks: bigint; +} +/** The current state of the module. */ +export interface State { + /** + * rewardPool is the current balance of rewards in the module account. + * NOTE: Tracking manually since there is no bank call for getting a + * module account balance by name. + */ + rewardPool: Coin[]; + /** + * reward_block_amount is the amount of reward, if available, to send to the + * fee collector module on every block. + */ + rewardBlockAmount: Coin[]; + /** last_sequence is a sequence number for communicating with the VM. */ + lastSequence: bigint; + lastRewardDistributionBlock: bigint; +} +export interface StateProtoMsg { + typeUrl: '/agoric.vbank.State'; + value: Uint8Array; +} +/** The current state of the module. */ +export interface StateAmino { + /** + * rewardPool is the current balance of rewards in the module account. + * NOTE: Tracking manually since there is no bank call for getting a + * module account balance by name. + */ + reward_pool?: CoinAmino[]; + /** + * reward_block_amount is the amount of reward, if available, to send to the + * fee collector module on every block. + */ + reward_block_amount?: CoinAmino[]; + /** last_sequence is a sequence number for communicating with the VM. */ + last_sequence?: string; + last_reward_distribution_block?: string; +} +export interface StateAminoMsg { + type: '/agoric.vbank.State'; + value: StateAmino; +} +/** The current state of the module. */ +export interface StateSDKType { + reward_pool: CoinSDKType[]; + reward_block_amount: CoinSDKType[]; + last_sequence: bigint; + last_reward_distribution_block: bigint; +} +function createBaseParams(): Params { + return { + rewardEpochDurationBlocks: BigInt(0), + perEpochRewardFraction: '', + rewardSmoothingBlocks: BigInt(0), + }; +} +export const Params = { + typeUrl: '/agoric.vbank.Params', + encode( + message: Params, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.rewardEpochDurationBlocks !== BigInt(0)) { + writer.uint32(8).int64(message.rewardEpochDurationBlocks); + } + if (message.perEpochRewardFraction !== '') { + writer + .uint32(18) + .string( + Decimal.fromUserInput(message.perEpochRewardFraction, 18).atomics, + ); + } + if (message.rewardSmoothingBlocks !== BigInt(0)) { + writer.uint32(24).int64(message.rewardSmoothingBlocks); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): Params { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rewardEpochDurationBlocks = reader.int64(); + break; + case 2: + message.perEpochRewardFraction = Decimal.fromAtomics( + reader.string(), + 18, + ).toString(); + break; + case 3: + message.rewardSmoothingBlocks = reader.int64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): Params { + return { + rewardEpochDurationBlocks: isSet(object.rewardEpochDurationBlocks) + ? BigInt(object.rewardEpochDurationBlocks.toString()) + : BigInt(0), + perEpochRewardFraction: isSet(object.perEpochRewardFraction) + ? String(object.perEpochRewardFraction) + : '', + rewardSmoothingBlocks: isSet(object.rewardSmoothingBlocks) + ? BigInt(object.rewardSmoothingBlocks.toString()) + : BigInt(0), + }; + }, + toJSON(message: Params): unknown { + const obj: any = {}; + message.rewardEpochDurationBlocks !== undefined && + (obj.rewardEpochDurationBlocks = ( + message.rewardEpochDurationBlocks || BigInt(0) + ).toString()); + message.perEpochRewardFraction !== undefined && + (obj.perEpochRewardFraction = message.perEpochRewardFraction); + message.rewardSmoothingBlocks !== undefined && + (obj.rewardSmoothingBlocks = ( + message.rewardSmoothingBlocks || BigInt(0) + ).toString()); + return obj; + }, + fromPartial(object: Partial): Params { + const message = createBaseParams(); + message.rewardEpochDurationBlocks = + object.rewardEpochDurationBlocks !== undefined && + object.rewardEpochDurationBlocks !== null + ? BigInt(object.rewardEpochDurationBlocks.toString()) + : BigInt(0); + message.perEpochRewardFraction = object.perEpochRewardFraction ?? ''; + message.rewardSmoothingBlocks = + object.rewardSmoothingBlocks !== undefined && + object.rewardSmoothingBlocks !== null + ? BigInt(object.rewardSmoothingBlocks.toString()) + : BigInt(0); + return message; + }, + fromAmino(object: ParamsAmino): Params { + const message = createBaseParams(); + if ( + object.reward_epoch_duration_blocks !== undefined && + object.reward_epoch_duration_blocks !== null + ) { + message.rewardEpochDurationBlocks = BigInt( + object.reward_epoch_duration_blocks, + ); + } + if ( + object.per_epoch_reward_fraction !== undefined && + object.per_epoch_reward_fraction !== null + ) { + message.perEpochRewardFraction = object.per_epoch_reward_fraction; + } + if ( + object.reward_smoothing_blocks !== undefined && + object.reward_smoothing_blocks !== null + ) { + message.rewardSmoothingBlocks = BigInt(object.reward_smoothing_blocks); + } + return message; + }, + toAmino(message: Params): ParamsAmino { + const obj: any = {}; + obj.reward_epoch_duration_blocks = message.rewardEpochDurationBlocks + ? message.rewardEpochDurationBlocks.toString() + : undefined; + obj.per_epoch_reward_fraction = message.perEpochRewardFraction; + obj.reward_smoothing_blocks = message.rewardSmoothingBlocks + ? message.rewardSmoothingBlocks.toString() + : undefined; + return obj; + }, + fromAminoMsg(object: ParamsAminoMsg): Params { + return Params.fromAmino(object.value); + }, + fromProtoMsg(message: ParamsProtoMsg): Params { + return Params.decode(message.value); + }, + toProto(message: Params): Uint8Array { + return Params.encode(message).finish(); + }, + toProtoMsg(message: Params): ParamsProtoMsg { + return { + typeUrl: '/agoric.vbank.Params', + value: Params.encode(message).finish(), + }; + }, +}; +function createBaseState(): State { + return { + rewardPool: [], + rewardBlockAmount: [], + lastSequence: BigInt(0), + lastRewardDistributionBlock: BigInt(0), + }; +} +export const State = { + typeUrl: '/agoric.vbank.State', + encode( + message: State, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.rewardPool) { + Coin.encode(v!, writer.uint32(10).fork()).ldelim(); + } + for (const v of message.rewardBlockAmount) { + Coin.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.lastSequence !== BigInt(0)) { + writer.uint32(24).uint64(message.lastSequence); + } + if (message.lastRewardDistributionBlock !== BigInt(0)) { + writer.uint32(32).int64(message.lastRewardDistributionBlock); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): State { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rewardPool.push(Coin.decode(reader, reader.uint32())); + break; + case 2: + message.rewardBlockAmount.push(Coin.decode(reader, reader.uint32())); + break; + case 3: + message.lastSequence = reader.uint64(); + break; + case 4: + message.lastRewardDistributionBlock = reader.int64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): State { + return { + rewardPool: Array.isArray(object?.rewardPool) + ? object.rewardPool.map((e: any) => Coin.fromJSON(e)) + : [], + rewardBlockAmount: Array.isArray(object?.rewardBlockAmount) + ? object.rewardBlockAmount.map((e: any) => Coin.fromJSON(e)) + : [], + lastSequence: isSet(object.lastSequence) + ? BigInt(object.lastSequence.toString()) + : BigInt(0), + lastRewardDistributionBlock: isSet(object.lastRewardDistributionBlock) + ? BigInt(object.lastRewardDistributionBlock.toString()) + : BigInt(0), + }; + }, + toJSON(message: State): unknown { + const obj: any = {}; + if (message.rewardPool) { + obj.rewardPool = message.rewardPool.map(e => + e ? Coin.toJSON(e) : undefined, + ); + } else { + obj.rewardPool = []; + } + if (message.rewardBlockAmount) { + obj.rewardBlockAmount = message.rewardBlockAmount.map(e => + e ? Coin.toJSON(e) : undefined, + ); + } else { + obj.rewardBlockAmount = []; + } + message.lastSequence !== undefined && + (obj.lastSequence = (message.lastSequence || BigInt(0)).toString()); + message.lastRewardDistributionBlock !== undefined && + (obj.lastRewardDistributionBlock = ( + message.lastRewardDistributionBlock || BigInt(0) + ).toString()); + return obj; + }, + fromPartial(object: Partial): State { + const message = createBaseState(); + message.rewardPool = object.rewardPool?.map(e => Coin.fromPartial(e)) || []; + message.rewardBlockAmount = + object.rewardBlockAmount?.map(e => Coin.fromPartial(e)) || []; + message.lastSequence = + object.lastSequence !== undefined && object.lastSequence !== null + ? BigInt(object.lastSequence.toString()) + : BigInt(0); + message.lastRewardDistributionBlock = + object.lastRewardDistributionBlock !== undefined && + object.lastRewardDistributionBlock !== null + ? BigInt(object.lastRewardDistributionBlock.toString()) + : BigInt(0); + return message; + }, + fromAmino(object: StateAmino): State { + const message = createBaseState(); + message.rewardPool = object.reward_pool?.map(e => Coin.fromAmino(e)) || []; + message.rewardBlockAmount = + object.reward_block_amount?.map(e => Coin.fromAmino(e)) || []; + if (object.last_sequence !== undefined && object.last_sequence !== null) { + message.lastSequence = BigInt(object.last_sequence); + } + if ( + object.last_reward_distribution_block !== undefined && + object.last_reward_distribution_block !== null + ) { + message.lastRewardDistributionBlock = BigInt( + object.last_reward_distribution_block, + ); + } + return message; + }, + toAmino(message: State): StateAmino { + const obj: any = {}; + if (message.rewardPool) { + obj.reward_pool = message.rewardPool.map(e => + e ? Coin.toAmino(e) : undefined, + ); + } else { + obj.reward_pool = []; + } + if (message.rewardBlockAmount) { + obj.reward_block_amount = message.rewardBlockAmount.map(e => + e ? Coin.toAmino(e) : undefined, + ); + } else { + obj.reward_block_amount = []; + } + obj.last_sequence = message.lastSequence + ? message.lastSequence.toString() + : undefined; + obj.last_reward_distribution_block = message.lastRewardDistributionBlock + ? message.lastRewardDistributionBlock.toString() + : undefined; + return obj; + }, + fromAminoMsg(object: StateAminoMsg): State { + return State.fromAmino(object.value); + }, + fromProtoMsg(message: StateProtoMsg): State { + return State.decode(message.value); + }, + toProto(message: State): Uint8Array { + return State.encode(message).finish(); + }, + toProtoMsg(message: State): StateProtoMsg { + return { + typeUrl: '/agoric.vbank.State', + value: State.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/vibc/msgs.amino.ts b/packages/cosmic-proto/src/codegen/agoric/vibc/msgs.amino.ts new file mode 100644 index 00000000000..75219fc4e4d --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/vibc/msgs.amino.ts @@ -0,0 +1,9 @@ +//@ts-nocheck +import { MsgSendPacket } from './msgs.js'; +export const AminoConverter = { + '/agoric.vibc.MsgSendPacket': { + aminoType: '/agoric.vibc.MsgSendPacket', + toAmino: MsgSendPacket.toAmino, + fromAmino: MsgSendPacket.fromAmino, + }, +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/vibc/msgs.registry.ts b/packages/cosmic-proto/src/codegen/agoric/vibc/msgs.registry.ts new file mode 100644 index 00000000000..95dfa18472f --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/vibc/msgs.registry.ts @@ -0,0 +1,53 @@ +//@ts-nocheck +import { GeneratedType, Registry } from '@cosmjs/proto-signing'; +import { MsgSendPacket } from './msgs.js'; +export const registry: ReadonlyArray<[string, GeneratedType]> = [ + ['/agoric.vibc.MsgSendPacket', MsgSendPacket], +]; +export const load = (protoRegistry: Registry) => { + registry.forEach(([typeUrl, mod]) => { + protoRegistry.register(typeUrl, mod); + }); +}; +export const MessageComposer = { + encoded: { + sendPacket(value: MsgSendPacket) { + return { + typeUrl: '/agoric.vibc.MsgSendPacket', + value: MsgSendPacket.encode(value).finish(), + }; + }, + }, + withTypeUrl: { + sendPacket(value: MsgSendPacket) { + return { + typeUrl: '/agoric.vibc.MsgSendPacket', + value, + }; + }, + }, + toJSON: { + sendPacket(value: MsgSendPacket) { + return { + typeUrl: '/agoric.vibc.MsgSendPacket', + value: MsgSendPacket.toJSON(value), + }; + }, + }, + fromJSON: { + sendPacket(value: any) { + return { + typeUrl: '/agoric.vibc.MsgSendPacket', + value: MsgSendPacket.fromJSON(value), + }; + }, + }, + fromPartial: { + sendPacket(value: MsgSendPacket) { + return { + typeUrl: '/agoric.vibc.MsgSendPacket', + value: MsgSendPacket.fromPartial(value), + }; + }, + }, +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/vibc/msgs.rpc.msg.ts b/packages/cosmic-proto/src/codegen/agoric/vibc/msgs.rpc.msg.ts new file mode 100644 index 00000000000..4f82327410f --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/vibc/msgs.rpc.msg.ts @@ -0,0 +1,23 @@ +//@ts-nocheck +import { Rpc } from '../../helpers.js'; +import { BinaryReader } from '../../binary.js'; +import { MsgSendPacket, MsgSendPacketResponse } from './msgs.js'; +/** The module transactions. */ +export interface Msg { + /** Force sending an arbitrary packet on a channel. */ + sendPacket(request: MsgSendPacket): Promise; +} +export class MsgClientImpl implements Msg { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.sendPacket = this.sendPacket.bind(this); + } + sendPacket(request: MsgSendPacket): Promise { + const data = MsgSendPacket.encode(request).finish(); + const promise = this.rpc.request('agoric.vibc.Msg', 'SendPacket', data); + return promise.then(data => + MsgSendPacketResponse.decode(new BinaryReader(data)), + ); + } +} diff --git a/packages/cosmic-proto/src/codegen/agoric/vibc/msgs.ts b/packages/cosmic-proto/src/codegen/agoric/vibc/msgs.ts new file mode 100644 index 00000000000..136a7bd8311 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/vibc/msgs.ts @@ -0,0 +1,209 @@ +//@ts-nocheck +import { + Packet, + PacketAmino, + PacketSDKType, +} from '../../ibc/core/channel/v1/channel.js'; +import { BinaryReader, BinaryWriter } from '../../binary.js'; +import { isSet, bytesFromBase64, base64FromBytes } from '../../helpers.js'; +/** MsgSendPacket is an SDK message for sending an outgoing IBC packet */ +export interface MsgSendPacket { + packet: Packet; + sender: Uint8Array; +} +export interface MsgSendPacketProtoMsg { + typeUrl: '/agoric.vibc.MsgSendPacket'; + value: Uint8Array; +} +/** MsgSendPacket is an SDK message for sending an outgoing IBC packet */ +export interface MsgSendPacketAmino { + packet?: PacketAmino; + sender?: string; +} +export interface MsgSendPacketAminoMsg { + type: '/agoric.vibc.MsgSendPacket'; + value: MsgSendPacketAmino; +} +/** MsgSendPacket is an SDK message for sending an outgoing IBC packet */ +export interface MsgSendPacketSDKType { + packet: PacketSDKType; + sender: Uint8Array; +} +/** Empty response for SendPacket. */ +export interface MsgSendPacketResponse {} +export interface MsgSendPacketResponseProtoMsg { + typeUrl: '/agoric.vibc.MsgSendPacketResponse'; + value: Uint8Array; +} +/** Empty response for SendPacket. */ +export interface MsgSendPacketResponseAmino {} +export interface MsgSendPacketResponseAminoMsg { + type: '/agoric.vibc.MsgSendPacketResponse'; + value: MsgSendPacketResponseAmino; +} +/** Empty response for SendPacket. */ +export interface MsgSendPacketResponseSDKType {} +function createBaseMsgSendPacket(): MsgSendPacket { + return { + packet: Packet.fromPartial({}), + sender: new Uint8Array(), + }; +} +export const MsgSendPacket = { + typeUrl: '/agoric.vibc.MsgSendPacket', + encode( + message: MsgSendPacket, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.packet !== undefined) { + Packet.encode(message.packet, writer.uint32(10).fork()).ldelim(); + } + if (message.sender.length !== 0) { + writer.uint32(18).bytes(message.sender); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): MsgSendPacket { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSendPacket(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.packet = Packet.decode(reader, reader.uint32()); + break; + case 2: + message.sender = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): MsgSendPacket { + return { + packet: isSet(object.packet) ? Packet.fromJSON(object.packet) : undefined, + sender: isSet(object.sender) + ? bytesFromBase64(object.sender) + : new Uint8Array(), + }; + }, + toJSON(message: MsgSendPacket): unknown { + const obj: any = {}; + message.packet !== undefined && + (obj.packet = message.packet ? Packet.toJSON(message.packet) : undefined); + message.sender !== undefined && + (obj.sender = base64FromBytes( + message.sender !== undefined ? message.sender : new Uint8Array(), + )); + return obj; + }, + fromPartial(object: Partial): MsgSendPacket { + const message = createBaseMsgSendPacket(); + message.packet = + object.packet !== undefined && object.packet !== null + ? Packet.fromPartial(object.packet) + : undefined; + message.sender = object.sender ?? new Uint8Array(); + return message; + }, + fromAmino(object: MsgSendPacketAmino): MsgSendPacket { + const message = createBaseMsgSendPacket(); + if (object.packet !== undefined && object.packet !== null) { + message.packet = Packet.fromAmino(object.packet); + } + if (object.sender !== undefined && object.sender !== null) { + message.sender = bytesFromBase64(object.sender); + } + return message; + }, + toAmino(message: MsgSendPacket): MsgSendPacketAmino { + const obj: any = {}; + obj.packet = message.packet ? Packet.toAmino(message.packet) : undefined; + obj.sender = message.sender ? base64FromBytes(message.sender) : undefined; + return obj; + }, + fromAminoMsg(object: MsgSendPacketAminoMsg): MsgSendPacket { + return MsgSendPacket.fromAmino(object.value); + }, + fromProtoMsg(message: MsgSendPacketProtoMsg): MsgSendPacket { + return MsgSendPacket.decode(message.value); + }, + toProto(message: MsgSendPacket): Uint8Array { + return MsgSendPacket.encode(message).finish(); + }, + toProtoMsg(message: MsgSendPacket): MsgSendPacketProtoMsg { + return { + typeUrl: '/agoric.vibc.MsgSendPacket', + value: MsgSendPacket.encode(message).finish(), + }; + }, +}; +function createBaseMsgSendPacketResponse(): MsgSendPacketResponse { + return {}; +} +export const MsgSendPacketResponse = { + typeUrl: '/agoric.vibc.MsgSendPacketResponse', + encode( + _: MsgSendPacketResponse, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): MsgSendPacketResponse { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMsgSendPacketResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(_: any): MsgSendPacketResponse { + return {}; + }, + toJSON(_: MsgSendPacketResponse): unknown { + const obj: any = {}; + return obj; + }, + fromPartial(_: Partial): MsgSendPacketResponse { + const message = createBaseMsgSendPacketResponse(); + return message; + }, + fromAmino(_: MsgSendPacketResponseAmino): MsgSendPacketResponse { + const message = createBaseMsgSendPacketResponse(); + return message; + }, + toAmino(_: MsgSendPacketResponse): MsgSendPacketResponseAmino { + const obj: any = {}; + return obj; + }, + fromAminoMsg(object: MsgSendPacketResponseAminoMsg): MsgSendPacketResponse { + return MsgSendPacketResponse.fromAmino(object.value); + }, + fromProtoMsg(message: MsgSendPacketResponseProtoMsg): MsgSendPacketResponse { + return MsgSendPacketResponse.decode(message.value); + }, + toProto(message: MsgSendPacketResponse): Uint8Array { + return MsgSendPacketResponse.encode(message).finish(); + }, + toProtoMsg(message: MsgSendPacketResponse): MsgSendPacketResponseProtoMsg { + return { + typeUrl: '/agoric.vibc.MsgSendPacketResponse', + value: MsgSendPacketResponse.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/vlocalchain/vlocalchain.ts b/packages/cosmic-proto/src/codegen/agoric/vlocalchain/vlocalchain.ts new file mode 100644 index 00000000000..0ca8c176357 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/vlocalchain/vlocalchain.ts @@ -0,0 +1,507 @@ +//@ts-nocheck +import { Any, AnyAmino, AnySDKType } from '../../google/protobuf/any.js'; +import { BinaryReader, BinaryWriter } from '../../binary.js'; +import { isSet } from '../../helpers.js'; +/** + * CosmosTx contains a list of sdk.Msg's. It should be used when sending + * transactions to a local chain. + */ +export interface CosmosTx { + messages: Any[]; +} +export interface CosmosTxProtoMsg { + typeUrl: '/agoric.vlocalchain.CosmosTx'; + value: Uint8Array; +} +/** + * CosmosTx contains a list of sdk.Msg's. It should be used when sending + * transactions to a local chain. + */ +export interface CosmosTxAmino { + messages?: AnyAmino[]; +} +export interface CosmosTxAminoMsg { + type: '/agoric.vlocalchain.CosmosTx'; + value: CosmosTxAmino; +} +/** + * CosmosTx contains a list of sdk.Msg's. It should be used when sending + * transactions to a local chain. + */ +export interface CosmosTxSDKType { + messages: AnySDKType[]; +} +/** QueryRequest is used internally to describe a query for the local chain. */ +export interface QueryRequest { + fullMethod: string; + request?: Any; + replyType: string; +} +export interface QueryRequestProtoMsg { + typeUrl: '/agoric.vlocalchain.QueryRequest'; + value: Uint8Array; +} +/** QueryRequest is used internally to describe a query for the local chain. */ +export interface QueryRequestAmino { + full_method?: string; + request?: AnyAmino; + reply_type?: string; +} +export interface QueryRequestAminoMsg { + type: '/agoric.vlocalchain.QueryRequest'; + value: QueryRequestAmino; +} +/** QueryRequest is used internally to describe a query for the local chain. */ +export interface QueryRequestSDKType { + full_method: string; + request?: AnySDKType; + reply_type: string; +} +/** QueryResponse is used internally to describe a response from the local chain. */ +export interface QueryResponse { + height: bigint; + reply?: Any; + error: string; +} +export interface QueryResponseProtoMsg { + typeUrl: '/agoric.vlocalchain.QueryResponse'; + value: Uint8Array; +} +/** QueryResponse is used internally to describe a response from the local chain. */ +export interface QueryResponseAmino { + height?: string; + reply?: AnyAmino; + error?: string; +} +export interface QueryResponseAminoMsg { + type: '/agoric.vlocalchain.QueryResponse'; + value: QueryResponseAmino; +} +/** QueryResponse is used internally to describe a response from the local chain. */ +export interface QueryResponseSDKType { + height: bigint; + reply?: AnySDKType; + error: string; +} +/** QueryResponses is used to group multiple QueryResponse messages. */ +export interface QueryResponses { + responses: QueryResponse[]; +} +export interface QueryResponsesProtoMsg { + typeUrl: '/agoric.vlocalchain.QueryResponses'; + value: Uint8Array; +} +/** QueryResponses is used to group multiple QueryResponse messages. */ +export interface QueryResponsesAmino { + responses?: QueryResponseAmino[]; +} +export interface QueryResponsesAminoMsg { + type: '/agoric.vlocalchain.QueryResponses'; + value: QueryResponsesAmino; +} +/** QueryResponses is used to group multiple QueryResponse messages. */ +export interface QueryResponsesSDKType { + responses: QueryResponseSDKType[]; +} +function createBaseCosmosTx(): CosmosTx { + return { + messages: [], + }; +} +export const CosmosTx = { + typeUrl: '/agoric.vlocalchain.CosmosTx', + encode( + message: CosmosTx, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.messages) { + Any.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): CosmosTx { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCosmosTx(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messages.push(Any.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): CosmosTx { + return { + messages: Array.isArray(object?.messages) + ? object.messages.map((e: any) => Any.fromJSON(e)) + : [], + }; + }, + toJSON(message: CosmosTx): unknown { + const obj: any = {}; + if (message.messages) { + obj.messages = message.messages.map(e => (e ? Any.toJSON(e) : undefined)); + } else { + obj.messages = []; + } + return obj; + }, + fromPartial(object: Partial): CosmosTx { + const message = createBaseCosmosTx(); + message.messages = object.messages?.map(e => Any.fromPartial(e)) || []; + return message; + }, + fromAmino(object: CosmosTxAmino): CosmosTx { + const message = createBaseCosmosTx(); + message.messages = object.messages?.map(e => Any.fromAmino(e)) || []; + return message; + }, + toAmino(message: CosmosTx): CosmosTxAmino { + const obj: any = {}; + if (message.messages) { + obj.messages = message.messages.map(e => + e ? Any.toAmino(e) : undefined, + ); + } else { + obj.messages = []; + } + return obj; + }, + fromAminoMsg(object: CosmosTxAminoMsg): CosmosTx { + return CosmosTx.fromAmino(object.value); + }, + fromProtoMsg(message: CosmosTxProtoMsg): CosmosTx { + return CosmosTx.decode(message.value); + }, + toProto(message: CosmosTx): Uint8Array { + return CosmosTx.encode(message).finish(); + }, + toProtoMsg(message: CosmosTx): CosmosTxProtoMsg { + return { + typeUrl: '/agoric.vlocalchain.CosmosTx', + value: CosmosTx.encode(message).finish(), + }; + }, +}; +function createBaseQueryRequest(): QueryRequest { + return { + fullMethod: '', + request: undefined, + replyType: '', + }; +} +export const QueryRequest = { + typeUrl: '/agoric.vlocalchain.QueryRequest', + encode( + message: QueryRequest, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.fullMethod !== '') { + writer.uint32(10).string(message.fullMethod); + } + if (message.request !== undefined) { + Any.encode(message.request, writer.uint32(18).fork()).ldelim(); + } + if (message.replyType !== '') { + writer.uint32(26).string(message.replyType); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): QueryRequest { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.fullMethod = reader.string(); + break; + case 2: + message.request = Any.decode(reader, reader.uint32()); + break; + case 3: + message.replyType = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): QueryRequest { + return { + fullMethod: isSet(object.fullMethod) ? String(object.fullMethod) : '', + request: isSet(object.request) ? Any.fromJSON(object.request) : undefined, + replyType: isSet(object.replyType) ? String(object.replyType) : '', + }; + }, + toJSON(message: QueryRequest): unknown { + const obj: any = {}; + message.fullMethod !== undefined && (obj.fullMethod = message.fullMethod); + message.request !== undefined && + (obj.request = message.request ? Any.toJSON(message.request) : undefined); + message.replyType !== undefined && (obj.replyType = message.replyType); + return obj; + }, + fromPartial(object: Partial): QueryRequest { + const message = createBaseQueryRequest(); + message.fullMethod = object.fullMethod ?? ''; + message.request = + object.request !== undefined && object.request !== null + ? Any.fromPartial(object.request) + : undefined; + message.replyType = object.replyType ?? ''; + return message; + }, + fromAmino(object: QueryRequestAmino): QueryRequest { + const message = createBaseQueryRequest(); + if (object.full_method !== undefined && object.full_method !== null) { + message.fullMethod = object.full_method; + } + if (object.request !== undefined && object.request !== null) { + message.request = Any.fromAmino(object.request); + } + if (object.reply_type !== undefined && object.reply_type !== null) { + message.replyType = object.reply_type; + } + return message; + }, + toAmino(message: QueryRequest): QueryRequestAmino { + const obj: any = {}; + obj.full_method = message.fullMethod; + obj.request = message.request ? Any.toAmino(message.request) : undefined; + obj.reply_type = message.replyType; + return obj; + }, + fromAminoMsg(object: QueryRequestAminoMsg): QueryRequest { + return QueryRequest.fromAmino(object.value); + }, + fromProtoMsg(message: QueryRequestProtoMsg): QueryRequest { + return QueryRequest.decode(message.value); + }, + toProto(message: QueryRequest): Uint8Array { + return QueryRequest.encode(message).finish(); + }, + toProtoMsg(message: QueryRequest): QueryRequestProtoMsg { + return { + typeUrl: '/agoric.vlocalchain.QueryRequest', + value: QueryRequest.encode(message).finish(), + }; + }, +}; +function createBaseQueryResponse(): QueryResponse { + return { + height: BigInt(0), + reply: undefined, + error: '', + }; +} +export const QueryResponse = { + typeUrl: '/agoric.vlocalchain.QueryResponse', + encode( + message: QueryResponse, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.height !== BigInt(0)) { + writer.uint32(8).int64(message.height); + } + if (message.reply !== undefined) { + Any.encode(message.reply, writer.uint32(18).fork()).ldelim(); + } + if (message.error !== '') { + writer.uint32(26).string(message.error); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): QueryResponse { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.height = reader.int64(); + break; + case 2: + message.reply = Any.decode(reader, reader.uint32()); + break; + case 3: + message.error = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): QueryResponse { + return { + height: isSet(object.height) + ? BigInt(object.height.toString()) + : BigInt(0), + reply: isSet(object.reply) ? Any.fromJSON(object.reply) : undefined, + error: isSet(object.error) ? String(object.error) : '', + }; + }, + toJSON(message: QueryResponse): unknown { + const obj: any = {}; + message.height !== undefined && + (obj.height = (message.height || BigInt(0)).toString()); + message.reply !== undefined && + (obj.reply = message.reply ? Any.toJSON(message.reply) : undefined); + message.error !== undefined && (obj.error = message.error); + return obj; + }, + fromPartial(object: Partial): QueryResponse { + const message = createBaseQueryResponse(); + message.height = + object.height !== undefined && object.height !== null + ? BigInt(object.height.toString()) + : BigInt(0); + message.reply = + object.reply !== undefined && object.reply !== null + ? Any.fromPartial(object.reply) + : undefined; + message.error = object.error ?? ''; + return message; + }, + fromAmino(object: QueryResponseAmino): QueryResponse { + const message = createBaseQueryResponse(); + if (object.height !== undefined && object.height !== null) { + message.height = BigInt(object.height); + } + if (object.reply !== undefined && object.reply !== null) { + message.reply = Any.fromAmino(object.reply); + } + if (object.error !== undefined && object.error !== null) { + message.error = object.error; + } + return message; + }, + toAmino(message: QueryResponse): QueryResponseAmino { + const obj: any = {}; + obj.height = message.height ? message.height.toString() : undefined; + obj.reply = message.reply ? Any.toAmino(message.reply) : undefined; + obj.error = message.error; + return obj; + }, + fromAminoMsg(object: QueryResponseAminoMsg): QueryResponse { + return QueryResponse.fromAmino(object.value); + }, + fromProtoMsg(message: QueryResponseProtoMsg): QueryResponse { + return QueryResponse.decode(message.value); + }, + toProto(message: QueryResponse): Uint8Array { + return QueryResponse.encode(message).finish(); + }, + toProtoMsg(message: QueryResponse): QueryResponseProtoMsg { + return { + typeUrl: '/agoric.vlocalchain.QueryResponse', + value: QueryResponse.encode(message).finish(), + }; + }, +}; +function createBaseQueryResponses(): QueryResponses { + return { + responses: [], + }; +} +export const QueryResponses = { + typeUrl: '/agoric.vlocalchain.QueryResponses', + encode( + message: QueryResponses, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.responses) { + QueryResponse.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): QueryResponses { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryResponses(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.responses.push(QueryResponse.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): QueryResponses { + return { + responses: Array.isArray(object?.responses) + ? object.responses.map((e: any) => QueryResponse.fromJSON(e)) + : [], + }; + }, + toJSON(message: QueryResponses): unknown { + const obj: any = {}; + if (message.responses) { + obj.responses = message.responses.map(e => + e ? QueryResponse.toJSON(e) : undefined, + ); + } else { + obj.responses = []; + } + return obj; + }, + fromPartial(object: Partial): QueryResponses { + const message = createBaseQueryResponses(); + message.responses = + object.responses?.map(e => QueryResponse.fromPartial(e)) || []; + return message; + }, + fromAmino(object: QueryResponsesAmino): QueryResponses { + const message = createBaseQueryResponses(); + message.responses = + object.responses?.map(e => QueryResponse.fromAmino(e)) || []; + return message; + }, + toAmino(message: QueryResponses): QueryResponsesAmino { + const obj: any = {}; + if (message.responses) { + obj.responses = message.responses.map(e => + e ? QueryResponse.toAmino(e) : undefined, + ); + } else { + obj.responses = []; + } + return obj; + }, + fromAminoMsg(object: QueryResponsesAminoMsg): QueryResponses { + return QueryResponses.fromAmino(object.value); + }, + fromProtoMsg(message: QueryResponsesProtoMsg): QueryResponses { + return QueryResponses.decode(message.value); + }, + toProto(message: QueryResponses): Uint8Array { + return QueryResponses.encode(message).finish(); + }, + toProtoMsg(message: QueryResponses): QueryResponsesProtoMsg { + return { + typeUrl: '/agoric.vlocalchain.QueryResponses', + value: QueryResponses.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/vstorage/genesis.ts b/packages/cosmic-proto/src/codegen/agoric/vstorage/genesis.ts new file mode 100644 index 00000000000..32251d1413a --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/vstorage/genesis.ts @@ -0,0 +1,239 @@ +//@ts-nocheck +import { BinaryReader, BinaryWriter } from '../../binary.js'; +import { isSet } from '../../helpers.js'; +/** The initial or exported state. */ +export interface GenesisState { + data: DataEntry[]; +} +export interface GenesisStateProtoMsg { + typeUrl: '/agoric.vstorage.GenesisState'; + value: Uint8Array; +} +/** The initial or exported state. */ +export interface GenesisStateAmino { + data?: DataEntryAmino[]; +} +export interface GenesisStateAminoMsg { + type: '/agoric.vstorage.GenesisState'; + value: GenesisStateAmino; +} +/** The initial or exported state. */ +export interface GenesisStateSDKType { + data: DataEntrySDKType[]; +} +/** + * A vstorage entry. The only necessary entries are those with data, as the + * ancestor nodes are reconstructed on import. + */ +export interface DataEntry { + /** + * A "."-separated path with individual path elements matching + * `[-_A-Za-z0-9]+` + */ + path: string; + value: string; +} +export interface DataEntryProtoMsg { + typeUrl: '/agoric.vstorage.DataEntry'; + value: Uint8Array; +} +/** + * A vstorage entry. The only necessary entries are those with data, as the + * ancestor nodes are reconstructed on import. + */ +export interface DataEntryAmino { + /** + * A "."-separated path with individual path elements matching + * `[-_A-Za-z0-9]+` + */ + path?: string; + value?: string; +} +export interface DataEntryAminoMsg { + type: '/agoric.vstorage.DataEntry'; + value: DataEntryAmino; +} +/** + * A vstorage entry. The only necessary entries are those with data, as the + * ancestor nodes are reconstructed on import. + */ +export interface DataEntrySDKType { + path: string; + value: string; +} +function createBaseGenesisState(): GenesisState { + return { + data: [], + }; +} +export const GenesisState = { + typeUrl: '/agoric.vstorage.GenesisState', + encode( + message: GenesisState, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.data) { + DataEntry.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): GenesisState { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGenesisState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.data.push(DataEntry.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): GenesisState { + return { + data: Array.isArray(object?.data) + ? object.data.map((e: any) => DataEntry.fromJSON(e)) + : [], + }; + }, + toJSON(message: GenesisState): unknown { + const obj: any = {}; + if (message.data) { + obj.data = message.data.map(e => (e ? DataEntry.toJSON(e) : undefined)); + } else { + obj.data = []; + } + return obj; + }, + fromPartial(object: Partial): GenesisState { + const message = createBaseGenesisState(); + message.data = object.data?.map(e => DataEntry.fromPartial(e)) || []; + return message; + }, + fromAmino(object: GenesisStateAmino): GenesisState { + const message = createBaseGenesisState(); + message.data = object.data?.map(e => DataEntry.fromAmino(e)) || []; + return message; + }, + toAmino(message: GenesisState): GenesisStateAmino { + const obj: any = {}; + if (message.data) { + obj.data = message.data.map(e => (e ? DataEntry.toAmino(e) : undefined)); + } else { + obj.data = []; + } + return obj; + }, + fromAminoMsg(object: GenesisStateAminoMsg): GenesisState { + return GenesisState.fromAmino(object.value); + }, + fromProtoMsg(message: GenesisStateProtoMsg): GenesisState { + return GenesisState.decode(message.value); + }, + toProto(message: GenesisState): Uint8Array { + return GenesisState.encode(message).finish(); + }, + toProtoMsg(message: GenesisState): GenesisStateProtoMsg { + return { + typeUrl: '/agoric.vstorage.GenesisState', + value: GenesisState.encode(message).finish(), + }; + }, +}; +function createBaseDataEntry(): DataEntry { + return { + path: '', + value: '', + }; +} +export const DataEntry = { + typeUrl: '/agoric.vstorage.DataEntry', + encode( + message: DataEntry, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.path !== '') { + writer.uint32(10).string(message.path); + } + if (message.value !== '') { + writer.uint32(18).string(message.value); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): DataEntry { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDataEntry(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.path = reader.string(); + break; + case 2: + message.value = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): DataEntry { + return { + path: isSet(object.path) ? String(object.path) : '', + value: isSet(object.value) ? String(object.value) : '', + }; + }, + toJSON(message: DataEntry): unknown { + const obj: any = {}; + message.path !== undefined && (obj.path = message.path); + message.value !== undefined && (obj.value = message.value); + return obj; + }, + fromPartial(object: Partial): DataEntry { + const message = createBaseDataEntry(); + message.path = object.path ?? ''; + message.value = object.value ?? ''; + return message; + }, + fromAmino(object: DataEntryAmino): DataEntry { + const message = createBaseDataEntry(); + if (object.path !== undefined && object.path !== null) { + message.path = object.path; + } + if (object.value !== undefined && object.value !== null) { + message.value = object.value; + } + return message; + }, + toAmino(message: DataEntry): DataEntryAmino { + const obj: any = {}; + obj.path = message.path; + obj.value = message.value; + return obj; + }, + fromAminoMsg(object: DataEntryAminoMsg): DataEntry { + return DataEntry.fromAmino(object.value); + }, + fromProtoMsg(message: DataEntryProtoMsg): DataEntry { + return DataEntry.decode(message.value); + }, + toProto(message: DataEntry): Uint8Array { + return DataEntry.encode(message).finish(); + }, + toProtoMsg(message: DataEntry): DataEntryProtoMsg { + return { + typeUrl: '/agoric.vstorage.DataEntry', + value: DataEntry.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/vstorage/query.rpc.Query.ts b/packages/cosmic-proto/src/codegen/agoric/vstorage/query.rpc.Query.ts new file mode 100644 index 00000000000..0b37969c04d --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/vstorage/query.rpc.Query.ts @@ -0,0 +1,69 @@ +//@ts-nocheck +import { Rpc } from '../../helpers.js'; +import { BinaryReader } from '../../binary.js'; +import { QueryClient, createProtobufRpcClient } from '@cosmjs/stargate'; +import { + QueryDataRequest, + QueryDataResponse, + QueryCapDataRequest, + QueryCapDataResponse, + QueryChildrenRequest, + QueryChildrenResponse, +} from './query.js'; +/** Query defines the gRPC querier service */ +export interface Query { + /** Return the raw string value of an arbitrary vstorage datum. */ + data(request: QueryDataRequest): Promise; + /** + * Return a formatted representation of a vstorage datum that must be + * a valid StreamCell with CapData values, or standalone CapData. + */ + capData(request: QueryCapDataRequest): Promise; + /** Return the children of a given vstorage path. */ + children(request: QueryChildrenRequest): Promise; +} +export class QueryClientImpl implements Query { + private readonly rpc: Rpc; + constructor(rpc: Rpc) { + this.rpc = rpc; + this.data = this.data.bind(this); + this.capData = this.capData.bind(this); + this.children = this.children.bind(this); + } + data(request: QueryDataRequest): Promise { + const data = QueryDataRequest.encode(request).finish(); + const promise = this.rpc.request('agoric.vstorage.Query', 'Data', data); + return promise.then(data => + QueryDataResponse.decode(new BinaryReader(data)), + ); + } + capData(request: QueryCapDataRequest): Promise { + const data = QueryCapDataRequest.encode(request).finish(); + const promise = this.rpc.request('agoric.vstorage.Query', 'CapData', data); + return promise.then(data => + QueryCapDataResponse.decode(new BinaryReader(data)), + ); + } + children(request: QueryChildrenRequest): Promise { + const data = QueryChildrenRequest.encode(request).finish(); + const promise = this.rpc.request('agoric.vstorage.Query', 'Children', data); + return promise.then(data => + QueryChildrenResponse.decode(new BinaryReader(data)), + ); + } +} +export const createRpcQueryExtension = (base: QueryClient) => { + const rpc = createProtobufRpcClient(base); + const queryService = new QueryClientImpl(rpc); + return { + data(request: QueryDataRequest): Promise { + return queryService.data(request); + }, + capData(request: QueryCapDataRequest): Promise { + return queryService.capData(request); + }, + children(request: QueryChildrenRequest): Promise { + return queryService.children(request); + }, + }; +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/vstorage/query.ts b/packages/cosmic-proto/src/codegen/agoric/vstorage/query.ts new file mode 100644 index 00000000000..c2161f3a4ce --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/vstorage/query.ts @@ -0,0 +1,790 @@ +//@ts-nocheck +import { + PageRequest, + PageRequestAmino, + PageRequestSDKType, + PageResponse, + PageResponseAmino, + PageResponseSDKType, +} from '../../cosmos/base/query/v1beta1/pagination.js'; +import { BinaryReader, BinaryWriter } from '../../binary.js'; +import { isSet } from '../../helpers.js'; +/** QueryDataRequest is the vstorage path data query. */ +export interface QueryDataRequest { + path: string; +} +export interface QueryDataRequestProtoMsg { + typeUrl: '/agoric.vstorage.QueryDataRequest'; + value: Uint8Array; +} +/** QueryDataRequest is the vstorage path data query. */ +export interface QueryDataRequestAmino { + path?: string; +} +export interface QueryDataRequestAminoMsg { + type: '/agoric.vstorage.QueryDataRequest'; + value: QueryDataRequestAmino; +} +/** QueryDataRequest is the vstorage path data query. */ +export interface QueryDataRequestSDKType { + path: string; +} +/** QueryDataResponse is the vstorage path data response. */ +export interface QueryDataResponse { + value: string; +} +export interface QueryDataResponseProtoMsg { + typeUrl: '/agoric.vstorage.QueryDataResponse'; + value: Uint8Array; +} +/** QueryDataResponse is the vstorage path data response. */ +export interface QueryDataResponseAmino { + value?: string; +} +export interface QueryDataResponseAminoMsg { + type: '/agoric.vstorage.QueryDataResponse'; + value: QueryDataResponseAmino; +} +/** QueryDataResponse is the vstorage path data response. */ +export interface QueryDataResponseSDKType { + value: string; +} +/** QueryCapDataRequest contains a path and formatting configuration. */ +export interface QueryCapDataRequest { + path: string; + /** + * mediaType must be an actual media type in the registry at + * https://www.iana.org/assignments/media-types/media-types.xhtml + * or a special value that does not conflict with the media type syntax. + * The only valid value is "JSON Lines", which is also the default. + */ + mediaType: string; + /** + * itemFormat, if present, must be the special value "flat" to indicate that + * the deep structure of each item should be flattened into a single level + * with kebab-case keys (e.g., `{ "metrics": { "min": 0, "max": 88 } }` as + * `{ "metrics-min": 0, "metrics-max": 88 }`). + */ + itemFormat: string; + /** + * remotableValueFormat indicates how to transform references to opaque but + * distinguishable Remotables into readable embedded representations. + * * "object" represents each Remotable as an `{ id, allegedName }` object, e.g. `{ "id": "board007", "allegedName": "IST brand" }`. + * * "string" represents each Remotable as a string with bracket-wrapped contents including its alleged name and id, e.g. "[Alleged: IST brand ]". + */ + remotableValueFormat: string; +} +export interface QueryCapDataRequestProtoMsg { + typeUrl: '/agoric.vstorage.QueryCapDataRequest'; + value: Uint8Array; +} +/** QueryCapDataRequest contains a path and formatting configuration. */ +export interface QueryCapDataRequestAmino { + path?: string; + /** + * mediaType must be an actual media type in the registry at + * https://www.iana.org/assignments/media-types/media-types.xhtml + * or a special value that does not conflict with the media type syntax. + * The only valid value is "JSON Lines", which is also the default. + */ + media_type?: string; + /** + * itemFormat, if present, must be the special value "flat" to indicate that + * the deep structure of each item should be flattened into a single level + * with kebab-case keys (e.g., `{ "metrics": { "min": 0, "max": 88 } }` as + * `{ "metrics-min": 0, "metrics-max": 88 }`). + */ + item_format?: string; + /** + * remotableValueFormat indicates how to transform references to opaque but + * distinguishable Remotables into readable embedded representations. + * * "object" represents each Remotable as an `{ id, allegedName }` object, e.g. `{ "id": "board007", "allegedName": "IST brand" }`. + * * "string" represents each Remotable as a string with bracket-wrapped contents including its alleged name and id, e.g. "[Alleged: IST brand ]". + */ + remotable_value_format?: string; +} +export interface QueryCapDataRequestAminoMsg { + type: '/agoric.vstorage.QueryCapDataRequest'; + value: QueryCapDataRequestAmino; +} +/** QueryCapDataRequest contains a path and formatting configuration. */ +export interface QueryCapDataRequestSDKType { + path: string; + media_type: string; + item_format: string; + remotable_value_format: string; +} +/** + * QueryCapDataResponse represents the result with the requested formatting, + * reserving space for future metadata such as media type. + */ +export interface QueryCapDataResponse { + blockHeight: string; + value: string; +} +export interface QueryCapDataResponseProtoMsg { + typeUrl: '/agoric.vstorage.QueryCapDataResponse'; + value: Uint8Array; +} +/** + * QueryCapDataResponse represents the result with the requested formatting, + * reserving space for future metadata such as media type. + */ +export interface QueryCapDataResponseAmino { + block_height?: string; + value?: string; +} +export interface QueryCapDataResponseAminoMsg { + type: '/agoric.vstorage.QueryCapDataResponse'; + value: QueryCapDataResponseAmino; +} +/** + * QueryCapDataResponse represents the result with the requested formatting, + * reserving space for future metadata such as media type. + */ +export interface QueryCapDataResponseSDKType { + block_height: string; + value: string; +} +/** QueryChildrenRequest is the vstorage path children query. */ +export interface QueryChildrenRequest { + path: string; + pagination?: PageRequest; +} +export interface QueryChildrenRequestProtoMsg { + typeUrl: '/agoric.vstorage.QueryChildrenRequest'; + value: Uint8Array; +} +/** QueryChildrenRequest is the vstorage path children query. */ +export interface QueryChildrenRequestAmino { + path?: string; + pagination?: PageRequestAmino; +} +export interface QueryChildrenRequestAminoMsg { + type: '/agoric.vstorage.QueryChildrenRequest'; + value: QueryChildrenRequestAmino; +} +/** QueryChildrenRequest is the vstorage path children query. */ +export interface QueryChildrenRequestSDKType { + path: string; + pagination?: PageRequestSDKType; +} +/** QueryChildrenResponse is the vstorage path children response. */ +export interface QueryChildrenResponse { + children: string[]; + pagination?: PageResponse; +} +export interface QueryChildrenResponseProtoMsg { + typeUrl: '/agoric.vstorage.QueryChildrenResponse'; + value: Uint8Array; +} +/** QueryChildrenResponse is the vstorage path children response. */ +export interface QueryChildrenResponseAmino { + children?: string[]; + pagination?: PageResponseAmino; +} +export interface QueryChildrenResponseAminoMsg { + type: '/agoric.vstorage.QueryChildrenResponse'; + value: QueryChildrenResponseAmino; +} +/** QueryChildrenResponse is the vstorage path children response. */ +export interface QueryChildrenResponseSDKType { + children: string[]; + pagination?: PageResponseSDKType; +} +function createBaseQueryDataRequest(): QueryDataRequest { + return { + path: '', + }; +} +export const QueryDataRequest = { + typeUrl: '/agoric.vstorage.QueryDataRequest', + encode( + message: QueryDataRequest, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.path !== '') { + writer.uint32(10).string(message.path); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): QueryDataRequest { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDataRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): QueryDataRequest { + return { + path: isSet(object.path) ? String(object.path) : '', + }; + }, + toJSON(message: QueryDataRequest): unknown { + const obj: any = {}; + message.path !== undefined && (obj.path = message.path); + return obj; + }, + fromPartial(object: Partial): QueryDataRequest { + const message = createBaseQueryDataRequest(); + message.path = object.path ?? ''; + return message; + }, + fromAmino(object: QueryDataRequestAmino): QueryDataRequest { + const message = createBaseQueryDataRequest(); + if (object.path !== undefined && object.path !== null) { + message.path = object.path; + } + return message; + }, + toAmino(message: QueryDataRequest): QueryDataRequestAmino { + const obj: any = {}; + obj.path = message.path; + return obj; + }, + fromAminoMsg(object: QueryDataRequestAminoMsg): QueryDataRequest { + return QueryDataRequest.fromAmino(object.value); + }, + fromProtoMsg(message: QueryDataRequestProtoMsg): QueryDataRequest { + return QueryDataRequest.decode(message.value); + }, + toProto(message: QueryDataRequest): Uint8Array { + return QueryDataRequest.encode(message).finish(); + }, + toProtoMsg(message: QueryDataRequest): QueryDataRequestProtoMsg { + return { + typeUrl: '/agoric.vstorage.QueryDataRequest', + value: QueryDataRequest.encode(message).finish(), + }; + }, +}; +function createBaseQueryDataResponse(): QueryDataResponse { + return { + value: '', + }; +} +export const QueryDataResponse = { + typeUrl: '/agoric.vstorage.QueryDataResponse', + encode( + message: QueryDataResponse, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.value !== '') { + writer.uint32(10).string(message.value); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): QueryDataResponse { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryDataResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.value = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): QueryDataResponse { + return { + value: isSet(object.value) ? String(object.value) : '', + }; + }, + toJSON(message: QueryDataResponse): unknown { + const obj: any = {}; + message.value !== undefined && (obj.value = message.value); + return obj; + }, + fromPartial(object: Partial): QueryDataResponse { + const message = createBaseQueryDataResponse(); + message.value = object.value ?? ''; + return message; + }, + fromAmino(object: QueryDataResponseAmino): QueryDataResponse { + const message = createBaseQueryDataResponse(); + if (object.value !== undefined && object.value !== null) { + message.value = object.value; + } + return message; + }, + toAmino(message: QueryDataResponse): QueryDataResponseAmino { + const obj: any = {}; + obj.value = message.value; + return obj; + }, + fromAminoMsg(object: QueryDataResponseAminoMsg): QueryDataResponse { + return QueryDataResponse.fromAmino(object.value); + }, + fromProtoMsg(message: QueryDataResponseProtoMsg): QueryDataResponse { + return QueryDataResponse.decode(message.value); + }, + toProto(message: QueryDataResponse): Uint8Array { + return QueryDataResponse.encode(message).finish(); + }, + toProtoMsg(message: QueryDataResponse): QueryDataResponseProtoMsg { + return { + typeUrl: '/agoric.vstorage.QueryDataResponse', + value: QueryDataResponse.encode(message).finish(), + }; + }, +}; +function createBaseQueryCapDataRequest(): QueryCapDataRequest { + return { + path: '', + mediaType: '', + itemFormat: '', + remotableValueFormat: '', + }; +} +export const QueryCapDataRequest = { + typeUrl: '/agoric.vstorage.QueryCapDataRequest', + encode( + message: QueryCapDataRequest, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.path !== '') { + writer.uint32(10).string(message.path); + } + if (message.mediaType !== '') { + writer.uint32(18).string(message.mediaType); + } + if (message.itemFormat !== '') { + writer.uint32(26).string(message.itemFormat); + } + if (message.remotableValueFormat !== '') { + writer.uint32(82).string(message.remotableValueFormat); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): QueryCapDataRequest { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryCapDataRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.path = reader.string(); + break; + case 2: + message.mediaType = reader.string(); + break; + case 3: + message.itemFormat = reader.string(); + break; + case 10: + message.remotableValueFormat = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): QueryCapDataRequest { + return { + path: isSet(object.path) ? String(object.path) : '', + mediaType: isSet(object.mediaType) ? String(object.mediaType) : '', + itemFormat: isSet(object.itemFormat) ? String(object.itemFormat) : '', + remotableValueFormat: isSet(object.remotableValueFormat) + ? String(object.remotableValueFormat) + : '', + }; + }, + toJSON(message: QueryCapDataRequest): unknown { + const obj: any = {}; + message.path !== undefined && (obj.path = message.path); + message.mediaType !== undefined && (obj.mediaType = message.mediaType); + message.itemFormat !== undefined && (obj.itemFormat = message.itemFormat); + message.remotableValueFormat !== undefined && + (obj.remotableValueFormat = message.remotableValueFormat); + return obj; + }, + fromPartial(object: Partial): QueryCapDataRequest { + const message = createBaseQueryCapDataRequest(); + message.path = object.path ?? ''; + message.mediaType = object.mediaType ?? ''; + message.itemFormat = object.itemFormat ?? ''; + message.remotableValueFormat = object.remotableValueFormat ?? ''; + return message; + }, + fromAmino(object: QueryCapDataRequestAmino): QueryCapDataRequest { + const message = createBaseQueryCapDataRequest(); + if (object.path !== undefined && object.path !== null) { + message.path = object.path; + } + if (object.media_type !== undefined && object.media_type !== null) { + message.mediaType = object.media_type; + } + if (object.item_format !== undefined && object.item_format !== null) { + message.itemFormat = object.item_format; + } + if ( + object.remotable_value_format !== undefined && + object.remotable_value_format !== null + ) { + message.remotableValueFormat = object.remotable_value_format; + } + return message; + }, + toAmino(message: QueryCapDataRequest): QueryCapDataRequestAmino { + const obj: any = {}; + obj.path = message.path; + obj.media_type = message.mediaType; + obj.item_format = message.itemFormat; + obj.remotable_value_format = message.remotableValueFormat; + return obj; + }, + fromAminoMsg(object: QueryCapDataRequestAminoMsg): QueryCapDataRequest { + return QueryCapDataRequest.fromAmino(object.value); + }, + fromProtoMsg(message: QueryCapDataRequestProtoMsg): QueryCapDataRequest { + return QueryCapDataRequest.decode(message.value); + }, + toProto(message: QueryCapDataRequest): Uint8Array { + return QueryCapDataRequest.encode(message).finish(); + }, + toProtoMsg(message: QueryCapDataRequest): QueryCapDataRequestProtoMsg { + return { + typeUrl: '/agoric.vstorage.QueryCapDataRequest', + value: QueryCapDataRequest.encode(message).finish(), + }; + }, +}; +function createBaseQueryCapDataResponse(): QueryCapDataResponse { + return { + blockHeight: '', + value: '', + }; +} +export const QueryCapDataResponse = { + typeUrl: '/agoric.vstorage.QueryCapDataResponse', + encode( + message: QueryCapDataResponse, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.blockHeight !== '') { + writer.uint32(10).string(message.blockHeight); + } + if (message.value !== '') { + writer.uint32(82).string(message.value); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): QueryCapDataResponse { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryCapDataResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.blockHeight = reader.string(); + break; + case 10: + message.value = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): QueryCapDataResponse { + return { + blockHeight: isSet(object.blockHeight) ? String(object.blockHeight) : '', + value: isSet(object.value) ? String(object.value) : '', + }; + }, + toJSON(message: QueryCapDataResponse): unknown { + const obj: any = {}; + message.blockHeight !== undefined && + (obj.blockHeight = message.blockHeight); + message.value !== undefined && (obj.value = message.value); + return obj; + }, + fromPartial(object: Partial): QueryCapDataResponse { + const message = createBaseQueryCapDataResponse(); + message.blockHeight = object.blockHeight ?? ''; + message.value = object.value ?? ''; + return message; + }, + fromAmino(object: QueryCapDataResponseAmino): QueryCapDataResponse { + const message = createBaseQueryCapDataResponse(); + if (object.block_height !== undefined && object.block_height !== null) { + message.blockHeight = object.block_height; + } + if (object.value !== undefined && object.value !== null) { + message.value = object.value; + } + return message; + }, + toAmino(message: QueryCapDataResponse): QueryCapDataResponseAmino { + const obj: any = {}; + obj.block_height = message.blockHeight; + obj.value = message.value; + return obj; + }, + fromAminoMsg(object: QueryCapDataResponseAminoMsg): QueryCapDataResponse { + return QueryCapDataResponse.fromAmino(object.value); + }, + fromProtoMsg(message: QueryCapDataResponseProtoMsg): QueryCapDataResponse { + return QueryCapDataResponse.decode(message.value); + }, + toProto(message: QueryCapDataResponse): Uint8Array { + return QueryCapDataResponse.encode(message).finish(); + }, + toProtoMsg(message: QueryCapDataResponse): QueryCapDataResponseProtoMsg { + return { + typeUrl: '/agoric.vstorage.QueryCapDataResponse', + value: QueryCapDataResponse.encode(message).finish(), + }; + }, +}; +function createBaseQueryChildrenRequest(): QueryChildrenRequest { + return { + path: '', + pagination: undefined, + }; +} +export const QueryChildrenRequest = { + typeUrl: '/agoric.vstorage.QueryChildrenRequest', + encode( + message: QueryChildrenRequest, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.path !== '') { + writer.uint32(10).string(message.path); + } + if (message.pagination !== undefined) { + PageRequest.encode(message.pagination, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): QueryChildrenRequest { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryChildrenRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.path = reader.string(); + break; + case 2: + message.pagination = PageRequest.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): QueryChildrenRequest { + return { + path: isSet(object.path) ? String(object.path) : '', + pagination: isSet(object.pagination) + ? PageRequest.fromJSON(object.pagination) + : undefined, + }; + }, + toJSON(message: QueryChildrenRequest): unknown { + const obj: any = {}; + message.path !== undefined && (obj.path = message.path); + message.pagination !== undefined && + (obj.pagination = message.pagination + ? PageRequest.toJSON(message.pagination) + : undefined); + return obj; + }, + fromPartial(object: Partial): QueryChildrenRequest { + const message = createBaseQueryChildrenRequest(); + message.path = object.path ?? ''; + message.pagination = + object.pagination !== undefined && object.pagination !== null + ? PageRequest.fromPartial(object.pagination) + : undefined; + return message; + }, + fromAmino(object: QueryChildrenRequestAmino): QueryChildrenRequest { + const message = createBaseQueryChildrenRequest(); + if (object.path !== undefined && object.path !== null) { + message.path = object.path; + } + if (object.pagination !== undefined && object.pagination !== null) { + message.pagination = PageRequest.fromAmino(object.pagination); + } + return message; + }, + toAmino(message: QueryChildrenRequest): QueryChildrenRequestAmino { + const obj: any = {}; + obj.path = message.path; + obj.pagination = message.pagination + ? PageRequest.toAmino(message.pagination) + : undefined; + return obj; + }, + fromAminoMsg(object: QueryChildrenRequestAminoMsg): QueryChildrenRequest { + return QueryChildrenRequest.fromAmino(object.value); + }, + fromProtoMsg(message: QueryChildrenRequestProtoMsg): QueryChildrenRequest { + return QueryChildrenRequest.decode(message.value); + }, + toProto(message: QueryChildrenRequest): Uint8Array { + return QueryChildrenRequest.encode(message).finish(); + }, + toProtoMsg(message: QueryChildrenRequest): QueryChildrenRequestProtoMsg { + return { + typeUrl: '/agoric.vstorage.QueryChildrenRequest', + value: QueryChildrenRequest.encode(message).finish(), + }; + }, +}; +function createBaseQueryChildrenResponse(): QueryChildrenResponse { + return { + children: [], + pagination: undefined, + }; +} +export const QueryChildrenResponse = { + typeUrl: '/agoric.vstorage.QueryChildrenResponse', + encode( + message: QueryChildrenResponse, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.children) { + writer.uint32(10).string(v!); + } + if (message.pagination !== undefined) { + PageResponse.encode( + message.pagination, + writer.uint32(18).fork(), + ).ldelim(); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): QueryChildrenResponse { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseQueryChildrenResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.children.push(reader.string()); + break; + case 2: + message.pagination = PageResponse.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): QueryChildrenResponse { + return { + children: Array.isArray(object?.children) + ? object.children.map((e: any) => String(e)) + : [], + pagination: isSet(object.pagination) + ? PageResponse.fromJSON(object.pagination) + : undefined, + }; + }, + toJSON(message: QueryChildrenResponse): unknown { + const obj: any = {}; + if (message.children) { + obj.children = message.children.map(e => e); + } else { + obj.children = []; + } + message.pagination !== undefined && + (obj.pagination = message.pagination + ? PageResponse.toJSON(message.pagination) + : undefined); + return obj; + }, + fromPartial(object: Partial): QueryChildrenResponse { + const message = createBaseQueryChildrenResponse(); + message.children = object.children?.map(e => e) || []; + message.pagination = + object.pagination !== undefined && object.pagination !== null + ? PageResponse.fromPartial(object.pagination) + : undefined; + return message; + }, + fromAmino(object: QueryChildrenResponseAmino): QueryChildrenResponse { + const message = createBaseQueryChildrenResponse(); + message.children = object.children?.map(e => e) || []; + if (object.pagination !== undefined && object.pagination !== null) { + message.pagination = PageResponse.fromAmino(object.pagination); + } + return message; + }, + toAmino(message: QueryChildrenResponse): QueryChildrenResponseAmino { + const obj: any = {}; + if (message.children) { + obj.children = message.children.map(e => e); + } else { + obj.children = []; + } + obj.pagination = message.pagination + ? PageResponse.toAmino(message.pagination) + : undefined; + return obj; + }, + fromAminoMsg(object: QueryChildrenResponseAminoMsg): QueryChildrenResponse { + return QueryChildrenResponse.fromAmino(object.value); + }, + fromProtoMsg(message: QueryChildrenResponseProtoMsg): QueryChildrenResponse { + return QueryChildrenResponse.decode(message.value); + }, + toProto(message: QueryChildrenResponse): Uint8Array { + return QueryChildrenResponse.encode(message).finish(); + }, + toProtoMsg(message: QueryChildrenResponse): QueryChildrenResponseProtoMsg { + return { + typeUrl: '/agoric.vstorage.QueryChildrenResponse', + value: QueryChildrenResponse.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/agoric/vstorage/vstorage.ts b/packages/cosmic-proto/src/codegen/agoric/vstorage/vstorage.ts new file mode 100644 index 00000000000..cc4767f506d --- /dev/null +++ b/packages/cosmic-proto/src/codegen/agoric/vstorage/vstorage.ts @@ -0,0 +1,214 @@ +//@ts-nocheck +import { BinaryReader, BinaryWriter } from '../../binary.js'; +import { isSet } from '../../helpers.js'; +/** Data is the vstorage node data. */ +export interface Data { + value: string; +} +export interface DataProtoMsg { + typeUrl: '/agoric.vstorage.Data'; + value: Uint8Array; +} +/** Data is the vstorage node data. */ +export interface DataAmino { + value?: string; +} +export interface DataAminoMsg { + type: '/agoric.vstorage.Data'; + value: DataAmino; +} +/** Data is the vstorage node data. */ +export interface DataSDKType { + value: string; +} +/** + * Children are the immediate names (just one level deep) of subnodes leading to + * more data from a given vstorage node. + */ +export interface Children { + children: string[]; +} +export interface ChildrenProtoMsg { + typeUrl: '/agoric.vstorage.Children'; + value: Uint8Array; +} +/** + * Children are the immediate names (just one level deep) of subnodes leading to + * more data from a given vstorage node. + */ +export interface ChildrenAmino { + children?: string[]; +} +export interface ChildrenAminoMsg { + type: '/agoric.vstorage.Children'; + value: ChildrenAmino; +} +/** + * Children are the immediate names (just one level deep) of subnodes leading to + * more data from a given vstorage node. + */ +export interface ChildrenSDKType { + children: string[]; +} +function createBaseData(): Data { + return { + value: '', + }; +} +export const Data = { + typeUrl: '/agoric.vstorage.Data', + encode( + message: Data, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.value !== '') { + writer.uint32(10).string(message.value); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): Data { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseData(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.value = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): Data { + return { + value: isSet(object.value) ? String(object.value) : '', + }; + }, + toJSON(message: Data): unknown { + const obj: any = {}; + message.value !== undefined && (obj.value = message.value); + return obj; + }, + fromPartial(object: Partial): Data { + const message = createBaseData(); + message.value = object.value ?? ''; + return message; + }, + fromAmino(object: DataAmino): Data { + const message = createBaseData(); + if (object.value !== undefined && object.value !== null) { + message.value = object.value; + } + return message; + }, + toAmino(message: Data): DataAmino { + const obj: any = {}; + obj.value = message.value; + return obj; + }, + fromAminoMsg(object: DataAminoMsg): Data { + return Data.fromAmino(object.value); + }, + fromProtoMsg(message: DataProtoMsg): Data { + return Data.decode(message.value); + }, + toProto(message: Data): Uint8Array { + return Data.encode(message).finish(); + }, + toProtoMsg(message: Data): DataProtoMsg { + return { + typeUrl: '/agoric.vstorage.Data', + value: Data.encode(message).finish(), + }; + }, +}; +function createBaseChildren(): Children { + return { + children: [], + }; +} +export const Children = { + typeUrl: '/agoric.vstorage.Children', + encode( + message: Children, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.children) { + writer.uint32(10).string(v!); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): Children { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseChildren(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.children.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): Children { + return { + children: Array.isArray(object?.children) + ? object.children.map((e: any) => String(e)) + : [], + }; + }, + toJSON(message: Children): unknown { + const obj: any = {}; + if (message.children) { + obj.children = message.children.map(e => e); + } else { + obj.children = []; + } + return obj; + }, + fromPartial(object: Partial): Children { + const message = createBaseChildren(); + message.children = object.children?.map(e => e) || []; + return message; + }, + fromAmino(object: ChildrenAmino): Children { + const message = createBaseChildren(); + message.children = object.children?.map(e => e) || []; + return message; + }, + toAmino(message: Children): ChildrenAmino { + const obj: any = {}; + if (message.children) { + obj.children = message.children.map(e => e); + } else { + obj.children = []; + } + return obj; + }, + fromAminoMsg(object: ChildrenAminoMsg): Children { + return Children.fromAmino(object.value); + }, + fromProtoMsg(message: ChildrenProtoMsg): Children { + return Children.decode(message.value); + }, + toProto(message: Children): Uint8Array { + return Children.encode(message).finish(); + }, + toProtoMsg(message: Children): ChildrenProtoMsg { + return { + typeUrl: '/agoric.vstorage.Children', + value: Children.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/binary.ts b/packages/cosmic-proto/src/codegen/binary.ts new file mode 100644 index 00000000000..0a087cd7587 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/binary.ts @@ -0,0 +1,534 @@ +//@ts-nocheck +/** + * This file and any referenced files were automatically generated by @cosmology/telescope@1.4.12 + * DO NOT MODIFY BY HAND. Instead, download the latest proto files for your chain + * and run the transpile command or yarn proto command to regenerate this bundle. + */ + +// Copyright (c) 2016, Daniel Wirtz All rights reserved. + +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: + +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// * Neither the name of its author, nor the names of its contributors +// may be used to endorse or promote products derived from this software +// without specific prior written permission. + +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// --- + +// Code generated by the command line utilities is owned by the owner +// of the input file used when generating it. This code is not +// standalone and requires a support library to be linked with it. This +// support library is itself covered by the above license. + +import { utf8Length, utf8Read, utf8Write } from './utf8.js'; +import { + int64ToString, + readInt32, + readUInt32, + uInt64ToString, + varint32read, + varint64read, + writeVarint32, + writeVarint64, + int64FromString, + int64Length, + writeFixed32, + writeByte, + zzDecode, + zzEncode, +} from './varint.js'; + +export enum WireType { + Varint = 0, + + Fixed64 = 1, + + Bytes = 2, + + Fixed32 = 5, +} + +// Reader +export interface IBinaryReader { + buf: Uint8Array; + pos: number; + type: number; + len: number; + tag(): [number, WireType, number]; + skip(length?: number): this; + skipType(wireType: number): this; + uint32(): number; + int32(): number; + sint32(): number; + fixed32(): number; + sfixed32(): number; + int64(): bigint; + uint64(): bigint; + sint64(): bigint; + fixed64(): bigint; + sfixed64(): bigint; + float(): number; + double(): number; + bool(): boolean; + bytes(): Uint8Array; + string(): string; +} + +export class BinaryReader implements IBinaryReader { + buf: Uint8Array; + pos: number; + type: number; + len: number; + + assertBounds(): void { + if (this.pos > this.len) throw new RangeError('premature EOF'); + } + + constructor(buf?: ArrayLike) { + this.buf = buf ? new Uint8Array(buf) : new Uint8Array(0); + this.pos = 0; + this.type = 0; + this.len = this.buf.length; + } + + tag(): [number, WireType, number] { + const tag = this.uint32(), + fieldNo = tag >>> 3, + wireType = tag & 7; + if (fieldNo <= 0 || wireType < 0 || wireType > 5) + throw new Error( + 'illegal tag: field no ' + fieldNo + ' wire type ' + wireType, + ); + return [fieldNo, wireType, tag]; + } + + skip(length?: number) { + if (typeof length === 'number') { + if (this.pos + length > this.len) throw indexOutOfRange(this, length); + this.pos += length; + } else { + do { + if (this.pos >= this.len) throw indexOutOfRange(this); + } while (this.buf[this.pos++] & 128); + } + return this; + } + + skipType(wireType: number) { + switch (wireType) { + case WireType.Varint: + this.skip(); + break; + case WireType.Fixed64: + this.skip(8); + break; + case WireType.Bytes: + this.skip(this.uint32()); + break; + case 3: + while ((wireType = this.uint32() & 7) !== 4) { + this.skipType(wireType); + } + break; + case WireType.Fixed32: + this.skip(4); + break; + + /* istanbul ignore next */ + default: + throw Error('invalid wire type ' + wireType + ' at offset ' + this.pos); + } + return this; + } + + uint32(): number { + return varint32read.bind(this)(); + } + + int32(): number { + return this.uint32() | 0; + } + + sint32(): number { + const num = this.uint32(); + return num % 2 === 1 ? (num + 1) / -2 : num / 2; // zigzag encoding + } + + fixed32(): number { + const val = readUInt32(this.buf, this.pos); + this.pos += 4; + return val; + } + + sfixed32(): number { + const val = readInt32(this.buf, this.pos); + this.pos += 4; + return val; + } + + int64(): bigint { + const [lo, hi] = varint64read.bind(this)(); + return BigInt(int64ToString(lo, hi)); + } + + uint64(): bigint { + const [lo, hi] = varint64read.bind(this)(); + return BigInt(uInt64ToString(lo, hi)); + } + + sint64(): bigint { + let [lo, hi] = varint64read.bind(this)(); + // zig zag + [lo, hi] = zzDecode(lo, hi); + return BigInt(int64ToString(lo, hi)); + } + + fixed64(): bigint { + const lo = this.sfixed32(); + const hi = this.sfixed32(); + return BigInt(uInt64ToString(lo, hi)); + } + sfixed64(): bigint { + const lo = this.sfixed32(); + const hi = this.sfixed32(); + return BigInt(int64ToString(lo, hi)); + } + + float(): number { + throw new Error('float not supported'); + } + + double(): number { + throw new Error('double not supported'); + } + + bool(): boolean { + const [lo, hi] = varint64read.bind(this)(); + return lo !== 0 || hi !== 0; + } + + bytes(): Uint8Array { + const len = this.uint32(), + start = this.pos; + this.pos += len; + this.assertBounds(); + return this.buf.subarray(start, start + len); + } + + string(): string { + const bytes = this.bytes(); + return utf8Read(bytes, 0, bytes.length); + } +} + +// Writer +export interface IBinaryWriter { + len: number; + head: IOp; + tail: IOp; + states: State | null; + finish(): Uint8Array; + fork(): IBinaryWriter; + reset(): IBinaryWriter; + ldelim(): IBinaryWriter; + tag(fieldNo: number, type: WireType): IBinaryWriter; + uint32(value: number): IBinaryWriter; + int32(value: number): IBinaryWriter; + sint32(value: number): IBinaryWriter; + int64(value: string | number | bigint): IBinaryWriter; + uint64: (value: string | number | bigint) => IBinaryWriter; + sint64(value: string | number | bigint): IBinaryWriter; + fixed64(value: string | number | bigint): IBinaryWriter; + sfixed64: (value: string | number | bigint) => IBinaryWriter; + bool(value: boolean): IBinaryWriter; + fixed32(value: number): IBinaryWriter; + sfixed32: (value: number) => IBinaryWriter; + float(value: number): IBinaryWriter; + double(value: number): IBinaryWriter; + bytes(value: Uint8Array): IBinaryWriter; + string(value: string): IBinaryWriter; +} + +interface IOp { + len: number; + next?: IOp; + proceed(buf: Uint8Array | number[], pos: number): void; +} + +class Op implements IOp { + fn?: ((val: T, buf: Uint8Array | number[], pos: number) => void) | null; + len: number; + val: T; + next?: IOp; + + constructor( + fn: + | (( + val: T, + buf: Uint8Array | number[], + pos: number, + ) => void | undefined | null) + | null, + len: number, + val: T, + ) { + this.fn = fn; + this.len = len; + this.val = val; + } + + proceed(buf: Uint8Array | number[], pos: number) { + if (this.fn) { + this.fn(this.val, buf, pos); + } + } +} + +class State { + head: IOp; + tail: IOp; + len: number; + next: State | null; + + constructor(writer: BinaryWriter) { + this.head = writer.head; + this.tail = writer.tail; + this.len = writer.len; + this.next = writer.states; + } +} + +export class BinaryWriter implements IBinaryWriter { + len = 0; + head: IOp; + tail: IOp; + states: State | null; + + constructor() { + this.head = new Op(null, 0, 0); + this.tail = this.head; + this.states = null; + } + + static create() { + return new BinaryWriter(); + } + + static alloc(size: number): Uint8Array | number[] { + if (typeof Uint8Array !== 'undefined') { + return pool( + size => new Uint8Array(size), + Uint8Array.prototype.subarray, + )(size); + } else { + return new Array(size); + } + } + + private _push( + fn: (val: T, buf: Uint8Array | number[], pos: number) => void, + len: number, + val: T, + ) { + this.tail = this.tail.next = new Op(fn, len, val); + this.len += len; + return this; + } + + finish(): Uint8Array { + let head = this.head.next, + pos = 0; + const buf = BinaryWriter.alloc(this.len); + while (head) { + head.proceed(buf, pos); + pos += head.len; + head = head.next; + } + return buf as Uint8Array; + } + + fork(): BinaryWriter { + this.states = new State(this); + this.head = this.tail = new Op(null, 0, 0); + this.len = 0; + return this; + } + + reset(): BinaryWriter { + if (this.states) { + this.head = this.states.head; + this.tail = this.states.tail; + this.len = this.states.len; + this.states = this.states.next; + } else { + this.head = this.tail = new Op(null, 0, 0); + this.len = 0; + } + return this; + } + + ldelim(): BinaryWriter { + const head = this.head, + tail = this.tail, + len = this.len; + this.reset().uint32(len); + if (len) { + this.tail.next = head.next; // skip noop + this.tail = tail; + this.len += len; + } + return this; + } + + tag(fieldNo: number, type: WireType): BinaryWriter { + return this.uint32(((fieldNo << 3) | type) >>> 0); + } + + uint32(value: number): BinaryWriter { + this.len += (this.tail = this.tail.next = + new Op( + writeVarint32, + (value = value >>> 0) < 128 + ? 1 + : value < 16384 + ? 2 + : value < 2097152 + ? 3 + : value < 268435456 + ? 4 + : 5, + value, + )).len; + return this; + } + + int32(value: number): BinaryWriter { + return value < 0 + ? this._push(writeVarint64, 10, int64FromString(value.toString())) // 10 bytes per spec + : this.uint32(value); + } + + sint32(value: number): BinaryWriter { + return this.uint32(((value << 1) ^ (value >> 31)) >>> 0); + } + + int64(value: string | number | bigint): BinaryWriter { + const { lo, hi } = int64FromString(value.toString()); + return this._push(writeVarint64, int64Length(lo, hi), { lo, hi }); + } + + // uint64 is the same with int64 + uint64 = BinaryWriter.prototype.int64; + + sint64(value: string | number | bigint): BinaryWriter { + let { lo, hi } = int64FromString(value.toString()); + // zig zag + [lo, hi] = zzEncode(lo, hi); + return this._push(writeVarint64, int64Length(lo, hi), { lo, hi }); + } + + fixed64(value: string | number | bigint): BinaryWriter { + const { lo, hi } = int64FromString(value.toString()); + return this._push(writeFixed32, 4, lo)._push(writeFixed32, 4, hi); + } + + // sfixed64 is the same with fixed64 + sfixed64 = BinaryWriter.prototype.fixed64; + + bool(value: boolean): BinaryWriter { + return this._push(writeByte, 1, value ? 1 : 0); + } + + fixed32(value: number): BinaryWriter { + return this._push(writeFixed32, 4, value >>> 0); + } + + // sfixed32 is the same with fixed32 + sfixed32 = BinaryWriter.prototype.fixed32; + + float(value: number): BinaryWriter { + throw new Error('float not supported' + value); + } + + double(value: number): BinaryWriter { + throw new Error('double not supported' + value); + } + + bytes(value: Uint8Array): BinaryWriter { + const len = value.length >>> 0; + if (!len) return this._push(writeByte, 1, 0); + return this.uint32(len)._push(writeBytes, len, value); + } + + string(value: string): BinaryWriter { + const len = utf8Length(value); + return len + ? this.uint32(len)._push(utf8Write, len, value) + : this._push(writeByte, 1, 0); + } +} + +function writeBytes( + val: Uint8Array | number[], + buf: Uint8Array | number[], + pos: number, +) { + if (typeof Uint8Array !== 'undefined') { + (buf as Uint8Array).set(val, pos); + } else { + for (let i = 0; i < val.length; ++i) buf[pos + i] = val[i]; + } +} + +function pool( + alloc: (size: number) => Uint8Array, + slice: (begin?: number, end?: number) => Uint8Array, + size?: number, +): (size: number) => Uint8Array { + const SIZE = size || 8192; + const MAX = SIZE >>> 1; + let slab: Uint8Array | null = null; + let offset = SIZE; + return function pool_alloc(size): Uint8Array { + if (size < 1 || size > MAX) return alloc(size); + if (offset + size > SIZE) { + slab = alloc(SIZE); + offset = 0; + } + const buf: Uint8Array = slice.call(slab, offset, (offset += size)); + if (offset & 7) + // align to 32 bit + offset = (offset | 7) + 1; + return buf; + }; +} + +function indexOutOfRange(reader: BinaryReader, writeLength?: number) { + return RangeError( + 'index out of range: ' + + reader.pos + + ' + ' + + (writeLength || 1) + + ' > ' + + reader.len, + ); +} diff --git a/packages/cosmic-proto/src/codegen/cosmos/base/query/v1beta1/pagination.ts b/packages/cosmic-proto/src/codegen/cosmos/base/query/v1beta1/pagination.ts new file mode 100644 index 00000000000..936758b145b --- /dev/null +++ b/packages/cosmic-proto/src/codegen/cosmos/base/query/v1beta1/pagination.ts @@ -0,0 +1,438 @@ +//@ts-nocheck +import { BinaryReader, BinaryWriter } from '../../../../binary.js'; +import { + isSet, + bytesFromBase64, + base64FromBytes, +} from '../../../../helpers.js'; +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ +export interface PageRequest { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + */ + key: Uint8Array; + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + */ + offset: bigint; + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + */ + limit: bigint; + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + countTotal: boolean; + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse: boolean; +} +export interface PageRequestProtoMsg { + typeUrl: '/cosmos.base.query.v1beta1.PageRequest'; + value: Uint8Array; +} +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ +export interface PageRequestAmino { + /** + * key is a value returned in PageResponse.next_key to begin + * querying the next page most efficiently. Only one of offset or key + * should be set. + */ + key?: string; + /** + * offset is a numeric offset that can be used when key is unavailable. + * It is less efficient than using key. Only one of offset or key should + * be set. + */ + offset?: string; + /** + * limit is the total number of results to be returned in the result page. + * If left empty it will default to a value to be set by each app. + */ + limit?: string; + /** + * count_total is set to true to indicate that the result set should include + * a count of the total number of items available for pagination in UIs. + * count_total is only respected when offset is used. It is ignored when key + * is set. + */ + count_total?: boolean; + /** + * reverse is set to true if results are to be returned in the descending order. + * + * Since: cosmos-sdk 0.43 + */ + reverse?: boolean; +} +export interface PageRequestAminoMsg { + type: 'cosmos-sdk/PageRequest'; + value: PageRequestAmino; +} +/** + * PageRequest is to be embedded in gRPC request messages for efficient + * pagination. Ex: + * + * message SomeRequest { + * Foo some_parameter = 1; + * PageRequest pagination = 2; + * } + */ +export interface PageRequestSDKType { + key: Uint8Array; + offset: bigint; + limit: bigint; + count_total: boolean; + reverse: boolean; +} +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ +export interface PageResponse { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + */ + nextKey: Uint8Array; + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + */ + total: bigint; +} +export interface PageResponseProtoMsg { + typeUrl: '/cosmos.base.query.v1beta1.PageResponse'; + value: Uint8Array; +} +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ +export interface PageResponseAmino { + /** + * next_key is the key to be passed to PageRequest.key to + * query the next page most efficiently. It will be empty if + * there are no more results. + */ + next_key?: string; + /** + * total is total number of results available if PageRequest.count_total + * was set, its value is undefined otherwise + */ + total?: string; +} +export interface PageResponseAminoMsg { + type: 'cosmos-sdk/PageResponse'; + value: PageResponseAmino; +} +/** + * PageResponse is to be embedded in gRPC response messages where the + * corresponding request message has used PageRequest. + * + * message SomeResponse { + * repeated Bar results = 1; + * PageResponse page = 2; + * } + */ +export interface PageResponseSDKType { + next_key: Uint8Array; + total: bigint; +} +function createBasePageRequest(): PageRequest { + return { + key: new Uint8Array(), + offset: BigInt(0), + limit: BigInt(0), + countTotal: false, + reverse: false, + }; +} +export const PageRequest = { + typeUrl: '/cosmos.base.query.v1beta1.PageRequest', + encode( + message: PageRequest, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.key.length !== 0) { + writer.uint32(10).bytes(message.key); + } + if (message.offset !== BigInt(0)) { + writer.uint32(16).uint64(message.offset); + } + if (message.limit !== BigInt(0)) { + writer.uint32(24).uint64(message.limit); + } + if (message.countTotal === true) { + writer.uint32(32).bool(message.countTotal); + } + if (message.reverse === true) { + writer.uint32(40).bool(message.reverse); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): PageRequest { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.key = reader.bytes(); + break; + case 2: + message.offset = reader.uint64(); + break; + case 3: + message.limit = reader.uint64(); + break; + case 4: + message.countTotal = reader.bool(); + break; + case 5: + message.reverse = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): PageRequest { + return { + key: isSet(object.key) ? bytesFromBase64(object.key) : new Uint8Array(), + offset: isSet(object.offset) + ? BigInt(object.offset.toString()) + : BigInt(0), + limit: isSet(object.limit) ? BigInt(object.limit.toString()) : BigInt(0), + countTotal: isSet(object.countTotal) ? Boolean(object.countTotal) : false, + reverse: isSet(object.reverse) ? Boolean(object.reverse) : false, + }; + }, + toJSON(message: PageRequest): unknown { + const obj: any = {}; + message.key !== undefined && + (obj.key = base64FromBytes( + message.key !== undefined ? message.key : new Uint8Array(), + )); + message.offset !== undefined && + (obj.offset = (message.offset || BigInt(0)).toString()); + message.limit !== undefined && + (obj.limit = (message.limit || BigInt(0)).toString()); + message.countTotal !== undefined && (obj.countTotal = message.countTotal); + message.reverse !== undefined && (obj.reverse = message.reverse); + return obj; + }, + fromPartial(object: Partial): PageRequest { + const message = createBasePageRequest(); + message.key = object.key ?? new Uint8Array(); + message.offset = + object.offset !== undefined && object.offset !== null + ? BigInt(object.offset.toString()) + : BigInt(0); + message.limit = + object.limit !== undefined && object.limit !== null + ? BigInt(object.limit.toString()) + : BigInt(0); + message.countTotal = object.countTotal ?? false; + message.reverse = object.reverse ?? false; + return message; + }, + fromAmino(object: PageRequestAmino): PageRequest { + const message = createBasePageRequest(); + if (object.key !== undefined && object.key !== null) { + message.key = bytesFromBase64(object.key); + } + if (object.offset !== undefined && object.offset !== null) { + message.offset = BigInt(object.offset); + } + if (object.limit !== undefined && object.limit !== null) { + message.limit = BigInt(object.limit); + } + if (object.count_total !== undefined && object.count_total !== null) { + message.countTotal = object.count_total; + } + if (object.reverse !== undefined && object.reverse !== null) { + message.reverse = object.reverse; + } + return message; + }, + toAmino(message: PageRequest): PageRequestAmino { + const obj: any = {}; + obj.key = message.key ? base64FromBytes(message.key) : undefined; + obj.offset = message.offset ? message.offset.toString() : undefined; + obj.limit = message.limit ? message.limit.toString() : undefined; + obj.count_total = message.countTotal; + obj.reverse = message.reverse; + return obj; + }, + fromAminoMsg(object: PageRequestAminoMsg): PageRequest { + return PageRequest.fromAmino(object.value); + }, + toAminoMsg(message: PageRequest): PageRequestAminoMsg { + return { + type: 'cosmos-sdk/PageRequest', + value: PageRequest.toAmino(message), + }; + }, + fromProtoMsg(message: PageRequestProtoMsg): PageRequest { + return PageRequest.decode(message.value); + }, + toProto(message: PageRequest): Uint8Array { + return PageRequest.encode(message).finish(); + }, + toProtoMsg(message: PageRequest): PageRequestProtoMsg { + return { + typeUrl: '/cosmos.base.query.v1beta1.PageRequest', + value: PageRequest.encode(message).finish(), + }; + }, +}; +function createBasePageResponse(): PageResponse { + return { + nextKey: new Uint8Array(), + total: BigInt(0), + }; +} +export const PageResponse = { + typeUrl: '/cosmos.base.query.v1beta1.PageResponse', + encode( + message: PageResponse, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.nextKey.length !== 0) { + writer.uint32(10).bytes(message.nextKey); + } + if (message.total !== BigInt(0)) { + writer.uint32(16).uint64(message.total); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): PageResponse { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePageResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.nextKey = reader.bytes(); + break; + case 2: + message.total = reader.uint64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): PageResponse { + return { + nextKey: isSet(object.nextKey) + ? bytesFromBase64(object.nextKey) + : new Uint8Array(), + total: isSet(object.total) ? BigInt(object.total.toString()) : BigInt(0), + }; + }, + toJSON(message: PageResponse): unknown { + const obj: any = {}; + message.nextKey !== undefined && + (obj.nextKey = base64FromBytes( + message.nextKey !== undefined ? message.nextKey : new Uint8Array(), + )); + message.total !== undefined && + (obj.total = (message.total || BigInt(0)).toString()); + return obj; + }, + fromPartial(object: Partial): PageResponse { + const message = createBasePageResponse(); + message.nextKey = object.nextKey ?? new Uint8Array(); + message.total = + object.total !== undefined && object.total !== null + ? BigInt(object.total.toString()) + : BigInt(0); + return message; + }, + fromAmino(object: PageResponseAmino): PageResponse { + const message = createBasePageResponse(); + if (object.next_key !== undefined && object.next_key !== null) { + message.nextKey = bytesFromBase64(object.next_key); + } + if (object.total !== undefined && object.total !== null) { + message.total = BigInt(object.total); + } + return message; + }, + toAmino(message: PageResponse): PageResponseAmino { + const obj: any = {}; + obj.next_key = message.nextKey + ? base64FromBytes(message.nextKey) + : undefined; + obj.total = message.total ? message.total.toString() : undefined; + return obj; + }, + fromAminoMsg(object: PageResponseAminoMsg): PageResponse { + return PageResponse.fromAmino(object.value); + }, + toAminoMsg(message: PageResponse): PageResponseAminoMsg { + return { + type: 'cosmos-sdk/PageResponse', + value: PageResponse.toAmino(message), + }; + }, + fromProtoMsg(message: PageResponseProtoMsg): PageResponse { + return PageResponse.decode(message.value); + }, + toProto(message: PageResponse): Uint8Array { + return PageResponse.encode(message).finish(); + }, + toProtoMsg(message: PageResponse): PageResponseProtoMsg { + return { + typeUrl: '/cosmos.base.query.v1beta1.PageResponse', + value: PageResponse.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/cosmos/base/v1beta1/coin.ts b/packages/cosmic-proto/src/codegen/cosmos/base/v1beta1/coin.ts new file mode 100644 index 00000000000..b4fc1bc4497 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/cosmos/base/v1beta1/coin.ts @@ -0,0 +1,479 @@ +//@ts-nocheck +import { BinaryReader, BinaryWriter } from '../../../binary.js'; +import { isSet } from '../../../helpers.js'; +/** + * Coin defines a token with a denomination and an amount. + * + * NOTE: The amount field is an Int which implements the custom method + * signatures required by gogoproto. + */ +export interface Coin { + denom: string; + amount: string; +} +export interface CoinProtoMsg { + typeUrl: '/cosmos.base.v1beta1.Coin'; + value: Uint8Array; +} +/** + * Coin defines a token with a denomination and an amount. + * + * NOTE: The amount field is an Int which implements the custom method + * signatures required by gogoproto. + */ +export interface CoinAmino { + denom?: string; + amount?: string; +} +export interface CoinAminoMsg { + type: 'cosmos-sdk/Coin'; + value: CoinAmino; +} +/** + * Coin defines a token with a denomination and an amount. + * + * NOTE: The amount field is an Int which implements the custom method + * signatures required by gogoproto. + */ +export interface CoinSDKType { + denom: string; + amount: string; +} +/** + * DecCoin defines a token with a denomination and a decimal amount. + * + * NOTE: The amount field is an Dec which implements the custom method + * signatures required by gogoproto. + */ +export interface DecCoin { + denom: string; + amount: string; +} +export interface DecCoinProtoMsg { + typeUrl: '/cosmos.base.v1beta1.DecCoin'; + value: Uint8Array; +} +/** + * DecCoin defines a token with a denomination and a decimal amount. + * + * NOTE: The amount field is an Dec which implements the custom method + * signatures required by gogoproto. + */ +export interface DecCoinAmino { + denom?: string; + amount?: string; +} +export interface DecCoinAminoMsg { + type: 'cosmos-sdk/DecCoin'; + value: DecCoinAmino; +} +/** + * DecCoin defines a token with a denomination and a decimal amount. + * + * NOTE: The amount field is an Dec which implements the custom method + * signatures required by gogoproto. + */ +export interface DecCoinSDKType { + denom: string; + amount: string; +} +/** IntProto defines a Protobuf wrapper around an Int object. */ +export interface IntProto { + int: string; +} +export interface IntProtoProtoMsg { + typeUrl: '/cosmos.base.v1beta1.IntProto'; + value: Uint8Array; +} +/** IntProto defines a Protobuf wrapper around an Int object. */ +export interface IntProtoAmino { + int?: string; +} +export interface IntProtoAminoMsg { + type: 'cosmos-sdk/IntProto'; + value: IntProtoAmino; +} +/** IntProto defines a Protobuf wrapper around an Int object. */ +export interface IntProtoSDKType { + int: string; +} +/** DecProto defines a Protobuf wrapper around a Dec object. */ +export interface DecProto { + dec: string; +} +export interface DecProtoProtoMsg { + typeUrl: '/cosmos.base.v1beta1.DecProto'; + value: Uint8Array; +} +/** DecProto defines a Protobuf wrapper around a Dec object. */ +export interface DecProtoAmino { + dec?: string; +} +export interface DecProtoAminoMsg { + type: 'cosmos-sdk/DecProto'; + value: DecProtoAmino; +} +/** DecProto defines a Protobuf wrapper around a Dec object. */ +export interface DecProtoSDKType { + dec: string; +} +function createBaseCoin(): Coin { + return { + denom: '', + amount: '', + }; +} +export const Coin = { + typeUrl: '/cosmos.base.v1beta1.Coin', + encode( + message: Coin, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.denom !== '') { + writer.uint32(10).string(message.denom); + } + if (message.amount !== '') { + writer.uint32(18).string(message.amount); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): Coin { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCoin(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.denom = reader.string(); + break; + case 2: + message.amount = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): Coin { + return { + denom: isSet(object.denom) ? String(object.denom) : '', + amount: isSet(object.amount) ? String(object.amount) : '', + }; + }, + toJSON(message: Coin): unknown { + const obj: any = {}; + message.denom !== undefined && (obj.denom = message.denom); + message.amount !== undefined && (obj.amount = message.amount); + return obj; + }, + fromPartial(object: Partial): Coin { + const message = createBaseCoin(); + message.denom = object.denom ?? ''; + message.amount = object.amount ?? ''; + return message; + }, + fromAmino(object: CoinAmino): Coin { + const message = createBaseCoin(); + if (object.denom !== undefined && object.denom !== null) { + message.denom = object.denom; + } + if (object.amount !== undefined && object.amount !== null) { + message.amount = object.amount; + } + return message; + }, + toAmino(message: Coin): CoinAmino { + const obj: any = {}; + obj.denom = message.denom; + obj.amount = message.amount; + return obj; + }, + fromAminoMsg(object: CoinAminoMsg): Coin { + return Coin.fromAmino(object.value); + }, + toAminoMsg(message: Coin): CoinAminoMsg { + return { + type: 'cosmos-sdk/Coin', + value: Coin.toAmino(message), + }; + }, + fromProtoMsg(message: CoinProtoMsg): Coin { + return Coin.decode(message.value); + }, + toProto(message: Coin): Uint8Array { + return Coin.encode(message).finish(); + }, + toProtoMsg(message: Coin): CoinProtoMsg { + return { + typeUrl: '/cosmos.base.v1beta1.Coin', + value: Coin.encode(message).finish(), + }; + }, +}; +function createBaseDecCoin(): DecCoin { + return { + denom: '', + amount: '', + }; +} +export const DecCoin = { + typeUrl: '/cosmos.base.v1beta1.DecCoin', + encode( + message: DecCoin, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.denom !== '') { + writer.uint32(10).string(message.denom); + } + if (message.amount !== '') { + writer.uint32(18).string(message.amount); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): DecCoin { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecCoin(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.denom = reader.string(); + break; + case 2: + message.amount = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): DecCoin { + return { + denom: isSet(object.denom) ? String(object.denom) : '', + amount: isSet(object.amount) ? String(object.amount) : '', + }; + }, + toJSON(message: DecCoin): unknown { + const obj: any = {}; + message.denom !== undefined && (obj.denom = message.denom); + message.amount !== undefined && (obj.amount = message.amount); + return obj; + }, + fromPartial(object: Partial): DecCoin { + const message = createBaseDecCoin(); + message.denom = object.denom ?? ''; + message.amount = object.amount ?? ''; + return message; + }, + fromAmino(object: DecCoinAmino): DecCoin { + const message = createBaseDecCoin(); + if (object.denom !== undefined && object.denom !== null) { + message.denom = object.denom; + } + if (object.amount !== undefined && object.amount !== null) { + message.amount = object.amount; + } + return message; + }, + toAmino(message: DecCoin): DecCoinAmino { + const obj: any = {}; + obj.denom = message.denom; + obj.amount = message.amount; + return obj; + }, + fromAminoMsg(object: DecCoinAminoMsg): DecCoin { + return DecCoin.fromAmino(object.value); + }, + toAminoMsg(message: DecCoin): DecCoinAminoMsg { + return { + type: 'cosmos-sdk/DecCoin', + value: DecCoin.toAmino(message), + }; + }, + fromProtoMsg(message: DecCoinProtoMsg): DecCoin { + return DecCoin.decode(message.value); + }, + toProto(message: DecCoin): Uint8Array { + return DecCoin.encode(message).finish(); + }, + toProtoMsg(message: DecCoin): DecCoinProtoMsg { + return { + typeUrl: '/cosmos.base.v1beta1.DecCoin', + value: DecCoin.encode(message).finish(), + }; + }, +}; +function createBaseIntProto(): IntProto { + return { + int: '', + }; +} +export const IntProto = { + typeUrl: '/cosmos.base.v1beta1.IntProto', + encode( + message: IntProto, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.int !== '') { + writer.uint32(10).string(message.int); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): IntProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseIntProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.int = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): IntProto { + return { + int: isSet(object.int) ? String(object.int) : '', + }; + }, + toJSON(message: IntProto): unknown { + const obj: any = {}; + message.int !== undefined && (obj.int = message.int); + return obj; + }, + fromPartial(object: Partial): IntProto { + const message = createBaseIntProto(); + message.int = object.int ?? ''; + return message; + }, + fromAmino(object: IntProtoAmino): IntProto { + const message = createBaseIntProto(); + if (object.int !== undefined && object.int !== null) { + message.int = object.int; + } + return message; + }, + toAmino(message: IntProto): IntProtoAmino { + const obj: any = {}; + obj.int = message.int; + return obj; + }, + fromAminoMsg(object: IntProtoAminoMsg): IntProto { + return IntProto.fromAmino(object.value); + }, + toAminoMsg(message: IntProto): IntProtoAminoMsg { + return { + type: 'cosmos-sdk/IntProto', + value: IntProto.toAmino(message), + }; + }, + fromProtoMsg(message: IntProtoProtoMsg): IntProto { + return IntProto.decode(message.value); + }, + toProto(message: IntProto): Uint8Array { + return IntProto.encode(message).finish(); + }, + toProtoMsg(message: IntProto): IntProtoProtoMsg { + return { + typeUrl: '/cosmos.base.v1beta1.IntProto', + value: IntProto.encode(message).finish(), + }; + }, +}; +function createBaseDecProto(): DecProto { + return { + dec: '', + }; +} +export const DecProto = { + typeUrl: '/cosmos.base.v1beta1.DecProto', + encode( + message: DecProto, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.dec !== '') { + writer.uint32(10).string(message.dec); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): DecProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDecProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.dec = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): DecProto { + return { + dec: isSet(object.dec) ? String(object.dec) : '', + }; + }, + toJSON(message: DecProto): unknown { + const obj: any = {}; + message.dec !== undefined && (obj.dec = message.dec); + return obj; + }, + fromPartial(object: Partial): DecProto { + const message = createBaseDecProto(); + message.dec = object.dec ?? ''; + return message; + }, + fromAmino(object: DecProtoAmino): DecProto { + const message = createBaseDecProto(); + if (object.dec !== undefined && object.dec !== null) { + message.dec = object.dec; + } + return message; + }, + toAmino(message: DecProto): DecProtoAmino { + const obj: any = {}; + obj.dec = message.dec; + return obj; + }, + fromAminoMsg(object: DecProtoAminoMsg): DecProto { + return DecProto.fromAmino(object.value); + }, + toAminoMsg(message: DecProto): DecProtoAminoMsg { + return { + type: 'cosmos-sdk/DecProto', + value: DecProto.toAmino(message), + }; + }, + fromProtoMsg(message: DecProtoProtoMsg): DecProto { + return DecProto.decode(message.value); + }, + toProto(message: DecProto): Uint8Array { + return DecProto.encode(message).finish(); + }, + toProtoMsg(message: DecProto): DecProtoProtoMsg { + return { + typeUrl: '/cosmos.base.v1beta1.DecProto', + value: DecProto.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/cosmos/bundle.ts b/packages/cosmic-proto/src/codegen/cosmos/bundle.ts new file mode 100644 index 00000000000..e5ba386ee84 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/cosmos/bundle.ts @@ -0,0 +1,21 @@ +//@ts-nocheck +import * as _16 from './base/query/v1beta1/pagination.js'; +import * as _17 from './base/v1beta1/coin.js'; +import * as _18 from './upgrade/v1beta1/upgrade.js'; +export namespace cosmos { + export namespace base { + export namespace query { + export const v1beta1 = { + ..._16, + }; + } + export const v1beta1 = { + ..._17, + }; + } + export namespace upgrade { + export const v1beta1 = { + ..._18, + }; + } +} diff --git a/packages/cosmic-proto/src/codegen/cosmos/upgrade/v1beta1/upgrade.ts b/packages/cosmic-proto/src/codegen/cosmos/upgrade/v1beta1/upgrade.ts new file mode 100644 index 00000000000..1116515afac --- /dev/null +++ b/packages/cosmic-proto/src/codegen/cosmos/upgrade/v1beta1/upgrade.ts @@ -0,0 +1,748 @@ +//@ts-nocheck +import { Timestamp } from '../../../google/protobuf/timestamp.js'; +import { Any, AnyAmino, AnySDKType } from '../../../google/protobuf/any.js'; +import { BinaryReader, BinaryWriter } from '../../../binary.js'; +import { + toTimestamp, + fromTimestamp, + isSet, + fromJsonTimestamp, +} from '../../../helpers.js'; +/** Plan specifies information about a planned upgrade and when it should occur. */ +export interface Plan { + /** + * Sets the name for the upgrade. This name will be used by the upgraded + * version of the software to apply any special "on-upgrade" commands during + * the first BeginBlock method after the upgrade is applied. It is also used + * to detect whether a software version can handle a given upgrade. If no + * upgrade handler with this name has been set in the software, it will be + * assumed that the software is out-of-date when the upgrade Time or Height is + * reached and the software will exit. + */ + name: string; + /** + * Deprecated: Time based upgrades have been deprecated. Time based upgrade logic + * has been removed from the SDK. + * If this field is not empty, an error will be thrown. + */ + /** @deprecated */ + time: Date; + /** + * The height at which the upgrade must be performed. + * Only used if Time is not set. + */ + height: bigint; + /** + * Any application specific upgrade info to be included on-chain + * such as a git commit that validators could automatically upgrade to + */ + info: string; + /** + * Deprecated: UpgradedClientState field has been deprecated. IBC upgrade logic has been + * moved to the IBC module in the sub module 02-client. + * If this field is not empty, an error will be thrown. + */ + /** @deprecated */ + upgradedClientState?: Any; +} +export interface PlanProtoMsg { + typeUrl: '/cosmos.upgrade.v1beta1.Plan'; + value: Uint8Array; +} +/** Plan specifies information about a planned upgrade and when it should occur. */ +export interface PlanAmino { + /** + * Sets the name for the upgrade. This name will be used by the upgraded + * version of the software to apply any special "on-upgrade" commands during + * the first BeginBlock method after the upgrade is applied. It is also used + * to detect whether a software version can handle a given upgrade. If no + * upgrade handler with this name has been set in the software, it will be + * assumed that the software is out-of-date when the upgrade Time or Height is + * reached and the software will exit. + */ + name?: string; + /** + * Deprecated: Time based upgrades have been deprecated. Time based upgrade logic + * has been removed from the SDK. + * If this field is not empty, an error will be thrown. + */ + /** @deprecated */ + time?: string; + /** + * The height at which the upgrade must be performed. + * Only used if Time is not set. + */ + height?: string; + /** + * Any application specific upgrade info to be included on-chain + * such as a git commit that validators could automatically upgrade to + */ + info?: string; + /** + * Deprecated: UpgradedClientState field has been deprecated. IBC upgrade logic has been + * moved to the IBC module in the sub module 02-client. + * If this field is not empty, an error will be thrown. + */ + /** @deprecated */ + upgraded_client_state?: AnyAmino; +} +export interface PlanAminoMsg { + type: 'cosmos-sdk/Plan'; + value: PlanAmino; +} +/** Plan specifies information about a planned upgrade and when it should occur. */ +export interface PlanSDKType { + name: string; + /** @deprecated */ + time: Date; + height: bigint; + info: string; + /** @deprecated */ + upgraded_client_state?: AnySDKType; +} +/** + * SoftwareUpgradeProposal is a gov Content type for initiating a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgSoftwareUpgrade. + */ +/** @deprecated */ +export interface SoftwareUpgradeProposal { + $typeUrl?: '/cosmos.upgrade.v1beta1.SoftwareUpgradeProposal'; + title: string; + description: string; + plan: Plan; +} +export interface SoftwareUpgradeProposalProtoMsg { + typeUrl: '/cosmos.upgrade.v1beta1.SoftwareUpgradeProposal'; + value: Uint8Array; +} +/** + * SoftwareUpgradeProposal is a gov Content type for initiating a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgSoftwareUpgrade. + */ +/** @deprecated */ +export interface SoftwareUpgradeProposalAmino { + title?: string; + description?: string; + plan?: PlanAmino; +} +export interface SoftwareUpgradeProposalAminoMsg { + type: 'cosmos-sdk/SoftwareUpgradeProposal'; + value: SoftwareUpgradeProposalAmino; +} +/** + * SoftwareUpgradeProposal is a gov Content type for initiating a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgSoftwareUpgrade. + */ +/** @deprecated */ +export interface SoftwareUpgradeProposalSDKType { + $typeUrl?: '/cosmos.upgrade.v1beta1.SoftwareUpgradeProposal'; + title: string; + description: string; + plan: PlanSDKType; +} +/** + * CancelSoftwareUpgradeProposal is a gov Content type for cancelling a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgCancelUpgrade. + */ +/** @deprecated */ +export interface CancelSoftwareUpgradeProposal { + $typeUrl?: '/cosmos.upgrade.v1beta1.CancelSoftwareUpgradeProposal'; + title: string; + description: string; +} +export interface CancelSoftwareUpgradeProposalProtoMsg { + typeUrl: '/cosmos.upgrade.v1beta1.CancelSoftwareUpgradeProposal'; + value: Uint8Array; +} +/** + * CancelSoftwareUpgradeProposal is a gov Content type for cancelling a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgCancelUpgrade. + */ +/** @deprecated */ +export interface CancelSoftwareUpgradeProposalAmino { + title?: string; + description?: string; +} +export interface CancelSoftwareUpgradeProposalAminoMsg { + type: 'cosmos-sdk/CancelSoftwareUpgradeProposal'; + value: CancelSoftwareUpgradeProposalAmino; +} +/** + * CancelSoftwareUpgradeProposal is a gov Content type for cancelling a software + * upgrade. + * Deprecated: This legacy proposal is deprecated in favor of Msg-based gov + * proposals, see MsgCancelUpgrade. + */ +/** @deprecated */ +export interface CancelSoftwareUpgradeProposalSDKType { + $typeUrl?: '/cosmos.upgrade.v1beta1.CancelSoftwareUpgradeProposal'; + title: string; + description: string; +} +/** + * ModuleVersion specifies a module and its consensus version. + * + * Since: cosmos-sdk 0.43 + */ +export interface ModuleVersion { + /** name of the app module */ + name: string; + /** consensus version of the app module */ + version: bigint; +} +export interface ModuleVersionProtoMsg { + typeUrl: '/cosmos.upgrade.v1beta1.ModuleVersion'; + value: Uint8Array; +} +/** + * ModuleVersion specifies a module and its consensus version. + * + * Since: cosmos-sdk 0.43 + */ +export interface ModuleVersionAmino { + /** name of the app module */ + name?: string; + /** consensus version of the app module */ + version?: string; +} +export interface ModuleVersionAminoMsg { + type: 'cosmos-sdk/ModuleVersion'; + value: ModuleVersionAmino; +} +/** + * ModuleVersion specifies a module and its consensus version. + * + * Since: cosmos-sdk 0.43 + */ +export interface ModuleVersionSDKType { + name: string; + version: bigint; +} +function createBasePlan(): Plan { + return { + name: '', + time: new Date(), + height: BigInt(0), + info: '', + upgradedClientState: undefined, + }; +} +export const Plan = { + typeUrl: '/cosmos.upgrade.v1beta1.Plan', + encode( + message: Plan, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name); + } + if (message.time !== undefined) { + Timestamp.encode( + toTimestamp(message.time), + writer.uint32(18).fork(), + ).ldelim(); + } + if (message.height !== BigInt(0)) { + writer.uint32(24).int64(message.height); + } + if (message.info !== '') { + writer.uint32(34).string(message.info); + } + if (message.upgradedClientState !== undefined) { + Any.encode( + message.upgradedClientState, + writer.uint32(42).fork(), + ).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): Plan { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePlan(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.time = fromTimestamp( + Timestamp.decode(reader, reader.uint32()), + ); + break; + case 3: + message.height = reader.int64(); + break; + case 4: + message.info = reader.string(); + break; + case 5: + message.upgradedClientState = Any.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): Plan { + return { + name: isSet(object.name) ? String(object.name) : '', + time: isSet(object.time) ? fromJsonTimestamp(object.time) : undefined, + height: isSet(object.height) + ? BigInt(object.height.toString()) + : BigInt(0), + info: isSet(object.info) ? String(object.info) : '', + upgradedClientState: isSet(object.upgradedClientState) + ? Any.fromJSON(object.upgradedClientState) + : undefined, + }; + }, + toJSON(message: Plan): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.time !== undefined && (obj.time = message.time.toISOString()); + message.height !== undefined && + (obj.height = (message.height || BigInt(0)).toString()); + message.info !== undefined && (obj.info = message.info); + message.upgradedClientState !== undefined && + (obj.upgradedClientState = message.upgradedClientState + ? Any.toJSON(message.upgradedClientState) + : undefined); + return obj; + }, + fromPartial(object: Partial): Plan { + const message = createBasePlan(); + message.name = object.name ?? ''; + message.time = object.time ?? undefined; + message.height = + object.height !== undefined && object.height !== null + ? BigInt(object.height.toString()) + : BigInt(0); + message.info = object.info ?? ''; + message.upgradedClientState = + object.upgradedClientState !== undefined && + object.upgradedClientState !== null + ? Any.fromPartial(object.upgradedClientState) + : undefined; + return message; + }, + fromAmino(object: PlanAmino): Plan { + const message = createBasePlan(); + if (object.name !== undefined && object.name !== null) { + message.name = object.name; + } + if (object.time !== undefined && object.time !== null) { + message.time = fromTimestamp(Timestamp.fromAmino(object.time)); + } + if (object.height !== undefined && object.height !== null) { + message.height = BigInt(object.height); + } + if (object.info !== undefined && object.info !== null) { + message.info = object.info; + } + if ( + object.upgraded_client_state !== undefined && + object.upgraded_client_state !== null + ) { + message.upgradedClientState = Any.fromAmino(object.upgraded_client_state); + } + return message; + }, + toAmino(message: Plan): PlanAmino { + const obj: any = {}; + obj.name = message.name; + obj.time = message.time + ? Timestamp.toAmino(toTimestamp(message.time)) + : undefined; + obj.height = message.height ? message.height.toString() : undefined; + obj.info = message.info; + obj.upgraded_client_state = message.upgradedClientState + ? Any.toAmino(message.upgradedClientState) + : undefined; + return obj; + }, + fromAminoMsg(object: PlanAminoMsg): Plan { + return Plan.fromAmino(object.value); + }, + toAminoMsg(message: Plan): PlanAminoMsg { + return { + type: 'cosmos-sdk/Plan', + value: Plan.toAmino(message), + }; + }, + fromProtoMsg(message: PlanProtoMsg): Plan { + return Plan.decode(message.value); + }, + toProto(message: Plan): Uint8Array { + return Plan.encode(message).finish(); + }, + toProtoMsg(message: Plan): PlanProtoMsg { + return { + typeUrl: '/cosmos.upgrade.v1beta1.Plan', + value: Plan.encode(message).finish(), + }; + }, +}; +function createBaseSoftwareUpgradeProposal(): SoftwareUpgradeProposal { + return { + $typeUrl: '/cosmos.upgrade.v1beta1.SoftwareUpgradeProposal', + title: '', + description: '', + plan: Plan.fromPartial({}), + }; +} +export const SoftwareUpgradeProposal = { + typeUrl: '/cosmos.upgrade.v1beta1.SoftwareUpgradeProposal', + encode( + message: SoftwareUpgradeProposal, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.title !== '') { + writer.uint32(10).string(message.title); + } + if (message.description !== '') { + writer.uint32(18).string(message.description); + } + if (message.plan !== undefined) { + Plan.encode(message.plan, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): SoftwareUpgradeProposal { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSoftwareUpgradeProposal(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + message.plan = Plan.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): SoftwareUpgradeProposal { + return { + title: isSet(object.title) ? String(object.title) : '', + description: isSet(object.description) ? String(object.description) : '', + plan: isSet(object.plan) ? Plan.fromJSON(object.plan) : undefined, + }; + }, + toJSON(message: SoftwareUpgradeProposal): unknown { + const obj: any = {}; + message.title !== undefined && (obj.title = message.title); + message.description !== undefined && + (obj.description = message.description); + message.plan !== undefined && + (obj.plan = message.plan ? Plan.toJSON(message.plan) : undefined); + return obj; + }, + fromPartial( + object: Partial, + ): SoftwareUpgradeProposal { + const message = createBaseSoftwareUpgradeProposal(); + message.title = object.title ?? ''; + message.description = object.description ?? ''; + message.plan = + object.plan !== undefined && object.plan !== null + ? Plan.fromPartial(object.plan) + : undefined; + return message; + }, + fromAmino(object: SoftwareUpgradeProposalAmino): SoftwareUpgradeProposal { + const message = createBaseSoftwareUpgradeProposal(); + if (object.title !== undefined && object.title !== null) { + message.title = object.title; + } + if (object.description !== undefined && object.description !== null) { + message.description = object.description; + } + if (object.plan !== undefined && object.plan !== null) { + message.plan = Plan.fromAmino(object.plan); + } + return message; + }, + toAmino(message: SoftwareUpgradeProposal): SoftwareUpgradeProposalAmino { + const obj: any = {}; + obj.title = message.title; + obj.description = message.description; + obj.plan = message.plan ? Plan.toAmino(message.plan) : undefined; + return obj; + }, + fromAminoMsg( + object: SoftwareUpgradeProposalAminoMsg, + ): SoftwareUpgradeProposal { + return SoftwareUpgradeProposal.fromAmino(object.value); + }, + toAminoMsg( + message: SoftwareUpgradeProposal, + ): SoftwareUpgradeProposalAminoMsg { + return { + type: 'cosmos-sdk/SoftwareUpgradeProposal', + value: SoftwareUpgradeProposal.toAmino(message), + }; + }, + fromProtoMsg( + message: SoftwareUpgradeProposalProtoMsg, + ): SoftwareUpgradeProposal { + return SoftwareUpgradeProposal.decode(message.value); + }, + toProto(message: SoftwareUpgradeProposal): Uint8Array { + return SoftwareUpgradeProposal.encode(message).finish(); + }, + toProtoMsg( + message: SoftwareUpgradeProposal, + ): SoftwareUpgradeProposalProtoMsg { + return { + typeUrl: '/cosmos.upgrade.v1beta1.SoftwareUpgradeProposal', + value: SoftwareUpgradeProposal.encode(message).finish(), + }; + }, +}; +function createBaseCancelSoftwareUpgradeProposal(): CancelSoftwareUpgradeProposal { + return { + $typeUrl: '/cosmos.upgrade.v1beta1.CancelSoftwareUpgradeProposal', + title: '', + description: '', + }; +} +export const CancelSoftwareUpgradeProposal = { + typeUrl: '/cosmos.upgrade.v1beta1.CancelSoftwareUpgradeProposal', + encode( + message: CancelSoftwareUpgradeProposal, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.title !== '') { + writer.uint32(10).string(message.title); + } + if (message.description !== '') { + writer.uint32(18).string(message.description); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): CancelSoftwareUpgradeProposal { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCancelSoftwareUpgradeProposal(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): CancelSoftwareUpgradeProposal { + return { + title: isSet(object.title) ? String(object.title) : '', + description: isSet(object.description) ? String(object.description) : '', + }; + }, + toJSON(message: CancelSoftwareUpgradeProposal): unknown { + const obj: any = {}; + message.title !== undefined && (obj.title = message.title); + message.description !== undefined && + (obj.description = message.description); + return obj; + }, + fromPartial( + object: Partial, + ): CancelSoftwareUpgradeProposal { + const message = createBaseCancelSoftwareUpgradeProposal(); + message.title = object.title ?? ''; + message.description = object.description ?? ''; + return message; + }, + fromAmino( + object: CancelSoftwareUpgradeProposalAmino, + ): CancelSoftwareUpgradeProposal { + const message = createBaseCancelSoftwareUpgradeProposal(); + if (object.title !== undefined && object.title !== null) { + message.title = object.title; + } + if (object.description !== undefined && object.description !== null) { + message.description = object.description; + } + return message; + }, + toAmino( + message: CancelSoftwareUpgradeProposal, + ): CancelSoftwareUpgradeProposalAmino { + const obj: any = {}; + obj.title = message.title; + obj.description = message.description; + return obj; + }, + fromAminoMsg( + object: CancelSoftwareUpgradeProposalAminoMsg, + ): CancelSoftwareUpgradeProposal { + return CancelSoftwareUpgradeProposal.fromAmino(object.value); + }, + toAminoMsg( + message: CancelSoftwareUpgradeProposal, + ): CancelSoftwareUpgradeProposalAminoMsg { + return { + type: 'cosmos-sdk/CancelSoftwareUpgradeProposal', + value: CancelSoftwareUpgradeProposal.toAmino(message), + }; + }, + fromProtoMsg( + message: CancelSoftwareUpgradeProposalProtoMsg, + ): CancelSoftwareUpgradeProposal { + return CancelSoftwareUpgradeProposal.decode(message.value); + }, + toProto(message: CancelSoftwareUpgradeProposal): Uint8Array { + return CancelSoftwareUpgradeProposal.encode(message).finish(); + }, + toProtoMsg( + message: CancelSoftwareUpgradeProposal, + ): CancelSoftwareUpgradeProposalProtoMsg { + return { + typeUrl: '/cosmos.upgrade.v1beta1.CancelSoftwareUpgradeProposal', + value: CancelSoftwareUpgradeProposal.encode(message).finish(), + }; + }, +}; +function createBaseModuleVersion(): ModuleVersion { + return { + name: '', + version: BigInt(0), + }; +} +export const ModuleVersion = { + typeUrl: '/cosmos.upgrade.v1beta1.ModuleVersion', + encode( + message: ModuleVersion, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name); + } + if (message.version !== BigInt(0)) { + writer.uint32(16).uint64(message.version); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): ModuleVersion { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseModuleVersion(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.version = reader.uint64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): ModuleVersion { + return { + name: isSet(object.name) ? String(object.name) : '', + version: isSet(object.version) + ? BigInt(object.version.toString()) + : BigInt(0), + }; + }, + toJSON(message: ModuleVersion): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.version !== undefined && + (obj.version = (message.version || BigInt(0)).toString()); + return obj; + }, + fromPartial(object: Partial): ModuleVersion { + const message = createBaseModuleVersion(); + message.name = object.name ?? ''; + message.version = + object.version !== undefined && object.version !== null + ? BigInt(object.version.toString()) + : BigInt(0); + return message; + }, + fromAmino(object: ModuleVersionAmino): ModuleVersion { + const message = createBaseModuleVersion(); + if (object.name !== undefined && object.name !== null) { + message.name = object.name; + } + if (object.version !== undefined && object.version !== null) { + message.version = BigInt(object.version); + } + return message; + }, + toAmino(message: ModuleVersion): ModuleVersionAmino { + const obj: any = {}; + obj.name = message.name; + obj.version = message.version ? message.version.toString() : undefined; + return obj; + }, + fromAminoMsg(object: ModuleVersionAminoMsg): ModuleVersion { + return ModuleVersion.fromAmino(object.value); + }, + toAminoMsg(message: ModuleVersion): ModuleVersionAminoMsg { + return { + type: 'cosmos-sdk/ModuleVersion', + value: ModuleVersion.toAmino(message), + }; + }, + fromProtoMsg(message: ModuleVersionProtoMsg): ModuleVersion { + return ModuleVersion.decode(message.value); + }, + toProto(message: ModuleVersion): Uint8Array { + return ModuleVersion.encode(message).finish(); + }, + toProtoMsg(message: ModuleVersion): ModuleVersionProtoMsg { + return { + typeUrl: '/cosmos.upgrade.v1beta1.ModuleVersion', + value: ModuleVersion.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/cosmos_proto/bundle.ts b/packages/cosmic-proto/src/codegen/cosmos_proto/bundle.ts new file mode 100644 index 00000000000..17dac74010e --- /dev/null +++ b/packages/cosmic-proto/src/codegen/cosmos_proto/bundle.ts @@ -0,0 +1,5 @@ +//@ts-nocheck +import * as _15 from './cosmos.js'; +export const cosmos_proto = { + ..._15, +}; diff --git a/packages/cosmic-proto/src/codegen/cosmos_proto/cosmos.ts b/packages/cosmic-proto/src/codegen/cosmos_proto/cosmos.ts new file mode 100644 index 00000000000..e6bf9fb181a --- /dev/null +++ b/packages/cosmic-proto/src/codegen/cosmos_proto/cosmos.ts @@ -0,0 +1,396 @@ +//@ts-nocheck +import { BinaryReader, BinaryWriter } from '../binary.js'; +import { isSet } from '../helpers.js'; +export enum ScalarType { + SCALAR_TYPE_UNSPECIFIED = 0, + SCALAR_TYPE_STRING = 1, + SCALAR_TYPE_BYTES = 2, + UNRECOGNIZED = -1, +} +export const ScalarTypeSDKType = ScalarType; +export const ScalarTypeAmino = ScalarType; +export function scalarTypeFromJSON(object: any): ScalarType { + switch (object) { + case 0: + case 'SCALAR_TYPE_UNSPECIFIED': + return ScalarType.SCALAR_TYPE_UNSPECIFIED; + case 1: + case 'SCALAR_TYPE_STRING': + return ScalarType.SCALAR_TYPE_STRING; + case 2: + case 'SCALAR_TYPE_BYTES': + return ScalarType.SCALAR_TYPE_BYTES; + case -1: + case 'UNRECOGNIZED': + default: + return ScalarType.UNRECOGNIZED; + } +} +export function scalarTypeToJSON(object: ScalarType): string { + switch (object) { + case ScalarType.SCALAR_TYPE_UNSPECIFIED: + return 'SCALAR_TYPE_UNSPECIFIED'; + case ScalarType.SCALAR_TYPE_STRING: + return 'SCALAR_TYPE_STRING'; + case ScalarType.SCALAR_TYPE_BYTES: + return 'SCALAR_TYPE_BYTES'; + case ScalarType.UNRECOGNIZED: + default: + return 'UNRECOGNIZED'; + } +} +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptor { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description: string; +} +export interface InterfaceDescriptorProtoMsg { + typeUrl: '/cosmos_proto.InterfaceDescriptor'; + value: Uint8Array; +} +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptorAmino { + /** + * name is the name of the interface. It should be a short-name (without + * a period) such that the fully qualified name of the interface will be + * package.name, ex. for the package a.b and interface named C, the + * fully-qualified name will be a.b.C. + */ + name?: string; + /** + * description is a human-readable description of the interface and its + * purpose. + */ + description?: string; +} +export interface InterfaceDescriptorAminoMsg { + type: '/cosmos_proto.InterfaceDescriptor'; + value: InterfaceDescriptorAmino; +} +/** + * InterfaceDescriptor describes an interface type to be used with + * accepts_interface and implements_interface and declared by declare_interface. + */ +export interface InterfaceDescriptorSDKType { + name: string; + description: string; +} +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptor { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + fieldType: ScalarType[]; +} +export interface ScalarDescriptorProtoMsg { + typeUrl: '/cosmos_proto.ScalarDescriptor'; + value: Uint8Array; +} +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptorAmino { + /** + * name is the name of the scalar. It should be a short-name (without + * a period) such that the fully qualified name of the scalar will be + * package.name, ex. for the package a.b and scalar named C, the + * fully-qualified name will be a.b.C. + */ + name?: string; + /** + * description is a human-readable description of the scalar and its + * encoding format. For instance a big integer or decimal scalar should + * specify precisely the expected encoding format. + */ + description?: string; + /** + * field_type is the type of field with which this scalar can be used. + * Scalars can be used with one and only one type of field so that + * encoding standards and simple and clear. Currently only string and + * bytes fields are supported for scalars. + */ + field_type?: ScalarType[]; +} +export interface ScalarDescriptorAminoMsg { + type: '/cosmos_proto.ScalarDescriptor'; + value: ScalarDescriptorAmino; +} +/** + * ScalarDescriptor describes an scalar type to be used with + * the scalar field option and declared by declare_scalar. + * Scalars extend simple protobuf built-in types with additional + * syntax and semantics, for instance to represent big integers. + * Scalars should ideally define an encoding such that there is only one + * valid syntactical representation for a given semantic meaning, + * i.e. the encoding should be deterministic. + */ +export interface ScalarDescriptorSDKType { + name: string; + description: string; + field_type: ScalarType[]; +} +function createBaseInterfaceDescriptor(): InterfaceDescriptor { + return { + name: '', + description: '', + }; +} +export const InterfaceDescriptor = { + typeUrl: '/cosmos_proto.InterfaceDescriptor', + encode( + message: InterfaceDescriptor, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name); + } + if (message.description !== '') { + writer.uint32(18).string(message.description); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): InterfaceDescriptor { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseInterfaceDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): InterfaceDescriptor { + return { + name: isSet(object.name) ? String(object.name) : '', + description: isSet(object.description) ? String(object.description) : '', + }; + }, + toJSON(message: InterfaceDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && + (obj.description = message.description); + return obj; + }, + fromPartial(object: Partial): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + message.name = object.name ?? ''; + message.description = object.description ?? ''; + return message; + }, + fromAmino(object: InterfaceDescriptorAmino): InterfaceDescriptor { + const message = createBaseInterfaceDescriptor(); + if (object.name !== undefined && object.name !== null) { + message.name = object.name; + } + if (object.description !== undefined && object.description !== null) { + message.description = object.description; + } + return message; + }, + toAmino(message: InterfaceDescriptor): InterfaceDescriptorAmino { + const obj: any = {}; + obj.name = message.name; + obj.description = message.description; + return obj; + }, + fromAminoMsg(object: InterfaceDescriptorAminoMsg): InterfaceDescriptor { + return InterfaceDescriptor.fromAmino(object.value); + }, + fromProtoMsg(message: InterfaceDescriptorProtoMsg): InterfaceDescriptor { + return InterfaceDescriptor.decode(message.value); + }, + toProto(message: InterfaceDescriptor): Uint8Array { + return InterfaceDescriptor.encode(message).finish(); + }, + toProtoMsg(message: InterfaceDescriptor): InterfaceDescriptorProtoMsg { + return { + typeUrl: '/cosmos_proto.InterfaceDescriptor', + value: InterfaceDescriptor.encode(message).finish(), + }; + }, +}; +function createBaseScalarDescriptor(): ScalarDescriptor { + return { + name: '', + description: '', + fieldType: [], + }; +} +export const ScalarDescriptor = { + typeUrl: '/cosmos_proto.ScalarDescriptor', + encode( + message: ScalarDescriptor, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name); + } + if (message.description !== '') { + writer.uint32(18).string(message.description); + } + writer.uint32(26).fork(); + for (const v of message.fieldType) { + writer.int32(v); + } + writer.ldelim(); + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): ScalarDescriptor { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseScalarDescriptor(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.fieldType.push(reader.int32() as any); + } + } else { + message.fieldType.push(reader.int32() as any); + } + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): ScalarDescriptor { + return { + name: isSet(object.name) ? String(object.name) : '', + description: isSet(object.description) ? String(object.description) : '', + fieldType: Array.isArray(object?.fieldType) + ? object.fieldType.map((e: any) => scalarTypeFromJSON(e)) + : [], + }; + }, + toJSON(message: ScalarDescriptor): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.description !== undefined && + (obj.description = message.description); + if (message.fieldType) { + obj.fieldType = message.fieldType.map(e => scalarTypeToJSON(e)); + } else { + obj.fieldType = []; + } + return obj; + }, + fromPartial(object: Partial): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + message.name = object.name ?? ''; + message.description = object.description ?? ''; + message.fieldType = object.fieldType?.map(e => e) || []; + return message; + }, + fromAmino(object: ScalarDescriptorAmino): ScalarDescriptor { + const message = createBaseScalarDescriptor(); + if (object.name !== undefined && object.name !== null) { + message.name = object.name; + } + if (object.description !== undefined && object.description !== null) { + message.description = object.description; + } + message.fieldType = + object.field_type?.map(e => scalarTypeFromJSON(e)) || []; + return message; + }, + toAmino(message: ScalarDescriptor): ScalarDescriptorAmino { + const obj: any = {}; + obj.name = message.name; + obj.description = message.description; + if (message.fieldType) { + obj.field_type = message.fieldType.map(e => e); + } else { + obj.field_type = []; + } + return obj; + }, + fromAminoMsg(object: ScalarDescriptorAminoMsg): ScalarDescriptor { + return ScalarDescriptor.fromAmino(object.value); + }, + fromProtoMsg(message: ScalarDescriptorProtoMsg): ScalarDescriptor { + return ScalarDescriptor.decode(message.value); + }, + toProto(message: ScalarDescriptor): Uint8Array { + return ScalarDescriptor.encode(message).finish(); + }, + toProtoMsg(message: ScalarDescriptor): ScalarDescriptorProtoMsg { + return { + typeUrl: '/cosmos_proto.ScalarDescriptor', + value: ScalarDescriptor.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/gogoproto/bundle.ts b/packages/cosmic-proto/src/codegen/gogoproto/bundle.ts new file mode 100644 index 00000000000..7ac16c4eb9d --- /dev/null +++ b/packages/cosmic-proto/src/codegen/gogoproto/bundle.ts @@ -0,0 +1,5 @@ +//@ts-nocheck +import * as _19 from './gogo.js'; +export const gogoproto = { + ..._19, +}; diff --git a/packages/cosmic-proto/src/codegen/gogoproto/gogo.ts b/packages/cosmic-proto/src/codegen/gogoproto/gogo.ts new file mode 100644 index 00000000000..cb0ff5c3b54 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/gogoproto/gogo.ts @@ -0,0 +1 @@ +export {}; diff --git a/packages/cosmic-proto/src/codegen/google/api/annotations.ts b/packages/cosmic-proto/src/codegen/google/api/annotations.ts new file mode 100644 index 00000000000..cb0ff5c3b54 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/google/api/annotations.ts @@ -0,0 +1 @@ +export {}; diff --git a/packages/cosmic-proto/src/codegen/google/api/http.ts b/packages/cosmic-proto/src/codegen/google/api/http.ts new file mode 100644 index 00000000000..8df19b32c42 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/google/api/http.ts @@ -0,0 +1,1472 @@ +//@ts-nocheck +import { BinaryReader, BinaryWriter } from '../../binary.js'; +import { isSet } from '../../helpers.js'; +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface Http { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules: HttpRule[]; + /** + * When set to true, URL path parameters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fullyDecodeReservedExpansion: boolean; +} +export interface HttpProtoMsg { + typeUrl: '/google.api.Http'; + value: Uint8Array; +} +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface HttpAmino { + /** + * A list of HTTP configuration rules that apply to individual API methods. + * + * **NOTE:** All service configuration rules follow "last one wins" order. + */ + rules?: HttpRuleAmino[]; + /** + * When set to true, URL path parameters will be fully URI-decoded except in + * cases of single segment matches in reserved expansion, where "%2F" will be + * left encoded. + * + * The default behavior is to not decode RFC 6570 reserved characters in multi + * segment matches. + */ + fully_decode_reserved_expansion?: boolean; +} +export interface HttpAminoMsg { + type: '/google.api.Http'; + value: HttpAmino; +} +/** + * Defines the HTTP configuration for an API service. It contains a list of + * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method + * to one or more HTTP REST API methods. + */ +export interface HttpSDKType { + rules: HttpRuleSDKType[]; + fully_decode_reserved_expansion: boolean; +} +/** + * # gRPC Transcoding + * + * gRPC Transcoding is a feature for mapping between a gRPC method and one or + * more HTTP REST endpoints. It allows developers to build a single API service + * that supports both gRPC APIs and REST APIs. Many systems, including [Google + * APIs](https://github.com/googleapis/googleapis), + * [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC + * Gateway](https://github.com/grpc-ecosystem/grpc-gateway), + * and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature + * and use it for large scale production services. + * + * `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies + * how different portions of the gRPC request message are mapped to the URL + * path, URL query parameters, and HTTP request body. It also controls how the + * gRPC response message is mapped to the HTTP response body. `HttpRule` is + * typically specified as an `google.api.http` annotation on the gRPC method. + * + * Each mapping specifies a URL path template and an HTTP method. The path + * template may refer to one or more fields in the gRPC request message, as long + * as each field is a non-repeated field with a primitive (non-message) type. + * The path template controls how fields of the request message are mapped to + * the URL path. + * + * Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/{name=messages/*}" + * }; + * } + * } + * message GetMessageRequest { + * string name = 1; // Mapped to URL path. + * } + * message Message { + * string text = 1; // The resource content. + * } + * + * This enables an HTTP REST to gRPC mapping as below: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` + * + * Any fields in the request message which are not bound by the path template + * automatically become HTTP query parameters if there is no HTTP request body. + * For example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get:"/v1/messages/{message_id}" + * }; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // Mapped to URL path. + * int64 revision = 2; // Mapped to URL query parameter `revision`. + * SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | + * `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: + * "foo"))` + * + * Note that fields which are mapped to URL query parameters must have a + * primitive type or a repeated primitive type or a non-repeated message type. + * In the case of a repeated type, the parameter can be repeated in the URL + * as `...?param=A¶m=B`. In the case of a message type, each field of the + * message is mapped to a separate parameter, such as + * `...?foo.a=A&foo.b=B&foo.c=C`. + * + * For HTTP methods that allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | gRPC + * -----|----- + * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: + * "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | gRPC + * -----|----- + * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: + * "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice when + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC mappings: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: + * "123456")` + * + * ## Rules for HTTP mapping + * + * 1. Leaf request fields (recursive expansion nested messages in the request + * message) are classified into three categories: + * - Fields referred by the path template. They are passed via the URL path. + * - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They + * are passed via the HTTP + * request body. + * - All other fields are passed via the URL query parameters, and the + * parameter name is the field path in the request message. A repeated + * field can be represented as multiple query parameters under the same + * name. + * 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL + * query parameter, all fields + * are passed via URL path and HTTP request body. + * 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP + * request body, all + * fields are passed via URL path and URL query parameters. + * + * ### Path template syntax + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single URL path segment. The syntax `**` matches + * zero or more URL path segments, which must be the last part of the URL path + * except the `Verb`. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` + * contains any reserved character, such characters should be percent-encoded + * before the matching. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path on the client + * side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The + * server side does the reverse decoding. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{var}`. + * + * If a variable contains multiple path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path on the + * client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. + * The server side does the reverse decoding, except "%2F" and "%2f" are left + * unchanged. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{+var}`. + * + * ## Using gRPC API Service Configuration + * + * gRPC API Service Configuration (service config) is a configuration language + * for configuring a gRPC service to become a user-facing product. The + * service config is simply the YAML representation of the `google.api.Service` + * proto message. + * + * As an alternative to annotating your proto file, you can configure gRPC + * transcoding in your service config YAML files. You do this by specifying a + * `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same + * effect as the proto annotation. This can be particularly useful if you + * have a proto that is reused in multiple services. Note that any transcoding + * specified in the service config will override any matching transcoding + * configuration in the proto. + * + * Example: + * + * http: + * rules: + * # Selects a gRPC method and applies HttpRule to it. + * - selector: example.v1.Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * ## Special notes + * + * When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the + * proto to JSON conversion must follow the [proto3 + * specification](https://developers.google.com/protocol-buffers/docs/proto3#json). + * + * While the single segment variable follows the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String + * Expansion, the multi segment variable **does not** follow RFC 6570 Section + * 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. As the result, gRPC Transcoding uses a custom encoding + * for multi segment variables. + * + * The path variables **must not** refer to any repeated or mapped field, + * because client libraries are not capable of handling such variable expansion. + * + * The path variables **must not** capture the leading "/" character. The reason + * is that the most common use case "{var}" does not capture the leading "/" + * character. For consistency, all path variables must share the same behavior. + * + * Repeated message fields must not be mapped to URL query parameters, because + * no client library can support such complicated mapping. + * + * If an API needs to use a JSON array for request or response body, it can map + * the request or response body to a repeated field. However, some gRPC + * Transcoding implementations may not support this feature. + */ +export interface HttpRule { + /** + * Selects a method to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax + * details. + */ + selector: string; + /** + * Maps to HTTP GET. Used for listing and getting information about + * resources. + */ + get?: string; + /** Maps to HTTP PUT. Used for replacing a resource. */ + put?: string; + /** Maps to HTTP POST. Used for creating a resource or performing an action. */ + post?: string; + /** Maps to HTTP DELETE. Used for deleting a resource. */ + delete?: string; + /** Maps to HTTP PATCH. Used for updating a resource. */ + patch?: string; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom?: CustomHttpPattern; + /** + * The name of the request field whose value is mapped to the HTTP request + * body, or `*` for mapping all request fields not captured by the path + * pattern to the HTTP body, or omitted for not having any HTTP request body. + * + * NOTE: the referred field must be present at the top-level of the request + * message type. + */ + body: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * response body. When omitted, the entire response message will be used + * as the HTTP response body. + * + * NOTE: The referred field must be present at the top-level of the response + * message type. + */ + responseBody: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additionalBindings: HttpRule[]; +} +export interface HttpRuleProtoMsg { + typeUrl: '/google.api.HttpRule'; + value: Uint8Array; +} +/** + * # gRPC Transcoding + * + * gRPC Transcoding is a feature for mapping between a gRPC method and one or + * more HTTP REST endpoints. It allows developers to build a single API service + * that supports both gRPC APIs and REST APIs. Many systems, including [Google + * APIs](https://github.com/googleapis/googleapis), + * [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC + * Gateway](https://github.com/grpc-ecosystem/grpc-gateway), + * and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature + * and use it for large scale production services. + * + * `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies + * how different portions of the gRPC request message are mapped to the URL + * path, URL query parameters, and HTTP request body. It also controls how the + * gRPC response message is mapped to the HTTP response body. `HttpRule` is + * typically specified as an `google.api.http` annotation on the gRPC method. + * + * Each mapping specifies a URL path template and an HTTP method. The path + * template may refer to one or more fields in the gRPC request message, as long + * as each field is a non-repeated field with a primitive (non-message) type. + * The path template controls how fields of the request message are mapped to + * the URL path. + * + * Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/{name=messages/*}" + * }; + * } + * } + * message GetMessageRequest { + * string name = 1; // Mapped to URL path. + * } + * message Message { + * string text = 1; // The resource content. + * } + * + * This enables an HTTP REST to gRPC mapping as below: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` + * + * Any fields in the request message which are not bound by the path template + * automatically become HTTP query parameters if there is no HTTP request body. + * For example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get:"/v1/messages/{message_id}" + * }; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // Mapped to URL path. + * int64 revision = 2; // Mapped to URL query parameter `revision`. + * SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | + * `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: + * "foo"))` + * + * Note that fields which are mapped to URL query parameters must have a + * primitive type or a repeated primitive type or a non-repeated message type. + * In the case of a repeated type, the parameter can be repeated in the URL + * as `...?param=A¶m=B`. In the case of a message type, each field of the + * message is mapped to a separate parameter, such as + * `...?foo.a=A&foo.b=B&foo.c=C`. + * + * For HTTP methods that allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | gRPC + * -----|----- + * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: + * "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | gRPC + * -----|----- + * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: + * "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice when + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC mappings: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: + * "123456")` + * + * ## Rules for HTTP mapping + * + * 1. Leaf request fields (recursive expansion nested messages in the request + * message) are classified into three categories: + * - Fields referred by the path template. They are passed via the URL path. + * - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They + * are passed via the HTTP + * request body. + * - All other fields are passed via the URL query parameters, and the + * parameter name is the field path in the request message. A repeated + * field can be represented as multiple query parameters under the same + * name. + * 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL + * query parameter, all fields + * are passed via URL path and HTTP request body. + * 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP + * request body, all + * fields are passed via URL path and URL query parameters. + * + * ### Path template syntax + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single URL path segment. The syntax `**` matches + * zero or more URL path segments, which must be the last part of the URL path + * except the `Verb`. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` + * contains any reserved character, such characters should be percent-encoded + * before the matching. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path on the client + * side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The + * server side does the reverse decoding. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{var}`. + * + * If a variable contains multiple path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path on the + * client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. + * The server side does the reverse decoding, except "%2F" and "%2f" are left + * unchanged. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{+var}`. + * + * ## Using gRPC API Service Configuration + * + * gRPC API Service Configuration (service config) is a configuration language + * for configuring a gRPC service to become a user-facing product. The + * service config is simply the YAML representation of the `google.api.Service` + * proto message. + * + * As an alternative to annotating your proto file, you can configure gRPC + * transcoding in your service config YAML files. You do this by specifying a + * `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same + * effect as the proto annotation. This can be particularly useful if you + * have a proto that is reused in multiple services. Note that any transcoding + * specified in the service config will override any matching transcoding + * configuration in the proto. + * + * Example: + * + * http: + * rules: + * # Selects a gRPC method and applies HttpRule to it. + * - selector: example.v1.Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * ## Special notes + * + * When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the + * proto to JSON conversion must follow the [proto3 + * specification](https://developers.google.com/protocol-buffers/docs/proto3#json). + * + * While the single segment variable follows the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String + * Expansion, the multi segment variable **does not** follow RFC 6570 Section + * 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. As the result, gRPC Transcoding uses a custom encoding + * for multi segment variables. + * + * The path variables **must not** refer to any repeated or mapped field, + * because client libraries are not capable of handling such variable expansion. + * + * The path variables **must not** capture the leading "/" character. The reason + * is that the most common use case "{var}" does not capture the leading "/" + * character. For consistency, all path variables must share the same behavior. + * + * Repeated message fields must not be mapped to URL query parameters, because + * no client library can support such complicated mapping. + * + * If an API needs to use a JSON array for request or response body, it can map + * the request or response body to a repeated field. However, some gRPC + * Transcoding implementations may not support this feature. + */ +export interface HttpRuleAmino { + /** + * Selects a method to which this rule applies. + * + * Refer to [selector][google.api.DocumentationRule.selector] for syntax + * details. + */ + selector?: string; + /** + * Maps to HTTP GET. Used for listing and getting information about + * resources. + */ + get?: string; + /** Maps to HTTP PUT. Used for replacing a resource. */ + put?: string; + /** Maps to HTTP POST. Used for creating a resource or performing an action. */ + post?: string; + /** Maps to HTTP DELETE. Used for deleting a resource. */ + delete?: string; + /** Maps to HTTP PATCH. Used for updating a resource. */ + patch?: string; + /** + * The custom pattern is used for specifying an HTTP method that is not + * included in the `pattern` field, such as HEAD, or "*" to leave the + * HTTP method unspecified for this rule. The wild-card rule is useful + * for services that provide content to Web (HTML) clients. + */ + custom?: CustomHttpPatternAmino; + /** + * The name of the request field whose value is mapped to the HTTP request + * body, or `*` for mapping all request fields not captured by the path + * pattern to the HTTP body, or omitted for not having any HTTP request body. + * + * NOTE: the referred field must be present at the top-level of the request + * message type. + */ + body?: string; + /** + * Optional. The name of the response field whose value is mapped to the HTTP + * response body. When omitted, the entire response message will be used + * as the HTTP response body. + * + * NOTE: The referred field must be present at the top-level of the response + * message type. + */ + response_body?: string; + /** + * Additional HTTP bindings for the selector. Nested bindings must + * not contain an `additional_bindings` field themselves (that is, + * the nesting may only be one level deep). + */ + additional_bindings?: HttpRuleAmino[]; +} +export interface HttpRuleAminoMsg { + type: '/google.api.HttpRule'; + value: HttpRuleAmino; +} +/** + * # gRPC Transcoding + * + * gRPC Transcoding is a feature for mapping between a gRPC method and one or + * more HTTP REST endpoints. It allows developers to build a single API service + * that supports both gRPC APIs and REST APIs. Many systems, including [Google + * APIs](https://github.com/googleapis/googleapis), + * [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC + * Gateway](https://github.com/grpc-ecosystem/grpc-gateway), + * and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature + * and use it for large scale production services. + * + * `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies + * how different portions of the gRPC request message are mapped to the URL + * path, URL query parameters, and HTTP request body. It also controls how the + * gRPC response message is mapped to the HTTP response body. `HttpRule` is + * typically specified as an `google.api.http` annotation on the gRPC method. + * + * Each mapping specifies a URL path template and an HTTP method. The path + * template may refer to one or more fields in the gRPC request message, as long + * as each field is a non-repeated field with a primitive (non-message) type. + * The path template controls how fields of the request message are mapped to + * the URL path. + * + * Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/{name=messages/*}" + * }; + * } + * } + * message GetMessageRequest { + * string name = 1; // Mapped to URL path. + * } + * message Message { + * string text = 1; // The resource content. + * } + * + * This enables an HTTP REST to gRPC mapping as below: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` + * + * Any fields in the request message which are not bound by the path template + * automatically become HTTP query parameters if there is no HTTP request body. + * For example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get:"/v1/messages/{message_id}" + * }; + * } + * } + * message GetMessageRequest { + * message SubMessage { + * string subfield = 1; + * } + * string message_id = 1; // Mapped to URL path. + * int64 revision = 2; // Mapped to URL query parameter `revision`. + * SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. + * } + * + * This enables a HTTP JSON to RPC mapping as below: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456?revision=2&sub.subfield=foo` | + * `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: + * "foo"))` + * + * Note that fields which are mapped to URL query parameters must have a + * primitive type or a repeated primitive type or a non-repeated message type. + * In the case of a repeated type, the parameter can be repeated in the URL + * as `...?param=A¶m=B`. In the case of a message type, each field of the + * message is mapped to a separate parameter, such as + * `...?foo.a=A&foo.b=B&foo.c=C`. + * + * For HTTP methods that allow a request body, the `body` field + * specifies the mapping. Consider a REST update method on the + * message resource collection: + * + * service Messaging { + * rpc UpdateMessage(UpdateMessageRequest) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "message" + * }; + * } + * } + * message UpdateMessageRequest { + * string message_id = 1; // mapped to the URL + * Message message = 2; // mapped to the body + * } + * + * The following HTTP JSON to RPC mapping is enabled, where the + * representation of the JSON in the request body is determined by + * protos JSON encoding: + * + * HTTP | gRPC + * -----|----- + * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: + * "123456" message { text: "Hi!" })` + * + * The special name `*` can be used in the body mapping to define that + * every field not bound by the path template should be mapped to the + * request body. This enables the following alternative definition of + * the update method: + * + * service Messaging { + * rpc UpdateMessage(Message) returns (Message) { + * option (google.api.http) = { + * patch: "/v1/messages/{message_id}" + * body: "*" + * }; + * } + * } + * message Message { + * string message_id = 1; + * string text = 2; + * } + * + * + * The following HTTP JSON to RPC mapping is enabled: + * + * HTTP | gRPC + * -----|----- + * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: + * "123456" text: "Hi!")` + * + * Note that when using `*` in the body mapping, it is not possible to + * have HTTP parameters, as all fields not bound by the path end in + * the body. This makes this option more rarely used in practice when + * defining REST APIs. The common usage of `*` is in custom methods + * which don't use the URL at all for transferring data. + * + * It is possible to define multiple HTTP methods for one RPC by using + * the `additional_bindings` option. Example: + * + * service Messaging { + * rpc GetMessage(GetMessageRequest) returns (Message) { + * option (google.api.http) = { + * get: "/v1/messages/{message_id}" + * additional_bindings { + * get: "/v1/users/{user_id}/messages/{message_id}" + * } + * }; + * } + * } + * message GetMessageRequest { + * string message_id = 1; + * string user_id = 2; + * } + * + * This enables the following two alternative HTTP JSON to RPC mappings: + * + * HTTP | gRPC + * -----|----- + * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` + * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: + * "123456")` + * + * ## Rules for HTTP mapping + * + * 1. Leaf request fields (recursive expansion nested messages in the request + * message) are classified into three categories: + * - Fields referred by the path template. They are passed via the URL path. + * - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They + * are passed via the HTTP + * request body. + * - All other fields are passed via the URL query parameters, and the + * parameter name is the field path in the request message. A repeated + * field can be represented as multiple query parameters under the same + * name. + * 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL + * query parameter, all fields + * are passed via URL path and HTTP request body. + * 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP + * request body, all + * fields are passed via URL path and URL query parameters. + * + * ### Path template syntax + * + * Template = "/" Segments [ Verb ] ; + * Segments = Segment { "/" Segment } ; + * Segment = "*" | "**" | LITERAL | Variable ; + * Variable = "{" FieldPath [ "=" Segments ] "}" ; + * FieldPath = IDENT { "." IDENT } ; + * Verb = ":" LITERAL ; + * + * The syntax `*` matches a single URL path segment. The syntax `**` matches + * zero or more URL path segments, which must be the last part of the URL path + * except the `Verb`. + * + * The syntax `Variable` matches part of the URL path as specified by its + * template. A variable template must not contain other variables. If a variable + * matches a single path segment, its template may be omitted, e.g. `{var}` + * is equivalent to `{var=*}`. + * + * The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` + * contains any reserved character, such characters should be percent-encoded + * before the matching. + * + * If a variable contains exactly one path segment, such as `"{var}"` or + * `"{var=*}"`, when such a variable is expanded into a URL path on the client + * side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The + * server side does the reverse decoding. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{var}`. + * + * If a variable contains multiple path segments, such as `"{var=foo/*}"` + * or `"{var=**}"`, when such a variable is expanded into a URL path on the + * client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. + * The server side does the reverse decoding, except "%2F" and "%2f" are left + * unchanged. Such variables show up in the + * [Discovery + * Document](https://developers.google.com/discovery/v1/reference/apis) as + * `{+var}`. + * + * ## Using gRPC API Service Configuration + * + * gRPC API Service Configuration (service config) is a configuration language + * for configuring a gRPC service to become a user-facing product. The + * service config is simply the YAML representation of the `google.api.Service` + * proto message. + * + * As an alternative to annotating your proto file, you can configure gRPC + * transcoding in your service config YAML files. You do this by specifying a + * `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same + * effect as the proto annotation. This can be particularly useful if you + * have a proto that is reused in multiple services. Note that any transcoding + * specified in the service config will override any matching transcoding + * configuration in the proto. + * + * Example: + * + * http: + * rules: + * # Selects a gRPC method and applies HttpRule to it. + * - selector: example.v1.Messaging.GetMessage + * get: /v1/messages/{message_id}/{sub.subfield} + * + * ## Special notes + * + * When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the + * proto to JSON conversion must follow the [proto3 + * specification](https://developers.google.com/protocol-buffers/docs/proto3#json). + * + * While the single segment variable follows the semantics of + * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String + * Expansion, the multi segment variable **does not** follow RFC 6570 Section + * 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion + * does not expand special characters like `?` and `#`, which would lead + * to invalid URLs. As the result, gRPC Transcoding uses a custom encoding + * for multi segment variables. + * + * The path variables **must not** refer to any repeated or mapped field, + * because client libraries are not capable of handling such variable expansion. + * + * The path variables **must not** capture the leading "/" character. The reason + * is that the most common use case "{var}" does not capture the leading "/" + * character. For consistency, all path variables must share the same behavior. + * + * Repeated message fields must not be mapped to URL query parameters, because + * no client library can support such complicated mapping. + * + * If an API needs to use a JSON array for request or response body, it can map + * the request or response body to a repeated field. However, some gRPC + * Transcoding implementations may not support this feature. + */ +export interface HttpRuleSDKType { + selector: string; + get?: string; + put?: string; + post?: string; + delete?: string; + patch?: string; + custom?: CustomHttpPatternSDKType; + body: string; + response_body: string; + additional_bindings: HttpRuleSDKType[]; +} +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPattern { + /** The name of this custom HTTP verb. */ + kind: string; + /** The path matched by this custom verb. */ + path: string; +} +export interface CustomHttpPatternProtoMsg { + typeUrl: '/google.api.CustomHttpPattern'; + value: Uint8Array; +} +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPatternAmino { + /** The name of this custom HTTP verb. */ + kind?: string; + /** The path matched by this custom verb. */ + path?: string; +} +export interface CustomHttpPatternAminoMsg { + type: '/google.api.CustomHttpPattern'; + value: CustomHttpPatternAmino; +} +/** A custom pattern is used for defining custom HTTP verb. */ +export interface CustomHttpPatternSDKType { + kind: string; + path: string; +} +function createBaseHttp(): Http { + return { + rules: [], + fullyDecodeReservedExpansion: false, + }; +} +export const Http = { + typeUrl: '/google.api.Http', + encode( + message: Http, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.rules) { + HttpRule.encode(v!, writer.uint32(10).fork()).ldelim(); + } + if (message.fullyDecodeReservedExpansion === true) { + writer.uint32(16).bool(message.fullyDecodeReservedExpansion); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): Http { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.rules.push(HttpRule.decode(reader, reader.uint32())); + break; + case 2: + message.fullyDecodeReservedExpansion = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): Http { + return { + rules: Array.isArray(object?.rules) + ? object.rules.map((e: any) => HttpRule.fromJSON(e)) + : [], + fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion) + ? Boolean(object.fullyDecodeReservedExpansion) + : false, + }; + }, + toJSON(message: Http): unknown { + const obj: any = {}; + if (message.rules) { + obj.rules = message.rules.map(e => (e ? HttpRule.toJSON(e) : undefined)); + } else { + obj.rules = []; + } + message.fullyDecodeReservedExpansion !== undefined && + (obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion); + return obj; + }, + fromPartial(object: Partial): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map(e => HttpRule.fromPartial(e)) || []; + message.fullyDecodeReservedExpansion = + object.fullyDecodeReservedExpansion ?? false; + return message; + }, + fromAmino(object: HttpAmino): Http { + const message = createBaseHttp(); + message.rules = object.rules?.map(e => HttpRule.fromAmino(e)) || []; + if ( + object.fully_decode_reserved_expansion !== undefined && + object.fully_decode_reserved_expansion !== null + ) { + message.fullyDecodeReservedExpansion = + object.fully_decode_reserved_expansion; + } + return message; + }, + toAmino(message: Http): HttpAmino { + const obj: any = {}; + if (message.rules) { + obj.rules = message.rules.map(e => (e ? HttpRule.toAmino(e) : undefined)); + } else { + obj.rules = []; + } + obj.fully_decode_reserved_expansion = message.fullyDecodeReservedExpansion; + return obj; + }, + fromAminoMsg(object: HttpAminoMsg): Http { + return Http.fromAmino(object.value); + }, + fromProtoMsg(message: HttpProtoMsg): Http { + return Http.decode(message.value); + }, + toProto(message: Http): Uint8Array { + return Http.encode(message).finish(); + }, + toProtoMsg(message: Http): HttpProtoMsg { + return { + typeUrl: '/google.api.Http', + value: Http.encode(message).finish(), + }; + }, +}; +function createBaseHttpRule(): HttpRule { + return { + selector: '', + get: undefined, + put: undefined, + post: undefined, + delete: undefined, + patch: undefined, + custom: undefined, + body: '', + responseBody: '', + additionalBindings: [], + }; +} +export const HttpRule = { + typeUrl: '/google.api.HttpRule', + encode( + message: HttpRule, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.selector !== '') { + writer.uint32(10).string(message.selector); + } + if (message.get !== undefined) { + writer.uint32(18).string(message.get); + } + if (message.put !== undefined) { + writer.uint32(26).string(message.put); + } + if (message.post !== undefined) { + writer.uint32(34).string(message.post); + } + if (message.delete !== undefined) { + writer.uint32(42).string(message.delete); + } + if (message.patch !== undefined) { + writer.uint32(50).string(message.patch); + } + if (message.custom !== undefined) { + CustomHttpPattern.encode( + message.custom, + writer.uint32(66).fork(), + ).ldelim(); + } + if (message.body !== '') { + writer.uint32(58).string(message.body); + } + if (message.responseBody !== '') { + writer.uint32(98).string(message.responseBody); + } + for (const v of message.additionalBindings) { + HttpRule.encode(v!, writer.uint32(90).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): HttpRule { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHttpRule(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.selector = reader.string(); + break; + case 2: + message.get = reader.string(); + break; + case 3: + message.put = reader.string(); + break; + case 4: + message.post = reader.string(); + break; + case 5: + message.delete = reader.string(); + break; + case 6: + message.patch = reader.string(); + break; + case 8: + message.custom = CustomHttpPattern.decode(reader, reader.uint32()); + break; + case 7: + message.body = reader.string(); + break; + case 12: + message.responseBody = reader.string(); + break; + case 11: + message.additionalBindings.push( + HttpRule.decode(reader, reader.uint32()), + ); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): HttpRule { + return { + selector: isSet(object.selector) ? String(object.selector) : '', + get: isSet(object.get) ? String(object.get) : undefined, + put: isSet(object.put) ? String(object.put) : undefined, + post: isSet(object.post) ? String(object.post) : undefined, + delete: isSet(object.delete) ? String(object.delete) : undefined, + patch: isSet(object.patch) ? String(object.patch) : undefined, + custom: isSet(object.custom) + ? CustomHttpPattern.fromJSON(object.custom) + : undefined, + body: isSet(object.body) ? String(object.body) : '', + responseBody: isSet(object.responseBody) + ? String(object.responseBody) + : '', + additionalBindings: Array.isArray(object?.additionalBindings) + ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e)) + : [], + }; + }, + toJSON(message: HttpRule): unknown { + const obj: any = {}; + message.selector !== undefined && (obj.selector = message.selector); + message.get !== undefined && (obj.get = message.get); + message.put !== undefined && (obj.put = message.put); + message.post !== undefined && (obj.post = message.post); + message.delete !== undefined && (obj.delete = message.delete); + message.patch !== undefined && (obj.patch = message.patch); + message.custom !== undefined && + (obj.custom = message.custom + ? CustomHttpPattern.toJSON(message.custom) + : undefined); + message.body !== undefined && (obj.body = message.body); + message.responseBody !== undefined && + (obj.responseBody = message.responseBody); + if (message.additionalBindings) { + obj.additionalBindings = message.additionalBindings.map(e => + e ? HttpRule.toJSON(e) : undefined, + ); + } else { + obj.additionalBindings = []; + } + return obj; + }, + fromPartial(object: Partial): HttpRule { + const message = createBaseHttpRule(); + message.selector = object.selector ?? ''; + message.get = object.get ?? undefined; + message.put = object.put ?? undefined; + message.post = object.post ?? undefined; + message.delete = object.delete ?? undefined; + message.patch = object.patch ?? undefined; + message.custom = + object.custom !== undefined && object.custom !== null + ? CustomHttpPattern.fromPartial(object.custom) + : undefined; + message.body = object.body ?? ''; + message.responseBody = object.responseBody ?? ''; + message.additionalBindings = + object.additionalBindings?.map(e => HttpRule.fromPartial(e)) || []; + return message; + }, + fromAmino(object: HttpRuleAmino): HttpRule { + const message = createBaseHttpRule(); + if (object.selector !== undefined && object.selector !== null) { + message.selector = object.selector; + } + if (object.get !== undefined && object.get !== null) { + message.get = object.get; + } + if (object.put !== undefined && object.put !== null) { + message.put = object.put; + } + if (object.post !== undefined && object.post !== null) { + message.post = object.post; + } + if (object.delete !== undefined && object.delete !== null) { + message.delete = object.delete; + } + if (object.patch !== undefined && object.patch !== null) { + message.patch = object.patch; + } + if (object.custom !== undefined && object.custom !== null) { + message.custom = CustomHttpPattern.fromAmino(object.custom); + } + if (object.body !== undefined && object.body !== null) { + message.body = object.body; + } + if (object.response_body !== undefined && object.response_body !== null) { + message.responseBody = object.response_body; + } + message.additionalBindings = + object.additional_bindings?.map(e => HttpRule.fromAmino(e)) || []; + return message; + }, + toAmino(message: HttpRule): HttpRuleAmino { + const obj: any = {}; + obj.selector = message.selector; + obj.get = message.get; + obj.put = message.put; + obj.post = message.post; + obj.delete = message.delete; + obj.patch = message.patch; + obj.custom = message.custom + ? CustomHttpPattern.toAmino(message.custom) + : undefined; + obj.body = message.body; + obj.response_body = message.responseBody; + if (message.additionalBindings) { + obj.additional_bindings = message.additionalBindings.map(e => + e ? HttpRule.toAmino(e) : undefined, + ); + } else { + obj.additional_bindings = []; + } + return obj; + }, + fromAminoMsg(object: HttpRuleAminoMsg): HttpRule { + return HttpRule.fromAmino(object.value); + }, + fromProtoMsg(message: HttpRuleProtoMsg): HttpRule { + return HttpRule.decode(message.value); + }, + toProto(message: HttpRule): Uint8Array { + return HttpRule.encode(message).finish(); + }, + toProtoMsg(message: HttpRule): HttpRuleProtoMsg { + return { + typeUrl: '/google.api.HttpRule', + value: HttpRule.encode(message).finish(), + }; + }, +}; +function createBaseCustomHttpPattern(): CustomHttpPattern { + return { + kind: '', + path: '', + }; +} +export const CustomHttpPattern = { + typeUrl: '/google.api.CustomHttpPattern', + encode( + message: CustomHttpPattern, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.kind !== '') { + writer.uint32(10).string(message.kind); + } + if (message.path !== '') { + writer.uint32(18).string(message.path); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): CustomHttpPattern { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCustomHttpPattern(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.kind = reader.string(); + break; + case 2: + message.path = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): CustomHttpPattern { + return { + kind: isSet(object.kind) ? String(object.kind) : '', + path: isSet(object.path) ? String(object.path) : '', + }; + }, + toJSON(message: CustomHttpPattern): unknown { + const obj: any = {}; + message.kind !== undefined && (obj.kind = message.kind); + message.path !== undefined && (obj.path = message.path); + return obj; + }, + fromPartial(object: Partial): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + message.kind = object.kind ?? ''; + message.path = object.path ?? ''; + return message; + }, + fromAmino(object: CustomHttpPatternAmino): CustomHttpPattern { + const message = createBaseCustomHttpPattern(); + if (object.kind !== undefined && object.kind !== null) { + message.kind = object.kind; + } + if (object.path !== undefined && object.path !== null) { + message.path = object.path; + } + return message; + }, + toAmino(message: CustomHttpPattern): CustomHttpPatternAmino { + const obj: any = {}; + obj.kind = message.kind; + obj.path = message.path; + return obj; + }, + fromAminoMsg(object: CustomHttpPatternAminoMsg): CustomHttpPattern { + return CustomHttpPattern.fromAmino(object.value); + }, + fromProtoMsg(message: CustomHttpPatternProtoMsg): CustomHttpPattern { + return CustomHttpPattern.decode(message.value); + }, + toProto(message: CustomHttpPattern): Uint8Array { + return CustomHttpPattern.encode(message).finish(); + }, + toProtoMsg(message: CustomHttpPattern): CustomHttpPatternProtoMsg { + return { + typeUrl: '/google.api.CustomHttpPattern', + value: CustomHttpPattern.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/google/bundle.ts b/packages/cosmic-proto/src/codegen/google/bundle.ts new file mode 100644 index 00000000000..fd6f3159257 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/google/bundle.ts @@ -0,0 +1,11 @@ +//@ts-nocheck +import * as _20 from './protobuf/any.js'; +import * as _21 from './protobuf/descriptor.js'; +import * as _22 from './protobuf/timestamp.js'; +export namespace google { + export const protobuf = { + ..._20, + ..._21, + ..._22, + }; +} diff --git a/packages/cosmic-proto/src/codegen/google/protobuf/any.ts b/packages/cosmic-proto/src/codegen/google/protobuf/any.ts new file mode 100644 index 00000000000..292bd43c063 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/google/protobuf/any.ts @@ -0,0 +1,421 @@ +//@ts-nocheck +import { BinaryReader, BinaryWriter } from '../../binary.js'; +import { isSet, bytesFromBase64, base64FromBytes } from '../../helpers.js'; +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := ptypes.MarshalAny(foo) + * ... + * foo := &pb.Foo{} + * if err := ptypes.UnmarshalAny(any, foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ +export interface Any { + $typeUrl?: '/google.protobuf.Any' | string; + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + typeUrl: string; + /** Must be a valid serialized protocol buffer of the above specified type. */ + value: Uint8Array; +} +export interface AnyProtoMsg { + typeUrl: '/google.protobuf.Any'; + value: Uint8Array; +} +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := ptypes.MarshalAny(foo) + * ... + * foo := &pb.Foo{} + * if err := ptypes.UnmarshalAny(any, foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ +export interface AnyAmino { + /** + * A URL/resource name that uniquely identifies the type of the serialized + * protocol buffer message. This string must contain at least + * one "/" character. The last segment of the URL's path must represent + * the fully qualified name of the type (as in + * `path/google.protobuf.Duration`). The name should be in a canonical form + * (e.g., leading "." is not accepted). + * + * In practice, teams usually precompile into the binary all types that they + * expect it to use in the context of Any. However, for URLs which use the + * scheme `http`, `https`, or no scheme, one can optionally set up a type + * server that maps type URLs to message definitions as follows: + * + * * If no scheme is provided, `https` is assumed. + * * An HTTP GET on the URL must yield a [google.protobuf.Type][] + * value in binary format, or produce an error. + * * Applications are allowed to cache lookup results based on the + * URL, or have them precompiled into a binary to avoid any + * lookup. Therefore, binary compatibility needs to be preserved + * on changes to types. (Use versioned type names to manage + * breaking changes.) + * + * Note: this functionality is not currently available in the official + * protobuf release, and it is not used for type URLs beginning with + * type.googleapis.com. + * + * Schemes other than `http`, `https` (or the empty scheme) might be + * used with implementation specific semantics. + */ + type: string; + /** Must be a valid serialized protocol buffer of the above specified type. */ + value: any; +} +export interface AnyAminoMsg { + type: string; + value: AnyAmino; +} +/** + * `Any` contains an arbitrary serialized protocol buffer message along with a + * URL that describes the type of the serialized message. + * + * Protobuf library provides support to pack/unpack Any values in the form + * of utility functions or additional generated methods of the Any type. + * + * Example 1: Pack and unpack a message in C++. + * + * Foo foo = ...; + * Any any; + * any.PackFrom(foo); + * ... + * if (any.UnpackTo(&foo)) { + * ... + * } + * + * Example 2: Pack and unpack a message in Java. + * + * Foo foo = ...; + * Any any = Any.pack(foo); + * ... + * if (any.is(Foo.class)) { + * foo = any.unpack(Foo.class); + * } + * + * Example 3: Pack and unpack a message in Python. + * + * foo = Foo(...) + * any = Any() + * any.Pack(foo) + * ... + * if any.Is(Foo.DESCRIPTOR): + * any.Unpack(foo) + * ... + * + * Example 4: Pack and unpack a message in Go + * + * foo := &pb.Foo{...} + * any, err := ptypes.MarshalAny(foo) + * ... + * foo := &pb.Foo{} + * if err := ptypes.UnmarshalAny(any, foo); err != nil { + * ... + * } + * + * The pack methods provided by protobuf library will by default use + * 'type.googleapis.com/full.type.name' as the type URL and the unpack + * methods only use the fully qualified type name after the last '/' + * in the type URL, for example "foo.bar.com/x/y.z" will yield type + * name "y.z". + * + * + * JSON + * ==== + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * If the embedded message type is well-known and has a custom JSON + * representation, that representation will be embedded adding a field + * `value` which holds the custom JSON in addition to the `@type` + * field. Example (for message [google.protobuf.Duration][]): + * + * { + * "@type": "type.googleapis.com/google.protobuf.Duration", + * "value": "1.212s" + * } + */ +export interface AnySDKType { + $typeUrl?: '/google.protobuf.Any' | string; + type_url: string; + value: Uint8Array; +} +function createBaseAny(): Any { + return { + $typeUrl: '/google.protobuf.Any', + typeUrl: '', + value: new Uint8Array(), + }; +} +export const Any = { + typeUrl: '/google.protobuf.Any', + encode( + message: Any, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.typeUrl !== '') { + writer.uint32(10).string(message.typeUrl); + } + if (message.value.length !== 0) { + writer.uint32(18).bytes(message.value); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): Any { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAny(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.typeUrl = reader.string(); + break; + case 2: + message.value = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): Any { + return { + typeUrl: isSet(object.typeUrl) ? String(object.typeUrl) : '', + value: isSet(object.value) + ? bytesFromBase64(object.value) + : new Uint8Array(), + }; + }, + toJSON(message: Any): unknown { + const obj: any = {}; + message.typeUrl !== undefined && (obj.typeUrl = message.typeUrl); + message.value !== undefined && + (obj.value = base64FromBytes( + message.value !== undefined ? message.value : new Uint8Array(), + )); + return obj; + }, + fromPartial(object: Partial): Any { + const message = createBaseAny(); + message.typeUrl = object.typeUrl ?? ''; + message.value = object.value ?? new Uint8Array(); + return message; + }, + fromAmino(object: AnyAmino): Any { + return { + typeUrl: object.type, + value: object.value, + }; + }, + toAmino(message: Any): AnyAmino { + const obj: any = {}; + obj.type = message.typeUrl; + obj.value = message.value; + return obj; + }, + fromAminoMsg(object: AnyAminoMsg): Any { + return Any.fromAmino(object.value); + }, + fromProtoMsg(message: AnyProtoMsg): Any { + return Any.decode(message.value); + }, + toProto(message: Any): Uint8Array { + return Any.encode(message).finish(); + }, + toProtoMsg(message: Any): AnyProtoMsg { + return { + typeUrl: '/google.protobuf.Any', + value: Any.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/google/protobuf/descriptor.ts b/packages/cosmic-proto/src/codegen/google/protobuf/descriptor.ts new file mode 100644 index 00000000000..c6f1a69e565 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/google/protobuf/descriptor.ts @@ -0,0 +1,6741 @@ +//@ts-nocheck +import { BinaryReader, BinaryWriter } from '../../binary.js'; +import { isSet, bytesFromBase64, base64FromBytes } from '../../helpers.js'; +export enum FieldDescriptorProto_Type { + /** + * TYPE_DOUBLE - 0 is reserved for errors. + * Order is weird for historical reasons. + */ + TYPE_DOUBLE = 1, + TYPE_FLOAT = 2, + /** + * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + * negative values are likely. + */ + TYPE_INT64 = 3, + TYPE_UINT64 = 4, + /** + * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + * negative values are likely. + */ + TYPE_INT32 = 5, + TYPE_FIXED64 = 6, + TYPE_FIXED32 = 7, + TYPE_BOOL = 8, + TYPE_STRING = 9, + /** + * TYPE_GROUP - Tag-delimited aggregate. + * Group type is deprecated and not supported in proto3. However, Proto3 + * implementations should still be able to parse the group wire format and + * treat group fields as unknown fields. + */ + TYPE_GROUP = 10, + TYPE_MESSAGE = 11, + /** TYPE_BYTES - New in version 2. */ + TYPE_BYTES = 12, + TYPE_UINT32 = 13, + TYPE_ENUM = 14, + TYPE_SFIXED32 = 15, + TYPE_SFIXED64 = 16, + /** TYPE_SINT32 - Uses ZigZag encoding. */ + TYPE_SINT32 = 17, + /** TYPE_SINT64 - Uses ZigZag encoding. */ + TYPE_SINT64 = 18, + UNRECOGNIZED = -1, +} +export const FieldDescriptorProto_TypeSDKType = FieldDescriptorProto_Type; +export const FieldDescriptorProto_TypeAmino = FieldDescriptorProto_Type; +export function fieldDescriptorProto_TypeFromJSON( + object: any, +): FieldDescriptorProto_Type { + switch (object) { + case 1: + case 'TYPE_DOUBLE': + return FieldDescriptorProto_Type.TYPE_DOUBLE; + case 2: + case 'TYPE_FLOAT': + return FieldDescriptorProto_Type.TYPE_FLOAT; + case 3: + case 'TYPE_INT64': + return FieldDescriptorProto_Type.TYPE_INT64; + case 4: + case 'TYPE_UINT64': + return FieldDescriptorProto_Type.TYPE_UINT64; + case 5: + case 'TYPE_INT32': + return FieldDescriptorProto_Type.TYPE_INT32; + case 6: + case 'TYPE_FIXED64': + return FieldDescriptorProto_Type.TYPE_FIXED64; + case 7: + case 'TYPE_FIXED32': + return FieldDescriptorProto_Type.TYPE_FIXED32; + case 8: + case 'TYPE_BOOL': + return FieldDescriptorProto_Type.TYPE_BOOL; + case 9: + case 'TYPE_STRING': + return FieldDescriptorProto_Type.TYPE_STRING; + case 10: + case 'TYPE_GROUP': + return FieldDescriptorProto_Type.TYPE_GROUP; + case 11: + case 'TYPE_MESSAGE': + return FieldDescriptorProto_Type.TYPE_MESSAGE; + case 12: + case 'TYPE_BYTES': + return FieldDescriptorProto_Type.TYPE_BYTES; + case 13: + case 'TYPE_UINT32': + return FieldDescriptorProto_Type.TYPE_UINT32; + case 14: + case 'TYPE_ENUM': + return FieldDescriptorProto_Type.TYPE_ENUM; + case 15: + case 'TYPE_SFIXED32': + return FieldDescriptorProto_Type.TYPE_SFIXED32; + case 16: + case 'TYPE_SFIXED64': + return FieldDescriptorProto_Type.TYPE_SFIXED64; + case 17: + case 'TYPE_SINT32': + return FieldDescriptorProto_Type.TYPE_SINT32; + case 18: + case 'TYPE_SINT64': + return FieldDescriptorProto_Type.TYPE_SINT64; + case -1: + case 'UNRECOGNIZED': + default: + return FieldDescriptorProto_Type.UNRECOGNIZED; + } +} +export function fieldDescriptorProto_TypeToJSON( + object: FieldDescriptorProto_Type, +): string { + switch (object) { + case FieldDescriptorProto_Type.TYPE_DOUBLE: + return 'TYPE_DOUBLE'; + case FieldDescriptorProto_Type.TYPE_FLOAT: + return 'TYPE_FLOAT'; + case FieldDescriptorProto_Type.TYPE_INT64: + return 'TYPE_INT64'; + case FieldDescriptorProto_Type.TYPE_UINT64: + return 'TYPE_UINT64'; + case FieldDescriptorProto_Type.TYPE_INT32: + return 'TYPE_INT32'; + case FieldDescriptorProto_Type.TYPE_FIXED64: + return 'TYPE_FIXED64'; + case FieldDescriptorProto_Type.TYPE_FIXED32: + return 'TYPE_FIXED32'; + case FieldDescriptorProto_Type.TYPE_BOOL: + return 'TYPE_BOOL'; + case FieldDescriptorProto_Type.TYPE_STRING: + return 'TYPE_STRING'; + case FieldDescriptorProto_Type.TYPE_GROUP: + return 'TYPE_GROUP'; + case FieldDescriptorProto_Type.TYPE_MESSAGE: + return 'TYPE_MESSAGE'; + case FieldDescriptorProto_Type.TYPE_BYTES: + return 'TYPE_BYTES'; + case FieldDescriptorProto_Type.TYPE_UINT32: + return 'TYPE_UINT32'; + case FieldDescriptorProto_Type.TYPE_ENUM: + return 'TYPE_ENUM'; + case FieldDescriptorProto_Type.TYPE_SFIXED32: + return 'TYPE_SFIXED32'; + case FieldDescriptorProto_Type.TYPE_SFIXED64: + return 'TYPE_SFIXED64'; + case FieldDescriptorProto_Type.TYPE_SINT32: + return 'TYPE_SINT32'; + case FieldDescriptorProto_Type.TYPE_SINT64: + return 'TYPE_SINT64'; + case FieldDescriptorProto_Type.UNRECOGNIZED: + default: + return 'UNRECOGNIZED'; + } +} +export enum FieldDescriptorProto_Label { + /** LABEL_OPTIONAL - 0 is reserved for errors */ + LABEL_OPTIONAL = 1, + LABEL_REQUIRED = 2, + LABEL_REPEATED = 3, + UNRECOGNIZED = -1, +} +export const FieldDescriptorProto_LabelSDKType = FieldDescriptorProto_Label; +export const FieldDescriptorProto_LabelAmino = FieldDescriptorProto_Label; +export function fieldDescriptorProto_LabelFromJSON( + object: any, +): FieldDescriptorProto_Label { + switch (object) { + case 1: + case 'LABEL_OPTIONAL': + return FieldDescriptorProto_Label.LABEL_OPTIONAL; + case 2: + case 'LABEL_REQUIRED': + return FieldDescriptorProto_Label.LABEL_REQUIRED; + case 3: + case 'LABEL_REPEATED': + return FieldDescriptorProto_Label.LABEL_REPEATED; + case -1: + case 'UNRECOGNIZED': + default: + return FieldDescriptorProto_Label.UNRECOGNIZED; + } +} +export function fieldDescriptorProto_LabelToJSON( + object: FieldDescriptorProto_Label, +): string { + switch (object) { + case FieldDescriptorProto_Label.LABEL_OPTIONAL: + return 'LABEL_OPTIONAL'; + case FieldDescriptorProto_Label.LABEL_REQUIRED: + return 'LABEL_REQUIRED'; + case FieldDescriptorProto_Label.LABEL_REPEATED: + return 'LABEL_REPEATED'; + case FieldDescriptorProto_Label.UNRECOGNIZED: + default: + return 'UNRECOGNIZED'; + } +} +/** Generated classes can be optimized for speed or code size. */ +export enum FileOptions_OptimizeMode { + /** + * SPEED - Generate complete code for parsing, serialization, + * etc. + */ + SPEED = 1, + /** CODE_SIZE - Use ReflectionOps to implement these methods. */ + CODE_SIZE = 2, + /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ + LITE_RUNTIME = 3, + UNRECOGNIZED = -1, +} +export const FileOptions_OptimizeModeSDKType = FileOptions_OptimizeMode; +export const FileOptions_OptimizeModeAmino = FileOptions_OptimizeMode; +export function fileOptions_OptimizeModeFromJSON( + object: any, +): FileOptions_OptimizeMode { + switch (object) { + case 1: + case 'SPEED': + return FileOptions_OptimizeMode.SPEED; + case 2: + case 'CODE_SIZE': + return FileOptions_OptimizeMode.CODE_SIZE; + case 3: + case 'LITE_RUNTIME': + return FileOptions_OptimizeMode.LITE_RUNTIME; + case -1: + case 'UNRECOGNIZED': + default: + return FileOptions_OptimizeMode.UNRECOGNIZED; + } +} +export function fileOptions_OptimizeModeToJSON( + object: FileOptions_OptimizeMode, +): string { + switch (object) { + case FileOptions_OptimizeMode.SPEED: + return 'SPEED'; + case FileOptions_OptimizeMode.CODE_SIZE: + return 'CODE_SIZE'; + case FileOptions_OptimizeMode.LITE_RUNTIME: + return 'LITE_RUNTIME'; + case FileOptions_OptimizeMode.UNRECOGNIZED: + default: + return 'UNRECOGNIZED'; + } +} +export enum FieldOptions_CType { + /** STRING - Default mode. */ + STRING = 0, + CORD = 1, + STRING_PIECE = 2, + UNRECOGNIZED = -1, +} +export const FieldOptions_CTypeSDKType = FieldOptions_CType; +export const FieldOptions_CTypeAmino = FieldOptions_CType; +export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { + switch (object) { + case 0: + case 'STRING': + return FieldOptions_CType.STRING; + case 1: + case 'CORD': + return FieldOptions_CType.CORD; + case 2: + case 'STRING_PIECE': + return FieldOptions_CType.STRING_PIECE; + case -1: + case 'UNRECOGNIZED': + default: + return FieldOptions_CType.UNRECOGNIZED; + } +} +export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { + switch (object) { + case FieldOptions_CType.STRING: + return 'STRING'; + case FieldOptions_CType.CORD: + return 'CORD'; + case FieldOptions_CType.STRING_PIECE: + return 'STRING_PIECE'; + case FieldOptions_CType.UNRECOGNIZED: + default: + return 'UNRECOGNIZED'; + } +} +export enum FieldOptions_JSType { + /** JS_NORMAL - Use the default type. */ + JS_NORMAL = 0, + /** JS_STRING - Use JavaScript strings. */ + JS_STRING = 1, + /** JS_NUMBER - Use JavaScript numbers. */ + JS_NUMBER = 2, + UNRECOGNIZED = -1, +} +export const FieldOptions_JSTypeSDKType = FieldOptions_JSType; +export const FieldOptions_JSTypeAmino = FieldOptions_JSType; +export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { + switch (object) { + case 0: + case 'JS_NORMAL': + return FieldOptions_JSType.JS_NORMAL; + case 1: + case 'JS_STRING': + return FieldOptions_JSType.JS_STRING; + case 2: + case 'JS_NUMBER': + return FieldOptions_JSType.JS_NUMBER; + case -1: + case 'UNRECOGNIZED': + default: + return FieldOptions_JSType.UNRECOGNIZED; + } +} +export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { + switch (object) { + case FieldOptions_JSType.JS_NORMAL: + return 'JS_NORMAL'; + case FieldOptions_JSType.JS_STRING: + return 'JS_STRING'; + case FieldOptions_JSType.JS_NUMBER: + return 'JS_NUMBER'; + case FieldOptions_JSType.UNRECOGNIZED: + default: + return 'UNRECOGNIZED'; + } +} +/** + * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + * or neither? HTTP based RPC implementation may choose GET verb for safe + * methods, and PUT verb for idempotent methods instead of the default POST. + */ +export enum MethodOptions_IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0, + /** NO_SIDE_EFFECTS - implies idempotent */ + NO_SIDE_EFFECTS = 1, + /** IDEMPOTENT - idempotent, but may have side effects */ + IDEMPOTENT = 2, + UNRECOGNIZED = -1, +} +export const MethodOptions_IdempotencyLevelSDKType = + MethodOptions_IdempotencyLevel; +export const MethodOptions_IdempotencyLevelAmino = + MethodOptions_IdempotencyLevel; +export function methodOptions_IdempotencyLevelFromJSON( + object: any, +): MethodOptions_IdempotencyLevel { + switch (object) { + case 0: + case 'IDEMPOTENCY_UNKNOWN': + return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; + case 1: + case 'NO_SIDE_EFFECTS': + return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; + case 2: + case 'IDEMPOTENT': + return MethodOptions_IdempotencyLevel.IDEMPOTENT; + case -1: + case 'UNRECOGNIZED': + default: + return MethodOptions_IdempotencyLevel.UNRECOGNIZED; + } +} +export function methodOptions_IdempotencyLevelToJSON( + object: MethodOptions_IdempotencyLevel, +): string { + switch (object) { + case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: + return 'IDEMPOTENCY_UNKNOWN'; + case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: + return 'NO_SIDE_EFFECTS'; + case MethodOptions_IdempotencyLevel.IDEMPOTENT: + return 'IDEMPOTENT'; + case MethodOptions_IdempotencyLevel.UNRECOGNIZED: + default: + return 'UNRECOGNIZED'; + } +} +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSet { + file: FileDescriptorProto[]; +} +export interface FileDescriptorSetProtoMsg { + typeUrl: '/google.protobuf.FileDescriptorSet'; + value: Uint8Array; +} +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSetAmino { + file?: FileDescriptorProtoAmino[]; +} +export interface FileDescriptorSetAminoMsg { + type: '/google.protobuf.FileDescriptorSet'; + value: FileDescriptorSetAmino; +} +/** + * The protocol compiler can output a FileDescriptorSet containing the .proto + * files it parses. + */ +export interface FileDescriptorSetSDKType { + file: FileDescriptorProtoSDKType[]; +} +/** Describes a complete .proto file. */ +export interface FileDescriptorProto { + /** file name, relative to root of source tree */ + name: string; + package: string; + /** Names of files imported by this file. */ + dependency: string[]; + /** Indexes of the public imported files in the dependency list above. */ + publicDependency: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weakDependency: number[]; + /** All top-level definitions in this file. */ + messageType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + service: ServiceDescriptorProto[]; + extension: FieldDescriptorProto[]; + options?: FileOptions; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + sourceCodeInfo?: SourceCodeInfo; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax: string; +} +export interface FileDescriptorProtoProtoMsg { + typeUrl: '/google.protobuf.FileDescriptorProto'; + value: Uint8Array; +} +/** Describes a complete .proto file. */ +export interface FileDescriptorProtoAmino { + /** file name, relative to root of source tree */ + name?: string; + package?: string; + /** Names of files imported by this file. */ + dependency?: string[]; + /** Indexes of the public imported files in the dependency list above. */ + public_dependency?: number[]; + /** + * Indexes of the weak imported files in the dependency list. + * For Google-internal migration only. Do not use. + */ + weak_dependency?: number[]; + /** All top-level definitions in this file. */ + message_type?: DescriptorProtoAmino[]; + enum_type?: EnumDescriptorProtoAmino[]; + service?: ServiceDescriptorProtoAmino[]; + extension?: FieldDescriptorProtoAmino[]; + options?: FileOptionsAmino; + /** + * This field contains optional information about the original source code. + * You may safely remove this entire field without harming runtime + * functionality of the descriptors -- the information is needed only by + * development tools. + */ + source_code_info?: SourceCodeInfoAmino; + /** + * The syntax of the proto file. + * The supported values are "proto2" and "proto3". + */ + syntax?: string; +} +export interface FileDescriptorProtoAminoMsg { + type: '/google.protobuf.FileDescriptorProto'; + value: FileDescriptorProtoAmino; +} +/** Describes a complete .proto file. */ +export interface FileDescriptorProtoSDKType { + name: string; + package: string; + dependency: string[]; + public_dependency: number[]; + weak_dependency: number[]; + message_type: DescriptorProtoSDKType[]; + enum_type: EnumDescriptorProtoSDKType[]; + service: ServiceDescriptorProtoSDKType[]; + extension: FieldDescriptorProtoSDKType[]; + options?: FileOptionsSDKType; + source_code_info?: SourceCodeInfoSDKType; + syntax: string; +} +/** Describes a message type. */ +export interface DescriptorProto { + name: string; + field: FieldDescriptorProto[]; + extension: FieldDescriptorProto[]; + nestedType: DescriptorProto[]; + enumType: EnumDescriptorProto[]; + extensionRange: DescriptorProto_ExtensionRange[]; + oneofDecl: OneofDescriptorProto[]; + options?: MessageOptions; + reservedRange: DescriptorProto_ReservedRange[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reservedName: string[]; +} +export interface DescriptorProtoProtoMsg { + typeUrl: '/google.protobuf.DescriptorProto'; + value: Uint8Array; +} +/** Describes a message type. */ +export interface DescriptorProtoAmino { + name?: string; + field?: FieldDescriptorProtoAmino[]; + extension?: FieldDescriptorProtoAmino[]; + nested_type?: DescriptorProtoAmino[]; + enum_type?: EnumDescriptorProtoAmino[]; + extension_range?: DescriptorProto_ExtensionRangeAmino[]; + oneof_decl?: OneofDescriptorProtoAmino[]; + options?: MessageOptionsAmino; + reserved_range?: DescriptorProto_ReservedRangeAmino[]; + /** + * Reserved field names, which may not be used by fields in the same message. + * A given name may only be reserved once. + */ + reserved_name?: string[]; +} +export interface DescriptorProtoAminoMsg { + type: '/google.protobuf.DescriptorProto'; + value: DescriptorProtoAmino; +} +/** Describes a message type. */ +export interface DescriptorProtoSDKType { + name: string; + field: FieldDescriptorProtoSDKType[]; + extension: FieldDescriptorProtoSDKType[]; + nested_type: DescriptorProtoSDKType[]; + enum_type: EnumDescriptorProtoSDKType[]; + extension_range: DescriptorProto_ExtensionRangeSDKType[]; + oneof_decl: OneofDescriptorProtoSDKType[]; + options?: MessageOptionsSDKType; + reserved_range: DescriptorProto_ReservedRangeSDKType[]; + reserved_name: string[]; +} +export interface DescriptorProto_ExtensionRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; + options?: ExtensionRangeOptions; +} +export interface DescriptorProto_ExtensionRangeProtoMsg { + typeUrl: '/google.protobuf.ExtensionRange'; + value: Uint8Array; +} +export interface DescriptorProto_ExtensionRangeAmino { + /** Inclusive. */ + start?: number; + /** Exclusive. */ + end?: number; + options?: ExtensionRangeOptionsAmino; +} +export interface DescriptorProto_ExtensionRangeAminoMsg { + type: '/google.protobuf.ExtensionRange'; + value: DescriptorProto_ExtensionRangeAmino; +} +export interface DescriptorProto_ExtensionRangeSDKType { + start: number; + end: number; + options?: ExtensionRangeOptionsSDKType; +} +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRange { + /** Inclusive. */ + start: number; + /** Exclusive. */ + end: number; +} +export interface DescriptorProto_ReservedRangeProtoMsg { + typeUrl: '/google.protobuf.ReservedRange'; + value: Uint8Array; +} +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRangeAmino { + /** Inclusive. */ + start?: number; + /** Exclusive. */ + end?: number; +} +export interface DescriptorProto_ReservedRangeAminoMsg { + type: '/google.protobuf.ReservedRange'; + value: DescriptorProto_ReservedRangeAmino; +} +/** + * Range of reserved tag numbers. Reserved tag numbers may not be used by + * fields or extension ranges in the same message. Reserved ranges may + * not overlap. + */ +export interface DescriptorProto_ReservedRangeSDKType { + start: number; + end: number; +} +export interface ExtensionRangeOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface ExtensionRangeOptionsProtoMsg { + typeUrl: '/google.protobuf.ExtensionRangeOptions'; + value: Uint8Array; +} +export interface ExtensionRangeOptionsAmino { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpreted_option?: UninterpretedOptionAmino[]; +} +export interface ExtensionRangeOptionsAminoMsg { + type: '/google.protobuf.ExtensionRangeOptions'; + value: ExtensionRangeOptionsAmino; +} +export interface ExtensionRangeOptionsSDKType { + uninterpreted_option: UninterpretedOptionSDKType[]; +} +/** Describes a field within a message. */ +export interface FieldDescriptorProto { + name: string; + number: number; + label: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + typeName: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + defaultValue: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneofIndex: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + jsonName: string; + options?: FieldOptions; +} +export interface FieldDescriptorProtoProtoMsg { + typeUrl: '/google.protobuf.FieldDescriptorProto'; + value: Uint8Array; +} +/** Describes a field within a message. */ +export interface FieldDescriptorProtoAmino { + name?: string; + number?: number; + label?: FieldDescriptorProto_Label; + /** + * If type_name is set, this need not be set. If both this and type_name + * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + */ + type?: FieldDescriptorProto_Type; + /** + * For message and enum types, this is the name of the type. If the name + * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + * rules are used to find the type (i.e. first the nested types within this + * message are searched, then within the parent, on up to the root + * namespace). + */ + type_name?: string; + /** + * For extensions, this is the name of the type being extended. It is + * resolved in the same manner as type_name. + */ + extendee?: string; + /** + * For numeric types, contains the original text representation of the value. + * For booleans, "true" or "false". + * For strings, contains the default text contents (not escaped in any way). + * For bytes, contains the C escaped value. All bytes >= 128 are escaped. + * TODO(kenton): Base-64 encode? + */ + default_value?: string; + /** + * If set, gives the index of a oneof in the containing type's oneof_decl + * list. This field is a member of that oneof. + */ + oneof_index?: number; + /** + * JSON name of this field. The value is set by protocol compiler. If the + * user has set a "json_name" option on this field, that option's value + * will be used. Otherwise, it's deduced from the field's name by converting + * it to camelCase. + */ + json_name?: string; + options?: FieldOptionsAmino; +} +export interface FieldDescriptorProtoAminoMsg { + type: '/google.protobuf.FieldDescriptorProto'; + value: FieldDescriptorProtoAmino; +} +/** Describes a field within a message. */ +export interface FieldDescriptorProtoSDKType { + name: string; + number: number; + label: FieldDescriptorProto_Label; + type: FieldDescriptorProto_Type; + type_name: string; + extendee: string; + default_value: string; + oneof_index: number; + json_name: string; + options?: FieldOptionsSDKType; +} +/** Describes a oneof. */ +export interface OneofDescriptorProto { + name: string; + options?: OneofOptions; +} +export interface OneofDescriptorProtoProtoMsg { + typeUrl: '/google.protobuf.OneofDescriptorProto'; + value: Uint8Array; +} +/** Describes a oneof. */ +export interface OneofDescriptorProtoAmino { + name?: string; + options?: OneofOptionsAmino; +} +export interface OneofDescriptorProtoAminoMsg { + type: '/google.protobuf.OneofDescriptorProto'; + value: OneofDescriptorProtoAmino; +} +/** Describes a oneof. */ +export interface OneofDescriptorProtoSDKType { + name: string; + options?: OneofOptionsSDKType; +} +/** Describes an enum type. */ +export interface EnumDescriptorProto { + name: string; + value: EnumValueDescriptorProto[]; + options?: EnumOptions; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reservedRange: EnumDescriptorProto_EnumReservedRange[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reservedName: string[]; +} +export interface EnumDescriptorProtoProtoMsg { + typeUrl: '/google.protobuf.EnumDescriptorProto'; + value: Uint8Array; +} +/** Describes an enum type. */ +export interface EnumDescriptorProtoAmino { + name?: string; + value?: EnumValueDescriptorProtoAmino[]; + options?: EnumOptionsAmino; + /** + * Range of reserved numeric values. Reserved numeric values may not be used + * by enum values in the same enum declaration. Reserved ranges may not + * overlap. + */ + reserved_range?: EnumDescriptorProto_EnumReservedRangeAmino[]; + /** + * Reserved enum value names, which may not be reused. A given name may only + * be reserved once. + */ + reserved_name?: string[]; +} +export interface EnumDescriptorProtoAminoMsg { + type: '/google.protobuf.EnumDescriptorProto'; + value: EnumDescriptorProtoAmino; +} +/** Describes an enum type. */ +export interface EnumDescriptorProtoSDKType { + name: string; + value: EnumValueDescriptorProtoSDKType[]; + options?: EnumOptionsSDKType; + reserved_range: EnumDescriptorProto_EnumReservedRangeSDKType[]; + reserved_name: string[]; +} +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRange { + /** Inclusive. */ + start: number; + /** Inclusive. */ + end: number; +} +export interface EnumDescriptorProto_EnumReservedRangeProtoMsg { + typeUrl: '/google.protobuf.EnumReservedRange'; + value: Uint8Array; +} +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRangeAmino { + /** Inclusive. */ + start?: number; + /** Inclusive. */ + end?: number; +} +export interface EnumDescriptorProto_EnumReservedRangeAminoMsg { + type: '/google.protobuf.EnumReservedRange'; + value: EnumDescriptorProto_EnumReservedRangeAmino; +} +/** + * Range of reserved numeric values. Reserved values may not be used by + * entries in the same enum. Reserved ranges may not overlap. + * + * Note that this is distinct from DescriptorProto.ReservedRange in that it + * is inclusive such that it can appropriately represent the entire int32 + * domain. + */ +export interface EnumDescriptorProto_EnumReservedRangeSDKType { + start: number; + end: number; +} +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProto { + name: string; + number: number; + options?: EnumValueOptions; +} +export interface EnumValueDescriptorProtoProtoMsg { + typeUrl: '/google.protobuf.EnumValueDescriptorProto'; + value: Uint8Array; +} +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProtoAmino { + name?: string; + number?: number; + options?: EnumValueOptionsAmino; +} +export interface EnumValueDescriptorProtoAminoMsg { + type: '/google.protobuf.EnumValueDescriptorProto'; + value: EnumValueDescriptorProtoAmino; +} +/** Describes a value within an enum. */ +export interface EnumValueDescriptorProtoSDKType { + name: string; + number: number; + options?: EnumValueOptionsSDKType; +} +/** Describes a service. */ +export interface ServiceDescriptorProto { + name: string; + method: MethodDescriptorProto[]; + options?: ServiceOptions; +} +export interface ServiceDescriptorProtoProtoMsg { + typeUrl: '/google.protobuf.ServiceDescriptorProto'; + value: Uint8Array; +} +/** Describes a service. */ +export interface ServiceDescriptorProtoAmino { + name?: string; + method?: MethodDescriptorProtoAmino[]; + options?: ServiceOptionsAmino; +} +export interface ServiceDescriptorProtoAminoMsg { + type: '/google.protobuf.ServiceDescriptorProto'; + value: ServiceDescriptorProtoAmino; +} +/** Describes a service. */ +export interface ServiceDescriptorProtoSDKType { + name: string; + method: MethodDescriptorProtoSDKType[]; + options?: ServiceOptionsSDKType; +} +/** Describes a method of a service. */ +export interface MethodDescriptorProto { + name: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + inputType: string; + outputType: string; + options?: MethodOptions; + /** Identifies if client streams multiple client messages */ + clientStreaming: boolean; + /** Identifies if server streams multiple server messages */ + serverStreaming: boolean; +} +export interface MethodDescriptorProtoProtoMsg { + typeUrl: '/google.protobuf.MethodDescriptorProto'; + value: Uint8Array; +} +/** Describes a method of a service. */ +export interface MethodDescriptorProtoAmino { + name?: string; + /** + * Input and output type names. These are resolved in the same way as + * FieldDescriptorProto.type_name, but must refer to a message type. + */ + input_type?: string; + output_type?: string; + options?: MethodOptionsAmino; + /** Identifies if client streams multiple client messages */ + client_streaming?: boolean; + /** Identifies if server streams multiple server messages */ + server_streaming?: boolean; +} +export interface MethodDescriptorProtoAminoMsg { + type: '/google.protobuf.MethodDescriptorProto'; + value: MethodDescriptorProtoAmino; +} +/** Describes a method of a service. */ +export interface MethodDescriptorProtoSDKType { + name: string; + input_type: string; + output_type: string; + options?: MethodOptionsSDKType; + client_streaming: boolean; + server_streaming: boolean; +} +export interface FileOptions { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + javaPackage: string; + /** + * If set, all the classes from the .proto file are wrapped in a single + * outer class with the given name. This applies to both Proto1 + * (equivalent to the old "--one_java_file" option) and Proto2 (where + * a .proto always translates to a single class, but you may want to + * explicitly choose the class name). + */ + javaOuterClassname: string; + /** + * If set true, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the outer class + * named by java_outer_classname. However, the outer class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + javaMultipleFiles: boolean; + /** This option does nothing. */ + /** @deprecated */ + javaGenerateEqualsAndHash: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + javaStringCheckUtf8: boolean; + optimizeFor: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + goPackage: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + ccGenericServices: boolean; + javaGenericServices: boolean; + pyGenericServices: boolean; + phpGenericServices: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + ccEnableArenas: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objcClassPrefix: string; + /** Namespace for generated classes; defaults to the package. */ + csharpNamespace: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swiftPrefix: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + phpClassPrefix: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + phpNamespace: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + phpMetadataNamespace: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + rubyPackage: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpretedOption: UninterpretedOption[]; +} +export interface FileOptionsProtoMsg { + typeUrl: '/google.protobuf.FileOptions'; + value: Uint8Array; +} +export interface FileOptionsAmino { + /** + * Sets the Java package where classes generated from this .proto will be + * placed. By default, the proto package is used, but this is often + * inappropriate because proto packages do not normally start with backwards + * domain names. + */ + java_package?: string; + /** + * If set, all the classes from the .proto file are wrapped in a single + * outer class with the given name. This applies to both Proto1 + * (equivalent to the old "--one_java_file" option) and Proto2 (where + * a .proto always translates to a single class, but you may want to + * explicitly choose the class name). + */ + java_outer_classname?: string; + /** + * If set true, then the Java code generator will generate a separate .java + * file for each top-level message, enum, and service defined in the .proto + * file. Thus, these types will *not* be nested inside the outer class + * named by java_outer_classname. However, the outer class will still be + * generated to contain the file's getDescriptor() method as well as any + * top-level extensions defined in the file. + */ + java_multiple_files?: boolean; + /** This option does nothing. */ + /** @deprecated */ + java_generate_equals_and_hash?: boolean; + /** + * If set true, then the Java2 code generator will generate code that + * throws an exception whenever an attempt is made to assign a non-UTF-8 + * byte sequence to a string field. + * Message reflection will do the same. + * However, an extension field still accepts non-UTF-8 byte sequences. + * This option has no effect on when used with the lite runtime. + */ + java_string_check_utf8?: boolean; + optimize_for?: FileOptions_OptimizeMode; + /** + * Sets the Go package where structs generated from this .proto will be + * placed. If omitted, the Go package will be derived from the following: + * - The basename of the package import path, if provided. + * - Otherwise, the package statement in the .proto file, if present. + * - Otherwise, the basename of the .proto file, without extension. + */ + go_package?: string; + /** + * Should generic services be generated in each language? "Generic" services + * are not specific to any particular RPC system. They are generated by the + * main code generators in each language (without additional plugins). + * Generic services were the only kind of service generation supported by + * early versions of google.protobuf. + * + * Generic services are now considered deprecated in favor of using plugins + * that generate code specific to your particular RPC system. Therefore, + * these default to false. Old code which depends on generic services should + * explicitly set them to true. + */ + cc_generic_services?: boolean; + java_generic_services?: boolean; + py_generic_services?: boolean; + php_generic_services?: boolean; + /** + * Is this file deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for everything in the file, or it will be completely ignored; in the very + * least, this is a formalization for deprecating files. + */ + deprecated?: boolean; + /** + * Enables the use of arenas for the proto messages in this file. This applies + * only to generated classes for C++. + */ + cc_enable_arenas?: boolean; + /** + * Sets the objective c class prefix which is prepended to all objective c + * generated classes from this .proto. There is no default. + */ + objc_class_prefix?: string; + /** Namespace for generated classes; defaults to the package. */ + csharp_namespace?: string; + /** + * By default Swift generators will take the proto package and CamelCase it + * replacing '.' with underscore and use that to prefix the types/symbols + * defined. When this options is provided, they will use this value instead + * to prefix the types/symbols defined. + */ + swift_prefix?: string; + /** + * Sets the php class prefix which is prepended to all php generated classes + * from this .proto. Default is empty. + */ + php_class_prefix?: string; + /** + * Use this option to change the namespace of php generated classes. Default + * is empty. When this option is empty, the package name will be used for + * determining the namespace. + */ + php_namespace?: string; + /** + * Use this option to change the namespace of php generated metadata classes. + * Default is empty. When this option is empty, the proto file name will be + * used for determining the namespace. + */ + php_metadata_namespace?: string; + /** + * Use this option to change the package of ruby generated classes. Default + * is empty. When this option is not set, the package name will be used for + * determining the ruby package. + */ + ruby_package?: string; + /** + * The parser stores options it doesn't recognize here. + * See the documentation for the "Options" section above. + */ + uninterpreted_option?: UninterpretedOptionAmino[]; +} +export interface FileOptionsAminoMsg { + type: '/google.protobuf.FileOptions'; + value: FileOptionsAmino; +} +export interface FileOptionsSDKType { + java_package: string; + java_outer_classname: string; + java_multiple_files: boolean; + /** @deprecated */ + java_generate_equals_and_hash: boolean; + java_string_check_utf8: boolean; + optimize_for: FileOptions_OptimizeMode; + go_package: string; + cc_generic_services: boolean; + java_generic_services: boolean; + py_generic_services: boolean; + php_generic_services: boolean; + deprecated: boolean; + cc_enable_arenas: boolean; + objc_class_prefix: string; + csharp_namespace: string; + swift_prefix: string; + php_class_prefix: string; + php_namespace: string; + php_metadata_namespace: string; + ruby_package: string; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface MessageOptions { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + messageSetWireFormat: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + noStandardDescriptorAccessor: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + mapEntry: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface MessageOptionsProtoMsg { + typeUrl: '/google.protobuf.MessageOptions'; + value: Uint8Array; +} +export interface MessageOptionsAmino { + /** + * Set true to use the old proto1 MessageSet wire format for extensions. + * This is provided for backwards-compatibility with the MessageSet wire + * format. You should not use this for any other reason: It's less + * efficient, has fewer features, and is more complicated. + * + * The message must be defined exactly as follows: + * message Foo { + * option message_set_wire_format = true; + * extensions 4 to max; + * } + * Note that the message cannot have any defined fields; MessageSets only + * have extensions. + * + * All extensions of your type must be singular messages; e.g. they cannot + * be int32s, enums, or repeated messages. + * + * Because this is an option, the above two restrictions are not enforced by + * the protocol compiler. + */ + message_set_wire_format?: boolean; + /** + * Disables the generation of the standard "descriptor()" accessor, which can + * conflict with a field of the same name. This is meant to make migration + * from proto1 easier; new code should avoid fields named "descriptor". + */ + no_standard_descriptor_accessor?: boolean; + /** + * Is this message deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the message, or it will be completely ignored; in the very least, + * this is a formalization for deprecating messages. + */ + deprecated?: boolean; + /** + * Whether the message is an automatically generated map entry type for the + * maps field. + * + * For maps fields: + * map map_field = 1; + * The parsed descriptor looks like: + * message MapFieldEntry { + * option map_entry = true; + * optional KeyType key = 1; + * optional ValueType value = 2; + * } + * repeated MapFieldEntry map_field = 1; + * + * Implementations may choose not to generate the map_entry=true message, but + * use a native map in the target language to hold the keys and values. + * The reflection APIs in such implementations still need to work as + * if the field is a repeated message field. + * + * NOTE: Do not set the option in .proto files. Always use the maps syntax + * instead. The option should only be implicitly set by the proto compiler + * parser. + */ + map_entry?: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpreted_option?: UninterpretedOptionAmino[]; +} +export interface MessageOptionsAminoMsg { + type: '/google.protobuf.MessageOptions'; + value: MessageOptionsAmino; +} +export interface MessageOptionsSDKType { + message_set_wire_format: boolean; + no_standard_descriptor_accessor: boolean; + deprecated: boolean; + map_entry: boolean; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface FieldOptions { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated: boolean; + /** For Google-internal migration only. Do not use. */ + weak: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface FieldOptionsProtoMsg { + typeUrl: '/google.protobuf.FieldOptions'; + value: Uint8Array; +} +export interface FieldOptionsAmino { + /** + * The ctype option instructs the C++ code generator to use a different + * representation of the field than it normally would. See the specific + * options below. This option is not yet implemented in the open source + * release -- sorry, we'll try to include it in a future version! + */ + ctype?: FieldOptions_CType; + /** + * The packed option can be enabled for repeated primitive fields to enable + * a more efficient representation on the wire. Rather than repeatedly + * writing the tag and type for each element, the entire array is encoded as + * a single length-delimited blob. In proto3, only explicit setting it to + * false will avoid using packed encoding. + */ + packed?: boolean; + /** + * The jstype option determines the JavaScript type used for values of the + * field. The option is permitted only for 64 bit integral and fixed types + * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + * is represented as JavaScript string, which avoids loss of precision that + * can happen when a large value is converted to a floating point JavaScript. + * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + * use the JavaScript "number" type. The behavior of the default option + * JS_NORMAL is implementation dependent. + * + * This option is an enum to permit additional types to be added, e.g. + * goog.math.Integer. + */ + jstype?: FieldOptions_JSType; + /** + * Should this field be parsed lazily? Lazy applies only to message-type + * fields. It means that when the outer message is initially parsed, the + * inner message's contents will not be parsed but instead stored in encoded + * form. The inner message will actually be parsed when it is first accessed. + * + * This is only a hint. Implementations are free to choose whether to use + * eager or lazy parsing regardless of the value of this option. However, + * setting this option true suggests that the protocol author believes that + * using lazy parsing on this field is worth the additional bookkeeping + * overhead typically needed to implement it. + * + * This option does not affect the public interface of any generated code; + * all method signatures remain the same. Furthermore, thread-safety of the + * interface is not affected by this option; const methods remain safe to + * call from multiple threads concurrently, while non-const methods continue + * to require exclusive access. + * + * + * Note that implementations may choose not to check required fields within + * a lazy sub-message. That is, calling IsInitialized() on the outer message + * may return true even if the inner message has missing required fields. + * This is necessary because otherwise the inner message would have to be + * parsed in order to perform the check, defeating the purpose of lazy + * parsing. An implementation which chooses not to check required fields + * must be consistent about it. That is, for any particular sub-message, the + * implementation must either *always* check its required fields, or *never* + * check its required fields, regardless of whether or not the message has + * been parsed. + */ + lazy?: boolean; + /** + * Is this field deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for accessors, or it will be completely ignored; in the very least, this + * is a formalization for deprecating fields. + */ + deprecated?: boolean; + /** For Google-internal migration only. Do not use. */ + weak?: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpreted_option?: UninterpretedOptionAmino[]; +} +export interface FieldOptionsAminoMsg { + type: '/google.protobuf.FieldOptions'; + value: FieldOptionsAmino; +} +export interface FieldOptionsSDKType { + ctype: FieldOptions_CType; + packed: boolean; + jstype: FieldOptions_JSType; + lazy: boolean; + deprecated: boolean; + weak: boolean; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface OneofOptions { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface OneofOptionsProtoMsg { + typeUrl: '/google.protobuf.OneofOptions'; + value: Uint8Array; +} +export interface OneofOptionsAmino { + /** The parser stores options it doesn't recognize here. See above. */ + uninterpreted_option?: UninterpretedOptionAmino[]; +} +export interface OneofOptionsAminoMsg { + type: '/google.protobuf.OneofOptions'; + value: OneofOptionsAmino; +} +export interface OneofOptionsSDKType { + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface EnumOptions { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allowAlias: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface EnumOptionsProtoMsg { + typeUrl: '/google.protobuf.EnumOptions'; + value: Uint8Array; +} +export interface EnumOptionsAmino { + /** + * Set this option to true to allow mapping different tag names to the same + * value. + */ + allow_alias?: boolean; + /** + * Is this enum deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum, or it will be completely ignored; in the very least, this + * is a formalization for deprecating enums. + */ + deprecated?: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpreted_option?: UninterpretedOptionAmino[]; +} +export interface EnumOptionsAminoMsg { + type: '/google.protobuf.EnumOptions'; + value: EnumOptionsAmino; +} +export interface EnumOptionsSDKType { + allow_alias: boolean; + deprecated: boolean; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface EnumValueOptions { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface EnumValueOptionsProtoMsg { + typeUrl: '/google.protobuf.EnumValueOptions'; + value: Uint8Array; +} +export interface EnumValueOptionsAmino { + /** + * Is this enum value deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the enum value, or it will be completely ignored; in the very least, + * this is a formalization for deprecating enum values. + */ + deprecated?: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpreted_option?: UninterpretedOptionAmino[]; +} +export interface EnumValueOptionsAminoMsg { + type: '/google.protobuf.EnumValueOptions'; + value: EnumValueOptionsAmino; +} +export interface EnumValueOptionsSDKType { + deprecated: boolean; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface ServiceOptions { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface ServiceOptionsProtoMsg { + typeUrl: '/google.protobuf.ServiceOptions'; + value: Uint8Array; +} +export interface ServiceOptionsAmino { + /** + * Is this service deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the service, or it will be completely ignored; in the very least, + * this is a formalization for deprecating services. + */ + deprecated?: boolean; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpreted_option?: UninterpretedOptionAmino[]; +} +export interface ServiceOptionsAminoMsg { + type: '/google.protobuf.ServiceOptions'; + value: ServiceOptionsAmino; +} +export interface ServiceOptionsSDKType { + deprecated: boolean; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +export interface MethodOptions { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated: boolean; + idempotencyLevel: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpretedOption: UninterpretedOption[]; +} +export interface MethodOptionsProtoMsg { + typeUrl: '/google.protobuf.MethodOptions'; + value: Uint8Array; +} +export interface MethodOptionsAmino { + /** + * Is this method deprecated? + * Depending on the target platform, this can emit Deprecated annotations + * for the method, or it will be completely ignored; in the very least, + * this is a formalization for deprecating methods. + */ + deprecated?: boolean; + idempotency_level?: MethodOptions_IdempotencyLevel; + /** The parser stores options it doesn't recognize here. See above. */ + uninterpreted_option?: UninterpretedOptionAmino[]; +} +export interface MethodOptionsAminoMsg { + type: '/google.protobuf.MethodOptions'; + value: MethodOptionsAmino; +} +export interface MethodOptionsSDKType { + deprecated: boolean; + idempotency_level: MethodOptions_IdempotencyLevel; + uninterpreted_option: UninterpretedOptionSDKType[]; +} +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOption { + name: UninterpretedOption_NamePart[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifierValue: string; + positiveIntValue: bigint; + negativeIntValue: bigint; + doubleValue: number; + stringValue: Uint8Array; + aggregateValue: string; +} +export interface UninterpretedOptionProtoMsg { + typeUrl: '/google.protobuf.UninterpretedOption'; + value: Uint8Array; +} +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOptionAmino { + name?: UninterpretedOption_NamePartAmino[]; + /** + * The value of the uninterpreted option, in whatever type the tokenizer + * identified it as during parsing. Exactly one of these should be set. + */ + identifier_value?: string; + positive_int_value?: string; + negative_int_value?: string; + double_value?: number; + string_value?: string; + aggregate_value?: string; +} +export interface UninterpretedOptionAminoMsg { + type: '/google.protobuf.UninterpretedOption'; + value: UninterpretedOptionAmino; +} +/** + * A message representing a option the parser does not recognize. This only + * appears in options protos created by the compiler::Parser class. + * DescriptorPool resolves these when building Descriptor objects. Therefore, + * options protos in descriptor objects (e.g. returned by Descriptor::options(), + * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + * in them. + */ +export interface UninterpretedOptionSDKType { + name: UninterpretedOption_NamePartSDKType[]; + identifier_value: string; + positive_int_value: bigint; + negative_int_value: bigint; + double_value: number; + string_value: Uint8Array; + aggregate_value: string; +} +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePart { + namePart: string; + isExtension: boolean; +} +export interface UninterpretedOption_NamePartProtoMsg { + typeUrl: '/google.protobuf.NamePart'; + value: Uint8Array; +} +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePartAmino { + name_part?: string; + is_extension?: boolean; +} +export interface UninterpretedOption_NamePartAminoMsg { + type: '/google.protobuf.NamePart'; + value: UninterpretedOption_NamePartAmino; +} +/** + * The name of the uninterpreted option. Each string represents a segment in + * a dot-separated name. is_extension is true iff a segment represents an + * extension (denoted with parentheses in options specs in .proto files). + * E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + * "foo.(bar.baz).qux". + */ +export interface UninterpretedOption_NamePartSDKType { + name_part: string; + is_extension: boolean; +} +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfo { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location: SourceCodeInfo_Location[]; +} +export interface SourceCodeInfoProtoMsg { + typeUrl: '/google.protobuf.SourceCodeInfo'; + value: Uint8Array; +} +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfoAmino { + /** + * A Location identifies a piece of source code in a .proto file which + * corresponds to a particular definition. This information is intended + * to be useful to IDEs, code indexers, documentation generators, and similar + * tools. + * + * For example, say we have a file like: + * message Foo { + * optional string foo = 1; + * } + * Let's look at just the field definition: + * optional string foo = 1; + * ^ ^^ ^^ ^ ^^^ + * a bc de f ghi + * We have the following locations: + * span path represents + * [a,i) [ 4, 0, 2, 0 ] The whole field definition. + * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + * + * Notes: + * - A location may refer to a repeated field itself (i.e. not to any + * particular index within it). This is used whenever a set of elements are + * logically enclosed in a single code segment. For example, an entire + * extend block (possibly containing multiple extension definitions) will + * have an outer location whose path refers to the "extensions" repeated + * field without an index. + * - Multiple locations may have the same path. This happens when a single + * logical declaration is spread out across multiple places. The most + * obvious example is the "extend" block again -- there may be multiple + * extend blocks in the same scope, each of which will have the same path. + * - A location's span is not always a subset of its parent's span. For + * example, the "extendee" of an extension declaration appears at the + * beginning of the "extend" block and is shared by all extensions within + * the block. + * - Just because a location's span is a subset of some other location's span + * does not mean that it is a descendant. For example, a "group" defines + * both a type and a field in a single declaration. Thus, the locations + * corresponding to the type and field and their components will overlap. + * - Code which tries to interpret locations should probably be designed to + * ignore those that it doesn't understand, as more types of locations could + * be recorded in the future. + */ + location?: SourceCodeInfo_LocationAmino[]; +} +export interface SourceCodeInfoAminoMsg { + type: '/google.protobuf.SourceCodeInfo'; + value: SourceCodeInfoAmino; +} +/** + * Encapsulates information about the original source file from which a + * FileDescriptorProto was generated. + */ +export interface SourceCodeInfoSDKType { + location: SourceCodeInfo_LocationSDKType[]; +} +export interface SourceCodeInfo_Location { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. *\/ + * /* Block comment attached to + * * grault. *\/ + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leadingComments: string; + trailingComments: string; + leadingDetachedComments: string[]; +} +export interface SourceCodeInfo_LocationProtoMsg { + typeUrl: '/google.protobuf.Location'; + value: Uint8Array; +} +export interface SourceCodeInfo_LocationAmino { + /** + * Identifies which part of the FileDescriptorProto was defined at this + * location. + * + * Each element is a field number or an index. They form a path from + * the root FileDescriptorProto to the place where the definition. For + * example, this path: + * [ 4, 3, 2, 7, 1 ] + * refers to: + * file.message_type(3) // 4, 3 + * .field(7) // 2, 7 + * .name() // 1 + * This is because FileDescriptorProto.message_type has field number 4: + * repeated DescriptorProto message_type = 4; + * and DescriptorProto.field has field number 2: + * repeated FieldDescriptorProto field = 2; + * and FieldDescriptorProto.name has field number 1: + * optional string name = 1; + * + * Thus, the above path gives the location of a field name. If we removed + * the last element: + * [ 4, 3, 2, 7 ] + * this path refers to the whole field declaration (from the beginning + * of the label to the terminating semicolon). + */ + path?: number[]; + /** + * Always has exactly three or four elements: start line, start column, + * end line (optional, otherwise assumed same as start line), end column. + * These are packed into a single field for efficiency. Note that line + * and column numbers are zero-based -- typically you will want to add + * 1 to each before displaying to a user. + */ + span?: number[]; + /** + * If this SourceCodeInfo represents a complete declaration, these are any + * comments appearing before and after the declaration which appear to be + * attached to the declaration. + * + * A series of line comments appearing on consecutive lines, with no other + * tokens appearing on those lines, will be treated as a single comment. + * + * leading_detached_comments will keep paragraphs of comments that appear + * before (but not connected to) the current element. Each paragraph, + * separated by empty lines, will be one comment element in the repeated + * field. + * + * Only the comment content is provided; comment markers (e.g. //) are + * stripped out. For block comments, leading whitespace and an asterisk + * will be stripped from the beginning of each line other than the first. + * Newlines are included in the output. + * + * Examples: + * + * optional int32 foo = 1; // Comment attached to foo. + * // Comment attached to bar. + * optional int32 bar = 2; + * + * optional string baz = 3; + * // Comment attached to baz. + * // Another line attached to baz. + * + * // Comment attached to qux. + * // + * // Another line attached to qux. + * optional double qux = 4; + * + * // Detached comment for corge. This is not leading or trailing comments + * // to qux or corge because there are blank lines separating it from + * // both. + * + * // Detached comment for corge paragraph 2. + * + * optional string corge = 5; + * /* Block comment attached + * * to corge. Leading asterisks + * * will be removed. *\/ + * /* Block comment attached to + * * grault. *\/ + * optional int32 grault = 6; + * + * // ignored detached comments. + */ + leading_comments?: string; + trailing_comments?: string; + leading_detached_comments?: string[]; +} +export interface SourceCodeInfo_LocationAminoMsg { + type: '/google.protobuf.Location'; + value: SourceCodeInfo_LocationAmino; +} +export interface SourceCodeInfo_LocationSDKType { + path: number[]; + span: number[]; + leading_comments: string; + trailing_comments: string; + leading_detached_comments: string[]; +} +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfo { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation: GeneratedCodeInfo_Annotation[]; +} +export interface GeneratedCodeInfoProtoMsg { + typeUrl: '/google.protobuf.GeneratedCodeInfo'; + value: Uint8Array; +} +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfoAmino { + /** + * An Annotation connects some span of text in generated code to an element + * of its generating .proto file. + */ + annotation?: GeneratedCodeInfo_AnnotationAmino[]; +} +export interface GeneratedCodeInfoAminoMsg { + type: '/google.protobuf.GeneratedCodeInfo'; + value: GeneratedCodeInfoAmino; +} +/** + * Describes the relationship between generated code and its original source + * file. A GeneratedCodeInfo message is associated with only one generated + * source file, but may contain references to different source .proto files. + */ +export interface GeneratedCodeInfoSDKType { + annotation: GeneratedCodeInfo_AnnotationSDKType[]; +} +export interface GeneratedCodeInfo_Annotation { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path: number[]; + /** Identifies the filesystem path to the original source .proto. */ + sourceFile: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end: number; +} +export interface GeneratedCodeInfo_AnnotationProtoMsg { + typeUrl: '/google.protobuf.Annotation'; + value: Uint8Array; +} +export interface GeneratedCodeInfo_AnnotationAmino { + /** + * Identifies the element in the original source .proto file. This field + * is formatted the same as SourceCodeInfo.Location.path. + */ + path?: number[]; + /** Identifies the filesystem path to the original source .proto. */ + source_file?: string; + /** + * Identifies the starting offset in bytes in the generated code + * that relates to the identified object. + */ + begin?: number; + /** + * Identifies the ending offset in bytes in the generated code that + * relates to the identified offset. The end offset should be one past + * the last relevant byte (so the length of the text = end - begin). + */ + end?: number; +} +export interface GeneratedCodeInfo_AnnotationAminoMsg { + type: '/google.protobuf.Annotation'; + value: GeneratedCodeInfo_AnnotationAmino; +} +export interface GeneratedCodeInfo_AnnotationSDKType { + path: number[]; + source_file: string; + begin: number; + end: number; +} +function createBaseFileDescriptorSet(): FileDescriptorSet { + return { + file: [], + }; +} +export const FileDescriptorSet = { + typeUrl: '/google.protobuf.FileDescriptorSet', + encode( + message: FileDescriptorSet, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.file) { + FileDescriptorProto.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): FileDescriptorSet { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorSet(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.file.push( + FileDescriptorProto.decode(reader, reader.uint32()), + ); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): FileDescriptorSet { + return { + file: Array.isArray(object?.file) + ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) + : [], + }; + }, + toJSON(message: FileDescriptorSet): unknown { + const obj: any = {}; + if (message.file) { + obj.file = message.file.map(e => + e ? FileDescriptorProto.toJSON(e) : undefined, + ); + } else { + obj.file = []; + } + return obj; + }, + fromPartial(object: Partial): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = + object.file?.map(e => FileDescriptorProto.fromPartial(e)) || []; + return message; + }, + fromAmino(object: FileDescriptorSetAmino): FileDescriptorSet { + const message = createBaseFileDescriptorSet(); + message.file = + object.file?.map(e => FileDescriptorProto.fromAmino(e)) || []; + return message; + }, + toAmino(message: FileDescriptorSet): FileDescriptorSetAmino { + const obj: any = {}; + if (message.file) { + obj.file = message.file.map(e => + e ? FileDescriptorProto.toAmino(e) : undefined, + ); + } else { + obj.file = []; + } + return obj; + }, + fromAminoMsg(object: FileDescriptorSetAminoMsg): FileDescriptorSet { + return FileDescriptorSet.fromAmino(object.value); + }, + fromProtoMsg(message: FileDescriptorSetProtoMsg): FileDescriptorSet { + return FileDescriptorSet.decode(message.value); + }, + toProto(message: FileDescriptorSet): Uint8Array { + return FileDescriptorSet.encode(message).finish(); + }, + toProtoMsg(message: FileDescriptorSet): FileDescriptorSetProtoMsg { + return { + typeUrl: '/google.protobuf.FileDescriptorSet', + value: FileDescriptorSet.encode(message).finish(), + }; + }, +}; +function createBaseFileDescriptorProto(): FileDescriptorProto { + return { + name: '', + package: '', + dependency: [], + publicDependency: [], + weakDependency: [], + messageType: [], + enumType: [], + service: [], + extension: [], + options: undefined, + sourceCodeInfo: undefined, + syntax: '', + }; +} +export const FileDescriptorProto = { + typeUrl: '/google.protobuf.FileDescriptorProto', + encode( + message: FileDescriptorProto, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name); + } + if (message.package !== '') { + writer.uint32(18).string(message.package); + } + for (const v of message.dependency) { + writer.uint32(26).string(v!); + } + writer.uint32(82).fork(); + for (const v of message.publicDependency) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(90).fork(); + for (const v of message.weakDependency) { + writer.int32(v); + } + writer.ldelim(); + for (const v of message.messageType) { + DescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(42).fork()).ldelim(); + } + for (const v of message.service) { + ServiceDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(58).fork()).ldelim(); + } + if (message.options !== undefined) { + FileOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + if (message.sourceCodeInfo !== undefined) { + SourceCodeInfo.encode( + message.sourceCodeInfo, + writer.uint32(74).fork(), + ).ldelim(); + } + if (message.syntax !== '') { + writer.uint32(98).string(message.syntax); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): FileDescriptorProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.package = reader.string(); + break; + case 3: + message.dependency.push(reader.string()); + break; + case 10: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.publicDependency.push(reader.int32()); + } + } else { + message.publicDependency.push(reader.int32()); + } + break; + case 11: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.weakDependency.push(reader.int32()); + } + } else { + message.weakDependency.push(reader.int32()); + } + break; + case 4: + message.messageType.push( + DescriptorProto.decode(reader, reader.uint32()), + ); + break; + case 5: + message.enumType.push( + EnumDescriptorProto.decode(reader, reader.uint32()), + ); + break; + case 6: + message.service.push( + ServiceDescriptorProto.decode(reader, reader.uint32()), + ); + break; + case 7: + message.extension.push( + FieldDescriptorProto.decode(reader, reader.uint32()), + ); + break; + case 8: + message.options = FileOptions.decode(reader, reader.uint32()); + break; + case 9: + message.sourceCodeInfo = SourceCodeInfo.decode( + reader, + reader.uint32(), + ); + break; + case 12: + message.syntax = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): FileDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : '', + package: isSet(object.package) ? String(object.package) : '', + dependency: Array.isArray(object?.dependency) + ? object.dependency.map((e: any) => String(e)) + : [], + publicDependency: Array.isArray(object?.publicDependency) + ? object.publicDependency.map((e: any) => Number(e)) + : [], + weakDependency: Array.isArray(object?.weakDependency) + ? object.weakDependency.map((e: any) => Number(e)) + : [], + messageType: Array.isArray(object?.messageType) + ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) + ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) + : [], + service: Array.isArray(object?.service) + ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) + : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) + ? FileOptions.fromJSON(object.options) + : undefined, + sourceCodeInfo: isSet(object.sourceCodeInfo) + ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) + : undefined, + syntax: isSet(object.syntax) ? String(object.syntax) : '', + }; + }, + toJSON(message: FileDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.package !== undefined && (obj.package = message.package); + if (message.dependency) { + obj.dependency = message.dependency.map(e => e); + } else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.publicDependency = message.publicDependency.map(e => Math.round(e)); + } else { + obj.publicDependency = []; + } + if (message.weakDependency) { + obj.weakDependency = message.weakDependency.map(e => Math.round(e)); + } else { + obj.weakDependency = []; + } + if (message.messageType) { + obj.messageType = message.messageType.map(e => + e ? DescriptorProto.toJSON(e) : undefined, + ); + } else { + obj.messageType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map(e => + e ? EnumDescriptorProto.toJSON(e) : undefined, + ); + } else { + obj.enumType = []; + } + if (message.service) { + obj.service = message.service.map(e => + e ? ServiceDescriptorProto.toJSON(e) : undefined, + ); + } else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map(e => + e ? FieldDescriptorProto.toJSON(e) : undefined, + ); + } else { + obj.extension = []; + } + message.options !== undefined && + (obj.options = message.options + ? FileOptions.toJSON(message.options) + : undefined); + message.sourceCodeInfo !== undefined && + (obj.sourceCodeInfo = message.sourceCodeInfo + ? SourceCodeInfo.toJSON(message.sourceCodeInfo) + : undefined); + message.syntax !== undefined && (obj.syntax = message.syntax); + return obj; + }, + fromPartial(object: Partial): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + message.name = object.name ?? ''; + message.package = object.package ?? ''; + message.dependency = object.dependency?.map(e => e) || []; + message.publicDependency = object.publicDependency?.map(e => e) || []; + message.weakDependency = object.weakDependency?.map(e => e) || []; + message.messageType = + object.messageType?.map(e => DescriptorProto.fromPartial(e)) || []; + message.enumType = + object.enumType?.map(e => EnumDescriptorProto.fromPartial(e)) || []; + message.service = + object.service?.map(e => ServiceDescriptorProto.fromPartial(e)) || []; + message.extension = + object.extension?.map(e => FieldDescriptorProto.fromPartial(e)) || []; + message.options = + object.options !== undefined && object.options !== null + ? FileOptions.fromPartial(object.options) + : undefined; + message.sourceCodeInfo = + object.sourceCodeInfo !== undefined && object.sourceCodeInfo !== null + ? SourceCodeInfo.fromPartial(object.sourceCodeInfo) + : undefined; + message.syntax = object.syntax ?? ''; + return message; + }, + fromAmino(object: FileDescriptorProtoAmino): FileDescriptorProto { + const message = createBaseFileDescriptorProto(); + if (object.name !== undefined && object.name !== null) { + message.name = object.name; + } + if (object.package !== undefined && object.package !== null) { + message.package = object.package; + } + message.dependency = object.dependency?.map(e => e) || []; + message.publicDependency = object.public_dependency?.map(e => e) || []; + message.weakDependency = object.weak_dependency?.map(e => e) || []; + message.messageType = + object.message_type?.map(e => DescriptorProto.fromAmino(e)) || []; + message.enumType = + object.enum_type?.map(e => EnumDescriptorProto.fromAmino(e)) || []; + message.service = + object.service?.map(e => ServiceDescriptorProto.fromAmino(e)) || []; + message.extension = + object.extension?.map(e => FieldDescriptorProto.fromAmino(e)) || []; + if (object.options !== undefined && object.options !== null) { + message.options = FileOptions.fromAmino(object.options); + } + if ( + object.source_code_info !== undefined && + object.source_code_info !== null + ) { + message.sourceCodeInfo = SourceCodeInfo.fromAmino( + object.source_code_info, + ); + } + if (object.syntax !== undefined && object.syntax !== null) { + message.syntax = object.syntax; + } + return message; + }, + toAmino(message: FileDescriptorProto): FileDescriptorProtoAmino { + const obj: any = {}; + obj.name = message.name; + obj.package = message.package; + if (message.dependency) { + obj.dependency = message.dependency.map(e => e); + } else { + obj.dependency = []; + } + if (message.publicDependency) { + obj.public_dependency = message.publicDependency.map(e => e); + } else { + obj.public_dependency = []; + } + if (message.weakDependency) { + obj.weak_dependency = message.weakDependency.map(e => e); + } else { + obj.weak_dependency = []; + } + if (message.messageType) { + obj.message_type = message.messageType.map(e => + e ? DescriptorProto.toAmino(e) : undefined, + ); + } else { + obj.message_type = []; + } + if (message.enumType) { + obj.enum_type = message.enumType.map(e => + e ? EnumDescriptorProto.toAmino(e) : undefined, + ); + } else { + obj.enum_type = []; + } + if (message.service) { + obj.service = message.service.map(e => + e ? ServiceDescriptorProto.toAmino(e) : undefined, + ); + } else { + obj.service = []; + } + if (message.extension) { + obj.extension = message.extension.map(e => + e ? FieldDescriptorProto.toAmino(e) : undefined, + ); + } else { + obj.extension = []; + } + obj.options = message.options + ? FileOptions.toAmino(message.options) + : undefined; + obj.source_code_info = message.sourceCodeInfo + ? SourceCodeInfo.toAmino(message.sourceCodeInfo) + : undefined; + obj.syntax = message.syntax; + return obj; + }, + fromAminoMsg(object: FileDescriptorProtoAminoMsg): FileDescriptorProto { + return FileDescriptorProto.fromAmino(object.value); + }, + fromProtoMsg(message: FileDescriptorProtoProtoMsg): FileDescriptorProto { + return FileDescriptorProto.decode(message.value); + }, + toProto(message: FileDescriptorProto): Uint8Array { + return FileDescriptorProto.encode(message).finish(); + }, + toProtoMsg(message: FileDescriptorProto): FileDescriptorProtoProtoMsg { + return { + typeUrl: '/google.protobuf.FileDescriptorProto', + value: FileDescriptorProto.encode(message).finish(), + }; + }, +}; +function createBaseDescriptorProto(): DescriptorProto { + return { + name: '', + field: [], + extension: [], + nestedType: [], + enumType: [], + extensionRange: [], + oneofDecl: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} +export const DescriptorProto = { + typeUrl: '/google.protobuf.DescriptorProto', + encode( + message: DescriptorProto, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name); + } + for (const v of message.field) { + FieldDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + for (const v of message.extension) { + FieldDescriptorProto.encode(v!, writer.uint32(50).fork()).ldelim(); + } + for (const v of message.nestedType) { + DescriptorProto.encode(v!, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.enumType) { + EnumDescriptorProto.encode(v!, writer.uint32(34).fork()).ldelim(); + } + for (const v of message.extensionRange) { + DescriptorProto_ExtensionRange.encode( + v!, + writer.uint32(42).fork(), + ).ldelim(); + } + for (const v of message.oneofDecl) { + OneofDescriptorProto.encode(v!, writer.uint32(66).fork()).ldelim(); + } + if (message.options !== undefined) { + MessageOptions.encode(message.options, writer.uint32(58).fork()).ldelim(); + } + for (const v of message.reservedRange) { + DescriptorProto_ReservedRange.encode( + v!, + writer.uint32(74).fork(), + ).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(82).string(v!); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): DescriptorProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.field.push( + FieldDescriptorProto.decode(reader, reader.uint32()), + ); + break; + case 6: + message.extension.push( + FieldDescriptorProto.decode(reader, reader.uint32()), + ); + break; + case 3: + message.nestedType.push( + DescriptorProto.decode(reader, reader.uint32()), + ); + break; + case 4: + message.enumType.push( + EnumDescriptorProto.decode(reader, reader.uint32()), + ); + break; + case 5: + message.extensionRange.push( + DescriptorProto_ExtensionRange.decode(reader, reader.uint32()), + ); + break; + case 8: + message.oneofDecl.push( + OneofDescriptorProto.decode(reader, reader.uint32()), + ); + break; + case 7: + message.options = MessageOptions.decode(reader, reader.uint32()); + break; + case 9: + message.reservedRange.push( + DescriptorProto_ReservedRange.decode(reader, reader.uint32()), + ); + break; + case 10: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): DescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : '', + field: Array.isArray(object?.field) + ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + extension: Array.isArray(object?.extension) + ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) + : [], + nestedType: Array.isArray(object?.nestedType) + ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) + : [], + enumType: Array.isArray(object?.enumType) + ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) + : [], + extensionRange: Array.isArray(object?.extensionRange) + ? object.extensionRange.map((e: any) => + DescriptorProto_ExtensionRange.fromJSON(e), + ) + : [], + oneofDecl: Array.isArray(object?.oneofDecl) + ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) + ? MessageOptions.fromJSON(object.options) + : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => + DescriptorProto_ReservedRange.fromJSON(e), + ) + : [], + reservedName: Array.isArray(object?.reservedName) + ? object.reservedName.map((e: any) => String(e)) + : [], + }; + }, + toJSON(message: DescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.field) { + obj.field = message.field.map(e => + e ? FieldDescriptorProto.toJSON(e) : undefined, + ); + } else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map(e => + e ? FieldDescriptorProto.toJSON(e) : undefined, + ); + } else { + obj.extension = []; + } + if (message.nestedType) { + obj.nestedType = message.nestedType.map(e => + e ? DescriptorProto.toJSON(e) : undefined, + ); + } else { + obj.nestedType = []; + } + if (message.enumType) { + obj.enumType = message.enumType.map(e => + e ? EnumDescriptorProto.toJSON(e) : undefined, + ); + } else { + obj.enumType = []; + } + if (message.extensionRange) { + obj.extensionRange = message.extensionRange.map(e => + e ? DescriptorProto_ExtensionRange.toJSON(e) : undefined, + ); + } else { + obj.extensionRange = []; + } + if (message.oneofDecl) { + obj.oneofDecl = message.oneofDecl.map(e => + e ? OneofDescriptorProto.toJSON(e) : undefined, + ); + } else { + obj.oneofDecl = []; + } + message.options !== undefined && + (obj.options = message.options + ? MessageOptions.toJSON(message.options) + : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map(e => + e ? DescriptorProto_ReservedRange.toJSON(e) : undefined, + ); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map(e => e); + } else { + obj.reservedName = []; + } + return obj; + }, + fromPartial(object: Partial): DescriptorProto { + const message = createBaseDescriptorProto(); + message.name = object.name ?? ''; + message.field = + object.field?.map(e => FieldDescriptorProto.fromPartial(e)) || []; + message.extension = + object.extension?.map(e => FieldDescriptorProto.fromPartial(e)) || []; + message.nestedType = + object.nestedType?.map(e => DescriptorProto.fromPartial(e)) || []; + message.enumType = + object.enumType?.map(e => EnumDescriptorProto.fromPartial(e)) || []; + message.extensionRange = + object.extensionRange?.map(e => + DescriptorProto_ExtensionRange.fromPartial(e), + ) || []; + message.oneofDecl = + object.oneofDecl?.map(e => OneofDescriptorProto.fromPartial(e)) || []; + message.options = + object.options !== undefined && object.options !== null + ? MessageOptions.fromPartial(object.options) + : undefined; + message.reservedRange = + object.reservedRange?.map(e => + DescriptorProto_ReservedRange.fromPartial(e), + ) || []; + message.reservedName = object.reservedName?.map(e => e) || []; + return message; + }, + fromAmino(object: DescriptorProtoAmino): DescriptorProto { + const message = createBaseDescriptorProto(); + if (object.name !== undefined && object.name !== null) { + message.name = object.name; + } + message.field = + object.field?.map(e => FieldDescriptorProto.fromAmino(e)) || []; + message.extension = + object.extension?.map(e => FieldDescriptorProto.fromAmino(e)) || []; + message.nestedType = + object.nested_type?.map(e => DescriptorProto.fromAmino(e)) || []; + message.enumType = + object.enum_type?.map(e => EnumDescriptorProto.fromAmino(e)) || []; + message.extensionRange = + object.extension_range?.map(e => + DescriptorProto_ExtensionRange.fromAmino(e), + ) || []; + message.oneofDecl = + object.oneof_decl?.map(e => OneofDescriptorProto.fromAmino(e)) || []; + if (object.options !== undefined && object.options !== null) { + message.options = MessageOptions.fromAmino(object.options); + } + message.reservedRange = + object.reserved_range?.map(e => + DescriptorProto_ReservedRange.fromAmino(e), + ) || []; + message.reservedName = object.reserved_name?.map(e => e) || []; + return message; + }, + toAmino(message: DescriptorProto): DescriptorProtoAmino { + const obj: any = {}; + obj.name = message.name; + if (message.field) { + obj.field = message.field.map(e => + e ? FieldDescriptorProto.toAmino(e) : undefined, + ); + } else { + obj.field = []; + } + if (message.extension) { + obj.extension = message.extension.map(e => + e ? FieldDescriptorProto.toAmino(e) : undefined, + ); + } else { + obj.extension = []; + } + if (message.nestedType) { + obj.nested_type = message.nestedType.map(e => + e ? DescriptorProto.toAmino(e) : undefined, + ); + } else { + obj.nested_type = []; + } + if (message.enumType) { + obj.enum_type = message.enumType.map(e => + e ? EnumDescriptorProto.toAmino(e) : undefined, + ); + } else { + obj.enum_type = []; + } + if (message.extensionRange) { + obj.extension_range = message.extensionRange.map(e => + e ? DescriptorProto_ExtensionRange.toAmino(e) : undefined, + ); + } else { + obj.extension_range = []; + } + if (message.oneofDecl) { + obj.oneof_decl = message.oneofDecl.map(e => + e ? OneofDescriptorProto.toAmino(e) : undefined, + ); + } else { + obj.oneof_decl = []; + } + obj.options = message.options + ? MessageOptions.toAmino(message.options) + : undefined; + if (message.reservedRange) { + obj.reserved_range = message.reservedRange.map(e => + e ? DescriptorProto_ReservedRange.toAmino(e) : undefined, + ); + } else { + obj.reserved_range = []; + } + if (message.reservedName) { + obj.reserved_name = message.reservedName.map(e => e); + } else { + obj.reserved_name = []; + } + return obj; + }, + fromAminoMsg(object: DescriptorProtoAminoMsg): DescriptorProto { + return DescriptorProto.fromAmino(object.value); + }, + fromProtoMsg(message: DescriptorProtoProtoMsg): DescriptorProto { + return DescriptorProto.decode(message.value); + }, + toProto(message: DescriptorProto): Uint8Array { + return DescriptorProto.encode(message).finish(); + }, + toProtoMsg(message: DescriptorProto): DescriptorProtoProtoMsg { + return { + typeUrl: '/google.protobuf.DescriptorProto', + value: DescriptorProto.encode(message).finish(), + }; + }, +}; +function createBaseDescriptorProto_ExtensionRange(): DescriptorProto_ExtensionRange { + return { + start: 0, + end: 0, + options: undefined, + }; +} +export const DescriptorProto_ExtensionRange = { + typeUrl: '/google.protobuf.ExtensionRange', + encode( + message: DescriptorProto_ExtensionRange, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + if (message.options !== undefined) { + ExtensionRangeOptions.encode( + message.options, + writer.uint32(26).fork(), + ).ldelim(); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): DescriptorProto_ExtensionRange { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ExtensionRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + case 3: + message.options = ExtensionRangeOptions.decode( + reader, + reader.uint32(), + ); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): DescriptorProto_ExtensionRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + options: isSet(object.options) + ? ExtensionRangeOptions.fromJSON(object.options) + : undefined, + }; + }, + toJSON(message: DescriptorProto_ExtensionRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + message.options !== undefined && + (obj.options = message.options + ? ExtensionRangeOptions.toJSON(message.options) + : undefined); + return obj; + }, + fromPartial( + object: Partial, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + message.options = + object.options !== undefined && object.options !== null + ? ExtensionRangeOptions.fromPartial(object.options) + : undefined; + return message; + }, + fromAmino( + object: DescriptorProto_ExtensionRangeAmino, + ): DescriptorProto_ExtensionRange { + const message = createBaseDescriptorProto_ExtensionRange(); + if (object.start !== undefined && object.start !== null) { + message.start = object.start; + } + if (object.end !== undefined && object.end !== null) { + message.end = object.end; + } + if (object.options !== undefined && object.options !== null) { + message.options = ExtensionRangeOptions.fromAmino(object.options); + } + return message; + }, + toAmino( + message: DescriptorProto_ExtensionRange, + ): DescriptorProto_ExtensionRangeAmino { + const obj: any = {}; + obj.start = message.start; + obj.end = message.end; + obj.options = message.options + ? ExtensionRangeOptions.toAmino(message.options) + : undefined; + return obj; + }, + fromAminoMsg( + object: DescriptorProto_ExtensionRangeAminoMsg, + ): DescriptorProto_ExtensionRange { + return DescriptorProto_ExtensionRange.fromAmino(object.value); + }, + fromProtoMsg( + message: DescriptorProto_ExtensionRangeProtoMsg, + ): DescriptorProto_ExtensionRange { + return DescriptorProto_ExtensionRange.decode(message.value); + }, + toProto(message: DescriptorProto_ExtensionRange): Uint8Array { + return DescriptorProto_ExtensionRange.encode(message).finish(); + }, + toProtoMsg( + message: DescriptorProto_ExtensionRange, + ): DescriptorProto_ExtensionRangeProtoMsg { + return { + typeUrl: '/google.protobuf.ExtensionRange', + value: DescriptorProto_ExtensionRange.encode(message).finish(), + }; + }, +}; +function createBaseDescriptorProto_ReservedRange(): DescriptorProto_ReservedRange { + return { + start: 0, + end: 0, + }; +} +export const DescriptorProto_ReservedRange = { + typeUrl: '/google.protobuf.ReservedRange', + encode( + message: DescriptorProto_ReservedRange, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): DescriptorProto_ReservedRange { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseDescriptorProto_ReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): DescriptorProto_ReservedRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + toJSON(message: DescriptorProto_ReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + fromPartial( + object: Partial, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, + fromAmino( + object: DescriptorProto_ReservedRangeAmino, + ): DescriptorProto_ReservedRange { + const message = createBaseDescriptorProto_ReservedRange(); + if (object.start !== undefined && object.start !== null) { + message.start = object.start; + } + if (object.end !== undefined && object.end !== null) { + message.end = object.end; + } + return message; + }, + toAmino( + message: DescriptorProto_ReservedRange, + ): DescriptorProto_ReservedRangeAmino { + const obj: any = {}; + obj.start = message.start; + obj.end = message.end; + return obj; + }, + fromAminoMsg( + object: DescriptorProto_ReservedRangeAminoMsg, + ): DescriptorProto_ReservedRange { + return DescriptorProto_ReservedRange.fromAmino(object.value); + }, + fromProtoMsg( + message: DescriptorProto_ReservedRangeProtoMsg, + ): DescriptorProto_ReservedRange { + return DescriptorProto_ReservedRange.decode(message.value); + }, + toProto(message: DescriptorProto_ReservedRange): Uint8Array { + return DescriptorProto_ReservedRange.encode(message).finish(); + }, + toProtoMsg( + message: DescriptorProto_ReservedRange, + ): DescriptorProto_ReservedRangeProtoMsg { + return { + typeUrl: '/google.protobuf.ReservedRange', + value: DescriptorProto_ReservedRange.encode(message).finish(), + }; + }, +}; +function createBaseExtensionRangeOptions(): ExtensionRangeOptions { + return { + uninterpretedOption: [], + }; +} +export const ExtensionRangeOptions = { + typeUrl: '/google.protobuf.ExtensionRangeOptions', + encode( + message: ExtensionRangeOptions, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): ExtensionRangeOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseExtensionRangeOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()), + ); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): ExtensionRangeOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => + UninterpretedOption.fromJSON(e), + ) + : [], + }; + }, + toJSON(message: ExtensionRangeOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toJSON(e) : undefined, + ); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + fromPartial(object: Partial): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = + object.uninterpretedOption?.map(e => + UninterpretedOption.fromPartial(e), + ) || []; + return message; + }, + fromAmino(object: ExtensionRangeOptionsAmino): ExtensionRangeOptions { + const message = createBaseExtensionRangeOptions(); + message.uninterpretedOption = + object.uninterpreted_option?.map(e => UninterpretedOption.fromAmino(e)) || + []; + return message; + }, + toAmino(message: ExtensionRangeOptions): ExtensionRangeOptionsAmino { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toAmino(e) : undefined, + ); + } else { + obj.uninterpreted_option = []; + } + return obj; + }, + fromAminoMsg(object: ExtensionRangeOptionsAminoMsg): ExtensionRangeOptions { + return ExtensionRangeOptions.fromAmino(object.value); + }, + fromProtoMsg(message: ExtensionRangeOptionsProtoMsg): ExtensionRangeOptions { + return ExtensionRangeOptions.decode(message.value); + }, + toProto(message: ExtensionRangeOptions): Uint8Array { + return ExtensionRangeOptions.encode(message).finish(); + }, + toProtoMsg(message: ExtensionRangeOptions): ExtensionRangeOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.ExtensionRangeOptions', + value: ExtensionRangeOptions.encode(message).finish(), + }; + }, +}; +function createBaseFieldDescriptorProto(): FieldDescriptorProto { + return { + name: '', + number: 0, + label: 1, + type: 1, + typeName: '', + extendee: '', + defaultValue: '', + oneofIndex: 0, + jsonName: '', + options: undefined, + }; +} +export const FieldDescriptorProto = { + typeUrl: '/google.protobuf.FieldDescriptorProto', + encode( + message: FieldDescriptorProto, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(24).int32(message.number); + } + if (message.label !== 1) { + writer.uint32(32).int32(message.label); + } + if (message.type !== 1) { + writer.uint32(40).int32(message.type); + } + if (message.typeName !== '') { + writer.uint32(50).string(message.typeName); + } + if (message.extendee !== '') { + writer.uint32(18).string(message.extendee); + } + if (message.defaultValue !== '') { + writer.uint32(58).string(message.defaultValue); + } + if (message.oneofIndex !== 0) { + writer.uint32(72).int32(message.oneofIndex); + } + if (message.jsonName !== '') { + writer.uint32(82).string(message.jsonName); + } + if (message.options !== undefined) { + FieldOptions.encode(message.options, writer.uint32(66).fork()).ldelim(); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): FieldDescriptorProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 3: + message.number = reader.int32(); + break; + case 4: + message.label = reader.int32() as any; + break; + case 5: + message.type = reader.int32() as any; + break; + case 6: + message.typeName = reader.string(); + break; + case 2: + message.extendee = reader.string(); + break; + case 7: + message.defaultValue = reader.string(); + break; + case 9: + message.oneofIndex = reader.int32(); + break; + case 10: + message.jsonName = reader.string(); + break; + case 8: + message.options = FieldOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): FieldDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : '', + number: isSet(object.number) ? Number(object.number) : 0, + label: isSet(object.label) + ? fieldDescriptorProto_LabelFromJSON(object.label) + : -1, + type: isSet(object.type) + ? fieldDescriptorProto_TypeFromJSON(object.type) + : -1, + typeName: isSet(object.typeName) ? String(object.typeName) : '', + extendee: isSet(object.extendee) ? String(object.extendee) : '', + defaultValue: isSet(object.defaultValue) + ? String(object.defaultValue) + : '', + oneofIndex: isSet(object.oneofIndex) ? Number(object.oneofIndex) : 0, + jsonName: isSet(object.jsonName) ? String(object.jsonName) : '', + options: isSet(object.options) + ? FieldOptions.fromJSON(object.options) + : undefined, + }; + }, + toJSON(message: FieldDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.label !== undefined && + (obj.label = fieldDescriptorProto_LabelToJSON(message.label)); + message.type !== undefined && + (obj.type = fieldDescriptorProto_TypeToJSON(message.type)); + message.typeName !== undefined && (obj.typeName = message.typeName); + message.extendee !== undefined && (obj.extendee = message.extendee); + message.defaultValue !== undefined && + (obj.defaultValue = message.defaultValue); + message.oneofIndex !== undefined && + (obj.oneofIndex = Math.round(message.oneofIndex)); + message.jsonName !== undefined && (obj.jsonName = message.jsonName); + message.options !== undefined && + (obj.options = message.options + ? FieldOptions.toJSON(message.options) + : undefined); + return obj; + }, + fromPartial(object: Partial): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + message.name = object.name ?? ''; + message.number = object.number ?? 0; + message.label = object.label ?? 1; + message.type = object.type ?? 1; + message.typeName = object.typeName ?? ''; + message.extendee = object.extendee ?? ''; + message.defaultValue = object.defaultValue ?? ''; + message.oneofIndex = object.oneofIndex ?? 0; + message.jsonName = object.jsonName ?? ''; + message.options = + object.options !== undefined && object.options !== null + ? FieldOptions.fromPartial(object.options) + : undefined; + return message; + }, + fromAmino(object: FieldDescriptorProtoAmino): FieldDescriptorProto { + const message = createBaseFieldDescriptorProto(); + if (object.name !== undefined && object.name !== null) { + message.name = object.name; + } + if (object.number !== undefined && object.number !== null) { + message.number = object.number; + } + if (object.label !== undefined && object.label !== null) { + message.label = fieldDescriptorProto_LabelFromJSON(object.label); + } + if (object.type !== undefined && object.type !== null) { + message.type = fieldDescriptorProto_TypeFromJSON(object.type); + } + if (object.type_name !== undefined && object.type_name !== null) { + message.typeName = object.type_name; + } + if (object.extendee !== undefined && object.extendee !== null) { + message.extendee = object.extendee; + } + if (object.default_value !== undefined && object.default_value !== null) { + message.defaultValue = object.default_value; + } + if (object.oneof_index !== undefined && object.oneof_index !== null) { + message.oneofIndex = object.oneof_index; + } + if (object.json_name !== undefined && object.json_name !== null) { + message.jsonName = object.json_name; + } + if (object.options !== undefined && object.options !== null) { + message.options = FieldOptions.fromAmino(object.options); + } + return message; + }, + toAmino(message: FieldDescriptorProto): FieldDescriptorProtoAmino { + const obj: any = {}; + obj.name = message.name; + obj.number = message.number; + obj.label = message.label; + obj.type = message.type; + obj.type_name = message.typeName; + obj.extendee = message.extendee; + obj.default_value = message.defaultValue; + obj.oneof_index = message.oneofIndex; + obj.json_name = message.jsonName; + obj.options = message.options + ? FieldOptions.toAmino(message.options) + : undefined; + return obj; + }, + fromAminoMsg(object: FieldDescriptorProtoAminoMsg): FieldDescriptorProto { + return FieldDescriptorProto.fromAmino(object.value); + }, + fromProtoMsg(message: FieldDescriptorProtoProtoMsg): FieldDescriptorProto { + return FieldDescriptorProto.decode(message.value); + }, + toProto(message: FieldDescriptorProto): Uint8Array { + return FieldDescriptorProto.encode(message).finish(); + }, + toProtoMsg(message: FieldDescriptorProto): FieldDescriptorProtoProtoMsg { + return { + typeUrl: '/google.protobuf.FieldDescriptorProto', + value: FieldDescriptorProto.encode(message).finish(), + }; + }, +}; +function createBaseOneofDescriptorProto(): OneofDescriptorProto { + return { + name: '', + options: undefined, + }; +} +export const OneofDescriptorProto = { + typeUrl: '/google.protobuf.OneofDescriptorProto', + encode( + message: OneofDescriptorProto, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name); + } + if (message.options !== undefined) { + OneofOptions.encode(message.options, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): OneofDescriptorProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.options = OneofOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): OneofDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : '', + options: isSet(object.options) + ? OneofOptions.fromJSON(object.options) + : undefined, + }; + }, + toJSON(message: OneofDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.options !== undefined && + (obj.options = message.options + ? OneofOptions.toJSON(message.options) + : undefined); + return obj; + }, + fromPartial(object: Partial): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + message.name = object.name ?? ''; + message.options = + object.options !== undefined && object.options !== null + ? OneofOptions.fromPartial(object.options) + : undefined; + return message; + }, + fromAmino(object: OneofDescriptorProtoAmino): OneofDescriptorProto { + const message = createBaseOneofDescriptorProto(); + if (object.name !== undefined && object.name !== null) { + message.name = object.name; + } + if (object.options !== undefined && object.options !== null) { + message.options = OneofOptions.fromAmino(object.options); + } + return message; + }, + toAmino(message: OneofDescriptorProto): OneofDescriptorProtoAmino { + const obj: any = {}; + obj.name = message.name; + obj.options = message.options + ? OneofOptions.toAmino(message.options) + : undefined; + return obj; + }, + fromAminoMsg(object: OneofDescriptorProtoAminoMsg): OneofDescriptorProto { + return OneofDescriptorProto.fromAmino(object.value); + }, + fromProtoMsg(message: OneofDescriptorProtoProtoMsg): OneofDescriptorProto { + return OneofDescriptorProto.decode(message.value); + }, + toProto(message: OneofDescriptorProto): Uint8Array { + return OneofDescriptorProto.encode(message).finish(); + }, + toProtoMsg(message: OneofDescriptorProto): OneofDescriptorProtoProtoMsg { + return { + typeUrl: '/google.protobuf.OneofDescriptorProto', + value: OneofDescriptorProto.encode(message).finish(), + }; + }, +}; +function createBaseEnumDescriptorProto(): EnumDescriptorProto { + return { + name: '', + value: [], + options: undefined, + reservedRange: [], + reservedName: [], + }; +} +export const EnumDescriptorProto = { + typeUrl: '/google.protobuf.EnumDescriptorProto', + encode( + message: EnumDescriptorProto, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name); + } + for (const v of message.value) { + EnumValueDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + EnumOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + for (const v of message.reservedRange) { + EnumDescriptorProto_EnumReservedRange.encode( + v!, + writer.uint32(34).fork(), + ).ldelim(); + } + for (const v of message.reservedName) { + writer.uint32(42).string(v!); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): EnumDescriptorProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.value.push( + EnumValueDescriptorProto.decode(reader, reader.uint32()), + ); + break; + case 3: + message.options = EnumOptions.decode(reader, reader.uint32()); + break; + case 4: + message.reservedRange.push( + EnumDescriptorProto_EnumReservedRange.decode( + reader, + reader.uint32(), + ), + ); + break; + case 5: + message.reservedName.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): EnumDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : '', + value: Array.isArray(object?.value) + ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) + ? EnumOptions.fromJSON(object.options) + : undefined, + reservedRange: Array.isArray(object?.reservedRange) + ? object.reservedRange.map((e: any) => + EnumDescriptorProto_EnumReservedRange.fromJSON(e), + ) + : [], + reservedName: Array.isArray(object?.reservedName) + ? object.reservedName.map((e: any) => String(e)) + : [], + }; + }, + toJSON(message: EnumDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.value) { + obj.value = message.value.map(e => + e ? EnumValueDescriptorProto.toJSON(e) : undefined, + ); + } else { + obj.value = []; + } + message.options !== undefined && + (obj.options = message.options + ? EnumOptions.toJSON(message.options) + : undefined); + if (message.reservedRange) { + obj.reservedRange = message.reservedRange.map(e => + e ? EnumDescriptorProto_EnumReservedRange.toJSON(e) : undefined, + ); + } else { + obj.reservedRange = []; + } + if (message.reservedName) { + obj.reservedName = message.reservedName.map(e => e); + } else { + obj.reservedName = []; + } + return obj; + }, + fromPartial(object: Partial): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + message.name = object.name ?? ''; + message.value = + object.value?.map(e => EnumValueDescriptorProto.fromPartial(e)) || []; + message.options = + object.options !== undefined && object.options !== null + ? EnumOptions.fromPartial(object.options) + : undefined; + message.reservedRange = + object.reservedRange?.map(e => + EnumDescriptorProto_EnumReservedRange.fromPartial(e), + ) || []; + message.reservedName = object.reservedName?.map(e => e) || []; + return message; + }, + fromAmino(object: EnumDescriptorProtoAmino): EnumDescriptorProto { + const message = createBaseEnumDescriptorProto(); + if (object.name !== undefined && object.name !== null) { + message.name = object.name; + } + message.value = + object.value?.map(e => EnumValueDescriptorProto.fromAmino(e)) || []; + if (object.options !== undefined && object.options !== null) { + message.options = EnumOptions.fromAmino(object.options); + } + message.reservedRange = + object.reserved_range?.map(e => + EnumDescriptorProto_EnumReservedRange.fromAmino(e), + ) || []; + message.reservedName = object.reserved_name?.map(e => e) || []; + return message; + }, + toAmino(message: EnumDescriptorProto): EnumDescriptorProtoAmino { + const obj: any = {}; + obj.name = message.name; + if (message.value) { + obj.value = message.value.map(e => + e ? EnumValueDescriptorProto.toAmino(e) : undefined, + ); + } else { + obj.value = []; + } + obj.options = message.options + ? EnumOptions.toAmino(message.options) + : undefined; + if (message.reservedRange) { + obj.reserved_range = message.reservedRange.map(e => + e ? EnumDescriptorProto_EnumReservedRange.toAmino(e) : undefined, + ); + } else { + obj.reserved_range = []; + } + if (message.reservedName) { + obj.reserved_name = message.reservedName.map(e => e); + } else { + obj.reserved_name = []; + } + return obj; + }, + fromAminoMsg(object: EnumDescriptorProtoAminoMsg): EnumDescriptorProto { + return EnumDescriptorProto.fromAmino(object.value); + }, + fromProtoMsg(message: EnumDescriptorProtoProtoMsg): EnumDescriptorProto { + return EnumDescriptorProto.decode(message.value); + }, + toProto(message: EnumDescriptorProto): Uint8Array { + return EnumDescriptorProto.encode(message).finish(); + }, + toProtoMsg(message: EnumDescriptorProto): EnumDescriptorProtoProtoMsg { + return { + typeUrl: '/google.protobuf.EnumDescriptorProto', + value: EnumDescriptorProto.encode(message).finish(), + }; + }, +}; +function createBaseEnumDescriptorProto_EnumReservedRange(): EnumDescriptorProto_EnumReservedRange { + return { + start: 0, + end: 0, + }; +} +export const EnumDescriptorProto_EnumReservedRange = { + typeUrl: '/google.protobuf.EnumReservedRange', + encode( + message: EnumDescriptorProto_EnumReservedRange, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.start !== 0) { + writer.uint32(8).int32(message.start); + } + if (message.end !== 0) { + writer.uint32(16).int32(message.end); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): EnumDescriptorProto_EnumReservedRange { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.start = reader.int32(); + break; + case 2: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { + const obj: any = {}; + message.start !== undefined && (obj.start = Math.round(message.start)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + fromPartial( + object: Partial, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + message.start = object.start ?? 0; + message.end = object.end ?? 0; + return message; + }, + fromAmino( + object: EnumDescriptorProto_EnumReservedRangeAmino, + ): EnumDescriptorProto_EnumReservedRange { + const message = createBaseEnumDescriptorProto_EnumReservedRange(); + if (object.start !== undefined && object.start !== null) { + message.start = object.start; + } + if (object.end !== undefined && object.end !== null) { + message.end = object.end; + } + return message; + }, + toAmino( + message: EnumDescriptorProto_EnumReservedRange, + ): EnumDescriptorProto_EnumReservedRangeAmino { + const obj: any = {}; + obj.start = message.start; + obj.end = message.end; + return obj; + }, + fromAminoMsg( + object: EnumDescriptorProto_EnumReservedRangeAminoMsg, + ): EnumDescriptorProto_EnumReservedRange { + return EnumDescriptorProto_EnumReservedRange.fromAmino(object.value); + }, + fromProtoMsg( + message: EnumDescriptorProto_EnumReservedRangeProtoMsg, + ): EnumDescriptorProto_EnumReservedRange { + return EnumDescriptorProto_EnumReservedRange.decode(message.value); + }, + toProto(message: EnumDescriptorProto_EnumReservedRange): Uint8Array { + return EnumDescriptorProto_EnumReservedRange.encode(message).finish(); + }, + toProtoMsg( + message: EnumDescriptorProto_EnumReservedRange, + ): EnumDescriptorProto_EnumReservedRangeProtoMsg { + return { + typeUrl: '/google.protobuf.EnumReservedRange', + value: EnumDescriptorProto_EnumReservedRange.encode(message).finish(), + }; + }, +}; +function createBaseEnumValueDescriptorProto(): EnumValueDescriptorProto { + return { + name: '', + number: 0, + options: undefined, + }; +} +export const EnumValueDescriptorProto = { + typeUrl: '/google.protobuf.EnumValueDescriptorProto', + encode( + message: EnumValueDescriptorProto, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name); + } + if (message.number !== 0) { + writer.uint32(16).int32(message.number); + } + if (message.options !== undefined) { + EnumValueOptions.encode( + message.options, + writer.uint32(26).fork(), + ).ldelim(); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): EnumValueDescriptorProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.number = reader.int32(); + break; + case 3: + message.options = EnumValueOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): EnumValueDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : '', + number: isSet(object.number) ? Number(object.number) : 0, + options: isSet(object.options) + ? EnumValueOptions.fromJSON(object.options) + : undefined, + }; + }, + toJSON(message: EnumValueDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.number !== undefined && (obj.number = Math.round(message.number)); + message.options !== undefined && + (obj.options = message.options + ? EnumValueOptions.toJSON(message.options) + : undefined); + return obj; + }, + fromPartial( + object: Partial, + ): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + message.name = object.name ?? ''; + message.number = object.number ?? 0; + message.options = + object.options !== undefined && object.options !== null + ? EnumValueOptions.fromPartial(object.options) + : undefined; + return message; + }, + fromAmino(object: EnumValueDescriptorProtoAmino): EnumValueDescriptorProto { + const message = createBaseEnumValueDescriptorProto(); + if (object.name !== undefined && object.name !== null) { + message.name = object.name; + } + if (object.number !== undefined && object.number !== null) { + message.number = object.number; + } + if (object.options !== undefined && object.options !== null) { + message.options = EnumValueOptions.fromAmino(object.options); + } + return message; + }, + toAmino(message: EnumValueDescriptorProto): EnumValueDescriptorProtoAmino { + const obj: any = {}; + obj.name = message.name; + obj.number = message.number; + obj.options = message.options + ? EnumValueOptions.toAmino(message.options) + : undefined; + return obj; + }, + fromAminoMsg( + object: EnumValueDescriptorProtoAminoMsg, + ): EnumValueDescriptorProto { + return EnumValueDescriptorProto.fromAmino(object.value); + }, + fromProtoMsg( + message: EnumValueDescriptorProtoProtoMsg, + ): EnumValueDescriptorProto { + return EnumValueDescriptorProto.decode(message.value); + }, + toProto(message: EnumValueDescriptorProto): Uint8Array { + return EnumValueDescriptorProto.encode(message).finish(); + }, + toProtoMsg( + message: EnumValueDescriptorProto, + ): EnumValueDescriptorProtoProtoMsg { + return { + typeUrl: '/google.protobuf.EnumValueDescriptorProto', + value: EnumValueDescriptorProto.encode(message).finish(), + }; + }, +}; +function createBaseServiceDescriptorProto(): ServiceDescriptorProto { + return { + name: '', + method: [], + options: undefined, + }; +} +export const ServiceDescriptorProto = { + typeUrl: '/google.protobuf.ServiceDescriptorProto', + encode( + message: ServiceDescriptorProto, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name); + } + for (const v of message.method) { + MethodDescriptorProto.encode(v!, writer.uint32(18).fork()).ldelim(); + } + if (message.options !== undefined) { + ServiceOptions.encode(message.options, writer.uint32(26).fork()).ldelim(); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): ServiceDescriptorProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.method.push( + MethodDescriptorProto.decode(reader, reader.uint32()), + ); + break; + case 3: + message.options = ServiceOptions.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): ServiceDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : '', + method: Array.isArray(object?.method) + ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) + : [], + options: isSet(object.options) + ? ServiceOptions.fromJSON(object.options) + : undefined, + }; + }, + toJSON(message: ServiceDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + if (message.method) { + obj.method = message.method.map(e => + e ? MethodDescriptorProto.toJSON(e) : undefined, + ); + } else { + obj.method = []; + } + message.options !== undefined && + (obj.options = message.options + ? ServiceOptions.toJSON(message.options) + : undefined); + return obj; + }, + fromPartial(object: Partial): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + message.name = object.name ?? ''; + message.method = + object.method?.map(e => MethodDescriptorProto.fromPartial(e)) || []; + message.options = + object.options !== undefined && object.options !== null + ? ServiceOptions.fromPartial(object.options) + : undefined; + return message; + }, + fromAmino(object: ServiceDescriptorProtoAmino): ServiceDescriptorProto { + const message = createBaseServiceDescriptorProto(); + if (object.name !== undefined && object.name !== null) { + message.name = object.name; + } + message.method = + object.method?.map(e => MethodDescriptorProto.fromAmino(e)) || []; + if (object.options !== undefined && object.options !== null) { + message.options = ServiceOptions.fromAmino(object.options); + } + return message; + }, + toAmino(message: ServiceDescriptorProto): ServiceDescriptorProtoAmino { + const obj: any = {}; + obj.name = message.name; + if (message.method) { + obj.method = message.method.map(e => + e ? MethodDescriptorProto.toAmino(e) : undefined, + ); + } else { + obj.method = []; + } + obj.options = message.options + ? ServiceOptions.toAmino(message.options) + : undefined; + return obj; + }, + fromAminoMsg(object: ServiceDescriptorProtoAminoMsg): ServiceDescriptorProto { + return ServiceDescriptorProto.fromAmino(object.value); + }, + fromProtoMsg( + message: ServiceDescriptorProtoProtoMsg, + ): ServiceDescriptorProto { + return ServiceDescriptorProto.decode(message.value); + }, + toProto(message: ServiceDescriptorProto): Uint8Array { + return ServiceDescriptorProto.encode(message).finish(); + }, + toProtoMsg(message: ServiceDescriptorProto): ServiceDescriptorProtoProtoMsg { + return { + typeUrl: '/google.protobuf.ServiceDescriptorProto', + value: ServiceDescriptorProto.encode(message).finish(), + }; + }, +}; +function createBaseMethodDescriptorProto(): MethodDescriptorProto { + return { + name: '', + inputType: '', + outputType: '', + options: undefined, + clientStreaming: false, + serverStreaming: false, + }; +} +export const MethodDescriptorProto = { + typeUrl: '/google.protobuf.MethodDescriptorProto', + encode( + message: MethodDescriptorProto, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.name !== '') { + writer.uint32(10).string(message.name); + } + if (message.inputType !== '') { + writer.uint32(18).string(message.inputType); + } + if (message.outputType !== '') { + writer.uint32(26).string(message.outputType); + } + if (message.options !== undefined) { + MethodOptions.encode(message.options, writer.uint32(34).fork()).ldelim(); + } + if (message.clientStreaming === true) { + writer.uint32(40).bool(message.clientStreaming); + } + if (message.serverStreaming === true) { + writer.uint32(48).bool(message.serverStreaming); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): MethodDescriptorProto { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodDescriptorProto(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.name = reader.string(); + break; + case 2: + message.inputType = reader.string(); + break; + case 3: + message.outputType = reader.string(); + break; + case 4: + message.options = MethodOptions.decode(reader, reader.uint32()); + break; + case 5: + message.clientStreaming = reader.bool(); + break; + case 6: + message.serverStreaming = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): MethodDescriptorProto { + return { + name: isSet(object.name) ? String(object.name) : '', + inputType: isSet(object.inputType) ? String(object.inputType) : '', + outputType: isSet(object.outputType) ? String(object.outputType) : '', + options: isSet(object.options) + ? MethodOptions.fromJSON(object.options) + : undefined, + clientStreaming: isSet(object.clientStreaming) + ? Boolean(object.clientStreaming) + : false, + serverStreaming: isSet(object.serverStreaming) + ? Boolean(object.serverStreaming) + : false, + }; + }, + toJSON(message: MethodDescriptorProto): unknown { + const obj: any = {}; + message.name !== undefined && (obj.name = message.name); + message.inputType !== undefined && (obj.inputType = message.inputType); + message.outputType !== undefined && (obj.outputType = message.outputType); + message.options !== undefined && + (obj.options = message.options + ? MethodOptions.toJSON(message.options) + : undefined); + message.clientStreaming !== undefined && + (obj.clientStreaming = message.clientStreaming); + message.serverStreaming !== undefined && + (obj.serverStreaming = message.serverStreaming); + return obj; + }, + fromPartial(object: Partial): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + message.name = object.name ?? ''; + message.inputType = object.inputType ?? ''; + message.outputType = object.outputType ?? ''; + message.options = + object.options !== undefined && object.options !== null + ? MethodOptions.fromPartial(object.options) + : undefined; + message.clientStreaming = object.clientStreaming ?? false; + message.serverStreaming = object.serverStreaming ?? false; + return message; + }, + fromAmino(object: MethodDescriptorProtoAmino): MethodDescriptorProto { + const message = createBaseMethodDescriptorProto(); + if (object.name !== undefined && object.name !== null) { + message.name = object.name; + } + if (object.input_type !== undefined && object.input_type !== null) { + message.inputType = object.input_type; + } + if (object.output_type !== undefined && object.output_type !== null) { + message.outputType = object.output_type; + } + if (object.options !== undefined && object.options !== null) { + message.options = MethodOptions.fromAmino(object.options); + } + if ( + object.client_streaming !== undefined && + object.client_streaming !== null + ) { + message.clientStreaming = object.client_streaming; + } + if ( + object.server_streaming !== undefined && + object.server_streaming !== null + ) { + message.serverStreaming = object.server_streaming; + } + return message; + }, + toAmino(message: MethodDescriptorProto): MethodDescriptorProtoAmino { + const obj: any = {}; + obj.name = message.name; + obj.input_type = message.inputType; + obj.output_type = message.outputType; + obj.options = message.options + ? MethodOptions.toAmino(message.options) + : undefined; + obj.client_streaming = message.clientStreaming; + obj.server_streaming = message.serverStreaming; + return obj; + }, + fromAminoMsg(object: MethodDescriptorProtoAminoMsg): MethodDescriptorProto { + return MethodDescriptorProto.fromAmino(object.value); + }, + fromProtoMsg(message: MethodDescriptorProtoProtoMsg): MethodDescriptorProto { + return MethodDescriptorProto.decode(message.value); + }, + toProto(message: MethodDescriptorProto): Uint8Array { + return MethodDescriptorProto.encode(message).finish(); + }, + toProtoMsg(message: MethodDescriptorProto): MethodDescriptorProtoProtoMsg { + return { + typeUrl: '/google.protobuf.MethodDescriptorProto', + value: MethodDescriptorProto.encode(message).finish(), + }; + }, +}; +function createBaseFileOptions(): FileOptions { + return { + javaPackage: '', + javaOuterClassname: '', + javaMultipleFiles: false, + javaGenerateEqualsAndHash: false, + javaStringCheckUtf8: false, + optimizeFor: 1, + goPackage: '', + ccGenericServices: false, + javaGenericServices: false, + pyGenericServices: false, + phpGenericServices: false, + deprecated: false, + ccEnableArenas: false, + objcClassPrefix: '', + csharpNamespace: '', + swiftPrefix: '', + phpClassPrefix: '', + phpNamespace: '', + phpMetadataNamespace: '', + rubyPackage: '', + uninterpretedOption: [], + }; +} +export const FileOptions = { + typeUrl: '/google.protobuf.FileOptions', + encode( + message: FileOptions, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.javaPackage !== '') { + writer.uint32(10).string(message.javaPackage); + } + if (message.javaOuterClassname !== '') { + writer.uint32(66).string(message.javaOuterClassname); + } + if (message.javaMultipleFiles === true) { + writer.uint32(80).bool(message.javaMultipleFiles); + } + if (message.javaGenerateEqualsAndHash === true) { + writer.uint32(160).bool(message.javaGenerateEqualsAndHash); + } + if (message.javaStringCheckUtf8 === true) { + writer.uint32(216).bool(message.javaStringCheckUtf8); + } + if (message.optimizeFor !== 1) { + writer.uint32(72).int32(message.optimizeFor); + } + if (message.goPackage !== '') { + writer.uint32(90).string(message.goPackage); + } + if (message.ccGenericServices === true) { + writer.uint32(128).bool(message.ccGenericServices); + } + if (message.javaGenericServices === true) { + writer.uint32(136).bool(message.javaGenericServices); + } + if (message.pyGenericServices === true) { + writer.uint32(144).bool(message.pyGenericServices); + } + if (message.phpGenericServices === true) { + writer.uint32(336).bool(message.phpGenericServices); + } + if (message.deprecated === true) { + writer.uint32(184).bool(message.deprecated); + } + if (message.ccEnableArenas === true) { + writer.uint32(248).bool(message.ccEnableArenas); + } + if (message.objcClassPrefix !== '') { + writer.uint32(290).string(message.objcClassPrefix); + } + if (message.csharpNamespace !== '') { + writer.uint32(298).string(message.csharpNamespace); + } + if (message.swiftPrefix !== '') { + writer.uint32(314).string(message.swiftPrefix); + } + if (message.phpClassPrefix !== '') { + writer.uint32(322).string(message.phpClassPrefix); + } + if (message.phpNamespace !== '') { + writer.uint32(330).string(message.phpNamespace); + } + if (message.phpMetadataNamespace !== '') { + writer.uint32(354).string(message.phpMetadataNamespace); + } + if (message.rubyPackage !== '') { + writer.uint32(362).string(message.rubyPackage); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): FileOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.javaPackage = reader.string(); + break; + case 8: + message.javaOuterClassname = reader.string(); + break; + case 10: + message.javaMultipleFiles = reader.bool(); + break; + case 20: + message.javaGenerateEqualsAndHash = reader.bool(); + break; + case 27: + message.javaStringCheckUtf8 = reader.bool(); + break; + case 9: + message.optimizeFor = reader.int32() as any; + break; + case 11: + message.goPackage = reader.string(); + break; + case 16: + message.ccGenericServices = reader.bool(); + break; + case 17: + message.javaGenericServices = reader.bool(); + break; + case 18: + message.pyGenericServices = reader.bool(); + break; + case 42: + message.phpGenericServices = reader.bool(); + break; + case 23: + message.deprecated = reader.bool(); + break; + case 31: + message.ccEnableArenas = reader.bool(); + break; + case 36: + message.objcClassPrefix = reader.string(); + break; + case 37: + message.csharpNamespace = reader.string(); + break; + case 39: + message.swiftPrefix = reader.string(); + break; + case 40: + message.phpClassPrefix = reader.string(); + break; + case 41: + message.phpNamespace = reader.string(); + break; + case 44: + message.phpMetadataNamespace = reader.string(); + break; + case 45: + message.rubyPackage = reader.string(); + break; + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()), + ); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): FileOptions { + return { + javaPackage: isSet(object.javaPackage) ? String(object.javaPackage) : '', + javaOuterClassname: isSet(object.javaOuterClassname) + ? String(object.javaOuterClassname) + : '', + javaMultipleFiles: isSet(object.javaMultipleFiles) + ? Boolean(object.javaMultipleFiles) + : false, + javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) + ? Boolean(object.javaGenerateEqualsAndHash) + : false, + javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) + ? Boolean(object.javaStringCheckUtf8) + : false, + optimizeFor: isSet(object.optimizeFor) + ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) + : -1, + goPackage: isSet(object.goPackage) ? String(object.goPackage) : '', + ccGenericServices: isSet(object.ccGenericServices) + ? Boolean(object.ccGenericServices) + : false, + javaGenericServices: isSet(object.javaGenericServices) + ? Boolean(object.javaGenericServices) + : false, + pyGenericServices: isSet(object.pyGenericServices) + ? Boolean(object.pyGenericServices) + : false, + phpGenericServices: isSet(object.phpGenericServices) + ? Boolean(object.phpGenericServices) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + ccEnableArenas: isSet(object.ccEnableArenas) + ? Boolean(object.ccEnableArenas) + : false, + objcClassPrefix: isSet(object.objcClassPrefix) + ? String(object.objcClassPrefix) + : '', + csharpNamespace: isSet(object.csharpNamespace) + ? String(object.csharpNamespace) + : '', + swiftPrefix: isSet(object.swiftPrefix) ? String(object.swiftPrefix) : '', + phpClassPrefix: isSet(object.phpClassPrefix) + ? String(object.phpClassPrefix) + : '', + phpNamespace: isSet(object.phpNamespace) + ? String(object.phpNamespace) + : '', + phpMetadataNamespace: isSet(object.phpMetadataNamespace) + ? String(object.phpMetadataNamespace) + : '', + rubyPackage: isSet(object.rubyPackage) ? String(object.rubyPackage) : '', + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => + UninterpretedOption.fromJSON(e), + ) + : [], + }; + }, + toJSON(message: FileOptions): unknown { + const obj: any = {}; + message.javaPackage !== undefined && + (obj.javaPackage = message.javaPackage); + message.javaOuterClassname !== undefined && + (obj.javaOuterClassname = message.javaOuterClassname); + message.javaMultipleFiles !== undefined && + (obj.javaMultipleFiles = message.javaMultipleFiles); + message.javaGenerateEqualsAndHash !== undefined && + (obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash); + message.javaStringCheckUtf8 !== undefined && + (obj.javaStringCheckUtf8 = message.javaStringCheckUtf8); + message.optimizeFor !== undefined && + (obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor)); + message.goPackage !== undefined && (obj.goPackage = message.goPackage); + message.ccGenericServices !== undefined && + (obj.ccGenericServices = message.ccGenericServices); + message.javaGenericServices !== undefined && + (obj.javaGenericServices = message.javaGenericServices); + message.pyGenericServices !== undefined && + (obj.pyGenericServices = message.pyGenericServices); + message.phpGenericServices !== undefined && + (obj.phpGenericServices = message.phpGenericServices); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.ccEnableArenas !== undefined && + (obj.ccEnableArenas = message.ccEnableArenas); + message.objcClassPrefix !== undefined && + (obj.objcClassPrefix = message.objcClassPrefix); + message.csharpNamespace !== undefined && + (obj.csharpNamespace = message.csharpNamespace); + message.swiftPrefix !== undefined && + (obj.swiftPrefix = message.swiftPrefix); + message.phpClassPrefix !== undefined && + (obj.phpClassPrefix = message.phpClassPrefix); + message.phpNamespace !== undefined && + (obj.phpNamespace = message.phpNamespace); + message.phpMetadataNamespace !== undefined && + (obj.phpMetadataNamespace = message.phpMetadataNamespace); + message.rubyPackage !== undefined && + (obj.rubyPackage = message.rubyPackage); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toJSON(e) : undefined, + ); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + fromPartial(object: Partial): FileOptions { + const message = createBaseFileOptions(); + message.javaPackage = object.javaPackage ?? ''; + message.javaOuterClassname = object.javaOuterClassname ?? ''; + message.javaMultipleFiles = object.javaMultipleFiles ?? false; + message.javaGenerateEqualsAndHash = + object.javaGenerateEqualsAndHash ?? false; + message.javaStringCheckUtf8 = object.javaStringCheckUtf8 ?? false; + message.optimizeFor = object.optimizeFor ?? 1; + message.goPackage = object.goPackage ?? ''; + message.ccGenericServices = object.ccGenericServices ?? false; + message.javaGenericServices = object.javaGenericServices ?? false; + message.pyGenericServices = object.pyGenericServices ?? false; + message.phpGenericServices = object.phpGenericServices ?? false; + message.deprecated = object.deprecated ?? false; + message.ccEnableArenas = object.ccEnableArenas ?? false; + message.objcClassPrefix = object.objcClassPrefix ?? ''; + message.csharpNamespace = object.csharpNamespace ?? ''; + message.swiftPrefix = object.swiftPrefix ?? ''; + message.phpClassPrefix = object.phpClassPrefix ?? ''; + message.phpNamespace = object.phpNamespace ?? ''; + message.phpMetadataNamespace = object.phpMetadataNamespace ?? ''; + message.rubyPackage = object.rubyPackage ?? ''; + message.uninterpretedOption = + object.uninterpretedOption?.map(e => + UninterpretedOption.fromPartial(e), + ) || []; + return message; + }, + fromAmino(object: FileOptionsAmino): FileOptions { + const message = createBaseFileOptions(); + if (object.java_package !== undefined && object.java_package !== null) { + message.javaPackage = object.java_package; + } + if ( + object.java_outer_classname !== undefined && + object.java_outer_classname !== null + ) { + message.javaOuterClassname = object.java_outer_classname; + } + if ( + object.java_multiple_files !== undefined && + object.java_multiple_files !== null + ) { + message.javaMultipleFiles = object.java_multiple_files; + } + if ( + object.java_generate_equals_and_hash !== undefined && + object.java_generate_equals_and_hash !== null + ) { + message.javaGenerateEqualsAndHash = object.java_generate_equals_and_hash; + } + if ( + object.java_string_check_utf8 !== undefined && + object.java_string_check_utf8 !== null + ) { + message.javaStringCheckUtf8 = object.java_string_check_utf8; + } + if (object.optimize_for !== undefined && object.optimize_for !== null) { + message.optimizeFor = fileOptions_OptimizeModeFromJSON( + object.optimize_for, + ); + } + if (object.go_package !== undefined && object.go_package !== null) { + message.goPackage = object.go_package; + } + if ( + object.cc_generic_services !== undefined && + object.cc_generic_services !== null + ) { + message.ccGenericServices = object.cc_generic_services; + } + if ( + object.java_generic_services !== undefined && + object.java_generic_services !== null + ) { + message.javaGenericServices = object.java_generic_services; + } + if ( + object.py_generic_services !== undefined && + object.py_generic_services !== null + ) { + message.pyGenericServices = object.py_generic_services; + } + if ( + object.php_generic_services !== undefined && + object.php_generic_services !== null + ) { + message.phpGenericServices = object.php_generic_services; + } + if (object.deprecated !== undefined && object.deprecated !== null) { + message.deprecated = object.deprecated; + } + if ( + object.cc_enable_arenas !== undefined && + object.cc_enable_arenas !== null + ) { + message.ccEnableArenas = object.cc_enable_arenas; + } + if ( + object.objc_class_prefix !== undefined && + object.objc_class_prefix !== null + ) { + message.objcClassPrefix = object.objc_class_prefix; + } + if ( + object.csharp_namespace !== undefined && + object.csharp_namespace !== null + ) { + message.csharpNamespace = object.csharp_namespace; + } + if (object.swift_prefix !== undefined && object.swift_prefix !== null) { + message.swiftPrefix = object.swift_prefix; + } + if ( + object.php_class_prefix !== undefined && + object.php_class_prefix !== null + ) { + message.phpClassPrefix = object.php_class_prefix; + } + if (object.php_namespace !== undefined && object.php_namespace !== null) { + message.phpNamespace = object.php_namespace; + } + if ( + object.php_metadata_namespace !== undefined && + object.php_metadata_namespace !== null + ) { + message.phpMetadataNamespace = object.php_metadata_namespace; + } + if (object.ruby_package !== undefined && object.ruby_package !== null) { + message.rubyPackage = object.ruby_package; + } + message.uninterpretedOption = + object.uninterpreted_option?.map(e => UninterpretedOption.fromAmino(e)) || + []; + return message; + }, + toAmino(message: FileOptions): FileOptionsAmino { + const obj: any = {}; + obj.java_package = message.javaPackage; + obj.java_outer_classname = message.javaOuterClassname; + obj.java_multiple_files = message.javaMultipleFiles; + obj.java_generate_equals_and_hash = message.javaGenerateEqualsAndHash; + obj.java_string_check_utf8 = message.javaStringCheckUtf8; + obj.optimize_for = message.optimizeFor; + obj.go_package = message.goPackage; + obj.cc_generic_services = message.ccGenericServices; + obj.java_generic_services = message.javaGenericServices; + obj.py_generic_services = message.pyGenericServices; + obj.php_generic_services = message.phpGenericServices; + obj.deprecated = message.deprecated; + obj.cc_enable_arenas = message.ccEnableArenas; + obj.objc_class_prefix = message.objcClassPrefix; + obj.csharp_namespace = message.csharpNamespace; + obj.swift_prefix = message.swiftPrefix; + obj.php_class_prefix = message.phpClassPrefix; + obj.php_namespace = message.phpNamespace; + obj.php_metadata_namespace = message.phpMetadataNamespace; + obj.ruby_package = message.rubyPackage; + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toAmino(e) : undefined, + ); + } else { + obj.uninterpreted_option = []; + } + return obj; + }, + fromAminoMsg(object: FileOptionsAminoMsg): FileOptions { + return FileOptions.fromAmino(object.value); + }, + fromProtoMsg(message: FileOptionsProtoMsg): FileOptions { + return FileOptions.decode(message.value); + }, + toProto(message: FileOptions): Uint8Array { + return FileOptions.encode(message).finish(); + }, + toProtoMsg(message: FileOptions): FileOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.FileOptions', + value: FileOptions.encode(message).finish(), + }; + }, +}; +function createBaseMessageOptions(): MessageOptions { + return { + messageSetWireFormat: false, + noStandardDescriptorAccessor: false, + deprecated: false, + mapEntry: false, + uninterpretedOption: [], + }; +} +export const MessageOptions = { + typeUrl: '/google.protobuf.MessageOptions', + encode( + message: MessageOptions, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.messageSetWireFormat === true) { + writer.uint32(8).bool(message.messageSetWireFormat); + } + if (message.noStandardDescriptorAccessor === true) { + writer.uint32(16).bool(message.noStandardDescriptorAccessor); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.mapEntry === true) { + writer.uint32(56).bool(message.mapEntry); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): MessageOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMessageOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.messageSetWireFormat = reader.bool(); + break; + case 2: + message.noStandardDescriptorAccessor = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 7: + message.mapEntry = reader.bool(); + break; + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()), + ); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): MessageOptions { + return { + messageSetWireFormat: isSet(object.messageSetWireFormat) + ? Boolean(object.messageSetWireFormat) + : false, + noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) + ? Boolean(object.noStandardDescriptorAccessor) + : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + mapEntry: isSet(object.mapEntry) ? Boolean(object.mapEntry) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => + UninterpretedOption.fromJSON(e), + ) + : [], + }; + }, + toJSON(message: MessageOptions): unknown { + const obj: any = {}; + message.messageSetWireFormat !== undefined && + (obj.messageSetWireFormat = message.messageSetWireFormat); + message.noStandardDescriptorAccessor !== undefined && + (obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.mapEntry !== undefined && (obj.mapEntry = message.mapEntry); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toJSON(e) : undefined, + ); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + fromPartial(object: Partial): MessageOptions { + const message = createBaseMessageOptions(); + message.messageSetWireFormat = object.messageSetWireFormat ?? false; + message.noStandardDescriptorAccessor = + object.noStandardDescriptorAccessor ?? false; + message.deprecated = object.deprecated ?? false; + message.mapEntry = object.mapEntry ?? false; + message.uninterpretedOption = + object.uninterpretedOption?.map(e => + UninterpretedOption.fromPartial(e), + ) || []; + return message; + }, + fromAmino(object: MessageOptionsAmino): MessageOptions { + const message = createBaseMessageOptions(); + if ( + object.message_set_wire_format !== undefined && + object.message_set_wire_format !== null + ) { + message.messageSetWireFormat = object.message_set_wire_format; + } + if ( + object.no_standard_descriptor_accessor !== undefined && + object.no_standard_descriptor_accessor !== null + ) { + message.noStandardDescriptorAccessor = + object.no_standard_descriptor_accessor; + } + if (object.deprecated !== undefined && object.deprecated !== null) { + message.deprecated = object.deprecated; + } + if (object.map_entry !== undefined && object.map_entry !== null) { + message.mapEntry = object.map_entry; + } + message.uninterpretedOption = + object.uninterpreted_option?.map(e => UninterpretedOption.fromAmino(e)) || + []; + return message; + }, + toAmino(message: MessageOptions): MessageOptionsAmino { + const obj: any = {}; + obj.message_set_wire_format = message.messageSetWireFormat; + obj.no_standard_descriptor_accessor = message.noStandardDescriptorAccessor; + obj.deprecated = message.deprecated; + obj.map_entry = message.mapEntry; + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toAmino(e) : undefined, + ); + } else { + obj.uninterpreted_option = []; + } + return obj; + }, + fromAminoMsg(object: MessageOptionsAminoMsg): MessageOptions { + return MessageOptions.fromAmino(object.value); + }, + fromProtoMsg(message: MessageOptionsProtoMsg): MessageOptions { + return MessageOptions.decode(message.value); + }, + toProto(message: MessageOptions): Uint8Array { + return MessageOptions.encode(message).finish(); + }, + toProtoMsg(message: MessageOptions): MessageOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.MessageOptions', + value: MessageOptions.encode(message).finish(), + }; + }, +}; +function createBaseFieldOptions(): FieldOptions { + return { + ctype: 1, + packed: false, + jstype: 1, + lazy: false, + deprecated: false, + weak: false, + uninterpretedOption: [], + }; +} +export const FieldOptions = { + typeUrl: '/google.protobuf.FieldOptions', + encode( + message: FieldOptions, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.ctype !== 1) { + writer.uint32(8).int32(message.ctype); + } + if (message.packed === true) { + writer.uint32(16).bool(message.packed); + } + if (message.jstype !== 1) { + writer.uint32(48).int32(message.jstype); + } + if (message.lazy === true) { + writer.uint32(40).bool(message.lazy); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + if (message.weak === true) { + writer.uint32(80).bool(message.weak); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): FieldOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFieldOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.ctype = reader.int32() as any; + break; + case 2: + message.packed = reader.bool(); + break; + case 6: + message.jstype = reader.int32() as any; + break; + case 5: + message.lazy = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 10: + message.weak = reader.bool(); + break; + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()), + ); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): FieldOptions { + return { + ctype: isSet(object.ctype) + ? fieldOptions_CTypeFromJSON(object.ctype) + : -1, + packed: isSet(object.packed) ? Boolean(object.packed) : false, + jstype: isSet(object.jstype) + ? fieldOptions_JSTypeFromJSON(object.jstype) + : -1, + lazy: isSet(object.lazy) ? Boolean(object.lazy) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + weak: isSet(object.weak) ? Boolean(object.weak) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => + UninterpretedOption.fromJSON(e), + ) + : [], + }; + }, + toJSON(message: FieldOptions): unknown { + const obj: any = {}; + message.ctype !== undefined && + (obj.ctype = fieldOptions_CTypeToJSON(message.ctype)); + message.packed !== undefined && (obj.packed = message.packed); + message.jstype !== undefined && + (obj.jstype = fieldOptions_JSTypeToJSON(message.jstype)); + message.lazy !== undefined && (obj.lazy = message.lazy); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.weak !== undefined && (obj.weak = message.weak); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toJSON(e) : undefined, + ); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + fromPartial(object: Partial): FieldOptions { + const message = createBaseFieldOptions(); + message.ctype = object.ctype ?? 1; + message.packed = object.packed ?? false; + message.jstype = object.jstype ?? 1; + message.lazy = object.lazy ?? false; + message.deprecated = object.deprecated ?? false; + message.weak = object.weak ?? false; + message.uninterpretedOption = + object.uninterpretedOption?.map(e => + UninterpretedOption.fromPartial(e), + ) || []; + return message; + }, + fromAmino(object: FieldOptionsAmino): FieldOptions { + const message = createBaseFieldOptions(); + if (object.ctype !== undefined && object.ctype !== null) { + message.ctype = fieldOptions_CTypeFromJSON(object.ctype); + } + if (object.packed !== undefined && object.packed !== null) { + message.packed = object.packed; + } + if (object.jstype !== undefined && object.jstype !== null) { + message.jstype = fieldOptions_JSTypeFromJSON(object.jstype); + } + if (object.lazy !== undefined && object.lazy !== null) { + message.lazy = object.lazy; + } + if (object.deprecated !== undefined && object.deprecated !== null) { + message.deprecated = object.deprecated; + } + if (object.weak !== undefined && object.weak !== null) { + message.weak = object.weak; + } + message.uninterpretedOption = + object.uninterpreted_option?.map(e => UninterpretedOption.fromAmino(e)) || + []; + return message; + }, + toAmino(message: FieldOptions): FieldOptionsAmino { + const obj: any = {}; + obj.ctype = message.ctype; + obj.packed = message.packed; + obj.jstype = message.jstype; + obj.lazy = message.lazy; + obj.deprecated = message.deprecated; + obj.weak = message.weak; + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toAmino(e) : undefined, + ); + } else { + obj.uninterpreted_option = []; + } + return obj; + }, + fromAminoMsg(object: FieldOptionsAminoMsg): FieldOptions { + return FieldOptions.fromAmino(object.value); + }, + fromProtoMsg(message: FieldOptionsProtoMsg): FieldOptions { + return FieldOptions.decode(message.value); + }, + toProto(message: FieldOptions): Uint8Array { + return FieldOptions.encode(message).finish(); + }, + toProtoMsg(message: FieldOptions): FieldOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.FieldOptions', + value: FieldOptions.encode(message).finish(), + }; + }, +}; +function createBaseOneofOptions(): OneofOptions { + return { + uninterpretedOption: [], + }; +} +export const OneofOptions = { + typeUrl: '/google.protobuf.OneofOptions', + encode( + message: OneofOptions, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): OneofOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseOneofOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()), + ); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): OneofOptions { + return { + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => + UninterpretedOption.fromJSON(e), + ) + : [], + }; + }, + toJSON(message: OneofOptions): unknown { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toJSON(e) : undefined, + ); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + fromPartial(object: Partial): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = + object.uninterpretedOption?.map(e => + UninterpretedOption.fromPartial(e), + ) || []; + return message; + }, + fromAmino(object: OneofOptionsAmino): OneofOptions { + const message = createBaseOneofOptions(); + message.uninterpretedOption = + object.uninterpreted_option?.map(e => UninterpretedOption.fromAmino(e)) || + []; + return message; + }, + toAmino(message: OneofOptions): OneofOptionsAmino { + const obj: any = {}; + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toAmino(e) : undefined, + ); + } else { + obj.uninterpreted_option = []; + } + return obj; + }, + fromAminoMsg(object: OneofOptionsAminoMsg): OneofOptions { + return OneofOptions.fromAmino(object.value); + }, + fromProtoMsg(message: OneofOptionsProtoMsg): OneofOptions { + return OneofOptions.decode(message.value); + }, + toProto(message: OneofOptions): Uint8Array { + return OneofOptions.encode(message).finish(); + }, + toProtoMsg(message: OneofOptions): OneofOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.OneofOptions', + value: OneofOptions.encode(message).finish(), + }; + }, +}; +function createBaseEnumOptions(): EnumOptions { + return { + allowAlias: false, + deprecated: false, + uninterpretedOption: [], + }; +} +export const EnumOptions = { + typeUrl: '/google.protobuf.EnumOptions', + encode( + message: EnumOptions, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.allowAlias === true) { + writer.uint32(16).bool(message.allowAlias); + } + if (message.deprecated === true) { + writer.uint32(24).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): EnumOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.allowAlias = reader.bool(); + break; + case 3: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()), + ); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): EnumOptions { + return { + allowAlias: isSet(object.allowAlias) ? Boolean(object.allowAlias) : false, + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => + UninterpretedOption.fromJSON(e), + ) + : [], + }; + }, + toJSON(message: EnumOptions): unknown { + const obj: any = {}; + message.allowAlias !== undefined && (obj.allowAlias = message.allowAlias); + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toJSON(e) : undefined, + ); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + fromPartial(object: Partial): EnumOptions { + const message = createBaseEnumOptions(); + message.allowAlias = object.allowAlias ?? false; + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = + object.uninterpretedOption?.map(e => + UninterpretedOption.fromPartial(e), + ) || []; + return message; + }, + fromAmino(object: EnumOptionsAmino): EnumOptions { + const message = createBaseEnumOptions(); + if (object.allow_alias !== undefined && object.allow_alias !== null) { + message.allowAlias = object.allow_alias; + } + if (object.deprecated !== undefined && object.deprecated !== null) { + message.deprecated = object.deprecated; + } + message.uninterpretedOption = + object.uninterpreted_option?.map(e => UninterpretedOption.fromAmino(e)) || + []; + return message; + }, + toAmino(message: EnumOptions): EnumOptionsAmino { + const obj: any = {}; + obj.allow_alias = message.allowAlias; + obj.deprecated = message.deprecated; + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toAmino(e) : undefined, + ); + } else { + obj.uninterpreted_option = []; + } + return obj; + }, + fromAminoMsg(object: EnumOptionsAminoMsg): EnumOptions { + return EnumOptions.fromAmino(object.value); + }, + fromProtoMsg(message: EnumOptionsProtoMsg): EnumOptions { + return EnumOptions.decode(message.value); + }, + toProto(message: EnumOptions): Uint8Array { + return EnumOptions.encode(message).finish(); + }, + toProtoMsg(message: EnumOptions): EnumOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.EnumOptions', + value: EnumOptions.encode(message).finish(), + }; + }, +}; +function createBaseEnumValueOptions(): EnumValueOptions { + return { + deprecated: false, + uninterpretedOption: [], + }; +} +export const EnumValueOptions = { + typeUrl: '/google.protobuf.EnumValueOptions', + encode( + message: EnumValueOptions, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.deprecated === true) { + writer.uint32(8).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): EnumValueOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseEnumValueOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()), + ); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): EnumValueOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => + UninterpretedOption.fromJSON(e), + ) + : [], + }; + }, + toJSON(message: EnumValueOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toJSON(e) : undefined, + ); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + fromPartial(object: Partial): EnumValueOptions { + const message = createBaseEnumValueOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = + object.uninterpretedOption?.map(e => + UninterpretedOption.fromPartial(e), + ) || []; + return message; + }, + fromAmino(object: EnumValueOptionsAmino): EnumValueOptions { + const message = createBaseEnumValueOptions(); + if (object.deprecated !== undefined && object.deprecated !== null) { + message.deprecated = object.deprecated; + } + message.uninterpretedOption = + object.uninterpreted_option?.map(e => UninterpretedOption.fromAmino(e)) || + []; + return message; + }, + toAmino(message: EnumValueOptions): EnumValueOptionsAmino { + const obj: any = {}; + obj.deprecated = message.deprecated; + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toAmino(e) : undefined, + ); + } else { + obj.uninterpreted_option = []; + } + return obj; + }, + fromAminoMsg(object: EnumValueOptionsAminoMsg): EnumValueOptions { + return EnumValueOptions.fromAmino(object.value); + }, + fromProtoMsg(message: EnumValueOptionsProtoMsg): EnumValueOptions { + return EnumValueOptions.decode(message.value); + }, + toProto(message: EnumValueOptions): Uint8Array { + return EnumValueOptions.encode(message).finish(); + }, + toProtoMsg(message: EnumValueOptions): EnumValueOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.EnumValueOptions', + value: EnumValueOptions.encode(message).finish(), + }; + }, +}; +function createBaseServiceOptions(): ServiceOptions { + return { + deprecated: false, + uninterpretedOption: [], + }; +} +export const ServiceOptions = { + typeUrl: '/google.protobuf.ServiceOptions', + encode( + message: ServiceOptions, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): ServiceOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseServiceOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()), + ); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): ServiceOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => + UninterpretedOption.fromJSON(e), + ) + : [], + }; + }, + toJSON(message: ServiceOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toJSON(e) : undefined, + ); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + fromPartial(object: Partial): ServiceOptions { + const message = createBaseServiceOptions(); + message.deprecated = object.deprecated ?? false; + message.uninterpretedOption = + object.uninterpretedOption?.map(e => + UninterpretedOption.fromPartial(e), + ) || []; + return message; + }, + fromAmino(object: ServiceOptionsAmino): ServiceOptions { + const message = createBaseServiceOptions(); + if (object.deprecated !== undefined && object.deprecated !== null) { + message.deprecated = object.deprecated; + } + message.uninterpretedOption = + object.uninterpreted_option?.map(e => UninterpretedOption.fromAmino(e)) || + []; + return message; + }, + toAmino(message: ServiceOptions): ServiceOptionsAmino { + const obj: any = {}; + obj.deprecated = message.deprecated; + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toAmino(e) : undefined, + ); + } else { + obj.uninterpreted_option = []; + } + return obj; + }, + fromAminoMsg(object: ServiceOptionsAminoMsg): ServiceOptions { + return ServiceOptions.fromAmino(object.value); + }, + fromProtoMsg(message: ServiceOptionsProtoMsg): ServiceOptions { + return ServiceOptions.decode(message.value); + }, + toProto(message: ServiceOptions): Uint8Array { + return ServiceOptions.encode(message).finish(); + }, + toProtoMsg(message: ServiceOptions): ServiceOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.ServiceOptions', + value: ServiceOptions.encode(message).finish(), + }; + }, +}; +function createBaseMethodOptions(): MethodOptions { + return { + deprecated: false, + idempotencyLevel: 1, + uninterpretedOption: [], + }; +} +export const MethodOptions = { + typeUrl: '/google.protobuf.MethodOptions', + encode( + message: MethodOptions, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.deprecated === true) { + writer.uint32(264).bool(message.deprecated); + } + if (message.idempotencyLevel !== 1) { + writer.uint32(272).int32(message.idempotencyLevel); + } + for (const v of message.uninterpretedOption) { + UninterpretedOption.encode(v!, writer.uint32(7994).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): MethodOptions { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseMethodOptions(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 33: + message.deprecated = reader.bool(); + break; + case 34: + message.idempotencyLevel = reader.int32() as any; + break; + case 999: + message.uninterpretedOption.push( + UninterpretedOption.decode(reader, reader.uint32()), + ); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): MethodOptions { + return { + deprecated: isSet(object.deprecated) ? Boolean(object.deprecated) : false, + idempotencyLevel: isSet(object.idempotencyLevel) + ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) + : -1, + uninterpretedOption: Array.isArray(object?.uninterpretedOption) + ? object.uninterpretedOption.map((e: any) => + UninterpretedOption.fromJSON(e), + ) + : [], + }; + }, + toJSON(message: MethodOptions): unknown { + const obj: any = {}; + message.deprecated !== undefined && (obj.deprecated = message.deprecated); + message.idempotencyLevel !== undefined && + (obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON( + message.idempotencyLevel, + )); + if (message.uninterpretedOption) { + obj.uninterpretedOption = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toJSON(e) : undefined, + ); + } else { + obj.uninterpretedOption = []; + } + return obj; + }, + fromPartial(object: Partial): MethodOptions { + const message = createBaseMethodOptions(); + message.deprecated = object.deprecated ?? false; + message.idempotencyLevel = object.idempotencyLevel ?? 1; + message.uninterpretedOption = + object.uninterpretedOption?.map(e => + UninterpretedOption.fromPartial(e), + ) || []; + return message; + }, + fromAmino(object: MethodOptionsAmino): MethodOptions { + const message = createBaseMethodOptions(); + if (object.deprecated !== undefined && object.deprecated !== null) { + message.deprecated = object.deprecated; + } + if ( + object.idempotency_level !== undefined && + object.idempotency_level !== null + ) { + message.idempotencyLevel = methodOptions_IdempotencyLevelFromJSON( + object.idempotency_level, + ); + } + message.uninterpretedOption = + object.uninterpreted_option?.map(e => UninterpretedOption.fromAmino(e)) || + []; + return message; + }, + toAmino(message: MethodOptions): MethodOptionsAmino { + const obj: any = {}; + obj.deprecated = message.deprecated; + obj.idempotency_level = message.idempotencyLevel; + if (message.uninterpretedOption) { + obj.uninterpreted_option = message.uninterpretedOption.map(e => + e ? UninterpretedOption.toAmino(e) : undefined, + ); + } else { + obj.uninterpreted_option = []; + } + return obj; + }, + fromAminoMsg(object: MethodOptionsAminoMsg): MethodOptions { + return MethodOptions.fromAmino(object.value); + }, + fromProtoMsg(message: MethodOptionsProtoMsg): MethodOptions { + return MethodOptions.decode(message.value); + }, + toProto(message: MethodOptions): Uint8Array { + return MethodOptions.encode(message).finish(); + }, + toProtoMsg(message: MethodOptions): MethodOptionsProtoMsg { + return { + typeUrl: '/google.protobuf.MethodOptions', + value: MethodOptions.encode(message).finish(), + }; + }, +}; +function createBaseUninterpretedOption(): UninterpretedOption { + return { + name: [], + identifierValue: '', + positiveIntValue: BigInt(0), + negativeIntValue: BigInt(0), + doubleValue: 0, + stringValue: new Uint8Array(), + aggregateValue: '', + }; +} +export const UninterpretedOption = { + typeUrl: '/google.protobuf.UninterpretedOption', + encode( + message: UninterpretedOption, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.name) { + UninterpretedOption_NamePart.encode( + v!, + writer.uint32(18).fork(), + ).ldelim(); + } + if (message.identifierValue !== '') { + writer.uint32(26).string(message.identifierValue); + } + if (message.positiveIntValue !== BigInt(0)) { + writer.uint32(32).uint64(message.positiveIntValue); + } + if (message.negativeIntValue !== BigInt(0)) { + writer.uint32(40).int64(message.negativeIntValue); + } + if (message.doubleValue !== 0) { + writer.uint32(49).double(message.doubleValue); + } + if (message.stringValue.length !== 0) { + writer.uint32(58).bytes(message.stringValue); + } + if (message.aggregateValue !== '') { + writer.uint32(66).string(message.aggregateValue); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): UninterpretedOption { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 2: + message.name.push( + UninterpretedOption_NamePart.decode(reader, reader.uint32()), + ); + break; + case 3: + message.identifierValue = reader.string(); + break; + case 4: + message.positiveIntValue = reader.uint64(); + break; + case 5: + message.negativeIntValue = reader.int64(); + break; + case 6: + message.doubleValue = reader.double(); + break; + case 7: + message.stringValue = reader.bytes(); + break; + case 8: + message.aggregateValue = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): UninterpretedOption { + return { + name: Array.isArray(object?.name) + ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) + : [], + identifierValue: isSet(object.identifierValue) + ? String(object.identifierValue) + : '', + positiveIntValue: isSet(object.positiveIntValue) + ? BigInt(object.positiveIntValue.toString()) + : BigInt(0), + negativeIntValue: isSet(object.negativeIntValue) + ? BigInt(object.negativeIntValue.toString()) + : BigInt(0), + doubleValue: isSet(object.doubleValue) ? Number(object.doubleValue) : 0, + stringValue: isSet(object.stringValue) + ? bytesFromBase64(object.stringValue) + : new Uint8Array(), + aggregateValue: isSet(object.aggregateValue) + ? String(object.aggregateValue) + : '', + }; + }, + toJSON(message: UninterpretedOption): unknown { + const obj: any = {}; + if (message.name) { + obj.name = message.name.map(e => + e ? UninterpretedOption_NamePart.toJSON(e) : undefined, + ); + } else { + obj.name = []; + } + message.identifierValue !== undefined && + (obj.identifierValue = message.identifierValue); + message.positiveIntValue !== undefined && + (obj.positiveIntValue = ( + message.positiveIntValue || BigInt(0) + ).toString()); + message.negativeIntValue !== undefined && + (obj.negativeIntValue = ( + message.negativeIntValue || BigInt(0) + ).toString()); + message.doubleValue !== undefined && + (obj.doubleValue = message.doubleValue); + message.stringValue !== undefined && + (obj.stringValue = base64FromBytes( + message.stringValue !== undefined + ? message.stringValue + : new Uint8Array(), + )); + message.aggregateValue !== undefined && + (obj.aggregateValue = message.aggregateValue); + return obj; + }, + fromPartial(object: Partial): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = + object.name?.map(e => UninterpretedOption_NamePart.fromPartial(e)) || []; + message.identifierValue = object.identifierValue ?? ''; + message.positiveIntValue = + object.positiveIntValue !== undefined && object.positiveIntValue !== null + ? BigInt(object.positiveIntValue.toString()) + : BigInt(0); + message.negativeIntValue = + object.negativeIntValue !== undefined && object.negativeIntValue !== null + ? BigInt(object.negativeIntValue.toString()) + : BigInt(0); + message.doubleValue = object.doubleValue ?? 0; + message.stringValue = object.stringValue ?? new Uint8Array(); + message.aggregateValue = object.aggregateValue ?? ''; + return message; + }, + fromAmino(object: UninterpretedOptionAmino): UninterpretedOption { + const message = createBaseUninterpretedOption(); + message.name = + object.name?.map(e => UninterpretedOption_NamePart.fromAmino(e)) || []; + if ( + object.identifier_value !== undefined && + object.identifier_value !== null + ) { + message.identifierValue = object.identifier_value; + } + if ( + object.positive_int_value !== undefined && + object.positive_int_value !== null + ) { + message.positiveIntValue = BigInt(object.positive_int_value); + } + if ( + object.negative_int_value !== undefined && + object.negative_int_value !== null + ) { + message.negativeIntValue = BigInt(object.negative_int_value); + } + if (object.double_value !== undefined && object.double_value !== null) { + message.doubleValue = object.double_value; + } + if (object.string_value !== undefined && object.string_value !== null) { + message.stringValue = bytesFromBase64(object.string_value); + } + if ( + object.aggregate_value !== undefined && + object.aggregate_value !== null + ) { + message.aggregateValue = object.aggregate_value; + } + return message; + }, + toAmino(message: UninterpretedOption): UninterpretedOptionAmino { + const obj: any = {}; + if (message.name) { + obj.name = message.name.map(e => + e ? UninterpretedOption_NamePart.toAmino(e) : undefined, + ); + } else { + obj.name = []; + } + obj.identifier_value = message.identifierValue; + obj.positive_int_value = message.positiveIntValue + ? message.positiveIntValue.toString() + : undefined; + obj.negative_int_value = message.negativeIntValue + ? message.negativeIntValue.toString() + : undefined; + obj.double_value = message.doubleValue; + obj.string_value = message.stringValue + ? base64FromBytes(message.stringValue) + : undefined; + obj.aggregate_value = message.aggregateValue; + return obj; + }, + fromAminoMsg(object: UninterpretedOptionAminoMsg): UninterpretedOption { + return UninterpretedOption.fromAmino(object.value); + }, + fromProtoMsg(message: UninterpretedOptionProtoMsg): UninterpretedOption { + return UninterpretedOption.decode(message.value); + }, + toProto(message: UninterpretedOption): Uint8Array { + return UninterpretedOption.encode(message).finish(); + }, + toProtoMsg(message: UninterpretedOption): UninterpretedOptionProtoMsg { + return { + typeUrl: '/google.protobuf.UninterpretedOption', + value: UninterpretedOption.encode(message).finish(), + }; + }, +}; +function createBaseUninterpretedOption_NamePart(): UninterpretedOption_NamePart { + return { + namePart: '', + isExtension: false, + }; +} +export const UninterpretedOption_NamePart = { + typeUrl: '/google.protobuf.NamePart', + encode( + message: UninterpretedOption_NamePart, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.namePart !== '') { + writer.uint32(10).string(message.namePart); + } + if (message.isExtension === true) { + writer.uint32(16).bool(message.isExtension); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): UninterpretedOption_NamePart { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUninterpretedOption_NamePart(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.namePart = reader.string(); + break; + case 2: + message.isExtension = reader.bool(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): UninterpretedOption_NamePart { + return { + namePart: isSet(object.namePart) ? String(object.namePart) : '', + isExtension: isSet(object.isExtension) + ? Boolean(object.isExtension) + : false, + }; + }, + toJSON(message: UninterpretedOption_NamePart): unknown { + const obj: any = {}; + message.namePart !== undefined && (obj.namePart = message.namePart); + message.isExtension !== undefined && + (obj.isExtension = message.isExtension); + return obj; + }, + fromPartial( + object: Partial, + ): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + message.namePart = object.namePart ?? ''; + message.isExtension = object.isExtension ?? false; + return message; + }, + fromAmino( + object: UninterpretedOption_NamePartAmino, + ): UninterpretedOption_NamePart { + const message = createBaseUninterpretedOption_NamePart(); + if (object.name_part !== undefined && object.name_part !== null) { + message.namePart = object.name_part; + } + if (object.is_extension !== undefined && object.is_extension !== null) { + message.isExtension = object.is_extension; + } + return message; + }, + toAmino( + message: UninterpretedOption_NamePart, + ): UninterpretedOption_NamePartAmino { + const obj: any = {}; + obj.name_part = message.namePart; + obj.is_extension = message.isExtension; + return obj; + }, + fromAminoMsg( + object: UninterpretedOption_NamePartAminoMsg, + ): UninterpretedOption_NamePart { + return UninterpretedOption_NamePart.fromAmino(object.value); + }, + fromProtoMsg( + message: UninterpretedOption_NamePartProtoMsg, + ): UninterpretedOption_NamePart { + return UninterpretedOption_NamePart.decode(message.value); + }, + toProto(message: UninterpretedOption_NamePart): Uint8Array { + return UninterpretedOption_NamePart.encode(message).finish(); + }, + toProtoMsg( + message: UninterpretedOption_NamePart, + ): UninterpretedOption_NamePartProtoMsg { + return { + typeUrl: '/google.protobuf.NamePart', + value: UninterpretedOption_NamePart.encode(message).finish(), + }; + }, +}; +function createBaseSourceCodeInfo(): SourceCodeInfo { + return { + location: [], + }; +} +export const SourceCodeInfo = { + typeUrl: '/google.protobuf.SourceCodeInfo', + encode( + message: SourceCodeInfo, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.location) { + SourceCodeInfo_Location.encode(v!, writer.uint32(10).fork()).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): SourceCodeInfo { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.location.push( + SourceCodeInfo_Location.decode(reader, reader.uint32()), + ); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): SourceCodeInfo { + return { + location: Array.isArray(object?.location) + ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) + : [], + }; + }, + toJSON(message: SourceCodeInfo): unknown { + const obj: any = {}; + if (message.location) { + obj.location = message.location.map(e => + e ? SourceCodeInfo_Location.toJSON(e) : undefined, + ); + } else { + obj.location = []; + } + return obj; + }, + fromPartial(object: Partial): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = + object.location?.map(e => SourceCodeInfo_Location.fromPartial(e)) || []; + return message; + }, + fromAmino(object: SourceCodeInfoAmino): SourceCodeInfo { + const message = createBaseSourceCodeInfo(); + message.location = + object.location?.map(e => SourceCodeInfo_Location.fromAmino(e)) || []; + return message; + }, + toAmino(message: SourceCodeInfo): SourceCodeInfoAmino { + const obj: any = {}; + if (message.location) { + obj.location = message.location.map(e => + e ? SourceCodeInfo_Location.toAmino(e) : undefined, + ); + } else { + obj.location = []; + } + return obj; + }, + fromAminoMsg(object: SourceCodeInfoAminoMsg): SourceCodeInfo { + return SourceCodeInfo.fromAmino(object.value); + }, + fromProtoMsg(message: SourceCodeInfoProtoMsg): SourceCodeInfo { + return SourceCodeInfo.decode(message.value); + }, + toProto(message: SourceCodeInfo): Uint8Array { + return SourceCodeInfo.encode(message).finish(); + }, + toProtoMsg(message: SourceCodeInfo): SourceCodeInfoProtoMsg { + return { + typeUrl: '/google.protobuf.SourceCodeInfo', + value: SourceCodeInfo.encode(message).finish(), + }; + }, +}; +function createBaseSourceCodeInfo_Location(): SourceCodeInfo_Location { + return { + path: [], + span: [], + leadingComments: '', + trailingComments: '', + leadingDetachedComments: [], + }; +} +export const SourceCodeInfo_Location = { + typeUrl: '/google.protobuf.Location', + encode( + message: SourceCodeInfo_Location, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + writer.uint32(18).fork(); + for (const v of message.span) { + writer.int32(v); + } + writer.ldelim(); + if (message.leadingComments !== '') { + writer.uint32(26).string(message.leadingComments); + } + if (message.trailingComments !== '') { + writer.uint32(34).string(message.trailingComments); + } + for (const v of message.leadingDetachedComments) { + writer.uint32(50).string(v!); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): SourceCodeInfo_Location { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseSourceCodeInfo_Location(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.span.push(reader.int32()); + } + } else { + message.span.push(reader.int32()); + } + break; + case 3: + message.leadingComments = reader.string(); + break; + case 4: + message.trailingComments = reader.string(); + break; + case 6: + message.leadingDetachedComments.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): SourceCodeInfo_Location { + return { + path: Array.isArray(object?.path) + ? object.path.map((e: any) => Number(e)) + : [], + span: Array.isArray(object?.span) + ? object.span.map((e: any) => Number(e)) + : [], + leadingComments: isSet(object.leadingComments) + ? String(object.leadingComments) + : '', + trailingComments: isSet(object.trailingComments) + ? String(object.trailingComments) + : '', + leadingDetachedComments: Array.isArray(object?.leadingDetachedComments) + ? object.leadingDetachedComments.map((e: any) => String(e)) + : [], + }; + }, + toJSON(message: SourceCodeInfo_Location): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map(e => Math.round(e)); + } else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map(e => Math.round(e)); + } else { + obj.span = []; + } + message.leadingComments !== undefined && + (obj.leadingComments = message.leadingComments); + message.trailingComments !== undefined && + (obj.trailingComments = message.trailingComments); + if (message.leadingDetachedComments) { + obj.leadingDetachedComments = message.leadingDetachedComments.map(e => e); + } else { + obj.leadingDetachedComments = []; + } + return obj; + }, + fromPartial( + object: Partial, + ): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map(e => e) || []; + message.span = object.span?.map(e => e) || []; + message.leadingComments = object.leadingComments ?? ''; + message.trailingComments = object.trailingComments ?? ''; + message.leadingDetachedComments = + object.leadingDetachedComments?.map(e => e) || []; + return message; + }, + fromAmino(object: SourceCodeInfo_LocationAmino): SourceCodeInfo_Location { + const message = createBaseSourceCodeInfo_Location(); + message.path = object.path?.map(e => e) || []; + message.span = object.span?.map(e => e) || []; + if ( + object.leading_comments !== undefined && + object.leading_comments !== null + ) { + message.leadingComments = object.leading_comments; + } + if ( + object.trailing_comments !== undefined && + object.trailing_comments !== null + ) { + message.trailingComments = object.trailing_comments; + } + message.leadingDetachedComments = + object.leading_detached_comments?.map(e => e) || []; + return message; + }, + toAmino(message: SourceCodeInfo_Location): SourceCodeInfo_LocationAmino { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map(e => e); + } else { + obj.path = []; + } + if (message.span) { + obj.span = message.span.map(e => e); + } else { + obj.span = []; + } + obj.leading_comments = message.leadingComments; + obj.trailing_comments = message.trailingComments; + if (message.leadingDetachedComments) { + obj.leading_detached_comments = message.leadingDetachedComments.map( + e => e, + ); + } else { + obj.leading_detached_comments = []; + } + return obj; + }, + fromAminoMsg( + object: SourceCodeInfo_LocationAminoMsg, + ): SourceCodeInfo_Location { + return SourceCodeInfo_Location.fromAmino(object.value); + }, + fromProtoMsg( + message: SourceCodeInfo_LocationProtoMsg, + ): SourceCodeInfo_Location { + return SourceCodeInfo_Location.decode(message.value); + }, + toProto(message: SourceCodeInfo_Location): Uint8Array { + return SourceCodeInfo_Location.encode(message).finish(); + }, + toProtoMsg( + message: SourceCodeInfo_Location, + ): SourceCodeInfo_LocationProtoMsg { + return { + typeUrl: '/google.protobuf.Location', + value: SourceCodeInfo_Location.encode(message).finish(), + }; + }, +}; +function createBaseGeneratedCodeInfo(): GeneratedCodeInfo { + return { + annotation: [], + }; +} +export const GeneratedCodeInfo = { + typeUrl: '/google.protobuf.GeneratedCodeInfo', + encode( + message: GeneratedCodeInfo, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.annotation) { + GeneratedCodeInfo_Annotation.encode( + v!, + writer.uint32(10).fork(), + ).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): GeneratedCodeInfo { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.annotation.push( + GeneratedCodeInfo_Annotation.decode(reader, reader.uint32()), + ); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): GeneratedCodeInfo { + return { + annotation: Array.isArray(object?.annotation) + ? object.annotation.map((e: any) => + GeneratedCodeInfo_Annotation.fromJSON(e), + ) + : [], + }; + }, + toJSON(message: GeneratedCodeInfo): unknown { + const obj: any = {}; + if (message.annotation) { + obj.annotation = message.annotation.map(e => + e ? GeneratedCodeInfo_Annotation.toJSON(e) : undefined, + ); + } else { + obj.annotation = []; + } + return obj; + }, + fromPartial(object: Partial): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = + object.annotation?.map(e => + GeneratedCodeInfo_Annotation.fromPartial(e), + ) || []; + return message; + }, + fromAmino(object: GeneratedCodeInfoAmino): GeneratedCodeInfo { + const message = createBaseGeneratedCodeInfo(); + message.annotation = + object.annotation?.map(e => GeneratedCodeInfo_Annotation.fromAmino(e)) || + []; + return message; + }, + toAmino(message: GeneratedCodeInfo): GeneratedCodeInfoAmino { + const obj: any = {}; + if (message.annotation) { + obj.annotation = message.annotation.map(e => + e ? GeneratedCodeInfo_Annotation.toAmino(e) : undefined, + ); + } else { + obj.annotation = []; + } + return obj; + }, + fromAminoMsg(object: GeneratedCodeInfoAminoMsg): GeneratedCodeInfo { + return GeneratedCodeInfo.fromAmino(object.value); + }, + fromProtoMsg(message: GeneratedCodeInfoProtoMsg): GeneratedCodeInfo { + return GeneratedCodeInfo.decode(message.value); + }, + toProto(message: GeneratedCodeInfo): Uint8Array { + return GeneratedCodeInfo.encode(message).finish(); + }, + toProtoMsg(message: GeneratedCodeInfo): GeneratedCodeInfoProtoMsg { + return { + typeUrl: '/google.protobuf.GeneratedCodeInfo', + value: GeneratedCodeInfo.encode(message).finish(), + }; + }, +}; +function createBaseGeneratedCodeInfo_Annotation(): GeneratedCodeInfo_Annotation { + return { + path: [], + sourceFile: '', + begin: 0, + end: 0, + }; +} +export const GeneratedCodeInfo_Annotation = { + typeUrl: '/google.protobuf.Annotation', + encode( + message: GeneratedCodeInfo_Annotation, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + writer.uint32(10).fork(); + for (const v of message.path) { + writer.int32(v); + } + writer.ldelim(); + if (message.sourceFile !== '') { + writer.uint32(18).string(message.sourceFile); + } + if (message.begin !== 0) { + writer.uint32(24).int32(message.begin); + } + if (message.end !== 0) { + writer.uint32(32).int32(message.end); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): GeneratedCodeInfo_Annotation { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGeneratedCodeInfo_Annotation(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if ((tag & 7) === 2) { + const end2 = reader.uint32() + reader.pos; + while (reader.pos < end2) { + message.path.push(reader.int32()); + } + } else { + message.path.push(reader.int32()); + } + break; + case 2: + message.sourceFile = reader.string(); + break; + case 3: + message.begin = reader.int32(); + break; + case 4: + message.end = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): GeneratedCodeInfo_Annotation { + return { + path: Array.isArray(object?.path) + ? object.path.map((e: any) => Number(e)) + : [], + sourceFile: isSet(object.sourceFile) ? String(object.sourceFile) : '', + begin: isSet(object.begin) ? Number(object.begin) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; + }, + toJSON(message: GeneratedCodeInfo_Annotation): unknown { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map(e => Math.round(e)); + } else { + obj.path = []; + } + message.sourceFile !== undefined && (obj.sourceFile = message.sourceFile); + message.begin !== undefined && (obj.begin = Math.round(message.begin)); + message.end !== undefined && (obj.end = Math.round(message.end)); + return obj; + }, + fromPartial( + object: Partial, + ): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map(e => e) || []; + message.sourceFile = object.sourceFile ?? ''; + message.begin = object.begin ?? 0; + message.end = object.end ?? 0; + return message; + }, + fromAmino( + object: GeneratedCodeInfo_AnnotationAmino, + ): GeneratedCodeInfo_Annotation { + const message = createBaseGeneratedCodeInfo_Annotation(); + message.path = object.path?.map(e => e) || []; + if (object.source_file !== undefined && object.source_file !== null) { + message.sourceFile = object.source_file; + } + if (object.begin !== undefined && object.begin !== null) { + message.begin = object.begin; + } + if (object.end !== undefined && object.end !== null) { + message.end = object.end; + } + return message; + }, + toAmino( + message: GeneratedCodeInfo_Annotation, + ): GeneratedCodeInfo_AnnotationAmino { + const obj: any = {}; + if (message.path) { + obj.path = message.path.map(e => e); + } else { + obj.path = []; + } + obj.source_file = message.sourceFile; + obj.begin = message.begin; + obj.end = message.end; + return obj; + }, + fromAminoMsg( + object: GeneratedCodeInfo_AnnotationAminoMsg, + ): GeneratedCodeInfo_Annotation { + return GeneratedCodeInfo_Annotation.fromAmino(object.value); + }, + fromProtoMsg( + message: GeneratedCodeInfo_AnnotationProtoMsg, + ): GeneratedCodeInfo_Annotation { + return GeneratedCodeInfo_Annotation.decode(message.value); + }, + toProto(message: GeneratedCodeInfo_Annotation): Uint8Array { + return GeneratedCodeInfo_Annotation.encode(message).finish(); + }, + toProtoMsg( + message: GeneratedCodeInfo_Annotation, + ): GeneratedCodeInfo_AnnotationProtoMsg { + return { + typeUrl: '/google.protobuf.Annotation', + value: GeneratedCodeInfo_Annotation.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/google/protobuf/timestamp.ts b/packages/cosmic-proto/src/codegen/google/protobuf/timestamp.ts new file mode 100644 index 00000000000..4d656537d77 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/google/protobuf/timestamp.ts @@ -0,0 +1,372 @@ +//@ts-nocheck +import { BinaryReader, BinaryWriter } from '../../binary.js'; +import { isSet, fromJsonTimestamp, fromTimestamp } from '../../helpers.js'; +/** + * A Timestamp represents a point in time independent of any time zone or local + * calendar, encoded as a count of seconds and fractions of seconds at + * nanosecond resolution. The count is relative to an epoch at UTC midnight on + * January 1, 1970, in the proleptic Gregorian calendar which extends the + * Gregorian calendar backwards to year one. + * + * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + * second table is needed for interpretation, using a [24-hour linear + * smear](https://developers.google.com/time/smear). + * + * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + * restricting to that range, we ensure that we can convert to and from [RFC + * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + * + * # Examples + * + * Example 1: Compute Timestamp from POSIX `time()`. + * + * Timestamp timestamp; + * timestamp.set_seconds(time(NULL)); + * timestamp.set_nanos(0); + * + * Example 2: Compute Timestamp from POSIX `gettimeofday()`. + * + * struct timeval tv; + * gettimeofday(&tv, NULL); + * + * Timestamp timestamp; + * timestamp.set_seconds(tv.tv_sec); + * timestamp.set_nanos(tv.tv_usec * 1000); + * + * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + * + * FILETIME ft; + * GetSystemTimeAsFileTime(&ft); + * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + * + * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + * Timestamp timestamp; + * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + * + * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + * + * long millis = System.currentTimeMillis(); + * + * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + * .setNanos((int) ((millis % 1000) * 1000000)).build(); + * + * + * Example 5: Compute Timestamp from current time in Python. + * + * timestamp = Timestamp() + * timestamp.GetCurrentTime() + * + * # JSON Mapping + * + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required. A proto3 JSON serializer should always use UTC (as indicated by + * "Z") when printing the Timestamp type and a proto3 JSON parser should be + * able to accept both UTC and other timezones (as indicated by an offset). + * + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * + * In JavaScript, one can convert a Date object to this format using the + * standard + * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using + * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + * the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * ) to obtain a formatter capable of generating timestamps in this format. + */ +export interface Timestamp { + /** + * Represents seconds of UTC time since Unix epoch + * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + * 9999-12-31T23:59:59Z inclusive. + */ + seconds: bigint; + /** + * Non-negative fractions of a second at nanosecond resolution. Negative + * second values with fractions must still have non-negative nanos values + * that count forward in time. Must be from 0 to 999,999,999 + * inclusive. + */ + nanos: number; +} +export interface TimestampProtoMsg { + typeUrl: '/google.protobuf.Timestamp'; + value: Uint8Array; +} +/** + * A Timestamp represents a point in time independent of any time zone or local + * calendar, encoded as a count of seconds and fractions of seconds at + * nanosecond resolution. The count is relative to an epoch at UTC midnight on + * January 1, 1970, in the proleptic Gregorian calendar which extends the + * Gregorian calendar backwards to year one. + * + * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + * second table is needed for interpretation, using a [24-hour linear + * smear](https://developers.google.com/time/smear). + * + * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + * restricting to that range, we ensure that we can convert to and from [RFC + * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + * + * # Examples + * + * Example 1: Compute Timestamp from POSIX `time()`. + * + * Timestamp timestamp; + * timestamp.set_seconds(time(NULL)); + * timestamp.set_nanos(0); + * + * Example 2: Compute Timestamp from POSIX `gettimeofday()`. + * + * struct timeval tv; + * gettimeofday(&tv, NULL); + * + * Timestamp timestamp; + * timestamp.set_seconds(tv.tv_sec); + * timestamp.set_nanos(tv.tv_usec * 1000); + * + * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + * + * FILETIME ft; + * GetSystemTimeAsFileTime(&ft); + * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + * + * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + * Timestamp timestamp; + * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + * + * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + * + * long millis = System.currentTimeMillis(); + * + * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + * .setNanos((int) ((millis % 1000) * 1000000)).build(); + * + * + * Example 5: Compute Timestamp from current time in Python. + * + * timestamp = Timestamp() + * timestamp.GetCurrentTime() + * + * # JSON Mapping + * + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required. A proto3 JSON serializer should always use UTC (as indicated by + * "Z") when printing the Timestamp type and a proto3 JSON parser should be + * able to accept both UTC and other timezones (as indicated by an offset). + * + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * + * In JavaScript, one can convert a Date object to this format using the + * standard + * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using + * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + * the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * ) to obtain a formatter capable of generating timestamps in this format. + */ +export type TimestampAmino = string; +export interface TimestampAminoMsg { + type: '/google.protobuf.Timestamp'; + value: TimestampAmino; +} +/** + * A Timestamp represents a point in time independent of any time zone or local + * calendar, encoded as a count of seconds and fractions of seconds at + * nanosecond resolution. The count is relative to an epoch at UTC midnight on + * January 1, 1970, in the proleptic Gregorian calendar which extends the + * Gregorian calendar backwards to year one. + * + * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + * second table is needed for interpretation, using a [24-hour linear + * smear](https://developers.google.com/time/smear). + * + * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + * restricting to that range, we ensure that we can convert to and from [RFC + * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + * + * # Examples + * + * Example 1: Compute Timestamp from POSIX `time()`. + * + * Timestamp timestamp; + * timestamp.set_seconds(time(NULL)); + * timestamp.set_nanos(0); + * + * Example 2: Compute Timestamp from POSIX `gettimeofday()`. + * + * struct timeval tv; + * gettimeofday(&tv, NULL); + * + * Timestamp timestamp; + * timestamp.set_seconds(tv.tv_sec); + * timestamp.set_nanos(tv.tv_usec * 1000); + * + * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + * + * FILETIME ft; + * GetSystemTimeAsFileTime(&ft); + * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + * + * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + * Timestamp timestamp; + * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + * + * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + * + * long millis = System.currentTimeMillis(); + * + * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + * .setNanos((int) ((millis % 1000) * 1000000)).build(); + * + * + * Example 5: Compute Timestamp from current time in Python. + * + * timestamp = Timestamp() + * timestamp.GetCurrentTime() + * + * # JSON Mapping + * + * In JSON format, the Timestamp type is encoded as a string in the + * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + * where {year} is always expressed using four digits while {month}, {day}, + * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + * is required. A proto3 JSON serializer should always use UTC (as indicated by + * "Z") when printing the Timestamp type and a proto3 JSON parser should be + * able to accept both UTC and other timezones (as indicated by an offset). + * + * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + * 01:30 UTC on January 15, 2017. + * + * In JavaScript, one can convert a Date object to this format using the + * standard + * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + * method. In Python, a standard `datetime.datetime` object can be converted + * to this format using + * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + * the Joda Time's [`ISODateTimeFormat.dateTime()`]( + * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + * ) to obtain a formatter capable of generating timestamps in this format. + */ +export interface TimestampSDKType { + seconds: bigint; + nanos: number; +} +function createBaseTimestamp(): Timestamp { + return { + seconds: BigInt(0), + nanos: 0, + }; +} +export const Timestamp = { + typeUrl: '/google.protobuf.Timestamp', + encode( + message: Timestamp, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.seconds !== BigInt(0)) { + writer.uint32(8).int64(message.seconds); + } + if (message.nanos !== 0) { + writer.uint32(16).int32(message.nanos); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): Timestamp { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseTimestamp(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.seconds = reader.int64(); + break; + case 2: + message.nanos = reader.int32(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): Timestamp { + return { + seconds: isSet(object.seconds) + ? BigInt(object.seconds.toString()) + : BigInt(0), + nanos: isSet(object.nanos) ? Number(object.nanos) : 0, + }; + }, + toJSON(message: Timestamp): unknown { + const obj: any = {}; + message.seconds !== undefined && + (obj.seconds = (message.seconds || BigInt(0)).toString()); + message.nanos !== undefined && (obj.nanos = Math.round(message.nanos)); + return obj; + }, + fromPartial(object: Partial): Timestamp { + const message = createBaseTimestamp(); + message.seconds = + object.seconds !== undefined && object.seconds !== null + ? BigInt(object.seconds.toString()) + : BigInt(0); + message.nanos = object.nanos ?? 0; + return message; + }, + fromAmino(object: TimestampAmino): Timestamp { + return fromJsonTimestamp(object); + }, + toAmino(message: Timestamp): TimestampAmino { + return fromTimestamp(message) + .toISOString() + .replace(/\.\d+Z$/, 'Z'); + }, + fromAminoMsg(object: TimestampAminoMsg): Timestamp { + return Timestamp.fromAmino(object.value); + }, + fromProtoMsg(message: TimestampProtoMsg): Timestamp { + return Timestamp.decode(message.value); + }, + toProto(message: Timestamp): Uint8Array { + return Timestamp.encode(message).finish(); + }, + toProtoMsg(message: Timestamp): TimestampProtoMsg { + return { + typeUrl: '/google.protobuf.Timestamp', + value: Timestamp.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/helpers.ts b/packages/cosmic-proto/src/codegen/helpers.ts new file mode 100644 index 00000000000..4a189d4a116 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/helpers.ts @@ -0,0 +1,250 @@ +//@ts-nocheck +/** + * This file and any referenced files were automatically generated by @cosmology/telescope@1.4.12 + * DO NOT MODIFY BY HAND. Instead, download the latest proto files for your chain + * and run the transpile command or yarn proto command to regenerate this bundle. + */ + +declare var self: any | undefined; +declare var window: any | undefined; +declare var global: any | undefined; +var globalThis: any = (() => { + if (typeof globalThis !== 'undefined') return globalThis; + if (typeof self !== 'undefined') return self; + if (typeof window !== 'undefined') return window; + if (typeof global !== 'undefined') return global; + throw 'Unable to locate global object'; +})(); + +const atob: (b64: string) => string = + globalThis.atob || + (b64 => globalThis.Buffer.from(b64, 'base64').toString('binary')); + +export function bytesFromBase64(b64: string): Uint8Array { + const bin = atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; +} + +const btoa: (bin: string) => string = + globalThis.btoa || + (bin => globalThis.Buffer.from(bin, 'binary').toString('base64')); + +export function base64FromBytes(arr: Uint8Array): string { + const bin: string[] = []; + arr.forEach(byte => { + bin.push(String.fromCharCode(byte)); + }); + return btoa(bin.join('')); +} + +export interface AminoHeight { + readonly revision_number?: string; + readonly revision_height?: string; +} + +export function omitDefault( + input: T, +): T | undefined { + if (typeof input === 'string') { + return input === '' ? undefined : input; + } + + if (typeof input === 'number') { + return input === 0 ? undefined : input; + } + + if (typeof input === 'bigint') { + return input === BigInt(0) ? undefined : input; + } + + throw new Error(`Got unsupported type ${typeof input}`); +} + +interface Duration { + /** + * Signed seconds of the span of time. Must be from -315,576,000,000 + * to +315,576,000,000 inclusive. Note: these bounds are computed from: + * 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + */ + seconds: bigint; + /** + * Signed fractions of a second at nanosecond resolution of the span + * of time. Durations less than one second are represented with a 0 + * `seconds` field and a positive or negative `nanos` field. For durations + * of one second or more, a non-zero value for the `nanos` field must be + * of the same sign as the `seconds` field. Must be from -999,999,999 + * to +999,999,999 inclusive. + */ + + nanos: number; +} + +export function toDuration(duration: string): Duration { + return { + seconds: BigInt(Math.floor(parseInt(duration) / 1000000000)), + nanos: parseInt(duration) % 1000000000, + }; +} + +export function fromDuration(duration: Duration): string { + return ( + parseInt(duration.seconds.toString()) * 1000000000 + + duration.nanos + ).toString(); +} + +export function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export function isObject(value: any): boolean { + return typeof value === 'object' && value !== null; +} + +export interface PageRequest { + key: Uint8Array; + offset: bigint; + limit: bigint; + countTotal: boolean; + reverse: boolean; +} + +export interface PageRequestParams { + 'pagination.key'?: string; + 'pagination.offset'?: string; + 'pagination.limit'?: string; + 'pagination.count_total'?: boolean; + 'pagination.reverse'?: boolean; +} + +export interface Params { + params: PageRequestParams; +} + +export const setPaginationParams = ( + options: Params, + pagination?: PageRequest, +) => { + if (!pagination) { + return options; + } + + if (typeof pagination?.countTotal !== 'undefined') { + options.params['pagination.count_total'] = pagination.countTotal; + } + if (typeof pagination?.key !== 'undefined') { + // String to Uint8Array + // let uint8arr = new Uint8Array(Buffer.from(data,'base64')); + + // Uint8Array to String + options.params['pagination.key'] = Buffer.from(pagination.key).toString( + 'base64', + ); + } + if (typeof pagination?.limit !== 'undefined') { + options.params['pagination.limit'] = pagination.limit.toString(); + } + if (typeof pagination?.offset !== 'undefined') { + options.params['pagination.offset'] = pagination.offset.toString(); + } + if (typeof pagination?.reverse !== 'undefined') { + options.params['pagination.reverse'] = pagination.reverse; + } + + return options; +}; + +type Builtin = + | Date + | Function + | Uint8Array + | string + | number + | bigint + | boolean + | undefined; + +export type DeepPartial = T extends Builtin + ? T + : T extends Array + ? Array> + : T extends ReadonlyArray + ? ReadonlyArray> + : T extends {} + ? { [K in keyof T]?: DeepPartial } + : Partial; + +type KeysOfUnion = T extends T ? keyof T : never; +export type Exact = P extends Builtin + ? P + : P & { [K in keyof P]: Exact } & Record< + Exclude>, + never + >; + +export interface Rpc { + request( + service: string, + method: string, + data: Uint8Array, + ): Promise; +} + +interface Timestamp { + /** + * Represents seconds of UTC time since Unix epoch + * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + * 9999-12-31T23:59:59Z inclusive. + */ + seconds: bigint; + /** + * Non-negative fractions of a second at nanosecond resolution. Negative + * second values with fractions must still have non-negative nanos values + * that count forward in time. Must be from 0 to 999,999,999 + * inclusive. + */ + + nanos: number; +} + +export function toTimestamp(date: Date): Timestamp { + const seconds = numberToLong(date.getTime() / 1_000); + const nanos = (date.getTime() % 1000) * 1000000; + return { + seconds, + nanos, + }; +} + +export function fromTimestamp(t: Timestamp): Date { + let millis = Number(t.seconds) * 1000; + millis += t.nanos / 1000000; + return new Date(millis); +} + +const timestampFromJSON = (object: any): Timestamp => { + return { + seconds: isSet(object.seconds) + ? BigInt(object.seconds.toString()) + : BigInt(0), + nanos: isSet(object.nanos) ? Number(object.nanos) : 0, + }; +}; + +export function fromJsonTimestamp(o: any): Timestamp { + if (o instanceof Date) { + return toTimestamp(o); + } else if (typeof o === 'string') { + return toTimestamp(new Date(o)); + } else { + return timestampFromJSON(o); + } +} + +function numberToLong(number: number) { + return BigInt(Math.trunc(number)); +} diff --git a/packages/cosmic-proto/src/codegen/ibc/bundle.ts b/packages/cosmic-proto/src/codegen/ibc/bundle.ts new file mode 100644 index 00000000000..762dbc3f062 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/ibc/bundle.ts @@ -0,0 +1,17 @@ +//@ts-nocheck +import * as _23 from './core/channel/v1/channel.js'; +import * as _24 from './core/client/v1/client.js'; +export namespace ibc { + export namespace core { + export namespace channel { + export const v1 = { + ..._23, + }; + } + export namespace client { + export const v1 = { + ..._24, + }; + } + } +} diff --git a/packages/cosmic-proto/src/codegen/ibc/core/channel/v1/channel.ts b/packages/cosmic-proto/src/codegen/ibc/core/channel/v1/channel.ts new file mode 100644 index 00000000000..d4352fbbbef --- /dev/null +++ b/packages/cosmic-proto/src/codegen/ibc/core/channel/v1/channel.ts @@ -0,0 +1,1511 @@ +//@ts-nocheck +import { Height, HeightAmino, HeightSDKType } from '../../client/v1/client.js'; +import { BinaryReader, BinaryWriter } from '../../../../binary.js'; +import { + isSet, + bytesFromBase64, + base64FromBytes, +} from '../../../../helpers.js'; +/** + * State defines if a channel is in one of the following states: + * CLOSED, INIT, TRYOPEN, OPEN or UNINITIALIZED. + */ +export enum State { + /** STATE_UNINITIALIZED_UNSPECIFIED - Default State */ + STATE_UNINITIALIZED_UNSPECIFIED = 0, + /** STATE_INIT - A channel has just started the opening handshake. */ + STATE_INIT = 1, + /** STATE_TRYOPEN - A channel has acknowledged the handshake step on the counterparty chain. */ + STATE_TRYOPEN = 2, + /** + * STATE_OPEN - A channel has completed the handshake. Open channels are + * ready to send and receive packets. + */ + STATE_OPEN = 3, + /** + * STATE_CLOSED - A channel has been closed and can no longer be used to send or receive + * packets. + */ + STATE_CLOSED = 4, + UNRECOGNIZED = -1, +} +export const StateSDKType = State; +export const StateAmino = State; +export function stateFromJSON(object: any): State { + switch (object) { + case 0: + case 'STATE_UNINITIALIZED_UNSPECIFIED': + return State.STATE_UNINITIALIZED_UNSPECIFIED; + case 1: + case 'STATE_INIT': + return State.STATE_INIT; + case 2: + case 'STATE_TRYOPEN': + return State.STATE_TRYOPEN; + case 3: + case 'STATE_OPEN': + return State.STATE_OPEN; + case 4: + case 'STATE_CLOSED': + return State.STATE_CLOSED; + case -1: + case 'UNRECOGNIZED': + default: + return State.UNRECOGNIZED; + } +} +export function stateToJSON(object: State): string { + switch (object) { + case State.STATE_UNINITIALIZED_UNSPECIFIED: + return 'STATE_UNINITIALIZED_UNSPECIFIED'; + case State.STATE_INIT: + return 'STATE_INIT'; + case State.STATE_TRYOPEN: + return 'STATE_TRYOPEN'; + case State.STATE_OPEN: + return 'STATE_OPEN'; + case State.STATE_CLOSED: + return 'STATE_CLOSED'; + case State.UNRECOGNIZED: + default: + return 'UNRECOGNIZED'; + } +} +/** Order defines if a channel is ORDERED or UNORDERED */ +export enum Order { + /** ORDER_NONE_UNSPECIFIED - zero-value for channel ordering */ + ORDER_NONE_UNSPECIFIED = 0, + /** + * ORDER_UNORDERED - packets can be delivered in any order, which may differ from the order in + * which they were sent. + */ + ORDER_UNORDERED = 1, + /** ORDER_ORDERED - packets are delivered exactly in the order which they were sent */ + ORDER_ORDERED = 2, + UNRECOGNIZED = -1, +} +export const OrderSDKType = Order; +export const OrderAmino = Order; +export function orderFromJSON(object: any): Order { + switch (object) { + case 0: + case 'ORDER_NONE_UNSPECIFIED': + return Order.ORDER_NONE_UNSPECIFIED; + case 1: + case 'ORDER_UNORDERED': + return Order.ORDER_UNORDERED; + case 2: + case 'ORDER_ORDERED': + return Order.ORDER_ORDERED; + case -1: + case 'UNRECOGNIZED': + default: + return Order.UNRECOGNIZED; + } +} +export function orderToJSON(object: Order): string { + switch (object) { + case Order.ORDER_NONE_UNSPECIFIED: + return 'ORDER_NONE_UNSPECIFIED'; + case Order.ORDER_UNORDERED: + return 'ORDER_UNORDERED'; + case Order.ORDER_ORDERED: + return 'ORDER_ORDERED'; + case Order.UNRECOGNIZED: + default: + return 'UNRECOGNIZED'; + } +} +/** + * Channel defines pipeline for exactly-once packet delivery between specific + * modules on separate blockchains, which has at least one end capable of + * sending packets and one end capable of receiving packets. + */ +export interface Channel { + /** current state of the channel end */ + state: State; + /** whether the channel is ordered or unordered */ + ordering: Order; + /** counterparty channel end */ + counterparty: Counterparty; + /** + * list of connection identifiers, in order, along which packets sent on + * this channel will travel + */ + connectionHops: string[]; + /** opaque channel version, which is agreed upon during the handshake */ + version: string; +} +export interface ChannelProtoMsg { + typeUrl: '/ibc.core.channel.v1.Channel'; + value: Uint8Array; +} +/** + * Channel defines pipeline for exactly-once packet delivery between specific + * modules on separate blockchains, which has at least one end capable of + * sending packets and one end capable of receiving packets. + */ +export interface ChannelAmino { + /** current state of the channel end */ + state?: State; + /** whether the channel is ordered or unordered */ + ordering?: Order; + /** counterparty channel end */ + counterparty?: CounterpartyAmino; + /** + * list of connection identifiers, in order, along which packets sent on + * this channel will travel + */ + connection_hops?: string[]; + /** opaque channel version, which is agreed upon during the handshake */ + version?: string; +} +export interface ChannelAminoMsg { + type: 'cosmos-sdk/Channel'; + value: ChannelAmino; +} +/** + * Channel defines pipeline for exactly-once packet delivery between specific + * modules on separate blockchains, which has at least one end capable of + * sending packets and one end capable of receiving packets. + */ +export interface ChannelSDKType { + state: State; + ordering: Order; + counterparty: CounterpartySDKType; + connection_hops: string[]; + version: string; +} +/** + * IdentifiedChannel defines a channel with additional port and channel + * identifier fields. + */ +export interface IdentifiedChannel { + /** current state of the channel end */ + state: State; + /** whether the channel is ordered or unordered */ + ordering: Order; + /** counterparty channel end */ + counterparty: Counterparty; + /** + * list of connection identifiers, in order, along which packets sent on + * this channel will travel + */ + connectionHops: string[]; + /** opaque channel version, which is agreed upon during the handshake */ + version: string; + /** port identifier */ + portId: string; + /** channel identifier */ + channelId: string; +} +export interface IdentifiedChannelProtoMsg { + typeUrl: '/ibc.core.channel.v1.IdentifiedChannel'; + value: Uint8Array; +} +/** + * IdentifiedChannel defines a channel with additional port and channel + * identifier fields. + */ +export interface IdentifiedChannelAmino { + /** current state of the channel end */ + state?: State; + /** whether the channel is ordered or unordered */ + ordering?: Order; + /** counterparty channel end */ + counterparty?: CounterpartyAmino; + /** + * list of connection identifiers, in order, along which packets sent on + * this channel will travel + */ + connection_hops?: string[]; + /** opaque channel version, which is agreed upon during the handshake */ + version?: string; + /** port identifier */ + port_id?: string; + /** channel identifier */ + channel_id?: string; +} +export interface IdentifiedChannelAminoMsg { + type: 'cosmos-sdk/IdentifiedChannel'; + value: IdentifiedChannelAmino; +} +/** + * IdentifiedChannel defines a channel with additional port and channel + * identifier fields. + */ +export interface IdentifiedChannelSDKType { + state: State; + ordering: Order; + counterparty: CounterpartySDKType; + connection_hops: string[]; + version: string; + port_id: string; + channel_id: string; +} +/** Counterparty defines a channel end counterparty */ +export interface Counterparty { + /** port on the counterparty chain which owns the other end of the channel. */ + portId: string; + /** channel end on the counterparty chain */ + channelId: string; +} +export interface CounterpartyProtoMsg { + typeUrl: '/ibc.core.channel.v1.Counterparty'; + value: Uint8Array; +} +/** Counterparty defines a channel end counterparty */ +export interface CounterpartyAmino { + /** port on the counterparty chain which owns the other end of the channel. */ + port_id?: string; + /** channel end on the counterparty chain */ + channel_id?: string; +} +export interface CounterpartyAminoMsg { + type: 'cosmos-sdk/Counterparty'; + value: CounterpartyAmino; +} +/** Counterparty defines a channel end counterparty */ +export interface CounterpartySDKType { + port_id: string; + channel_id: string; +} +/** Packet defines a type that carries data across different chains through IBC */ +export interface Packet { + /** + * number corresponds to the order of sends and receives, where a Packet + * with an earlier sequence number must be sent and received before a Packet + * with a later sequence number. + */ + sequence: bigint; + /** identifies the port on the sending chain. */ + sourcePort: string; + /** identifies the channel end on the sending chain. */ + sourceChannel: string; + /** identifies the port on the receiving chain. */ + destinationPort: string; + /** identifies the channel end on the receiving chain. */ + destinationChannel: string; + /** actual opaque bytes transferred directly to the application module */ + data: Uint8Array; + /** block height after which the packet times out */ + timeoutHeight: Height; + /** block timestamp (in nanoseconds) after which the packet times out */ + timeoutTimestamp: bigint; +} +export interface PacketProtoMsg { + typeUrl: '/ibc.core.channel.v1.Packet'; + value: Uint8Array; +} +/** Packet defines a type that carries data across different chains through IBC */ +export interface PacketAmino { + /** + * number corresponds to the order of sends and receives, where a Packet + * with an earlier sequence number must be sent and received before a Packet + * with a later sequence number. + */ + sequence?: string; + /** identifies the port on the sending chain. */ + source_port?: string; + /** identifies the channel end on the sending chain. */ + source_channel?: string; + /** identifies the port on the receiving chain. */ + destination_port?: string; + /** identifies the channel end on the receiving chain. */ + destination_channel?: string; + /** actual opaque bytes transferred directly to the application module */ + data?: string; + /** block height after which the packet times out */ + timeout_height?: HeightAmino; + /** block timestamp (in nanoseconds) after which the packet times out */ + timeout_timestamp?: string; +} +export interface PacketAminoMsg { + type: 'cosmos-sdk/Packet'; + value: PacketAmino; +} +/** Packet defines a type that carries data across different chains through IBC */ +export interface PacketSDKType { + sequence: bigint; + source_port: string; + source_channel: string; + destination_port: string; + destination_channel: string; + data: Uint8Array; + timeout_height: HeightSDKType; + timeout_timestamp: bigint; +} +/** + * PacketState defines the generic type necessary to retrieve and store + * packet commitments, acknowledgements, and receipts. + * Caller is responsible for knowing the context necessary to interpret this + * state as a commitment, acknowledgement, or a receipt. + */ +export interface PacketState { + /** channel port identifier. */ + portId: string; + /** channel unique identifier. */ + channelId: string; + /** packet sequence. */ + sequence: bigint; + /** embedded data that represents packet state. */ + data: Uint8Array; +} +export interface PacketStateProtoMsg { + typeUrl: '/ibc.core.channel.v1.PacketState'; + value: Uint8Array; +} +/** + * PacketState defines the generic type necessary to retrieve and store + * packet commitments, acknowledgements, and receipts. + * Caller is responsible for knowing the context necessary to interpret this + * state as a commitment, acknowledgement, or a receipt. + */ +export interface PacketStateAmino { + /** channel port identifier. */ + port_id?: string; + /** channel unique identifier. */ + channel_id?: string; + /** packet sequence. */ + sequence?: string; + /** embedded data that represents packet state. */ + data?: string; +} +export interface PacketStateAminoMsg { + type: 'cosmos-sdk/PacketState'; + value: PacketStateAmino; +} +/** + * PacketState defines the generic type necessary to retrieve and store + * packet commitments, acknowledgements, and receipts. + * Caller is responsible for knowing the context necessary to interpret this + * state as a commitment, acknowledgement, or a receipt. + */ +export interface PacketStateSDKType { + port_id: string; + channel_id: string; + sequence: bigint; + data: Uint8Array; +} +/** + * PacketId is an identifer for a unique Packet + * Source chains refer to packets by source port/channel + * Destination chains refer to packets by destination port/channel + */ +export interface PacketId { + /** channel port identifier */ + portId: string; + /** channel unique identifier */ + channelId: string; + /** packet sequence */ + sequence: bigint; +} +export interface PacketIdProtoMsg { + typeUrl: '/ibc.core.channel.v1.PacketId'; + value: Uint8Array; +} +/** + * PacketId is an identifer for a unique Packet + * Source chains refer to packets by source port/channel + * Destination chains refer to packets by destination port/channel + */ +export interface PacketIdAmino { + /** channel port identifier */ + port_id?: string; + /** channel unique identifier */ + channel_id?: string; + /** packet sequence */ + sequence?: string; +} +export interface PacketIdAminoMsg { + type: 'cosmos-sdk/PacketId'; + value: PacketIdAmino; +} +/** + * PacketId is an identifer for a unique Packet + * Source chains refer to packets by source port/channel + * Destination chains refer to packets by destination port/channel + */ +export interface PacketIdSDKType { + port_id: string; + channel_id: string; + sequence: bigint; +} +/** + * Acknowledgement is the recommended acknowledgement format to be used by + * app-specific protocols. + * NOTE: The field numbers 21 and 22 were explicitly chosen to avoid accidental + * conflicts with other protobuf message formats used for acknowledgements. + * The first byte of any message with this format will be the non-ASCII values + * `0xaa` (result) or `0xb2` (error). Implemented as defined by ICS: + * https://github.com/cosmos/ibc/tree/master/spec/core/ics-004-channel-and-packet-semantics#acknowledgement-envelope + */ +export interface Acknowledgement { + result?: Uint8Array; + error?: string; +} +export interface AcknowledgementProtoMsg { + typeUrl: '/ibc.core.channel.v1.Acknowledgement'; + value: Uint8Array; +} +/** + * Acknowledgement is the recommended acknowledgement format to be used by + * app-specific protocols. + * NOTE: The field numbers 21 and 22 were explicitly chosen to avoid accidental + * conflicts with other protobuf message formats used for acknowledgements. + * The first byte of any message with this format will be the non-ASCII values + * `0xaa` (result) or `0xb2` (error). Implemented as defined by ICS: + * https://github.com/cosmos/ibc/tree/master/spec/core/ics-004-channel-and-packet-semantics#acknowledgement-envelope + */ +export interface AcknowledgementAmino { + result?: string; + error?: string; +} +export interface AcknowledgementAminoMsg { + type: 'cosmos-sdk/Acknowledgement'; + value: AcknowledgementAmino; +} +/** + * Acknowledgement is the recommended acknowledgement format to be used by + * app-specific protocols. + * NOTE: The field numbers 21 and 22 were explicitly chosen to avoid accidental + * conflicts with other protobuf message formats used for acknowledgements. + * The first byte of any message with this format will be the non-ASCII values + * `0xaa` (result) or `0xb2` (error). Implemented as defined by ICS: + * https://github.com/cosmos/ibc/tree/master/spec/core/ics-004-channel-and-packet-semantics#acknowledgement-envelope + */ +export interface AcknowledgementSDKType { + result?: Uint8Array; + error?: string; +} +function createBaseChannel(): Channel { + return { + state: 0, + ordering: 0, + counterparty: Counterparty.fromPartial({}), + connectionHops: [], + version: '', + }; +} +export const Channel = { + typeUrl: '/ibc.core.channel.v1.Channel', + encode( + message: Channel, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.state !== 0) { + writer.uint32(8).int32(message.state); + } + if (message.ordering !== 0) { + writer.uint32(16).int32(message.ordering); + } + if (message.counterparty !== undefined) { + Counterparty.encode( + message.counterparty, + writer.uint32(26).fork(), + ).ldelim(); + } + for (const v of message.connectionHops) { + writer.uint32(34).string(v!); + } + if (message.version !== '') { + writer.uint32(42).string(message.version); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): Channel { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseChannel(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.state = reader.int32() as any; + break; + case 2: + message.ordering = reader.int32() as any; + break; + case 3: + message.counterparty = Counterparty.decode(reader, reader.uint32()); + break; + case 4: + message.connectionHops.push(reader.string()); + break; + case 5: + message.version = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): Channel { + return { + state: isSet(object.state) ? stateFromJSON(object.state) : -1, + ordering: isSet(object.ordering) ? orderFromJSON(object.ordering) : -1, + counterparty: isSet(object.counterparty) + ? Counterparty.fromJSON(object.counterparty) + : undefined, + connectionHops: Array.isArray(object?.connectionHops) + ? object.connectionHops.map((e: any) => String(e)) + : [], + version: isSet(object.version) ? String(object.version) : '', + }; + }, + toJSON(message: Channel): unknown { + const obj: any = {}; + message.state !== undefined && (obj.state = stateToJSON(message.state)); + message.ordering !== undefined && + (obj.ordering = orderToJSON(message.ordering)); + message.counterparty !== undefined && + (obj.counterparty = message.counterparty + ? Counterparty.toJSON(message.counterparty) + : undefined); + if (message.connectionHops) { + obj.connectionHops = message.connectionHops.map(e => e); + } else { + obj.connectionHops = []; + } + message.version !== undefined && (obj.version = message.version); + return obj; + }, + fromPartial(object: Partial): Channel { + const message = createBaseChannel(); + message.state = object.state ?? 0; + message.ordering = object.ordering ?? 0; + message.counterparty = + object.counterparty !== undefined && object.counterparty !== null + ? Counterparty.fromPartial(object.counterparty) + : undefined; + message.connectionHops = object.connectionHops?.map(e => e) || []; + message.version = object.version ?? ''; + return message; + }, + fromAmino(object: ChannelAmino): Channel { + const message = createBaseChannel(); + if (object.state !== undefined && object.state !== null) { + message.state = stateFromJSON(object.state); + } + if (object.ordering !== undefined && object.ordering !== null) { + message.ordering = orderFromJSON(object.ordering); + } + if (object.counterparty !== undefined && object.counterparty !== null) { + message.counterparty = Counterparty.fromAmino(object.counterparty); + } + message.connectionHops = object.connection_hops?.map(e => e) || []; + if (object.version !== undefined && object.version !== null) { + message.version = object.version; + } + return message; + }, + toAmino(message: Channel): ChannelAmino { + const obj: any = {}; + obj.state = message.state; + obj.ordering = message.ordering; + obj.counterparty = message.counterparty + ? Counterparty.toAmino(message.counterparty) + : undefined; + if (message.connectionHops) { + obj.connection_hops = message.connectionHops.map(e => e); + } else { + obj.connection_hops = []; + } + obj.version = message.version; + return obj; + }, + fromAminoMsg(object: ChannelAminoMsg): Channel { + return Channel.fromAmino(object.value); + }, + toAminoMsg(message: Channel): ChannelAminoMsg { + return { + type: 'cosmos-sdk/Channel', + value: Channel.toAmino(message), + }; + }, + fromProtoMsg(message: ChannelProtoMsg): Channel { + return Channel.decode(message.value); + }, + toProto(message: Channel): Uint8Array { + return Channel.encode(message).finish(); + }, + toProtoMsg(message: Channel): ChannelProtoMsg { + return { + typeUrl: '/ibc.core.channel.v1.Channel', + value: Channel.encode(message).finish(), + }; + }, +}; +function createBaseIdentifiedChannel(): IdentifiedChannel { + return { + state: 0, + ordering: 0, + counterparty: Counterparty.fromPartial({}), + connectionHops: [], + version: '', + portId: '', + channelId: '', + }; +} +export const IdentifiedChannel = { + typeUrl: '/ibc.core.channel.v1.IdentifiedChannel', + encode( + message: IdentifiedChannel, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.state !== 0) { + writer.uint32(8).int32(message.state); + } + if (message.ordering !== 0) { + writer.uint32(16).int32(message.ordering); + } + if (message.counterparty !== undefined) { + Counterparty.encode( + message.counterparty, + writer.uint32(26).fork(), + ).ldelim(); + } + for (const v of message.connectionHops) { + writer.uint32(34).string(v!); + } + if (message.version !== '') { + writer.uint32(42).string(message.version); + } + if (message.portId !== '') { + writer.uint32(50).string(message.portId); + } + if (message.channelId !== '') { + writer.uint32(58).string(message.channelId); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): IdentifiedChannel { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseIdentifiedChannel(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.state = reader.int32() as any; + break; + case 2: + message.ordering = reader.int32() as any; + break; + case 3: + message.counterparty = Counterparty.decode(reader, reader.uint32()); + break; + case 4: + message.connectionHops.push(reader.string()); + break; + case 5: + message.version = reader.string(); + break; + case 6: + message.portId = reader.string(); + break; + case 7: + message.channelId = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): IdentifiedChannel { + return { + state: isSet(object.state) ? stateFromJSON(object.state) : -1, + ordering: isSet(object.ordering) ? orderFromJSON(object.ordering) : -1, + counterparty: isSet(object.counterparty) + ? Counterparty.fromJSON(object.counterparty) + : undefined, + connectionHops: Array.isArray(object?.connectionHops) + ? object.connectionHops.map((e: any) => String(e)) + : [], + version: isSet(object.version) ? String(object.version) : '', + portId: isSet(object.portId) ? String(object.portId) : '', + channelId: isSet(object.channelId) ? String(object.channelId) : '', + }; + }, + toJSON(message: IdentifiedChannel): unknown { + const obj: any = {}; + message.state !== undefined && (obj.state = stateToJSON(message.state)); + message.ordering !== undefined && + (obj.ordering = orderToJSON(message.ordering)); + message.counterparty !== undefined && + (obj.counterparty = message.counterparty + ? Counterparty.toJSON(message.counterparty) + : undefined); + if (message.connectionHops) { + obj.connectionHops = message.connectionHops.map(e => e); + } else { + obj.connectionHops = []; + } + message.version !== undefined && (obj.version = message.version); + message.portId !== undefined && (obj.portId = message.portId); + message.channelId !== undefined && (obj.channelId = message.channelId); + return obj; + }, + fromPartial(object: Partial): IdentifiedChannel { + const message = createBaseIdentifiedChannel(); + message.state = object.state ?? 0; + message.ordering = object.ordering ?? 0; + message.counterparty = + object.counterparty !== undefined && object.counterparty !== null + ? Counterparty.fromPartial(object.counterparty) + : undefined; + message.connectionHops = object.connectionHops?.map(e => e) || []; + message.version = object.version ?? ''; + message.portId = object.portId ?? ''; + message.channelId = object.channelId ?? ''; + return message; + }, + fromAmino(object: IdentifiedChannelAmino): IdentifiedChannel { + const message = createBaseIdentifiedChannel(); + if (object.state !== undefined && object.state !== null) { + message.state = stateFromJSON(object.state); + } + if (object.ordering !== undefined && object.ordering !== null) { + message.ordering = orderFromJSON(object.ordering); + } + if (object.counterparty !== undefined && object.counterparty !== null) { + message.counterparty = Counterparty.fromAmino(object.counterparty); + } + message.connectionHops = object.connection_hops?.map(e => e) || []; + if (object.version !== undefined && object.version !== null) { + message.version = object.version; + } + if (object.port_id !== undefined && object.port_id !== null) { + message.portId = object.port_id; + } + if (object.channel_id !== undefined && object.channel_id !== null) { + message.channelId = object.channel_id; + } + return message; + }, + toAmino(message: IdentifiedChannel): IdentifiedChannelAmino { + const obj: any = {}; + obj.state = message.state; + obj.ordering = message.ordering; + obj.counterparty = message.counterparty + ? Counterparty.toAmino(message.counterparty) + : undefined; + if (message.connectionHops) { + obj.connection_hops = message.connectionHops.map(e => e); + } else { + obj.connection_hops = []; + } + obj.version = message.version; + obj.port_id = message.portId; + obj.channel_id = message.channelId; + return obj; + }, + fromAminoMsg(object: IdentifiedChannelAminoMsg): IdentifiedChannel { + return IdentifiedChannel.fromAmino(object.value); + }, + toAminoMsg(message: IdentifiedChannel): IdentifiedChannelAminoMsg { + return { + type: 'cosmos-sdk/IdentifiedChannel', + value: IdentifiedChannel.toAmino(message), + }; + }, + fromProtoMsg(message: IdentifiedChannelProtoMsg): IdentifiedChannel { + return IdentifiedChannel.decode(message.value); + }, + toProto(message: IdentifiedChannel): Uint8Array { + return IdentifiedChannel.encode(message).finish(); + }, + toProtoMsg(message: IdentifiedChannel): IdentifiedChannelProtoMsg { + return { + typeUrl: '/ibc.core.channel.v1.IdentifiedChannel', + value: IdentifiedChannel.encode(message).finish(), + }; + }, +}; +function createBaseCounterparty(): Counterparty { + return { + portId: '', + channelId: '', + }; +} +export const Counterparty = { + typeUrl: '/ibc.core.channel.v1.Counterparty', + encode( + message: Counterparty, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.portId !== '') { + writer.uint32(10).string(message.portId); + } + if (message.channelId !== '') { + writer.uint32(18).string(message.channelId); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): Counterparty { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseCounterparty(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + case 2: + message.channelId = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): Counterparty { + return { + portId: isSet(object.portId) ? String(object.portId) : '', + channelId: isSet(object.channelId) ? String(object.channelId) : '', + }; + }, + toJSON(message: Counterparty): unknown { + const obj: any = {}; + message.portId !== undefined && (obj.portId = message.portId); + message.channelId !== undefined && (obj.channelId = message.channelId); + return obj; + }, + fromPartial(object: Partial): Counterparty { + const message = createBaseCounterparty(); + message.portId = object.portId ?? ''; + message.channelId = object.channelId ?? ''; + return message; + }, + fromAmino(object: CounterpartyAmino): Counterparty { + const message = createBaseCounterparty(); + if (object.port_id !== undefined && object.port_id !== null) { + message.portId = object.port_id; + } + if (object.channel_id !== undefined && object.channel_id !== null) { + message.channelId = object.channel_id; + } + return message; + }, + toAmino(message: Counterparty): CounterpartyAmino { + const obj: any = {}; + obj.port_id = message.portId; + obj.channel_id = message.channelId; + return obj; + }, + fromAminoMsg(object: CounterpartyAminoMsg): Counterparty { + return Counterparty.fromAmino(object.value); + }, + toAminoMsg(message: Counterparty): CounterpartyAminoMsg { + return { + type: 'cosmos-sdk/Counterparty', + value: Counterparty.toAmino(message), + }; + }, + fromProtoMsg(message: CounterpartyProtoMsg): Counterparty { + return Counterparty.decode(message.value); + }, + toProto(message: Counterparty): Uint8Array { + return Counterparty.encode(message).finish(); + }, + toProtoMsg(message: Counterparty): CounterpartyProtoMsg { + return { + typeUrl: '/ibc.core.channel.v1.Counterparty', + value: Counterparty.encode(message).finish(), + }; + }, +}; +function createBasePacket(): Packet { + return { + sequence: BigInt(0), + sourcePort: '', + sourceChannel: '', + destinationPort: '', + destinationChannel: '', + data: new Uint8Array(), + timeoutHeight: Height.fromPartial({}), + timeoutTimestamp: BigInt(0), + }; +} +export const Packet = { + typeUrl: '/ibc.core.channel.v1.Packet', + encode( + message: Packet, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.sequence !== BigInt(0)) { + writer.uint32(8).uint64(message.sequence); + } + if (message.sourcePort !== '') { + writer.uint32(18).string(message.sourcePort); + } + if (message.sourceChannel !== '') { + writer.uint32(26).string(message.sourceChannel); + } + if (message.destinationPort !== '') { + writer.uint32(34).string(message.destinationPort); + } + if (message.destinationChannel !== '') { + writer.uint32(42).string(message.destinationChannel); + } + if (message.data.length !== 0) { + writer.uint32(50).bytes(message.data); + } + if (message.timeoutHeight !== undefined) { + Height.encode(message.timeoutHeight, writer.uint32(58).fork()).ldelim(); + } + if (message.timeoutTimestamp !== BigInt(0)) { + writer.uint32(64).uint64(message.timeoutTimestamp); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): Packet { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePacket(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.sequence = reader.uint64(); + break; + case 2: + message.sourcePort = reader.string(); + break; + case 3: + message.sourceChannel = reader.string(); + break; + case 4: + message.destinationPort = reader.string(); + break; + case 5: + message.destinationChannel = reader.string(); + break; + case 6: + message.data = reader.bytes(); + break; + case 7: + message.timeoutHeight = Height.decode(reader, reader.uint32()); + break; + case 8: + message.timeoutTimestamp = reader.uint64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): Packet { + return { + sequence: isSet(object.sequence) + ? BigInt(object.sequence.toString()) + : BigInt(0), + sourcePort: isSet(object.sourcePort) ? String(object.sourcePort) : '', + sourceChannel: isSet(object.sourceChannel) + ? String(object.sourceChannel) + : '', + destinationPort: isSet(object.destinationPort) + ? String(object.destinationPort) + : '', + destinationChannel: isSet(object.destinationChannel) + ? String(object.destinationChannel) + : '', + data: isSet(object.data) + ? bytesFromBase64(object.data) + : new Uint8Array(), + timeoutHeight: isSet(object.timeoutHeight) + ? Height.fromJSON(object.timeoutHeight) + : undefined, + timeoutTimestamp: isSet(object.timeoutTimestamp) + ? BigInt(object.timeoutTimestamp.toString()) + : BigInt(0), + }; + }, + toJSON(message: Packet): unknown { + const obj: any = {}; + message.sequence !== undefined && + (obj.sequence = (message.sequence || BigInt(0)).toString()); + message.sourcePort !== undefined && (obj.sourcePort = message.sourcePort); + message.sourceChannel !== undefined && + (obj.sourceChannel = message.sourceChannel); + message.destinationPort !== undefined && + (obj.destinationPort = message.destinationPort); + message.destinationChannel !== undefined && + (obj.destinationChannel = message.destinationChannel); + message.data !== undefined && + (obj.data = base64FromBytes( + message.data !== undefined ? message.data : new Uint8Array(), + )); + message.timeoutHeight !== undefined && + (obj.timeoutHeight = message.timeoutHeight + ? Height.toJSON(message.timeoutHeight) + : undefined); + message.timeoutTimestamp !== undefined && + (obj.timeoutTimestamp = ( + message.timeoutTimestamp || BigInt(0) + ).toString()); + return obj; + }, + fromPartial(object: Partial): Packet { + const message = createBasePacket(); + message.sequence = + object.sequence !== undefined && object.sequence !== null + ? BigInt(object.sequence.toString()) + : BigInt(0); + message.sourcePort = object.sourcePort ?? ''; + message.sourceChannel = object.sourceChannel ?? ''; + message.destinationPort = object.destinationPort ?? ''; + message.destinationChannel = object.destinationChannel ?? ''; + message.data = object.data ?? new Uint8Array(); + message.timeoutHeight = + object.timeoutHeight !== undefined && object.timeoutHeight !== null + ? Height.fromPartial(object.timeoutHeight) + : undefined; + message.timeoutTimestamp = + object.timeoutTimestamp !== undefined && object.timeoutTimestamp !== null + ? BigInt(object.timeoutTimestamp.toString()) + : BigInt(0); + return message; + }, + fromAmino(object: PacketAmino): Packet { + const message = createBasePacket(); + if (object.sequence !== undefined && object.sequence !== null) { + message.sequence = BigInt(object.sequence); + } + if (object.source_port !== undefined && object.source_port !== null) { + message.sourcePort = object.source_port; + } + if (object.source_channel !== undefined && object.source_channel !== null) { + message.sourceChannel = object.source_channel; + } + if ( + object.destination_port !== undefined && + object.destination_port !== null + ) { + message.destinationPort = object.destination_port; + } + if ( + object.destination_channel !== undefined && + object.destination_channel !== null + ) { + message.destinationChannel = object.destination_channel; + } + if (object.data !== undefined && object.data !== null) { + message.data = bytesFromBase64(object.data); + } + if (object.timeout_height !== undefined && object.timeout_height !== null) { + message.timeoutHeight = Height.fromAmino(object.timeout_height); + } + if ( + object.timeout_timestamp !== undefined && + object.timeout_timestamp !== null + ) { + message.timeoutTimestamp = BigInt(object.timeout_timestamp); + } + return message; + }, + toAmino(message: Packet): PacketAmino { + const obj: any = {}; + obj.sequence = message.sequence ? message.sequence.toString() : undefined; + obj.source_port = message.sourcePort; + obj.source_channel = message.sourceChannel; + obj.destination_port = message.destinationPort; + obj.destination_channel = message.destinationChannel; + obj.data = message.data ? base64FromBytes(message.data) : undefined; + obj.timeout_height = message.timeoutHeight + ? Height.toAmino(message.timeoutHeight) + : {}; + obj.timeout_timestamp = message.timeoutTimestamp + ? message.timeoutTimestamp.toString() + : undefined; + return obj; + }, + fromAminoMsg(object: PacketAminoMsg): Packet { + return Packet.fromAmino(object.value); + }, + toAminoMsg(message: Packet): PacketAminoMsg { + return { + type: 'cosmos-sdk/Packet', + value: Packet.toAmino(message), + }; + }, + fromProtoMsg(message: PacketProtoMsg): Packet { + return Packet.decode(message.value); + }, + toProto(message: Packet): Uint8Array { + return Packet.encode(message).finish(); + }, + toProtoMsg(message: Packet): PacketProtoMsg { + return { + typeUrl: '/ibc.core.channel.v1.Packet', + value: Packet.encode(message).finish(), + }; + }, +}; +function createBasePacketState(): PacketState { + return { + portId: '', + channelId: '', + sequence: BigInt(0), + data: new Uint8Array(), + }; +} +export const PacketState = { + typeUrl: '/ibc.core.channel.v1.PacketState', + encode( + message: PacketState, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.portId !== '') { + writer.uint32(10).string(message.portId); + } + if (message.channelId !== '') { + writer.uint32(18).string(message.channelId); + } + if (message.sequence !== BigInt(0)) { + writer.uint32(24).uint64(message.sequence); + } + if (message.data.length !== 0) { + writer.uint32(34).bytes(message.data); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): PacketState { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePacketState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + case 2: + message.channelId = reader.string(); + break; + case 3: + message.sequence = reader.uint64(); + break; + case 4: + message.data = reader.bytes(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): PacketState { + return { + portId: isSet(object.portId) ? String(object.portId) : '', + channelId: isSet(object.channelId) ? String(object.channelId) : '', + sequence: isSet(object.sequence) + ? BigInt(object.sequence.toString()) + : BigInt(0), + data: isSet(object.data) + ? bytesFromBase64(object.data) + : new Uint8Array(), + }; + }, + toJSON(message: PacketState): unknown { + const obj: any = {}; + message.portId !== undefined && (obj.portId = message.portId); + message.channelId !== undefined && (obj.channelId = message.channelId); + message.sequence !== undefined && + (obj.sequence = (message.sequence || BigInt(0)).toString()); + message.data !== undefined && + (obj.data = base64FromBytes( + message.data !== undefined ? message.data : new Uint8Array(), + )); + return obj; + }, + fromPartial(object: Partial): PacketState { + const message = createBasePacketState(); + message.portId = object.portId ?? ''; + message.channelId = object.channelId ?? ''; + message.sequence = + object.sequence !== undefined && object.sequence !== null + ? BigInt(object.sequence.toString()) + : BigInt(0); + message.data = object.data ?? new Uint8Array(); + return message; + }, + fromAmino(object: PacketStateAmino): PacketState { + const message = createBasePacketState(); + if (object.port_id !== undefined && object.port_id !== null) { + message.portId = object.port_id; + } + if (object.channel_id !== undefined && object.channel_id !== null) { + message.channelId = object.channel_id; + } + if (object.sequence !== undefined && object.sequence !== null) { + message.sequence = BigInt(object.sequence); + } + if (object.data !== undefined && object.data !== null) { + message.data = bytesFromBase64(object.data); + } + return message; + }, + toAmino(message: PacketState): PacketStateAmino { + const obj: any = {}; + obj.port_id = message.portId; + obj.channel_id = message.channelId; + obj.sequence = message.sequence ? message.sequence.toString() : undefined; + obj.data = message.data ? base64FromBytes(message.data) : undefined; + return obj; + }, + fromAminoMsg(object: PacketStateAminoMsg): PacketState { + return PacketState.fromAmino(object.value); + }, + toAminoMsg(message: PacketState): PacketStateAminoMsg { + return { + type: 'cosmos-sdk/PacketState', + value: PacketState.toAmino(message), + }; + }, + fromProtoMsg(message: PacketStateProtoMsg): PacketState { + return PacketState.decode(message.value); + }, + toProto(message: PacketState): Uint8Array { + return PacketState.encode(message).finish(); + }, + toProtoMsg(message: PacketState): PacketStateProtoMsg { + return { + typeUrl: '/ibc.core.channel.v1.PacketState', + value: PacketState.encode(message).finish(), + }; + }, +}; +function createBasePacketId(): PacketId { + return { + portId: '', + channelId: '', + sequence: BigInt(0), + }; +} +export const PacketId = { + typeUrl: '/ibc.core.channel.v1.PacketId', + encode( + message: PacketId, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.portId !== '') { + writer.uint32(10).string(message.portId); + } + if (message.channelId !== '') { + writer.uint32(18).string(message.channelId); + } + if (message.sequence !== BigInt(0)) { + writer.uint32(24).uint64(message.sequence); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): PacketId { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePacketId(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.portId = reader.string(); + break; + case 2: + message.channelId = reader.string(); + break; + case 3: + message.sequence = reader.uint64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): PacketId { + return { + portId: isSet(object.portId) ? String(object.portId) : '', + channelId: isSet(object.channelId) ? String(object.channelId) : '', + sequence: isSet(object.sequence) + ? BigInt(object.sequence.toString()) + : BigInt(0), + }; + }, + toJSON(message: PacketId): unknown { + const obj: any = {}; + message.portId !== undefined && (obj.portId = message.portId); + message.channelId !== undefined && (obj.channelId = message.channelId); + message.sequence !== undefined && + (obj.sequence = (message.sequence || BigInt(0)).toString()); + return obj; + }, + fromPartial(object: Partial): PacketId { + const message = createBasePacketId(); + message.portId = object.portId ?? ''; + message.channelId = object.channelId ?? ''; + message.sequence = + object.sequence !== undefined && object.sequence !== null + ? BigInt(object.sequence.toString()) + : BigInt(0); + return message; + }, + fromAmino(object: PacketIdAmino): PacketId { + const message = createBasePacketId(); + if (object.port_id !== undefined && object.port_id !== null) { + message.portId = object.port_id; + } + if (object.channel_id !== undefined && object.channel_id !== null) { + message.channelId = object.channel_id; + } + if (object.sequence !== undefined && object.sequence !== null) { + message.sequence = BigInt(object.sequence); + } + return message; + }, + toAmino(message: PacketId): PacketIdAmino { + const obj: any = {}; + obj.port_id = message.portId; + obj.channel_id = message.channelId; + obj.sequence = message.sequence ? message.sequence.toString() : undefined; + return obj; + }, + fromAminoMsg(object: PacketIdAminoMsg): PacketId { + return PacketId.fromAmino(object.value); + }, + toAminoMsg(message: PacketId): PacketIdAminoMsg { + return { + type: 'cosmos-sdk/PacketId', + value: PacketId.toAmino(message), + }; + }, + fromProtoMsg(message: PacketIdProtoMsg): PacketId { + return PacketId.decode(message.value); + }, + toProto(message: PacketId): Uint8Array { + return PacketId.encode(message).finish(); + }, + toProtoMsg(message: PacketId): PacketIdProtoMsg { + return { + typeUrl: '/ibc.core.channel.v1.PacketId', + value: PacketId.encode(message).finish(), + }; + }, +}; +function createBaseAcknowledgement(): Acknowledgement { + return { + result: undefined, + error: undefined, + }; +} +export const Acknowledgement = { + typeUrl: '/ibc.core.channel.v1.Acknowledgement', + encode( + message: Acknowledgement, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.result !== undefined) { + writer.uint32(170).bytes(message.result); + } + if (message.error !== undefined) { + writer.uint32(178).string(message.error); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): Acknowledgement { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseAcknowledgement(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 21: + message.result = reader.bytes(); + break; + case 22: + message.error = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): Acknowledgement { + return { + result: isSet(object.result) ? bytesFromBase64(object.result) : undefined, + error: isSet(object.error) ? String(object.error) : undefined, + }; + }, + toJSON(message: Acknowledgement): unknown { + const obj: any = {}; + message.result !== undefined && + (obj.result = + message.result !== undefined + ? base64FromBytes(message.result) + : undefined); + message.error !== undefined && (obj.error = message.error); + return obj; + }, + fromPartial(object: Partial): Acknowledgement { + const message = createBaseAcknowledgement(); + message.result = object.result ?? undefined; + message.error = object.error ?? undefined; + return message; + }, + fromAmino(object: AcknowledgementAmino): Acknowledgement { + const message = createBaseAcknowledgement(); + if (object.result !== undefined && object.result !== null) { + message.result = bytesFromBase64(object.result); + } + if (object.error !== undefined && object.error !== null) { + message.error = object.error; + } + return message; + }, + toAmino(message: Acknowledgement): AcknowledgementAmino { + const obj: any = {}; + obj.result = message.result ? base64FromBytes(message.result) : undefined; + obj.error = message.error; + return obj; + }, + fromAminoMsg(object: AcknowledgementAminoMsg): Acknowledgement { + return Acknowledgement.fromAmino(object.value); + }, + toAminoMsg(message: Acknowledgement): AcknowledgementAminoMsg { + return { + type: 'cosmos-sdk/Acknowledgement', + value: Acknowledgement.toAmino(message), + }; + }, + fromProtoMsg(message: AcknowledgementProtoMsg): Acknowledgement { + return Acknowledgement.decode(message.value); + }, + toProto(message: Acknowledgement): Uint8Array { + return Acknowledgement.encode(message).finish(); + }, + toProtoMsg(message: Acknowledgement): AcknowledgementProtoMsg { + return { + typeUrl: '/ibc.core.channel.v1.Acknowledgement', + value: Acknowledgement.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/ibc/core/client/v1/client.ts b/packages/cosmic-proto/src/codegen/ibc/core/client/v1/client.ts new file mode 100644 index 00000000000..a437967c787 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/ibc/core/client/v1/client.ts @@ -0,0 +1,1164 @@ +//@ts-nocheck +import { Any, AnyAmino, AnySDKType } from '../../../../google/protobuf/any.js'; +import { + Plan, + PlanAmino, + PlanSDKType, +} from '../../../../cosmos/upgrade/v1beta1/upgrade.js'; +import { BinaryReader, BinaryWriter } from '../../../../binary.js'; +import { isSet } from '../../../../helpers.js'; +/** + * IdentifiedClientState defines a client state with an additional client + * identifier field. + */ +export interface IdentifiedClientState { + /** client identifier */ + clientId: string; + /** client state */ + clientState?: Any; +} +export interface IdentifiedClientStateProtoMsg { + typeUrl: '/ibc.core.client.v1.IdentifiedClientState'; + value: Uint8Array; +} +/** + * IdentifiedClientState defines a client state with an additional client + * identifier field. + */ +export interface IdentifiedClientStateAmino { + /** client identifier */ + client_id?: string; + /** client state */ + client_state?: AnyAmino; +} +export interface IdentifiedClientStateAminoMsg { + type: 'cosmos-sdk/IdentifiedClientState'; + value: IdentifiedClientStateAmino; +} +/** + * IdentifiedClientState defines a client state with an additional client + * identifier field. + */ +export interface IdentifiedClientStateSDKType { + client_id: string; + client_state?: AnySDKType; +} +/** + * ConsensusStateWithHeight defines a consensus state with an additional height + * field. + */ +export interface ConsensusStateWithHeight { + /** consensus state height */ + height: Height; + /** consensus state */ + consensusState?: Any; +} +export interface ConsensusStateWithHeightProtoMsg { + typeUrl: '/ibc.core.client.v1.ConsensusStateWithHeight'; + value: Uint8Array; +} +/** + * ConsensusStateWithHeight defines a consensus state with an additional height + * field. + */ +export interface ConsensusStateWithHeightAmino { + /** consensus state height */ + height?: HeightAmino; + /** consensus state */ + consensus_state?: AnyAmino; +} +export interface ConsensusStateWithHeightAminoMsg { + type: 'cosmos-sdk/ConsensusStateWithHeight'; + value: ConsensusStateWithHeightAmino; +} +/** + * ConsensusStateWithHeight defines a consensus state with an additional height + * field. + */ +export interface ConsensusStateWithHeightSDKType { + height: HeightSDKType; + consensus_state?: AnySDKType; +} +/** + * ClientConsensusStates defines all the stored consensus states for a given + * client. + */ +export interface ClientConsensusStates { + /** client identifier */ + clientId: string; + /** consensus states and their heights associated with the client */ + consensusStates: ConsensusStateWithHeight[]; +} +export interface ClientConsensusStatesProtoMsg { + typeUrl: '/ibc.core.client.v1.ClientConsensusStates'; + value: Uint8Array; +} +/** + * ClientConsensusStates defines all the stored consensus states for a given + * client. + */ +export interface ClientConsensusStatesAmino { + /** client identifier */ + client_id?: string; + /** consensus states and their heights associated with the client */ + consensus_states?: ConsensusStateWithHeightAmino[]; +} +export interface ClientConsensusStatesAminoMsg { + type: 'cosmos-sdk/ClientConsensusStates'; + value: ClientConsensusStatesAmino; +} +/** + * ClientConsensusStates defines all the stored consensus states for a given + * client. + */ +export interface ClientConsensusStatesSDKType { + client_id: string; + consensus_states: ConsensusStateWithHeightSDKType[]; +} +/** + * ClientUpdateProposal is a governance proposal. If it passes, the substitute + * client's latest consensus state is copied over to the subject client. The proposal + * handler may fail if the subject and the substitute do not match in client and + * chain parameters (with exception to latest height, frozen height, and chain-id). + */ +export interface ClientUpdateProposal { + $typeUrl?: '/ibc.core.client.v1.ClientUpdateProposal'; + /** the title of the update proposal */ + title: string; + /** the description of the proposal */ + description: string; + /** the client identifier for the client to be updated if the proposal passes */ + subjectClientId: string; + /** + * the substitute client identifier for the client standing in for the subject + * client + */ + substituteClientId: string; +} +export interface ClientUpdateProposalProtoMsg { + typeUrl: '/ibc.core.client.v1.ClientUpdateProposal'; + value: Uint8Array; +} +/** + * ClientUpdateProposal is a governance proposal. If it passes, the substitute + * client's latest consensus state is copied over to the subject client. The proposal + * handler may fail if the subject and the substitute do not match in client and + * chain parameters (with exception to latest height, frozen height, and chain-id). + */ +export interface ClientUpdateProposalAmino { + /** the title of the update proposal */ + title?: string; + /** the description of the proposal */ + description?: string; + /** the client identifier for the client to be updated if the proposal passes */ + subject_client_id?: string; + /** + * the substitute client identifier for the client standing in for the subject + * client + */ + substitute_client_id?: string; +} +export interface ClientUpdateProposalAminoMsg { + type: 'cosmos-sdk/ClientUpdateProposal'; + value: ClientUpdateProposalAmino; +} +/** + * ClientUpdateProposal is a governance proposal. If it passes, the substitute + * client's latest consensus state is copied over to the subject client. The proposal + * handler may fail if the subject and the substitute do not match in client and + * chain parameters (with exception to latest height, frozen height, and chain-id). + */ +export interface ClientUpdateProposalSDKType { + $typeUrl?: '/ibc.core.client.v1.ClientUpdateProposal'; + title: string; + description: string; + subject_client_id: string; + substitute_client_id: string; +} +/** + * UpgradeProposal is a gov Content type for initiating an IBC breaking + * upgrade. + */ +export interface UpgradeProposal { + $typeUrl?: '/ibc.core.client.v1.UpgradeProposal'; + title: string; + description: string; + plan: Plan; + /** + * An UpgradedClientState must be provided to perform an IBC breaking upgrade. + * This will make the chain commit to the correct upgraded (self) client state + * before the upgrade occurs, so that connecting chains can verify that the + * new upgraded client is valid by verifying a proof on the previous version + * of the chain. This will allow IBC connections to persist smoothly across + * planned chain upgrades + */ + upgradedClientState?: Any; +} +export interface UpgradeProposalProtoMsg { + typeUrl: '/ibc.core.client.v1.UpgradeProposal'; + value: Uint8Array; +} +/** + * UpgradeProposal is a gov Content type for initiating an IBC breaking + * upgrade. + */ +export interface UpgradeProposalAmino { + title?: string; + description?: string; + plan?: PlanAmino; + /** + * An UpgradedClientState must be provided to perform an IBC breaking upgrade. + * This will make the chain commit to the correct upgraded (self) client state + * before the upgrade occurs, so that connecting chains can verify that the + * new upgraded client is valid by verifying a proof on the previous version + * of the chain. This will allow IBC connections to persist smoothly across + * planned chain upgrades + */ + upgraded_client_state?: AnyAmino; +} +export interface UpgradeProposalAminoMsg { + type: 'cosmos-sdk/UpgradeProposal'; + value: UpgradeProposalAmino; +} +/** + * UpgradeProposal is a gov Content type for initiating an IBC breaking + * upgrade. + */ +export interface UpgradeProposalSDKType { + $typeUrl?: '/ibc.core.client.v1.UpgradeProposal'; + title: string; + description: string; + plan: PlanSDKType; + upgraded_client_state?: AnySDKType; +} +/** + * Height is a monotonically increasing data type + * that can be compared against another Height for the purposes of updating and + * freezing clients + * + * Normally the RevisionHeight is incremented at each height while keeping + * RevisionNumber the same. However some consensus algorithms may choose to + * reset the height in certain conditions e.g. hard forks, state-machine + * breaking changes In these cases, the RevisionNumber is incremented so that + * height continues to be monitonically increasing even as the RevisionHeight + * gets reset + */ +export interface Height { + /** the revision that the client is currently on */ + revisionNumber: bigint; + /** the height within the given revision */ + revisionHeight: bigint; +} +export interface HeightProtoMsg { + typeUrl: '/ibc.core.client.v1.Height'; + value: Uint8Array; +} +/** + * Height is a monotonically increasing data type + * that can be compared against another Height for the purposes of updating and + * freezing clients + * + * Normally the RevisionHeight is incremented at each height while keeping + * RevisionNumber the same. However some consensus algorithms may choose to + * reset the height in certain conditions e.g. hard forks, state-machine + * breaking changes In these cases, the RevisionNumber is incremented so that + * height continues to be monitonically increasing even as the RevisionHeight + * gets reset + */ +export interface HeightAmino { + /** the revision that the client is currently on */ + revision_number?: string; + /** the height within the given revision */ + revision_height?: string; +} +export interface HeightAminoMsg { + type: 'cosmos-sdk/Height'; + value: HeightAmino; +} +/** + * Height is a monotonically increasing data type + * that can be compared against another Height for the purposes of updating and + * freezing clients + * + * Normally the RevisionHeight is incremented at each height while keeping + * RevisionNumber the same. However some consensus algorithms may choose to + * reset the height in certain conditions e.g. hard forks, state-machine + * breaking changes In these cases, the RevisionNumber is incremented so that + * height continues to be monitonically increasing even as the RevisionHeight + * gets reset + */ +export interface HeightSDKType { + revision_number: bigint; + revision_height: bigint; +} +/** Params defines the set of IBC light client parameters. */ +export interface Params { + /** allowed_clients defines the list of allowed client state types. */ + allowedClients: string[]; +} +export interface ParamsProtoMsg { + typeUrl: '/ibc.core.client.v1.Params'; + value: Uint8Array; +} +/** Params defines the set of IBC light client parameters. */ +export interface ParamsAmino { + /** allowed_clients defines the list of allowed client state types. */ + allowed_clients?: string[]; +} +export interface ParamsAminoMsg { + type: 'cosmos-sdk/Params'; + value: ParamsAmino; +} +/** Params defines the set of IBC light client parameters. */ +export interface ParamsSDKType { + allowed_clients: string[]; +} +function createBaseIdentifiedClientState(): IdentifiedClientState { + return { + clientId: '', + clientState: undefined, + }; +} +export const IdentifiedClientState = { + typeUrl: '/ibc.core.client.v1.IdentifiedClientState', + encode( + message: IdentifiedClientState, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.clientId !== '') { + writer.uint32(10).string(message.clientId); + } + if (message.clientState !== undefined) { + Any.encode(message.clientState, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): IdentifiedClientState { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseIdentifiedClientState(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + case 2: + message.clientState = Any.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): IdentifiedClientState { + return { + clientId: isSet(object.clientId) ? String(object.clientId) : '', + clientState: isSet(object.clientState) + ? Any.fromJSON(object.clientState) + : undefined, + }; + }, + toJSON(message: IdentifiedClientState): unknown { + const obj: any = {}; + message.clientId !== undefined && (obj.clientId = message.clientId); + message.clientState !== undefined && + (obj.clientState = message.clientState + ? Any.toJSON(message.clientState) + : undefined); + return obj; + }, + fromPartial(object: Partial): IdentifiedClientState { + const message = createBaseIdentifiedClientState(); + message.clientId = object.clientId ?? ''; + message.clientState = + object.clientState !== undefined && object.clientState !== null + ? Any.fromPartial(object.clientState) + : undefined; + return message; + }, + fromAmino(object: IdentifiedClientStateAmino): IdentifiedClientState { + const message = createBaseIdentifiedClientState(); + if (object.client_id !== undefined && object.client_id !== null) { + message.clientId = object.client_id; + } + if (object.client_state !== undefined && object.client_state !== null) { + message.clientState = Any.fromAmino(object.client_state); + } + return message; + }, + toAmino(message: IdentifiedClientState): IdentifiedClientStateAmino { + const obj: any = {}; + obj.client_id = message.clientId; + obj.client_state = message.clientState + ? Any.toAmino(message.clientState) + : undefined; + return obj; + }, + fromAminoMsg(object: IdentifiedClientStateAminoMsg): IdentifiedClientState { + return IdentifiedClientState.fromAmino(object.value); + }, + toAminoMsg(message: IdentifiedClientState): IdentifiedClientStateAminoMsg { + return { + type: 'cosmos-sdk/IdentifiedClientState', + value: IdentifiedClientState.toAmino(message), + }; + }, + fromProtoMsg(message: IdentifiedClientStateProtoMsg): IdentifiedClientState { + return IdentifiedClientState.decode(message.value); + }, + toProto(message: IdentifiedClientState): Uint8Array { + return IdentifiedClientState.encode(message).finish(); + }, + toProtoMsg(message: IdentifiedClientState): IdentifiedClientStateProtoMsg { + return { + typeUrl: '/ibc.core.client.v1.IdentifiedClientState', + value: IdentifiedClientState.encode(message).finish(), + }; + }, +}; +function createBaseConsensusStateWithHeight(): ConsensusStateWithHeight { + return { + height: Height.fromPartial({}), + consensusState: undefined, + }; +} +export const ConsensusStateWithHeight = { + typeUrl: '/ibc.core.client.v1.ConsensusStateWithHeight', + encode( + message: ConsensusStateWithHeight, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.height !== undefined) { + Height.encode(message.height, writer.uint32(10).fork()).ldelim(); + } + if (message.consensusState !== undefined) { + Any.encode(message.consensusState, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): ConsensusStateWithHeight { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseConsensusStateWithHeight(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.height = Height.decode(reader, reader.uint32()); + break; + case 2: + message.consensusState = Any.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): ConsensusStateWithHeight { + return { + height: isSet(object.height) ? Height.fromJSON(object.height) : undefined, + consensusState: isSet(object.consensusState) + ? Any.fromJSON(object.consensusState) + : undefined, + }; + }, + toJSON(message: ConsensusStateWithHeight): unknown { + const obj: any = {}; + message.height !== undefined && + (obj.height = message.height ? Height.toJSON(message.height) : undefined); + message.consensusState !== undefined && + (obj.consensusState = message.consensusState + ? Any.toJSON(message.consensusState) + : undefined); + return obj; + }, + fromPartial( + object: Partial, + ): ConsensusStateWithHeight { + const message = createBaseConsensusStateWithHeight(); + message.height = + object.height !== undefined && object.height !== null + ? Height.fromPartial(object.height) + : undefined; + message.consensusState = + object.consensusState !== undefined && object.consensusState !== null + ? Any.fromPartial(object.consensusState) + : undefined; + return message; + }, + fromAmino(object: ConsensusStateWithHeightAmino): ConsensusStateWithHeight { + const message = createBaseConsensusStateWithHeight(); + if (object.height !== undefined && object.height !== null) { + message.height = Height.fromAmino(object.height); + } + if ( + object.consensus_state !== undefined && + object.consensus_state !== null + ) { + message.consensusState = Any.fromAmino(object.consensus_state); + } + return message; + }, + toAmino(message: ConsensusStateWithHeight): ConsensusStateWithHeightAmino { + const obj: any = {}; + obj.height = message.height ? Height.toAmino(message.height) : undefined; + obj.consensus_state = message.consensusState + ? Any.toAmino(message.consensusState) + : undefined; + return obj; + }, + fromAminoMsg( + object: ConsensusStateWithHeightAminoMsg, + ): ConsensusStateWithHeight { + return ConsensusStateWithHeight.fromAmino(object.value); + }, + toAminoMsg( + message: ConsensusStateWithHeight, + ): ConsensusStateWithHeightAminoMsg { + return { + type: 'cosmos-sdk/ConsensusStateWithHeight', + value: ConsensusStateWithHeight.toAmino(message), + }; + }, + fromProtoMsg( + message: ConsensusStateWithHeightProtoMsg, + ): ConsensusStateWithHeight { + return ConsensusStateWithHeight.decode(message.value); + }, + toProto(message: ConsensusStateWithHeight): Uint8Array { + return ConsensusStateWithHeight.encode(message).finish(); + }, + toProtoMsg( + message: ConsensusStateWithHeight, + ): ConsensusStateWithHeightProtoMsg { + return { + typeUrl: '/ibc.core.client.v1.ConsensusStateWithHeight', + value: ConsensusStateWithHeight.encode(message).finish(), + }; + }, +}; +function createBaseClientConsensusStates(): ClientConsensusStates { + return { + clientId: '', + consensusStates: [], + }; +} +export const ClientConsensusStates = { + typeUrl: '/ibc.core.client.v1.ClientConsensusStates', + encode( + message: ClientConsensusStates, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.clientId !== '') { + writer.uint32(10).string(message.clientId); + } + for (const v of message.consensusStates) { + ConsensusStateWithHeight.encode(v!, writer.uint32(18).fork()).ldelim(); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): ClientConsensusStates { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseClientConsensusStates(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.clientId = reader.string(); + break; + case 2: + message.consensusStates.push( + ConsensusStateWithHeight.decode(reader, reader.uint32()), + ); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): ClientConsensusStates { + return { + clientId: isSet(object.clientId) ? String(object.clientId) : '', + consensusStates: Array.isArray(object?.consensusStates) + ? object.consensusStates.map((e: any) => + ConsensusStateWithHeight.fromJSON(e), + ) + : [], + }; + }, + toJSON(message: ClientConsensusStates): unknown { + const obj: any = {}; + message.clientId !== undefined && (obj.clientId = message.clientId); + if (message.consensusStates) { + obj.consensusStates = message.consensusStates.map(e => + e ? ConsensusStateWithHeight.toJSON(e) : undefined, + ); + } else { + obj.consensusStates = []; + } + return obj; + }, + fromPartial(object: Partial): ClientConsensusStates { + const message = createBaseClientConsensusStates(); + message.clientId = object.clientId ?? ''; + message.consensusStates = + object.consensusStates?.map(e => + ConsensusStateWithHeight.fromPartial(e), + ) || []; + return message; + }, + fromAmino(object: ClientConsensusStatesAmino): ClientConsensusStates { + const message = createBaseClientConsensusStates(); + if (object.client_id !== undefined && object.client_id !== null) { + message.clientId = object.client_id; + } + message.consensusStates = + object.consensus_states?.map(e => + ConsensusStateWithHeight.fromAmino(e), + ) || []; + return message; + }, + toAmino(message: ClientConsensusStates): ClientConsensusStatesAmino { + const obj: any = {}; + obj.client_id = message.clientId; + if (message.consensusStates) { + obj.consensus_states = message.consensusStates.map(e => + e ? ConsensusStateWithHeight.toAmino(e) : undefined, + ); + } else { + obj.consensus_states = []; + } + return obj; + }, + fromAminoMsg(object: ClientConsensusStatesAminoMsg): ClientConsensusStates { + return ClientConsensusStates.fromAmino(object.value); + }, + toAminoMsg(message: ClientConsensusStates): ClientConsensusStatesAminoMsg { + return { + type: 'cosmos-sdk/ClientConsensusStates', + value: ClientConsensusStates.toAmino(message), + }; + }, + fromProtoMsg(message: ClientConsensusStatesProtoMsg): ClientConsensusStates { + return ClientConsensusStates.decode(message.value); + }, + toProto(message: ClientConsensusStates): Uint8Array { + return ClientConsensusStates.encode(message).finish(); + }, + toProtoMsg(message: ClientConsensusStates): ClientConsensusStatesProtoMsg { + return { + typeUrl: '/ibc.core.client.v1.ClientConsensusStates', + value: ClientConsensusStates.encode(message).finish(), + }; + }, +}; +function createBaseClientUpdateProposal(): ClientUpdateProposal { + return { + $typeUrl: '/ibc.core.client.v1.ClientUpdateProposal', + title: '', + description: '', + subjectClientId: '', + substituteClientId: '', + }; +} +export const ClientUpdateProposal = { + typeUrl: '/ibc.core.client.v1.ClientUpdateProposal', + encode( + message: ClientUpdateProposal, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.title !== '') { + writer.uint32(10).string(message.title); + } + if (message.description !== '') { + writer.uint32(18).string(message.description); + } + if (message.subjectClientId !== '') { + writer.uint32(26).string(message.subjectClientId); + } + if (message.substituteClientId !== '') { + writer.uint32(34).string(message.substituteClientId); + } + return writer; + }, + decode( + input: BinaryReader | Uint8Array, + length?: number, + ): ClientUpdateProposal { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseClientUpdateProposal(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + message.subjectClientId = reader.string(); + break; + case 4: + message.substituteClientId = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): ClientUpdateProposal { + return { + title: isSet(object.title) ? String(object.title) : '', + description: isSet(object.description) ? String(object.description) : '', + subjectClientId: isSet(object.subjectClientId) + ? String(object.subjectClientId) + : '', + substituteClientId: isSet(object.substituteClientId) + ? String(object.substituteClientId) + : '', + }; + }, + toJSON(message: ClientUpdateProposal): unknown { + const obj: any = {}; + message.title !== undefined && (obj.title = message.title); + message.description !== undefined && + (obj.description = message.description); + message.subjectClientId !== undefined && + (obj.subjectClientId = message.subjectClientId); + message.substituteClientId !== undefined && + (obj.substituteClientId = message.substituteClientId); + return obj; + }, + fromPartial(object: Partial): ClientUpdateProposal { + const message = createBaseClientUpdateProposal(); + message.title = object.title ?? ''; + message.description = object.description ?? ''; + message.subjectClientId = object.subjectClientId ?? ''; + message.substituteClientId = object.substituteClientId ?? ''; + return message; + }, + fromAmino(object: ClientUpdateProposalAmino): ClientUpdateProposal { + const message = createBaseClientUpdateProposal(); + if (object.title !== undefined && object.title !== null) { + message.title = object.title; + } + if (object.description !== undefined && object.description !== null) { + message.description = object.description; + } + if ( + object.subject_client_id !== undefined && + object.subject_client_id !== null + ) { + message.subjectClientId = object.subject_client_id; + } + if ( + object.substitute_client_id !== undefined && + object.substitute_client_id !== null + ) { + message.substituteClientId = object.substitute_client_id; + } + return message; + }, + toAmino(message: ClientUpdateProposal): ClientUpdateProposalAmino { + const obj: any = {}; + obj.title = message.title; + obj.description = message.description; + obj.subject_client_id = message.subjectClientId; + obj.substitute_client_id = message.substituteClientId; + return obj; + }, + fromAminoMsg(object: ClientUpdateProposalAminoMsg): ClientUpdateProposal { + return ClientUpdateProposal.fromAmino(object.value); + }, + toAminoMsg(message: ClientUpdateProposal): ClientUpdateProposalAminoMsg { + return { + type: 'cosmos-sdk/ClientUpdateProposal', + value: ClientUpdateProposal.toAmino(message), + }; + }, + fromProtoMsg(message: ClientUpdateProposalProtoMsg): ClientUpdateProposal { + return ClientUpdateProposal.decode(message.value); + }, + toProto(message: ClientUpdateProposal): Uint8Array { + return ClientUpdateProposal.encode(message).finish(); + }, + toProtoMsg(message: ClientUpdateProposal): ClientUpdateProposalProtoMsg { + return { + typeUrl: '/ibc.core.client.v1.ClientUpdateProposal', + value: ClientUpdateProposal.encode(message).finish(), + }; + }, +}; +function createBaseUpgradeProposal(): UpgradeProposal { + return { + $typeUrl: '/ibc.core.client.v1.UpgradeProposal', + title: '', + description: '', + plan: Plan.fromPartial({}), + upgradedClientState: undefined, + }; +} +export const UpgradeProposal = { + typeUrl: '/ibc.core.client.v1.UpgradeProposal', + encode( + message: UpgradeProposal, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.title !== '') { + writer.uint32(10).string(message.title); + } + if (message.description !== '') { + writer.uint32(18).string(message.description); + } + if (message.plan !== undefined) { + Plan.encode(message.plan, writer.uint32(26).fork()).ldelim(); + } + if (message.upgradedClientState !== undefined) { + Any.encode( + message.upgradedClientState, + writer.uint32(34).fork(), + ).ldelim(); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): UpgradeProposal { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseUpgradeProposal(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.title = reader.string(); + break; + case 2: + message.description = reader.string(); + break; + case 3: + message.plan = Plan.decode(reader, reader.uint32()); + break; + case 4: + message.upgradedClientState = Any.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): UpgradeProposal { + return { + title: isSet(object.title) ? String(object.title) : '', + description: isSet(object.description) ? String(object.description) : '', + plan: isSet(object.plan) ? Plan.fromJSON(object.plan) : undefined, + upgradedClientState: isSet(object.upgradedClientState) + ? Any.fromJSON(object.upgradedClientState) + : undefined, + }; + }, + toJSON(message: UpgradeProposal): unknown { + const obj: any = {}; + message.title !== undefined && (obj.title = message.title); + message.description !== undefined && + (obj.description = message.description); + message.plan !== undefined && + (obj.plan = message.plan ? Plan.toJSON(message.plan) : undefined); + message.upgradedClientState !== undefined && + (obj.upgradedClientState = message.upgradedClientState + ? Any.toJSON(message.upgradedClientState) + : undefined); + return obj; + }, + fromPartial(object: Partial): UpgradeProposal { + const message = createBaseUpgradeProposal(); + message.title = object.title ?? ''; + message.description = object.description ?? ''; + message.plan = + object.plan !== undefined && object.plan !== null + ? Plan.fromPartial(object.plan) + : undefined; + message.upgradedClientState = + object.upgradedClientState !== undefined && + object.upgradedClientState !== null + ? Any.fromPartial(object.upgradedClientState) + : undefined; + return message; + }, + fromAmino(object: UpgradeProposalAmino): UpgradeProposal { + const message = createBaseUpgradeProposal(); + if (object.title !== undefined && object.title !== null) { + message.title = object.title; + } + if (object.description !== undefined && object.description !== null) { + message.description = object.description; + } + if (object.plan !== undefined && object.plan !== null) { + message.plan = Plan.fromAmino(object.plan); + } + if ( + object.upgraded_client_state !== undefined && + object.upgraded_client_state !== null + ) { + message.upgradedClientState = Any.fromAmino(object.upgraded_client_state); + } + return message; + }, + toAmino(message: UpgradeProposal): UpgradeProposalAmino { + const obj: any = {}; + obj.title = message.title; + obj.description = message.description; + obj.plan = message.plan ? Plan.toAmino(message.plan) : undefined; + obj.upgraded_client_state = message.upgradedClientState + ? Any.toAmino(message.upgradedClientState) + : undefined; + return obj; + }, + fromAminoMsg(object: UpgradeProposalAminoMsg): UpgradeProposal { + return UpgradeProposal.fromAmino(object.value); + }, + toAminoMsg(message: UpgradeProposal): UpgradeProposalAminoMsg { + return { + type: 'cosmos-sdk/UpgradeProposal', + value: UpgradeProposal.toAmino(message), + }; + }, + fromProtoMsg(message: UpgradeProposalProtoMsg): UpgradeProposal { + return UpgradeProposal.decode(message.value); + }, + toProto(message: UpgradeProposal): Uint8Array { + return UpgradeProposal.encode(message).finish(); + }, + toProtoMsg(message: UpgradeProposal): UpgradeProposalProtoMsg { + return { + typeUrl: '/ibc.core.client.v1.UpgradeProposal', + value: UpgradeProposal.encode(message).finish(), + }; + }, +}; +function createBaseHeight(): Height { + return { + revisionNumber: BigInt(0), + revisionHeight: BigInt(0), + }; +} +export const Height = { + typeUrl: '/ibc.core.client.v1.Height', + encode( + message: Height, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + if (message.revisionNumber !== BigInt(0)) { + writer.uint32(8).uint64(message.revisionNumber); + } + if (message.revisionHeight !== BigInt(0)) { + writer.uint32(16).uint64(message.revisionHeight); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): Height { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseHeight(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.revisionNumber = reader.uint64(); + break; + case 2: + message.revisionHeight = reader.uint64(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): Height { + return { + revisionNumber: isSet(object.revisionNumber) + ? BigInt(object.revisionNumber.toString()) + : BigInt(0), + revisionHeight: isSet(object.revisionHeight) + ? BigInt(object.revisionHeight.toString()) + : BigInt(0), + }; + }, + toJSON(message: Height): unknown { + const obj: any = {}; + message.revisionNumber !== undefined && + (obj.revisionNumber = (message.revisionNumber || BigInt(0)).toString()); + message.revisionHeight !== undefined && + (obj.revisionHeight = (message.revisionHeight || BigInt(0)).toString()); + return obj; + }, + fromPartial(object: Partial): Height { + const message = createBaseHeight(); + message.revisionNumber = + object.revisionNumber !== undefined && object.revisionNumber !== null + ? BigInt(object.revisionNumber.toString()) + : BigInt(0); + message.revisionHeight = + object.revisionHeight !== undefined && object.revisionHeight !== null + ? BigInt(object.revisionHeight.toString()) + : BigInt(0); + return message; + }, + fromAmino(object: HeightAmino): Height { + return { + revisionNumber: BigInt(object.revision_number || '0'), + revisionHeight: BigInt(object.revision_height || '0'), + }; + }, + toAmino(message: Height): HeightAmino { + const obj: any = {}; + obj.revision_number = message.revisionNumber + ? message.revisionNumber.toString() + : undefined; + obj.revision_height = message.revisionHeight + ? message.revisionHeight.toString() + : undefined; + return obj; + }, + fromAminoMsg(object: HeightAminoMsg): Height { + return Height.fromAmino(object.value); + }, + toAminoMsg(message: Height): HeightAminoMsg { + return { + type: 'cosmos-sdk/Height', + value: Height.toAmino(message), + }; + }, + fromProtoMsg(message: HeightProtoMsg): Height { + return Height.decode(message.value); + }, + toProto(message: Height): Uint8Array { + return Height.encode(message).finish(); + }, + toProtoMsg(message: Height): HeightProtoMsg { + return { + typeUrl: '/ibc.core.client.v1.Height', + value: Height.encode(message).finish(), + }; + }, +}; +function createBaseParams(): Params { + return { + allowedClients: [], + }; +} +export const Params = { + typeUrl: '/ibc.core.client.v1.Params', + encode( + message: Params, + writer: BinaryWriter = BinaryWriter.create(), + ): BinaryWriter { + for (const v of message.allowedClients) { + writer.uint32(10).string(v!); + } + return writer; + }, + decode(input: BinaryReader | Uint8Array, length?: number): Params { + const reader = + input instanceof BinaryReader ? input : new BinaryReader(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseParams(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.allowedClients.push(reader.string()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }, + fromJSON(object: any): Params { + return { + allowedClients: Array.isArray(object?.allowedClients) + ? object.allowedClients.map((e: any) => String(e)) + : [], + }; + }, + toJSON(message: Params): unknown { + const obj: any = {}; + if (message.allowedClients) { + obj.allowedClients = message.allowedClients.map(e => e); + } else { + obj.allowedClients = []; + } + return obj; + }, + fromPartial(object: Partial): Params { + const message = createBaseParams(); + message.allowedClients = object.allowedClients?.map(e => e) || []; + return message; + }, + fromAmino(object: ParamsAmino): Params { + const message = createBaseParams(); + message.allowedClients = object.allowed_clients?.map(e => e) || []; + return message; + }, + toAmino(message: Params): ParamsAmino { + const obj: any = {}; + if (message.allowedClients) { + obj.allowed_clients = message.allowedClients.map(e => e); + } else { + obj.allowed_clients = []; + } + return obj; + }, + fromAminoMsg(object: ParamsAminoMsg): Params { + return Params.fromAmino(object.value); + }, + toAminoMsg(message: Params): ParamsAminoMsg { + return { + type: 'cosmos-sdk/Params', + value: Params.toAmino(message), + }; + }, + fromProtoMsg(message: ParamsProtoMsg): Params { + return Params.decode(message.value); + }, + toProto(message: Params): Uint8Array { + return Params.encode(message).finish(); + }, + toProtoMsg(message: Params): ParamsProtoMsg { + return { + typeUrl: '/ibc.core.client.v1.Params', + value: Params.encode(message).finish(), + }; + }, +}; diff --git a/packages/cosmic-proto/src/codegen/index.ts b/packages/cosmic-proto/src/codegen/index.ts new file mode 100644 index 00000000000..ae0a6789bb3 --- /dev/null +++ b/packages/cosmic-proto/src/codegen/index.ts @@ -0,0 +1,17 @@ +//@ts-nocheck +/** + * This file and any referenced files were automatically generated by @cosmology/telescope@1.4.12 + * DO NOT MODIFY BY HAND. Instead, download the latest proto files for your chain + * and run the transpile command or yarn proto command to regenerate this bundle. + */ + +export * from './agoric/bundle.js'; +export * from './agoric/client.js'; +export * from './cosmos_proto/bundle.js'; +export * from './cosmos/bundle.js'; +export * from './gogoproto/bundle.js'; +export * from './google/bundle.js'; +export * from './ibc/bundle.js'; +export * from './varint.js'; +export * from './utf8.js'; +export * from './binary.js'; diff --git a/packages/cosmic-proto/src/codegen/utf8.ts b/packages/cosmic-proto/src/codegen/utf8.ts new file mode 100644 index 00000000000..bf2acc47f2d --- /dev/null +++ b/packages/cosmic-proto/src/codegen/utf8.ts @@ -0,0 +1,148 @@ +//@ts-nocheck +/** + * This file and any referenced files were automatically generated by @cosmology/telescope@1.4.12 + * DO NOT MODIFY BY HAND. Instead, download the latest proto files for your chain + * and run the transpile command or yarn proto command to regenerate this bundle. + */ + +// Copyright (c) 2016, Daniel Wirtz All rights reserved. + +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: + +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// * Neither the name of its author, nor the names of its contributors +// may be used to endorse or promote products derived from this software +// without specific prior written permission. + +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +'use strict'; + +/** + * Calculates the UTF8 byte length of a string. + * @param {string} string String + * @returns {number} Byte length + */ +export function utf8Length(str: string) { + let len = 0, + c = 0; + for (let i = 0; i < str.length; ++i) { + c = str.charCodeAt(i); + if (c < 128) len += 1; + else if (c < 2048) len += 2; + else if ( + (c & 0xfc00) === 0xd800 && + (str.charCodeAt(i + 1) & 0xfc00) === 0xdc00 + ) { + ++i; + len += 4; + } else len += 3; + } + return len; +} + +/** + * Reads UTF8 bytes as a string. + * @param {Uint8Array} buffer Source buffer + * @param {number} start Source start + * @param {number} end Source end + * @returns {string} String read + */ +export function utf8Read( + buffer: ArrayLike, + start: number, + end: number, +) { + const len = end - start; + if (len < 1) return ''; + const chunk = []; + let parts: string[] = [], + i = 0, // char offset + t; // temporary + while (start < end) { + t = buffer[start++]; + if (t < 128) chunk[i++] = t; + else if (t > 191 && t < 224) + chunk[i++] = ((t & 31) << 6) | (buffer[start++] & 63); + else if (t > 239 && t < 365) { + t = + (((t & 7) << 18) | + ((buffer[start++] & 63) << 12) | + ((buffer[start++] & 63) << 6) | + (buffer[start++] & 63)) - + 0x10000; + chunk[i++] = 0xd800 + (t >> 10); + chunk[i++] = 0xdc00 + (t & 1023); + } else + chunk[i++] = + ((t & 15) << 12) | + ((buffer[start++] & 63) << 6) | + (buffer[start++] & 63); + if (i > 8191) { + (parts || (parts = [])).push(String.fromCharCode(...chunk)); + i = 0; + } + } + if (parts) { + if (i) parts.push(String.fromCharCode(...chunk.slice(0, i))); + return parts.join(''); + } + return String.fromCharCode(...chunk.slice(0, i)); +} + +/** + * Writes a string as UTF8 bytes. + * @param {string} string Source string + * @param {Uint8Array} buffer Destination buffer + * @param {number} offset Destination offset + * @returns {number} Bytes written + */ +export function utf8Write( + str: string, + buffer: Uint8Array | Array, + offset: number, +) { + const start = offset; + let c1, // character 1 + c2; // character 2 + for (let i = 0; i < str.length; ++i) { + c1 = str.charCodeAt(i); + if (c1 < 128) { + buffer[offset++] = c1; + } else if (c1 < 2048) { + buffer[offset++] = (c1 >> 6) | 192; + buffer[offset++] = (c1 & 63) | 128; + } else if ( + (c1 & 0xfc00) === 0xd800 && + ((c2 = str.charCodeAt(i + 1)) & 0xfc00) === 0xdc00 + ) { + c1 = 0x10000 + ((c1 & 0x03ff) << 10) + (c2 & 0x03ff); + ++i; + buffer[offset++] = (c1 >> 18) | 240; + buffer[offset++] = ((c1 >> 12) & 63) | 128; + buffer[offset++] = ((c1 >> 6) & 63) | 128; + buffer[offset++] = (c1 & 63) | 128; + } else { + buffer[offset++] = (c1 >> 12) | 224; + buffer[offset++] = ((c1 >> 6) & 63) | 128; + buffer[offset++] = (c1 & 63) | 128; + } + } + return offset - start; +} diff --git a/packages/cosmic-proto/src/codegen/varint.ts b/packages/cosmic-proto/src/codegen/varint.ts new file mode 100644 index 00000000000..17f9b16781e --- /dev/null +++ b/packages/cosmic-proto/src/codegen/varint.ts @@ -0,0 +1,488 @@ +//@ts-nocheck +/** + * This file and any referenced files were automatically generated by @cosmology/telescope@1.4.12 + * DO NOT MODIFY BY HAND. Instead, download the latest proto files for your chain + * and run the transpile command or yarn proto command to regenerate this bundle. + */ + +// Copyright 2008 Google Inc. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Code generated by the Protocol Buffer compiler is owned by the owner +// of the input file used when generating it. This code is not +// standalone and requires a support library to be linked with it. This +// support library is itself covered by the above license. + +/* eslint-disable prefer-const,@typescript-eslint/restrict-plus-operands */ + +/** + * Read a 64 bit varint as two JS numbers. + * + * Returns tuple: + * [0]: low bits + * [1]: high bits + * + * Copyright 2008 Google Inc. All rights reserved. + * + * See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L175 + */ +export function varint64read(this: ReaderLike): [number, number] { + let lowBits = 0; + let highBits = 0; + + for (let shift = 0; shift < 28; shift += 7) { + let b = this.buf[this.pos++]; + lowBits |= (b & 0x7f) << shift; + if ((b & 0x80) == 0) { + this.assertBounds(); + return [lowBits, highBits]; + } + } + + let middleByte = this.buf[this.pos++]; + + // last four bits of the first 32 bit number + lowBits |= (middleByte & 0x0f) << 28; + + // 3 upper bits are part of the next 32 bit number + highBits = (middleByte & 0x70) >> 4; + + if ((middleByte & 0x80) == 0) { + this.assertBounds(); + return [lowBits, highBits]; + } + + for (let shift = 3; shift <= 31; shift += 7) { + let b = this.buf[this.pos++]; + highBits |= (b & 0x7f) << shift; + if ((b & 0x80) == 0) { + this.assertBounds(); + return [lowBits, highBits]; + } + } + + throw new Error('invalid varint'); +} + +/** + * Write a 64 bit varint, given as two JS numbers, to the given bytes array. + * + * Copyright 2008 Google Inc. All rights reserved. + * + * See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/writer.js#L344 + */ +export function varint64write(lo: number, hi: number, bytes: number[]): void { + for (let i = 0; i < 28; i = i + 7) { + const shift = lo >>> i; + const hasNext = !(shift >>> 7 == 0 && hi == 0); + const byte = (hasNext ? shift | 0x80 : shift) & 0xff; + bytes.push(byte); + if (!hasNext) { + return; + } + } + + const splitBits = ((lo >>> 28) & 0x0f) | ((hi & 0x07) << 4); + const hasMoreBits = !(hi >> 3 == 0); + bytes.push((hasMoreBits ? splitBits | 0x80 : splitBits) & 0xff); + + if (!hasMoreBits) { + return; + } + + for (let i = 3; i < 31; i = i + 7) { + const shift = hi >>> i; + const hasNext = !(shift >>> 7 == 0); + const byte = (hasNext ? shift | 0x80 : shift) & 0xff; + bytes.push(byte); + if (!hasNext) { + return; + } + } + + bytes.push((hi >>> 31) & 0x01); +} + +// constants for binary math +const TWO_PWR_32_DBL = 0x100000000; + +/** + * Parse decimal string of 64 bit integer value as two JS numbers. + * + * Copyright 2008 Google Inc. All rights reserved. + * + * See https://github.com/protocolbuffers/protobuf-javascript/blob/a428c58273abad07c66071d9753bc4d1289de426/experimental/runtime/int64.js#L10 + */ +export function int64FromString(dec: string): { lo: number; hi: number } { + // Check for minus sign. + const minus = dec[0] === '-'; + if (minus) { + dec = dec.slice(1); + } + + // Work 6 decimal digits at a time, acting like we're converting base 1e6 + // digits to binary. This is safe to do with floating point math because + // Number.isSafeInteger(ALL_32_BITS * 1e6) == true. + const base = 1e6; + let lowBits = 0; + let highBits = 0; + + function add1e6digit(begin: number, end?: number) { + // Note: Number('') is 0. + const digit1e6 = Number(dec.slice(begin, end)); + highBits *= base; + lowBits = lowBits * base + digit1e6; + // Carry bits from lowBits to + if (lowBits >= TWO_PWR_32_DBL) { + highBits = highBits + ((lowBits / TWO_PWR_32_DBL) | 0); + lowBits = lowBits % TWO_PWR_32_DBL; + } + } + + add1e6digit(-24, -18); + add1e6digit(-18, -12); + add1e6digit(-12, -6); + add1e6digit(-6); + return minus ? negate(lowBits, highBits) : newBits(lowBits, highBits); +} + +/** + * Losslessly converts a 64-bit signed integer in 32:32 split representation + * into a decimal string. + * + * Copyright 2008 Google Inc. All rights reserved. + * + * See https://github.com/protocolbuffers/protobuf-javascript/blob/a428c58273abad07c66071d9753bc4d1289de426/experimental/runtime/int64.js#L10 + */ +export function int64ToString(lo: number, hi: number): string { + let bits = newBits(lo, hi); + // If we're treating the input as a signed value and the high bit is set, do + // a manual two's complement conversion before the decimal conversion. + const negative = bits.hi & 0x80000000; + if (negative) { + bits = negate(bits.lo, bits.hi); + } + const result = uInt64ToString(bits.lo, bits.hi); + return negative ? '-' + result : result; +} + +/** + * Losslessly converts a 64-bit unsigned integer in 32:32 split representation + * into a decimal string. + * + * Copyright 2008 Google Inc. All rights reserved. + * + * See https://github.com/protocolbuffers/protobuf-javascript/blob/a428c58273abad07c66071d9753bc4d1289de426/experimental/runtime/int64.js#L10 + */ +export function uInt64ToString(lo: number, hi: number): string { + ({ lo, hi } = toUnsigned(lo, hi)); + // Skip the expensive conversion if the number is small enough to use the + // built-in conversions. + // Number.MAX_SAFE_INTEGER = 0x001FFFFF FFFFFFFF, thus any number with + // highBits <= 0x1FFFFF can be safely expressed with a double and retain + // integer precision. + // Proven by: Number.isSafeInteger(0x1FFFFF * 2**32 + 0xFFFFFFFF) == true. + if (hi <= 0x1fffff) { + return String(TWO_PWR_32_DBL * hi + lo); + } + + // What this code is doing is essentially converting the input number from + // base-2 to base-1e7, which allows us to represent the 64-bit range with + // only 3 (very large) digits. Those digits are then trivial to convert to + // a base-10 string. + + // The magic numbers used here are - + // 2^24 = 16777216 = (1,6777216) in base-1e7. + // 2^48 = 281474976710656 = (2,8147497,6710656) in base-1e7. + + // Split 32:32 representation into 16:24:24 representation so our + // intermediate digits don't overflow. + const low = lo & 0xffffff; + const mid = ((lo >>> 24) | (hi << 8)) & 0xffffff; + const high = (hi >> 16) & 0xffff; + + // Assemble our three base-1e7 digits, ignoring carries. The maximum + // value in a digit at this step is representable as a 48-bit integer, which + // can be stored in a 64-bit floating point number. + let digitA = low + mid * 6777216 + high * 6710656; + let digitB = mid + high * 8147497; + let digitC = high * 2; + + // Apply carries from A to B and from B to C. + const base = 10000000; + if (digitA >= base) { + digitB += Math.floor(digitA / base); + digitA %= base; + } + + if (digitB >= base) { + digitC += Math.floor(digitB / base); + digitB %= base; + } + + // If digitC is 0, then we should have returned in the trivial code path + // at the top for non-safe integers. Given this, we can assume both digitB + // and digitA need leading zeros. + return ( + digitC.toString() + + decimalFrom1e7WithLeadingZeros(digitB) + + decimalFrom1e7WithLeadingZeros(digitA) + ); +} + +function toUnsigned(lo: number, hi: number): { lo: number; hi: number } { + return { lo: lo >>> 0, hi: hi >>> 0 }; +} + +function newBits(lo: number, hi: number): { lo: number; hi: number } { + return { lo: lo | 0, hi: hi | 0 }; +} + +/** + * Returns two's compliment negation of input. + * @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Bitwise_Operators#Signed_32-bit_integers + */ +function negate(lowBits: number, highBits: number) { + highBits = ~highBits; + if (lowBits) { + lowBits = ~lowBits + 1; + } else { + // If lowBits is 0, then bitwise-not is 0xFFFFFFFF, + // adding 1 to that, results in 0x100000000, which leaves + // the low bits 0x0 and simply adds one to the high bits. + highBits += 1; + } + return newBits(lowBits, highBits); +} + +/** + * Returns decimal representation of digit1e7 with leading zeros. + */ +const decimalFrom1e7WithLeadingZeros = (digit1e7: number) => { + const partial = String(digit1e7); + return '0000000'.slice(partial.length) + partial; +}; + +/** + * Write a 32 bit varint, signed or unsigned. Same as `varint64write(0, value, bytes)` + * + * Copyright 2008 Google Inc. All rights reserved. + * + * See https://github.com/protocolbuffers/protobuf/blob/1b18833f4f2a2f681f4e4a25cdf3b0a43115ec26/js/binary/encoder.js#L144 + */ +export function varint32write(value: number, bytes: number[]): void { + if (value >= 0) { + // write value as varint 32 + while (value > 0x7f) { + bytes.push((value & 0x7f) | 0x80); + value = value >>> 7; + } + bytes.push(value); + } else { + for (let i = 0; i < 9; i++) { + bytes.push((value & 127) | 128); + value = value >> 7; + } + bytes.push(1); + } +} + +/** + * Read an unsigned 32 bit varint. + * + * See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L220 + */ +export function varint32read(this: ReaderLike): number { + let b = this.buf[this.pos++]; + let result = b & 0x7f; + if ((b & 0x80) == 0) { + this.assertBounds(); + return result; + } + + b = this.buf[this.pos++]; + result |= (b & 0x7f) << 7; + if ((b & 0x80) == 0) { + this.assertBounds(); + return result; + } + + b = this.buf[this.pos++]; + result |= (b & 0x7f) << 14; + if ((b & 0x80) == 0) { + this.assertBounds(); + return result; + } + + b = this.buf[this.pos++]; + result |= (b & 0x7f) << 21; + if ((b & 0x80) == 0) { + this.assertBounds(); + return result; + } + + // Extract only last 4 bits + b = this.buf[this.pos++]; + result |= (b & 0x0f) << 28; + + for (let readBytes = 5; (b & 0x80) !== 0 && readBytes < 10; readBytes++) + b = this.buf[this.pos++]; + + if ((b & 0x80) != 0) throw new Error('invalid varint'); + + this.assertBounds(); + + // Result can have 32 bits, convert it to unsigned + return result >>> 0; +} + +type ReaderLike = { + buf: Uint8Array; + pos: number; + len: number; + assertBounds(): void; +}; + +/** + * encode zig zag + */ +export function zzEncode(lo: number, hi: number) { + let mask = hi >> 31; + hi = (((hi << 1) | (lo >>> 31)) ^ mask) >>> 0; + lo = ((lo << 1) ^ mask) >>> 0; + return [lo, hi]; +} + +/** + * decode zig zag + */ +export function zzDecode(lo: number, hi: number) { + let mask = -(lo & 1); + lo = (((lo >>> 1) | (hi << 31)) ^ mask) >>> 0; + hi = ((hi >>> 1) ^ mask) >>> 0; + return [lo, hi]; +} + +/** + * unsigned int32 without moving pos. + */ +export function readUInt32(buf: Uint8Array, pos: number) { + return ( + (buf[pos] | (buf[pos + 1] << 8) | (buf[pos + 2] << 16)) + + buf[pos + 3] * 0x1000000 + ); +} + +/** + * signed int32 without moving pos. + */ +export function readInt32(buf: Uint8Array, pos: number) { + return ( + (buf[pos] | (buf[pos + 1] << 8) | (buf[pos + 2] << 16)) + + (buf[pos + 3] << 24) + ); +} + +/** + * writing varint32 to pos + */ +export function writeVarint32( + val: number, + buf: Uint8Array | number[], + pos: number, +) { + while (val > 127) { + buf[pos++] = (val & 127) | 128; + val >>>= 7; + } + buf[pos] = val; +} + +/** + * writing varint64 to pos + */ +export function writeVarint64( + val: { lo: number; hi: number }, + buf: Uint8Array | number[], + pos: number, +) { + while (val.hi) { + buf[pos++] = (val.lo & 127) | 128; + val.lo = ((val.lo >>> 7) | (val.hi << 25)) >>> 0; + val.hi >>>= 7; + } + while (val.lo > 127) { + buf[pos++] = (val.lo & 127) | 128; + val.lo = val.lo >>> 7; + } + buf[pos++] = val.lo; +} + +export function int64Length(lo: number, hi: number) { + let part0 = lo, + part1 = ((lo >>> 28) | (hi << 4)) >>> 0, + part2 = hi >>> 24; + return part2 === 0 + ? part1 === 0 + ? part0 < 16384 + ? part0 < 128 + ? 1 + : 2 + : part0 < 2097152 + ? 3 + : 4 + : part1 < 16384 + ? part1 < 128 + ? 5 + : 6 + : part1 < 2097152 + ? 7 + : 8 + : part2 < 128 + ? 9 + : 10; +} + +export function writeFixed32( + val: number, + buf: Uint8Array | number[], + pos: number, +) { + buf[pos] = val & 255; + buf[pos + 1] = (val >>> 8) & 255; + buf[pos + 2] = (val >>> 16) & 255; + buf[pos + 3] = val >>> 24; +} + +export function writeByte( + val: number, + buf: Uint8Array | number[], + pos: number, +) { + buf[pos] = val & 255; +} From 1e8d9236a451caab2b9e5744d75d6c8e07804957 Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Tue, 27 Feb 2024 15:53:44 -0800 Subject: [PATCH 20/47] build: ignore output --- .dockerignore | 1 - .gitignore | 1 - .prettierignore | 1 - packages/cosmic-proto/.gitignore | 4 ---- 4 files changed, 7 deletions(-) diff --git a/.dockerignore b/.dockerignore index c0658b5c374..6e6aa7d3e00 100644 --- a/.dockerignore +++ b/.dockerignore @@ -14,7 +14,6 @@ packages/stat-logger **/vars.tf **/*.log **/dist -!packages/cosmic-proto/dist **/build **/bundles **/__pycache__ diff --git a/.gitignore b/.gitignore index 79937b351f2..924317ab198 100644 --- a/.gitignore +++ b/.gitignore @@ -56,7 +56,6 @@ bundles bundle-*.js compiled dist -!packages/cosmic-proto/dist api-docs # misc diff --git a/.prettierignore b/.prettierignore index 0fe320e6ea2..e8c2df8fa14 100644 --- a/.prettierignore +++ b/.prettierignore @@ -26,4 +26,3 @@ public # copied from upstream /packages/xsnap/lib/object-inspect.js - diff --git a/packages/cosmic-proto/.gitignore b/packages/cosmic-proto/.gitignore index 266a097cb75..aaf8d7cd0ae 100644 --- a/packages/cosmic-proto/.gitignore +++ b/packages/cosmic-proto/.gitignore @@ -1,7 +1,3 @@ -# TODO gitignore build output and generate it as needed -# We don't ignore the build output (dist, mjs) because our CI integration -# tests don't have hooks to build the package before consuming it. - # cache .tsimp From 55ae8c8d01124951587b1348c5dea5130e6b3926 Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Tue, 27 Feb 2024 15:35:18 -0800 Subject: [PATCH 21/47] build: export swingset/swingset.js --- packages/cosmic-proto/package.json | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/cosmic-proto/package.json b/packages/cosmic-proto/package.json index 08375ef070d..84a1140d292 100644 --- a/packages/cosmic-proto/package.json +++ b/packages/cosmic-proto/package.json @@ -28,6 +28,10 @@ "types": "./dist/codegen/agoric/swingset/query.d.ts", "default": "./dist/codegen/agoric/swingset/query.js" }, + "./swingset/swingset.js": { + "types": "./dist/codegen/agoric/swingset/swingset.d.ts", + "default": "./dist/codegen/agoric/swingset/swingset.js" + }, "./vstorage/query.js": { "types": "./dist/codegen/agoric/vstorage/query.d.ts", "default": "./dist/codegen/agoric/vstorage/query.js" From b7bbc8516d1b749c65d1c42c8ff6018eb2991313 Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Mon, 26 Feb 2024 12:13:40 -0800 Subject: [PATCH 22/47] feat: SES compatibility --- packages/cosmic-proto/package.json | 6 +++- patches/axios+1.6.7.patch | 44 ++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 1 deletion(-) create mode 100644 patches/axios+1.6.7.patch diff --git a/packages/cosmic-proto/package.json b/packages/cosmic-proto/package.json index 84a1140d292..98560f5befb 100644 --- a/packages/cosmic-proto/package.json +++ b/packages/cosmic-proto/package.json @@ -75,7 +75,11 @@ "@cosmjs/proto-signing": "^0.32.2", "@cosmjs/stargate": "^0.32.2", "@cosmjs/tendermint-rpc": "^0.32.2", - "@endo/init": "^1.0.3" + "@endo/init": "^1.0.3", + "axios": "^1.6.7" + }, + "resolutions": { + "**/protobufjs": "^7.2.4" }, "ava": { "extensions": { diff --git a/patches/axios+1.6.7.patch b/patches/axios+1.6.7.patch new file mode 100644 index 00000000000..3373cf41c42 --- /dev/null +++ b/patches/axios+1.6.7.patch @@ -0,0 +1,44 @@ +diff --git a/node_modules/axios/dist/node/axios.cjs b/node_modules/axios/dist/node/axios.cjs +index 9099d87..7104f6e 100644 +--- a/node_modules/axios/dist/node/axios.cjs ++++ b/node_modules/axios/dist/node/axios.cjs +@@ -370,9 +370,9 @@ function merge(/* obj1, obj2, obj3, ... */) { + const extend = (a, b, thisArg, {allOwnKeys}= {}) => { + forEach(b, (val, key) => { + if (thisArg && isFunction(val)) { +- a[key] = bind(val, thisArg); ++ Object.defineProperty(a, key, {value: bind(val, thisArg)}); + } else { +- a[key] = val; ++ Object.defineProperty(a, key, {value: val}); + } + }, {allOwnKeys}); + return a; +@@ -403,7 +403,9 @@ const stripBOM = (content) => { + */ + const inherits = (constructor, superConstructor, props, descriptors) => { + constructor.prototype = Object.create(superConstructor.prototype, descriptors); +- constructor.prototype.constructor = constructor; ++ Object.defineProperty(constructor, 'constructor', { ++ value: constructor ++ }); + Object.defineProperty(constructor, 'super', { + value: superConstructor.prototype + }); +@@ -565,12 +567,14 @@ const isRegExp = kindOfTest('RegExp'); + + const reduceDescriptors = (obj, reducer) => { + const descriptors = Object.getOwnPropertyDescriptors(obj); +- const reducedDescriptors = {}; ++ let reducedDescriptors = {}; + + forEach(descriptors, (descriptor, name) => { + let ret; + if ((ret = reducer(descriptor, name, obj)) !== false) { +- reducedDescriptors[name] = ret || descriptor; ++ reducedDescriptors = {...reducedDescriptors, ++ [name]: ret || descriptor ++ }; + } + }); + From d967a9cb0f03a49460b76b0d6890aff633e056a3 Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Thu, 15 Feb 2024 11:17:13 -0800 Subject: [PATCH 23/47] test: exports --- packages/cosmic-proto/package.json | 16 ++-- .../test/snapshots/test-exports.js.md | 87 ++++++++++++++++++ .../test/snapshots/test-exports.js.snap | Bin 0 -> 940 bytes packages/cosmic-proto/test/test-agoric.ts | 23 ----- packages/cosmic-proto/test/test-exports.js | 27 ++++++ yarn.lock | 61 +++++++++++- 6 files changed, 181 insertions(+), 33 deletions(-) create mode 100644 packages/cosmic-proto/test/snapshots/test-exports.js.md create mode 100644 packages/cosmic-proto/test/snapshots/test-exports.js.snap delete mode 100644 packages/cosmic-proto/test/test-agoric.ts create mode 100644 packages/cosmic-proto/test/test-exports.js diff --git a/packages/cosmic-proto/package.json b/packages/cosmic-proto/package.json index 98560f5befb..f192fadacd6 100644 --- a/packages/cosmic-proto/package.json +++ b/packages/cosmic-proto/package.json @@ -63,12 +63,13 @@ }, "devDependencies": { "@agoric/cosmos": "^0.34.1", + "@ava/typescript": "^4.1.0", "@cosmology/telescope": "^1.0.1", "ava": "^5.3.1", "publish-scripts": "0.1.0", "rimraf": "^5.0.0", "tsimp": "^2.0.11", - "typescript": "^5.0.4" + "typescript": "^5.3.3" }, "dependencies": { "@cosmjs/amino": "^0.32.2", @@ -82,15 +83,14 @@ "**/protobufjs": "^7.2.4" }, "ava": { - "extensions": { - "ts": "module" + "typescript": { + "rewritePaths": { + "src/": "dist/" + }, + "compile": false }, "files": [ - "test/**/test-*.ts" - ], - "nodeArguments": [ - "--import=tsimp", - "--no-warnings" + "test/**/test-*.js" ] } } diff --git a/packages/cosmic-proto/test/snapshots/test-exports.js.md b/packages/cosmic-proto/test/snapshots/test-exports.js.md new file mode 100644 index 00000000000..4333daba6f6 --- /dev/null +++ b/packages/cosmic-proto/test/snapshots/test-exports.js.md @@ -0,0 +1,87 @@ +# Snapshot report for `test/test-exports.js` + +The actual snapshot is saved in `test-exports.js.snap`. + +Generated by [AVA](https://avajs.dev). + +## index + +> Snapshot 1 + + [ + 'BinaryReader', + 'BinaryWriter', + 'WireType', + 'agoric', + 'agoricAminoConverters', + 'agoricProtoRegistry', + 'cosmos', + 'cosmos_proto', + 'getSigningAgoricClient', + 'getSigningAgoricClientOptions', + 'gogoproto', + 'google', + 'ibc', + 'int64FromString', + 'int64Length', + 'int64ToString', + 'readInt32', + 'readUInt32', + 'uInt64ToString', + 'utf8Length', + 'utf8Read', + 'utf8Write', + 'varint32read', + 'varint32write', + 'varint64read', + 'varint64write', + 'writeByte', + 'writeFixed32', + 'writeVarint32', + 'writeVarint64', + 'zzDecode', + 'zzEncode', + ] + +## swingset/msgs + +> Snapshot 1 + + [ + 'MsgDeliverInbound', + 'MsgDeliverInboundResponse', + 'MsgInstallBundle', + 'MsgInstallBundleResponse', + 'MsgProvision', + 'MsgProvisionResponse', + 'MsgWalletAction', + 'MsgWalletActionResponse', + 'MsgWalletSpendAction', + 'MsgWalletSpendActionResponse', + ] + +## swingset/query + +> Snapshot 1 + + [ + 'QueryEgressRequest', + 'QueryEgressResponse', + 'QueryMailboxRequest', + 'QueryMailboxResponse', + 'QueryParamsRequest', + 'QueryParamsResponse', + ] + +## vstorage/query + +> Snapshot 1 + + [ + 'QueryCapDataRequest', + 'QueryCapDataResponse', + 'QueryChildrenRequest', + 'QueryChildrenResponse', + 'QueryDataRequest', + 'QueryDataResponse', + ] diff --git a/packages/cosmic-proto/test/snapshots/test-exports.js.snap b/packages/cosmic-proto/test/snapshots/test-exports.js.snap new file mode 100644 index 0000000000000000000000000000000000000000..ba499eff3b10658061f2dd1061f2bd4e55e40b92 GIT binary patch literal 940 zcmV;d15^A#RzV*Fd>y5T9OmYEeNh9;E? z)0~124IhgL00000000A(l{;@6M-+w6>{1U)lqs5&-*z5FTn4ZOAw`N%k_8AbVOdfv zxCn$1cQhJjIYZA3X;DBD1V|mE4UiO$n^b8MxN+?&P22=X;UD12F5CvTm*k9gmn00c zBJOwYgZB;&AMEeNs`0h?;WuW5jTsX%Vu!*=+A;^qn(rS(5w-MAVL!wak`I|CAN>yC z4*-7ycn%;Wzy$&<6Mz%oI|BSlfF}faMSxNOTnT{B1K_IwI0}HD1K_U!_$L5f20$?c zJ_&(Z2<(Qyw;}LL2s{pfe?#C^2viEd)dFy%0943inM~BQrd>kZw^FGDG6V7PE5&2KqoRw4V$UyV&ILV=oP$LNBu+e2&OkVVHky6Efn=0Z`$BTV?59n`&9EJjrzty{-2iYKz#)K#0Gm6GnU<1<4H?ON-SBm3EREykwv6IoLphsQkGG9Oh(yq-fS+8 zQn{Lg%S(Uxdu^t>BToNu>hx1z09bqbFC+l`2;ffu{{i?P?>O~j{?w;#CH<~*&5YS( z_u9-XMef2I}MTBVkbTV^#Y8nexT>OjPiW^&v*7jmjG`<(B|MhxY*SxzbX OMcx3m(u6M}2><{}48N}c literal 0 HcmV?d00001 diff --git a/packages/cosmic-proto/test/test-agoric.ts b/packages/cosmic-proto/test/test-agoric.ts deleted file mode 100644 index d2ca8ab4836..00000000000 --- a/packages/cosmic-proto/test/test-agoric.ts +++ /dev/null @@ -1,23 +0,0 @@ -/* eslint-disable @typescript-eslint/prefer-ts-expect-error */ -import test from 'ava'; -import { coins } from '@cosmjs/amino'; - -// FIXME does not work after SES init (add this back to package.json: -// "require": [ -// "@endo/init/debug.js" -// ]) -// import * as index from '../src/index.js'; - -// @ts-ignore tsc thinks Module '"../src/index.js"' has no exported member 'agoric'. -import { agoric } from '../src/index.js'; - -test('it loads', t => { - t.deepEqual(Object.keys(agoric), [ - 'lien', - 'swingset', - 'vbank', - 'vibc', - 'vstorage', - 'ClientFactory', - ]); -}); diff --git a/packages/cosmic-proto/test/test-exports.js b/packages/cosmic-proto/test/test-exports.js new file mode 100644 index 00000000000..e3d0f0ab61b --- /dev/null +++ b/packages/cosmic-proto/test/test-exports.js @@ -0,0 +1,27 @@ +/* eslint-disable import/no-extraneous-dependencies -- requiring the package itself to check exports map */ +/* eslint-disable import/no-unresolved -- not detecting the "exports" map */ +// @ts-nocheck tsc also not finding the "exports" map until https://github.com/Agoric/agoric-sdk/issues/9005 +import test from 'ava'; + +import '@endo/init'; + +import * as index from '@agoric/cosmic-proto'; +import * as swingsetMsgs from '@agoric/cosmic-proto/swingset/msgs.js'; +import * as swingsetQuery from '@agoric/cosmic-proto/swingset/query.js'; +import * as vstorageQuery from '@agoric/cosmic-proto/vstorage/query.js'; + +test('index', t => { + t.snapshot(Object.keys(index).sort()); +}); + +test('swingset/msgs', t => { + t.snapshot(Object.keys(swingsetMsgs).sort()); +}); + +test('swingset/query', t => { + t.snapshot(Object.keys(swingsetQuery).sort()); +}); + +test('vstorage/query', t => { + t.snapshot(Object.keys(vstorageQuery).sort()); +}); diff --git a/yarn.lock b/yarn.lock index 648495d2ec8..b4f0c0e94da 100644 --- a/yarn.lock +++ b/yarn.lock @@ -36,6 +36,14 @@ "@jridgewell/gen-mapping" "^0.3.0" "@jridgewell/trace-mapping" "^0.3.9" +"@ava/typescript@^4.1.0": + version "4.1.0" + resolved "https://registry.yarnpkg.com/@ava/typescript/-/typescript-4.1.0.tgz#0dde7b3bbcfe59c1424fb20eb289b4a2b3694418" + integrity sha512-1iWZQ/nr9iflhLK9VN8H+1oDZqe93qxNnyYUz+jTzkYPAHc5fdZXBrqmNIgIfFhWYXK5OaQ5YtC7OmLeTNhVEg== + dependencies: + escape-string-regexp "^5.0.0" + execa "^7.1.1" + "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.18.6", "@babel/code-frame@^7.22.13", "@babel/code-frame@^7.23.5": version "7.23.5" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.23.5.tgz#9009b69a8c602293476ad598ff53e4562e15c244" @@ -6448,6 +6456,21 @@ execa@^5.0.0: signal-exit "^3.0.3" strip-final-newline "^2.0.0" +execa@^7.1.1: + version "7.2.0" + resolved "https://registry.yarnpkg.com/execa/-/execa-7.2.0.tgz#657e75ba984f42a70f38928cedc87d6f2d4fe4e9" + integrity sha512-UduyVP7TLB5IcAQl+OzLyLcS/l32W/GLg+AhHJ+ow40FOk2U3SAllPwR44v4vmdFwIWqpdwxxpQbF1n5ta9seA== + dependencies: + cross-spawn "^7.0.3" + get-stream "^6.0.1" + human-signals "^4.3.0" + is-stream "^3.0.0" + merge-stream "^2.0.0" + npm-run-path "^5.1.0" + onetime "^6.0.0" + signal-exit "^3.0.7" + strip-final-newline "^3.0.0" + expand-template@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c" @@ -6948,7 +6971,7 @@ get-stdin@^8.0.0: resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-8.0.0.tgz#cbad6a73feb75f6eeb22ba9e01f89aa28aa97a53" integrity sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg== -get-stream@^6.0.0: +get-stream@^6.0.0, get-stream@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== @@ -7381,6 +7404,11 @@ human-signals@^2.1.0: resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== +human-signals@^4.3.0: + version "4.3.1" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-4.3.1.tgz#ab7f811e851fca97ffbd2c1fe9a958964de321b2" + integrity sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ== + humanize-ms@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed" @@ -7888,6 +7916,11 @@ is-stream@^2.0.0: resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== +is-stream@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-3.0.0.tgz#e6bfd7aa6bef69f4f472ce9bb681e3e57b4319ac" + integrity sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA== + is-string@^1.0.5, is-string@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" @@ -9587,6 +9620,13 @@ npm-run-path@^4.0.1: dependencies: path-key "^3.0.0" +npm-run-path@^5.1.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-5.3.0.tgz#e23353d0ebb9317f174e93417e4a4d82d0249e9f" + integrity sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ== + dependencies: + path-key "^4.0.0" + npmlog@^6.0.0, npmlog@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-6.0.2.tgz#c8166017a42f2dea92d6453168dd865186a70830" @@ -9762,6 +9802,13 @@ onetime@^5.1.0, onetime@^5.1.2: dependencies: mimic-fn "^2.1.0" +onetime@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-6.0.0.tgz#7c24c18ed1fd2e9bca4bd26806a33613c77d34b4" + integrity sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ== + dependencies: + mimic-fn "^4.0.0" + open@^7.4.2: version "7.4.2" resolved "https://registry.yarnpkg.com/open/-/open-7.4.2.tgz#b8147e26dcf3e426316c730089fd71edd29c2321" @@ -10105,6 +10152,11 @@ path-key@^3.0.0, path-key@^3.1.0: resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== +path-key@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-4.0.0.tgz#295588dc3aee64154f877adb9d780b81c554bf18" + integrity sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ== + path-parse@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" @@ -11457,6 +11509,11 @@ strip-final-newline@^2.0.0: resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== +strip-final-newline@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-3.0.0.tgz#52894c313fbff318835280aed60ff71ebf12b8fd" + integrity sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw== + strip-indent@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" @@ -11988,7 +12045,7 @@ typedoc@^0.25.7: resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.9.5.tgz#095979f9bcc0d09da324d58d03ce8f8374cbe65a" integrity sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g== -typescript@^5.0.4, typescript@^5.3.3, typescript@~5.3.3: +typescript@^5.3.3, typescript@~5.3.3: version "5.3.3" resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.3.3.tgz#b3ce6ba258e72e6305ba66f5c9b452aaee3ffe37" integrity sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw== From 7c75d76242717658c4efc7bbb57301ba73dbb68a Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Fri, 16 Feb 2024 08:30:57 -0800 Subject: [PATCH 24/47] test: integration with Emerynet --- packages/cosmic-proto/README.md | 1 - packages/cosmic-proto/package.json | 3 +- packages/cosmic-proto/scripts/test-live.js | 36 ++++++++++++++++++++++ 3 files changed, 38 insertions(+), 2 deletions(-) create mode 100755 packages/cosmic-proto/scripts/test-live.js diff --git a/packages/cosmic-proto/README.md b/packages/cosmic-proto/README.md index 91a1aac5044..ec406f85ddd 100644 --- a/packages/cosmic-proto/README.md +++ b/packages/cosmic-proto/README.md @@ -245,7 +245,6 @@ To test with a real network, ``` yarn test:live ``` -Note that requires `tsx` in the global path (`npm install -g tsx`). diff --git a/packages/cosmic-proto/package.json b/packages/cosmic-proto/package.json index f192fadacd6..0b2de11ede9 100644 --- a/packages/cosmic-proto/package.json +++ b/packages/cosmic-proto/package.json @@ -56,7 +56,8 @@ "lint-fix": "yarn lint:eslint --fix", "lint": "tsc", "test": "ava", - "test:xs": "exit 0" + "test:xs": "exit 0", + "test:live": "yarn build && scripts/test-live.js" }, "publishConfig": { "access": "public" diff --git a/packages/cosmic-proto/scripts/test-live.js b/packages/cosmic-proto/scripts/test-live.js new file mode 100755 index 00000000000..80cc7d76529 --- /dev/null +++ b/packages/cosmic-proto/scripts/test-live.js @@ -0,0 +1,36 @@ +#!/usr/bin/env node +// @ts-check +import '@endo/init'; + +import process from 'node:process'; +import assert from 'node:assert/strict'; + +import { agoric } from '../dist/codegen/index.js'; + +const rpcEndpoint = 'https://emerynet.rpc.agoric.net:443'; + +const testMain = async () => { + const client = await agoric.ClientFactory.createRPCQueryClient({ + rpcEndpoint, + }); + const { params } = await client.agoric.swingset.params(); + assert.deepEqual(Object.keys(params).sort(), [ + 'beansPerUnit', + 'bootstrapVatConfig', + 'feeUnitPrice', + 'powerFlagFees', + 'queueMax', + ]); + console.log('✅ SwingSet params query successful'); +}; + +process.exitCode = 1; +testMain().then( + () => { + process.exitCode = 0; + }, + err => { + console.error('Failed with', err); + process.exit(process.exitCode || 1); + }, +); From f65ca3e5eaaf9f913f08984d2a3fac9fd1e9ccf2 Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Mon, 26 Feb 2024 11:28:53 -0800 Subject: [PATCH 25/47] chore(deps): override protobufjs version to get https://github.com/protobufjs/protobuf.js/pull/1742 --- package.json | 3 +++ yarn.lock | 39 +-------------------------------------- 2 files changed, 4 insertions(+), 38 deletions(-) diff --git a/package.json b/package.json index 6c340b8dcf9..0e30acf4947 100644 --- a/package.json +++ b/package.json @@ -36,6 +36,9 @@ "typedoc": "^0.25.7", "typescript": "^5.3.3" }, + "resolutions": { + "**/protobufjs": "^7.2.4" + }, "engines": { "node": "^16.13 || ^18.12 || ^20.9" }, diff --git a/yarn.lock b/yarn.lock index b4f0c0e94da..f4f191223de 100644 --- a/yarn.lock +++ b/yarn.lock @@ -10394,44 +10394,7 @@ proto-list@~1.2.1: resolved "https://registry.yarnpkg.com/proto-list/-/proto-list-1.2.4.tgz#212d5bfe1318306a420f6402b8e26ff39647a849" integrity sha1-IS1b/hMYMGpCD2QCuOJv85ZHqEk= -protobufjs@^6.8.8: - version "6.11.3" - resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-6.11.3.tgz#637a527205a35caa4f3e2a9a4a13ddffe0e7af74" - integrity sha512-xL96WDdCZYdU7Slin569tFX712BxsxslWwAfAhCYjQKGTq7dAU91Lomy6nLLhh/dyGhk/YH4TwTSRxTzhuHyZg== - dependencies: - "@protobufjs/aspromise" "^1.1.2" - "@protobufjs/base64" "^1.1.2" - "@protobufjs/codegen" "^2.0.4" - "@protobufjs/eventemitter" "^1.1.0" - "@protobufjs/fetch" "^1.1.0" - "@protobufjs/float" "^1.0.2" - "@protobufjs/inquire" "^1.1.0" - "@protobufjs/path" "^1.1.2" - "@protobufjs/pool" "^1.1.0" - "@protobufjs/utf8" "^1.1.0" - "@types/long" "^4.0.1" - "@types/node" ">=13.7.0" - long "^4.0.0" - -protobufjs@^7.0.0: - version "7.2.6" - resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-7.2.6.tgz#4a0ccd79eb292717aacf07530a07e0ed20278215" - integrity sha512-dgJaEDDL6x8ASUZ1YqWciTRrdOuYNzoOf27oHNfdyvKqHr5i0FV7FSLU+aIeFjyFgVxrpTOtQUi0BLLBymZaBw== - dependencies: - "@protobufjs/aspromise" "^1.1.2" - "@protobufjs/base64" "^1.1.2" - "@protobufjs/codegen" "^2.0.4" - "@protobufjs/eventemitter" "^1.1.0" - "@protobufjs/fetch" "^1.1.0" - "@protobufjs/float" "^1.0.2" - "@protobufjs/inquire" "^1.1.0" - "@protobufjs/path" "^1.1.2" - "@protobufjs/pool" "^1.1.0" - "@protobufjs/utf8" "^1.1.0" - "@types/node" ">=13.7.0" - long "^5.0.0" - -protobufjs@^7.2.4: +protobufjs@^6.8.8, protobufjs@^7.0.0, protobufjs@^7.2.4: version "7.2.4" resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-7.2.4.tgz#3fc1ec0cdc89dd91aef9ba6037ba07408485c3ae" integrity sha512-AT+RJgD2sH8phPmCf7OUZR8xGdcJRga4+1cOaXJ64hvcSkVhNcRHOwIxUatPH15+nj59WAGTDv3LSGZPEQbJaQ== From 66955c1e70b63e6525ea10bc946f7c1a84e1e869 Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Tue, 27 Feb 2024 16:25:26 -0800 Subject: [PATCH 26/47] fix(sim-params): power_flag casing --- packages/cosmic-swingset/src/sim-params.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/cosmic-swingset/src/sim-params.js b/packages/cosmic-swingset/src/sim-params.js index 50c2da3a7d5..f3c1abad15d 100644 --- a/packages/cosmic-swingset/src/sim-params.js +++ b/packages/cosmic-swingset/src/sim-params.js @@ -1,9 +1,9 @@ // @jessie-check +// @ts-check import { Nat } from '@endo/nat'; const makeStringBeans = (key, beans) => ({ key, beans: `${Nat(beans)}` }); -const makePowerFlagFee = (powerFlag, fee) => ({ powerFlag, fee }); const makeCoin = (denom, amount) => ({ denom, amount: `${Nat(amount)}` }); /** * @param {string} key @@ -63,8 +63,9 @@ export const defaultBeansPerUnit = [ const defaultBootstrapVatConfig = '@agoric/vm-config/decentral-demo-config.json'; +/** @type {import('@agoric/cosmic-proto/dist/codegen/agoric/swingset/swingset.js').PowerFlagFeeSDKType[]} */ export const defaultPowerFlagFees = [ - makePowerFlagFee('SMART_WALLET', [makeCoin('ubld', 10_000_000n)]), + { power_flag: 'SMART_WALLET', fee: [makeCoin('ubld', 10_000_000n)] }, ]; export const QueueInbound = 'inbound'; From 659831938918f445f220057f211e96df9f54c461 Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Fri, 16 Feb 2024 16:30:27 -0800 Subject: [PATCH 27/47] feat: use HTTP to fetch SwingSet Params --- packages/agoric-cli/src/commands/wallet.js | 11 +++++---- packages/agoric-cli/src/lib/chain.js | 28 +++++++++++----------- 2 files changed, 20 insertions(+), 19 deletions(-) diff --git a/packages/agoric-cli/src/commands/wallet.js b/packages/agoric-cli/src/commands/wallet.js index 10bed0a0ee3..99496958a58 100644 --- a/packages/agoric-cli/src/commands/wallet.js +++ b/packages/agoric-cli/src/commands/wallet.js @@ -50,10 +50,11 @@ export const makeWalletCommand = async command => { ) .option('--spend', 'confirm you want to spend') .option('--nickname ', 'nickname to use', 'my-wallet') - .action(function (opts) { + .action(async function (opts) { const { account, nickname, spend } = opts; const { home, keyringBackend: backend } = wallet.opts(); const tx = ['provision-one', nickname, account, 'SMART_WALLET']; + await null; if (spend) { execSwingsetTransaction(tx, { from: account, @@ -61,12 +62,12 @@ export const makeWalletCommand = async command => { ...networkConfig, }); } else { - const params = fetchSwingsetParams(networkConfig); + const params = await fetchSwingsetParams(networkConfig); assert( - params.power_flag_fees.length === 1, - 'multiple power_flag_fees not supported', + params.powerFlagFees.length === 1, + 'multiple powerFlagFees not supported', ); - const { fee: fees } = params.power_flag_fees[0]; + const { fee: fees } = params.powerFlagFees[0]; const nf = new Intl.NumberFormat('en-US'); const costs = fees .map(f => `${nf.format(Number(f.amount))} ${f.denom}`) diff --git a/packages/agoric-cli/src/lib/chain.js b/packages/agoric-cli/src/lib/chain.js index 667cba70009..159c90451c9 100644 --- a/packages/agoric-cli/src/lib/chain.js +++ b/packages/agoric-cli/src/lib/chain.js @@ -1,5 +1,6 @@ // @ts-check /* global process */ +import { agoric } from '@agoric/cosmic-proto'; import { normalizeBech32 } from '@cosmjs/encoding'; import { execFileSync as execFileSyncAmbient } from 'child_process'; @@ -92,20 +93,19 @@ export const execSwingsetTransaction = (swingsetArgs, opts) => { }; harden(execSwingsetTransaction); -// xxx rpc should be able to query this by HTTP without shelling out -export const fetchSwingsetParams = net => { - const { chainName, rpcAddrs, execFileSync = execFileSyncAmbient } = net; - const cmd = [ - `--node=${rpcAddrs[0]}`, - `--chain-id=${chainName}`, - 'query', - 'swingset', - 'params', - '--output', - '--json', - ]; - const buffer = execFileSync(agdBinary, cmd); - return JSON.parse(buffer.toString()); +/** + * + * @param {import('./rpc.js').MinimalNetworkConfig} net + * @returns {Promise} + */ +export const fetchSwingsetParams = async net => { + const { rpcAddrs } = net; + const rpcEndpoint = rpcAddrs[0]; + const client = await agoric.ClientFactory.createRPCQueryClient({ + rpcEndpoint, + }); + const { params } = await client.agoric.swingset.params(); + return params; }; harden(fetchSwingsetParams); From 5f19df9f17343764538f32f2f0c00b47a7cbc05c Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Fri, 16 Feb 2024 15:42:58 -0800 Subject: [PATCH 28/47] refactor: use new cosmic-proto --- packages/agoric-cli/package.json | 2 +- packages/agoric-cli/src/publish.js | 13 +- packages/boot/package.json | 1 + packages/boot/tools/supports.ts | 3 +- packages/casting/package.json | 2 +- .../casting/test/test-interpose-net-access.js | 17 +- packages/cosmic-swingset/src/params.js | 5 +- packages/cosmic-swingset/src/sim-params.js | 3 + packages/smart-wallet/package.json | 2 +- packages/smart-wallet/src/types.d.ts | 6 +- packages/vats/src/core/chain-behaviors.js | 8 +- yarn.lock | 204 +++--------------- 12 files changed, 60 insertions(+), 206 deletions(-) diff --git a/packages/agoric-cli/package.json b/packages/agoric-cli/package.json index 0e78d869d0a..f1d1bf57714 100644 --- a/packages/agoric-cli/package.json +++ b/packages/agoric-cli/package.json @@ -40,7 +40,7 @@ "@agoric/assert": "^0.6.0", "@agoric/cache": "^0.3.2", "@agoric/casting": "^0.4.2", - "@agoric/cosmic-proto": "^0.3.0", + "@agoric/cosmic-proto": "^0.4.0", "@agoric/ertp": "^0.16.2", "@agoric/governance": "^0.10.3", "@agoric/inter-protocol": "^0.16.1", diff --git a/packages/agoric-cli/src/publish.js b/packages/agoric-cli/src/publish.js index 79072fa550f..3235302652f 100644 --- a/packages/agoric-cli/src/publish.js +++ b/packages/agoric-cli/src/publish.js @@ -9,14 +9,12 @@ import { makeLeaderFromRpcAddresses, makeCastingSpec, } from '@agoric/casting'; -import { DirectSecp256k1HdWallet, Registry } from '@cosmjs/proto-signing'; -import { defaultRegistryTypes } from '@cosmjs/stargate'; +import { getSigningAgoricClientOptions } from '@agoric/cosmic-proto'; +import { DirectSecp256k1HdWallet } from '@cosmjs/proto-signing'; import { stringToPath } from '@cosmjs/crypto'; import { Decimal } from '@cosmjs/math'; import { fromBech32 } from '@cosmjs/encoding'; -import { MsgInstallBundle } from '@agoric/cosmic-proto/swingset/msgs.js'; - // https://github.com/Agoric/agoric-sdk/blob/master/golang/cosmos/daemon/main.go const Agoric = { Bech32MainPrefix: 'agoric', @@ -38,11 +36,6 @@ const Agoric = { const hdPath = (coinType = 118, account = 0) => stringToPath(`m/44'/${coinType}'/${account}'/0/0`); -const registry = new Registry([ - ...defaultRegistryTypes, - [Agoric.proto.swingset.InstallBundle.typeUrl, MsgInstallBundle], -]); - /** * @typedef {object} JsonHttpRequest * @property {string} hostname @@ -290,8 +283,8 @@ export const makeCosmosBundlePublisher = ({ // AWAIT const stargateClient = await connectWithSigner(endpoint, wallet, { + ...getSigningAgoricClientOptions(), gasPrice: Agoric.gasPrice, - registry, }); // AWAIT diff --git a/packages/boot/package.json b/packages/boot/package.json index 2bbdf561261..40487498a9a 100644 --- a/packages/boot/package.json +++ b/packages/boot/package.json @@ -19,6 +19,7 @@ "license": "Apache-2.0", "dependencies": { "@agoric/assert": "^0.6.0", + "@agoric/cosmic-proto": "^0.4.0", "@agoric/builders": "^0.1.0", "@agoric/cosmic-swingset": "^0.41.3", "@agoric/ertp": "^0.16.2", diff --git a/packages/boot/tools/supports.ts b/packages/boot/tools/supports.ts index 680db4a9ad4..828c18c599d 100644 --- a/packages/boot/tools/supports.ts +++ b/packages/boot/tools/supports.ts @@ -26,6 +26,7 @@ import { import type { ExecutionContext as AvaT } from 'ava'; import { makeRunUtils } from '@agoric/swingset-vat/tools/run-utils.js'; +import type { CoreEvalSDKType } from '@agoric/cosmic-proto/dist/codegen/agoric/swingset/swingset'; const trace = makeTracer('BSTSupport', false); @@ -172,7 +173,7 @@ export const makeProposalExtractor = ({ childProcess, fs }: Powers) => { loadAndRmPkgFile(permit), loadAndRmPkgFile(script), ]); - return { json_permits: permits, js_code: code }; + return { json_permits: permits, js_code: code } as CoreEvalSDKType; }), ); diff --git a/packages/casting/package.json b/packages/casting/package.json index c9406f44f79..e7359ec7a20 100644 --- a/packages/casting/package.json +++ b/packages/casting/package.json @@ -38,7 +38,7 @@ "node-fetch": "^2.6.0" }, "devDependencies": { - "@agoric/cosmic-proto": "^0.3.0", + "@agoric/cosmic-proto": "^0.4.0", "@endo/ses-ava": "^1.1.2", "@types/node-fetch": "^2.6.2", "ava": "^5.3.0", diff --git a/packages/casting/test/test-interpose-net-access.js b/packages/casting/test/test-interpose-net-access.js index bcf54166c78..5cd205b40d4 100644 --- a/packages/casting/test/test-interpose-net-access.js +++ b/packages/casting/test/test-interpose-net-access.js @@ -1,14 +1,10 @@ // @ts-check /* global globalThis */ -import anyTest from 'ava'; -import { - createProtobufRpcClient, - QueryClient, - setupBankExtension, -} from '@cosmjs/stargate'; +import { QueryClient, setupBankExtension } from '@cosmjs/stargate'; import { Tendermint34Client } from '@cosmjs/tendermint-rpc'; -import { QueryClientImpl } from '@agoric/cosmic-proto/vstorage/query.js'; +import anyTest from 'ava'; +import { agoric } from '@agoric/cosmic-proto'; import { makeHttpClient } from '../src/makeHttpClient.js'; import { captureIO, replayIO, web1, web2 } from './net-access-fixture.js'; @@ -81,12 +77,13 @@ test(`vstorage query: Children (RECORDING: ${RECORDING})`, async t => { : { fetch: replayIO(web2), web: new Map() }; const rpcClient = makeHttpClient(scenario2.endpoint, fetchMock); + t.is(agoric.vstorage.Children.typeUrl, '/agoric.vstorage.Children'); + const tmClient = await Tendermint34Client.create(rpcClient); const qClient = new QueryClient(tmClient); - const rpc = createProtobufRpcClient(qClient); - const queryService = new QueryClientImpl(rpc); + const queryService = agoric.vstorage.createRpcQueryExtension(qClient); - const children = await queryService.Children({ path: '' }); + const children = await queryService.children({ path: '' }); if (io.recording) { t.snapshot(web); } diff --git a/packages/cosmic-swingset/src/params.js b/packages/cosmic-swingset/src/params.js index 155664a3c39..ddaad34b223 100644 --- a/packages/cosmic-swingset/src/params.js +++ b/packages/cosmic-swingset/src/params.js @@ -29,7 +29,10 @@ export const encodeQueueSizes = queueSizes => return { key, size }; }); -// Map the SwingSet parameters to a deterministic data structure. +/** + * Map the SwingSet parameters to a deterministic data structure. + * @param {import('@agoric/cosmic-proto/dist/codegen/agoric/swingset/swingset.js').ParamsSDKType} params + */ export const parseParams = params => { const { beans_per_unit: rawBeansPerUnit, diff --git a/packages/cosmic-swingset/src/sim-params.js b/packages/cosmic-swingset/src/sim-params.js index f3c1abad15d..f550c17f831 100644 --- a/packages/cosmic-swingset/src/sim-params.js +++ b/packages/cosmic-swingset/src/sim-params.js @@ -76,6 +76,9 @@ export const defaultQueueMax = [ makeQueueSize(QueueInbound, defaultInboundQueueMax), ]; +/** + * @type {import('@agoric/cosmic-proto/dist/codegen/agoric/swingset/swingset.js').ParamsSDKType} + */ export const DEFAULT_SIM_SWINGSET_PARAMS = { beans_per_unit: defaultBeansPerUnit, fee_unit_price: defaultFeeUnitPrice, diff --git a/packages/smart-wallet/package.json b/packages/smart-wallet/package.json index 43f387b84d4..dd5b351c3a4 100644 --- a/packages/smart-wallet/package.json +++ b/packages/smart-wallet/package.json @@ -16,7 +16,7 @@ "lint:eslint": "eslint ." }, "devDependencies": { - "@agoric/cosmic-proto": "^0.3.0", + "@agoric/cosmic-proto": "^0.4.0", "@agoric/swingset-vat": "^0.32.2", "@endo/bundle-source": "^3.1.0", "@endo/captp": "^4.0.4", diff --git a/packages/smart-wallet/src/types.d.ts b/packages/smart-wallet/src/types.d.ts index f0ad92b739f..f12822df490 100644 --- a/packages/smart-wallet/src/types.d.ts +++ b/packages/smart-wallet/src/types.d.ts @@ -7,7 +7,7 @@ import type { ERef } from '@endo/far'; import type { CapData } from '@endo/marshal'; -import type { MsgWalletSpendAction } from '@agoric/cosmic-proto/swingset/msgs'; +import type { agoric } from '@agoric/cosmic-proto'; import type { AgoricNamesRemotes } from '@agoric/vats/tools/board-utils.js'; import type { OfferSpec } from './offers.js'; @@ -40,7 +40,7 @@ export type BridgeActionCapData = WalletCapData< /** * Defined by walletAction struct in msg_server.go * - * @see {MsgWalletSpendAction} and walletSpendAction in msg_server.go + * @see {agoric.swingset.MsgWalletAction} and walletSpendAction in msg_server.go */ export type WalletActionMsg = { type: 'WALLET_ACTION'; @@ -55,7 +55,7 @@ export type WalletActionMsg = { /** * Defined by walletSpendAction struct in msg_server.go * - * @see {MsgWalletSpendAction} and walletSpendAction in msg_server.go + * @see {agoric.swingset.MsgWalletSpendAction} and walletSpendAction in msg_server.go */ export type WalletSpendActionMsg = { type: 'WALLET_SPEND_ACTION'; diff --git a/packages/vats/src/core/chain-behaviors.js b/packages/vats/src/core/chain-behaviors.js index 6ed04ee409d..3d21d781fc3 100644 --- a/packages/vats/src/core/chain-behaviors.js +++ b/packages/vats/src/core/chain-behaviors.js @@ -57,13 +57,7 @@ export const bridgeCoreEval = async allPowers => { async fromBridge(obj) { switch (obj.type) { case 'CORE_EVAL': { - /** - * Type defined by - * `agoric-sdk/golang/cosmos/proto/agoric/swingset/swingset.proto` - * CoreEval. - * - * @type {{ evals: { json_permits: string; js_code: string }[] }} - */ + /** @type {import('@agoric/cosmic-proto/dist/codegen/agoric/swingset/swingset.d.ts').CoreEvalProposalSDKType} */ const { evals } = obj; return Promise.all( evals.map(({ json_permits: jsonPermit, js_code: code }) => diff --git a/yarn.lock b/yarn.lock index f4f191223de..d9da2cfdede 100644 --- a/yarn.lock +++ b/yarn.lock @@ -16,13 +16,6 @@ jsesc "^2.5.1" source-map "^0.5.0" -"@agoric/cosmic-proto@^0.3.0": - version "0.3.0" - resolved "https://registry.yarnpkg.com/@agoric/cosmic-proto/-/cosmic-proto-0.3.0.tgz#c9d31d3946c91fbb1630f89d8ba63a662bcdacc5" - integrity sha512-cIunby6gs53sGkHx3ALraREbfVQXvsIcObMjQQ0/tZt5HVqwoS7Y1Qj1Xl0ZZvqE8B1Zyk7QMDj829mbTII+9g== - dependencies: - protobufjs "^7.0.0" - "@agoric/wallet-ui@0.1.3-solo.0": version "0.1.3-solo.0" resolved "https://registry.yarnpkg.com/@agoric/wallet-ui/-/wallet-ui-0.1.3-solo.0.tgz#5f05c3dd2820d4f1efcbccbd2dc1292847ecbd2b" @@ -2152,12 +2145,12 @@ resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== -"@jridgewell/sourcemap-codec@1.4.14", "@jridgewell/sourcemap-codec@^1.4.10": +"@jridgewell/sourcemap-codec@1.4.14": version "1.4.14" resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== -"@jridgewell/sourcemap-codec@^1.4.14": +"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14": version "1.4.15" resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== @@ -4202,14 +4195,6 @@ aria-query@^5.1.3, aria-query@^5.3.0: dependencies: dequal "^2.0.3" -array-buffer-byte-length@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz#fabe8bc193fea865f317fe7807085ee0dee5aead" - integrity sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A== - dependencies: - call-bind "^1.0.2" - is-array-buffer "^3.0.1" - array-buffer-byte-length@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz#1e5583ec16763540a27ae52eed99ff899223568f" @@ -5858,47 +5843,7 @@ error-ex@^1.3.1: dependencies: is-arrayish "^0.2.1" -es-abstract@^1.19.0, es-abstract@^1.20.4: - version "1.21.2" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.21.2.tgz#a56b9695322c8a185dc25975aa3b8ec31d0e7eff" - integrity sha512-y/B5POM2iBnIxCiernH1G7rC9qQoM77lLIMQLuob0zhp8C56Po81+2Nj0WFKnd0pNReDTnkYryc+zhOzpEIROg== - dependencies: - array-buffer-byte-length "^1.0.0" - available-typed-arrays "^1.0.5" - call-bind "^1.0.2" - es-set-tostringtag "^2.0.1" - es-to-primitive "^1.2.1" - function.prototype.name "^1.1.5" - get-intrinsic "^1.2.0" - get-symbol-description "^1.0.0" - globalthis "^1.0.3" - gopd "^1.0.1" - has "^1.0.3" - has-property-descriptors "^1.0.0" - has-proto "^1.0.1" - has-symbols "^1.0.3" - internal-slot "^1.0.5" - is-array-buffer "^3.0.2" - is-callable "^1.2.7" - is-negative-zero "^2.0.2" - is-regex "^1.1.4" - is-shared-array-buffer "^1.0.2" - is-string "^1.0.7" - is-typed-array "^1.1.10" - is-weakref "^1.0.2" - object-inspect "^1.12.3" - object-keys "^1.1.1" - object.assign "^4.1.4" - regexp.prototype.flags "^1.4.3" - safe-regex-test "^1.0.0" - string.prototype.trim "^1.2.7" - string.prototype.trimend "^1.0.6" - string.prototype.trimstart "^1.0.6" - typed-array-length "^1.0.4" - unbox-primitive "^1.0.2" - which-typed-array "^1.1.9" - -es-abstract@^1.22.1, es-abstract@^1.22.3: +es-abstract@^1.20.4, es-abstract@^1.22.1, es-abstract@^1.22.3: version "1.22.4" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.22.4.tgz#26eb2e7538c3271141f5754d31aabfdb215f27bf" integrity sha512-vZYJlk2u6qHYxBOTjAeg7qUxHdNfih64Uu2J8QqWgXZ2cri0ZpJAkzDUK/q593+mvKwlxyaxr6F1Q+3LKoQRgg== @@ -5957,15 +5902,6 @@ es-errors@^1.2.1, es-errors@^1.3.0: resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f" integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw== -es-set-tostringtag@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz#338d502f6f674301d710b80c8592de8a15f09cd8" - integrity sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg== - dependencies: - get-intrinsic "^1.1.3" - has "^1.0.3" - has-tostringtag "^1.0.0" - es-set-tostringtag@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz#11f7cc9f63376930a5f20be4915834f4bc74f9c9" @@ -6743,16 +6679,16 @@ fn.name@1.x.x: resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc" integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw== -follow-redirects@^1.0.0, follow-redirects@^1.15.0: - version "1.15.2" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" - integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== - -follow-redirects@^1.15.4: +follow-redirects@^1.0.0, follow-redirects@^1.15.4: version "1.15.5" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.5.tgz#54d4d6d062c0fa7d9d17feb008461550e3ba8020" integrity sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw== +follow-redirects@^1.15.0: + version "1.15.2" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" + integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== + for-each@^0.3.3: version "0.3.3" resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" @@ -6867,16 +6803,6 @@ function-bind@^1.1.2: resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== -function.prototype.name@^1.1.5: - version "1.1.5" - resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" - integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.19.0" - functions-have-names "^1.2.2" - function.prototype.name@^1.1.6: version "1.1.6" resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.6.tgz#cdf315b7d90ee77a4c6ee216c3c3362da07533fd" @@ -6887,11 +6813,6 @@ function.prototype.name@^1.1.6: es-abstract "^1.22.1" functions-have-names "^1.2.3" -functions-have-names@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.2.tgz#98d93991c39da9361f8e50b337c4f6e41f120e21" - integrity sha512-bLgc3asbWdwPbx2mNk2S49kmJCuQeu0nfmaOgbs8WIyzzkw3r4htszdIi9Q9EMezDPTYuJx2wvjZ/EwgAthpnA== - functions-have-names@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" @@ -6926,16 +6847,7 @@ get-caller-file@^2.0.5: resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== -get-intrinsic@^1.0.2, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.0.tgz#7ad1dc0535f3a2904bba075772763e5051f6d05f" - integrity sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q== - dependencies: - function-bind "^1.1.1" - has "^1.0.3" - has-symbols "^1.0.3" - -get-intrinsic@^1.2.1, get-intrinsic@^1.2.2, get-intrinsic@^1.2.3, get-intrinsic@^1.2.4: +get-intrinsic@^1.0.2, get-intrinsic@^1.1.3, get-intrinsic@^1.2.1, get-intrinsic@^1.2.2, get-intrinsic@^1.2.3, get-intrinsic@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz#e385f5a4b5227d449c3eabbad05494ef0abbeadd" integrity sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ== @@ -6946,6 +6858,15 @@ get-intrinsic@^1.2.1, get-intrinsic@^1.2.2, get-intrinsic@^1.2.3, get-intrinsic@ has-symbols "^1.0.3" hasown "^2.0.0" +get-intrinsic@^1.1.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.0.tgz#7ad1dc0535f3a2904bba075772763e5051f6d05f" + integrity sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.3" + get-package-type@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" @@ -6976,14 +6897,6 @@ get-stream@^6.0.0, get-stream@^6.0.1: resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== -get-symbol-description@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" - integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== - dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.1.1" - get-symbol-description@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.2.tgz#533744d5aa20aca4e079c8e5daf7fd44202821f5" @@ -7631,15 +7544,6 @@ int64-buffer@^0.1.9: resolved "https://registry.yarnpkg.com/int64-buffer/-/int64-buffer-0.1.10.tgz#277b228a87d95ad777d07c13832022406a473423" integrity sha512-v7cSY1J8ydZ0GyjUHqF+1bshJ6cnEVLo9EnjB8p+4HDRPZc9N5jjmvUV7NvEsqQOKyH0pmIBFWXVQbiS0+OBbA== -internal-slot@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.5.tgz#f2a2ee21f668f8627a4667f309dc0f4fb6674986" - integrity sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ== - dependencies: - get-intrinsic "^1.2.0" - has "^1.0.3" - side-channel "^1.0.4" - internal-slot@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.7.tgz#c06dcca3ed874249881007b0a5523b172a190802" @@ -7677,15 +7581,6 @@ irregular-plurals@^3.2.0, irregular-plurals@^3.3.0: resolved "https://registry.yarnpkg.com/irregular-plurals/-/irregular-plurals-3.3.0.tgz#67d0715d4361a60d9fd9ee80af3881c631a31ee2" integrity sha512-MVBLKUTangM3EfRPFROhmWQQKRDsrgI83J8GS3jXy+OwYqiR2/aoWndYQ5416jLE3uaGgLH7ncme3X9y09gZ3g== -is-array-buffer@^3.0.1, is-array-buffer@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.2.tgz#f2653ced8412081638ecb0ebbd0c41c6e0aecbbe" - integrity sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w== - dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.2.0" - is-typed-array "^1.1.10" - is-array-buffer@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.4.tgz#7a1f92b3d61edd2bc65d24f130530ea93d7fae98" @@ -7745,20 +7640,20 @@ is-ci@^2.0.0: dependencies: ci-info "^2.0.0" -is-core-module@^2.11.0, is-core-module@^2.5.0, is-core-module@^2.8.1, is-core-module@^2.9.0: - version "2.12.1" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.12.1.tgz#0c0b6885b6f80011c71541ce15c8d66cf5a4f9fd" - integrity sha512-Q4ZuBAe2FUsKtyQJoQHlvP8OvBERxO3jEmy1I7hcRXcJBGGHFh/aJBswbXuS9sgrDH2QUO8ilkwNPHvHMd8clg== - dependencies: - has "^1.0.3" - -is-core-module@^2.13.0: +is-core-module@^2.11.0, is-core-module@^2.13.0, is-core-module@^2.5.0, is-core-module@^2.8.1: version "2.13.1" resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.1.tgz#ad0d7532c6fea9da1ebdc82742d74525c6273384" integrity sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw== dependencies: hasown "^2.0.0" +is-core-module@^2.9.0: + version "2.12.1" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.12.1.tgz#0c0b6885b6f80011c71541ce15c8d66cf5a4f9fd" + integrity sha512-Q4ZuBAe2FUsKtyQJoQHlvP8OvBERxO3jEmy1I7hcRXcJBGGHFh/aJBswbXuS9sgrDH2QUO8ilkwNPHvHMd8clg== + dependencies: + has "^1.0.3" + is-date-object@^1.0.1: version "1.0.5" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" @@ -9693,11 +9588,6 @@ object-assign@^4.0.1: resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== -object-inspect@^1.12.3: - version "1.12.3" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.3.tgz#ba62dffd67ee256c8c086dfae69e016cd1f198b9" - integrity sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g== - object-inspect@^1.13.1, object-inspect@^1.9.0: version "1.13.1" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.1.tgz#b96c6109324ccfef6b12216a956ca4dc2ff94bc2" @@ -9708,17 +9598,7 @@ object-keys@^1.1.1: resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== -object.assign@^4.1.2, object.assign@^4.1.3, object.assign@^4.1.4: - version "4.1.4" - resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" - integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.4" - has-symbols "^1.0.3" - object-keys "^1.1.1" - -object.assign@^4.1.5: +object.assign@^4.1.2, object.assign@^4.1.3, object.assign@^4.1.5: version "4.1.5" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.5.tgz#3a833f9ab7fdb80fc9e8d2300c803d216d8fdbb0" integrity sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ== @@ -10394,7 +10274,7 @@ proto-list@~1.2.1: resolved "https://registry.yarnpkg.com/proto-list/-/proto-list-1.2.4.tgz#212d5bfe1318306a420f6402b8e26ff39647a849" integrity sha1-IS1b/hMYMGpCD2QCuOJv85ZHqEk= -protobufjs@^6.8.8, protobufjs@^7.0.0, protobufjs@^7.2.4: +protobufjs@^6.8.8, protobufjs@^7.2.4: version "7.2.4" resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-7.2.4.tgz#3fc1ec0cdc89dd91aef9ba6037ba07408485c3ae" integrity sha512-AT+RJgD2sH8phPmCf7OUZR8xGdcJRga4+1cOaXJ64hvcSkVhNcRHOwIxUatPH15+nj59WAGTDv3LSGZPEQbJaQ== @@ -10672,15 +10552,6 @@ regenerator-transform@^0.15.2: dependencies: "@babel/runtime" "^7.8.4" -regexp.prototype.flags@^1.4.3: - version "1.4.3" - resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" - integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - functions-have-names "^1.2.2" - regexp.prototype.flags@^1.5.2: version "1.5.2" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz#138f644a3350f981a858c44f6bb1a61ff59be334" @@ -10915,15 +10786,6 @@ safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@~5.2.0: resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== -safe-regex-test@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/safe-regex-test/-/safe-regex-test-1.0.0.tgz#793b874d524eb3640d1873aad03596db2d4f2295" - integrity sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA== - dependencies: - call-bind "^1.0.2" - get-intrinsic "^1.1.3" - is-regex "^1.1.4" - safe-regex-test@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/safe-regex-test/-/safe-regex-test-1.0.3.tgz#a5b4c0f06e0ab50ea2c395c14d8371232924c377" @@ -11381,7 +11243,7 @@ string.prototype.padend@^3.0.0: define-properties "^1.2.0" es-abstract "^1.22.1" -string.prototype.trim@^1.2.7, string.prototype.trim@^1.2.8: +string.prototype.trim@^1.2.8: version "1.2.8" resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.8.tgz#f9ac6f8af4bd55ddfa8895e6aea92a96395393bd" integrity sha512-lfjY4HcixfQXOfaqCvcBuOIapyaroTXhbkfJN3gcB1OtyupngWK4sEET9Knd0cXd28kTUqu/kHoV4HKSJdnjiQ== @@ -11390,7 +11252,7 @@ string.prototype.trim@^1.2.7, string.prototype.trim@^1.2.8: define-properties "^1.2.0" es-abstract "^1.22.1" -string.prototype.trimend@^1.0.6, string.prototype.trimend@^1.0.7: +string.prototype.trimend@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.7.tgz#1bb3afc5008661d73e2dc015cd4853732d6c471e" integrity sha512-Ni79DqeB72ZFq1uH/L6zJ+DKZTkOtPIHovb3YZHQViE+HDouuU4mBrLOLDn5Dde3RF8qw5qVETEjhu9locMLvA== @@ -11399,7 +11261,7 @@ string.prototype.trimend@^1.0.6, string.prototype.trimend@^1.0.7: define-properties "^1.2.0" es-abstract "^1.22.1" -string.prototype.trimstart@^1.0.6, string.prototype.trimstart@^1.0.7: +string.prototype.trimstart@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.7.tgz#d4cdb44b83a4737ffbac2d406e405d43d0184298" integrity sha512-NGhtDFu3jCEm7B4Fy0DpLewdJQOZcQ0rGbwQ/+stjnrp2i+rlKeCvos9hOIeCmqwratM47OBxY7uFZzjxHXmrg== @@ -12274,7 +12136,7 @@ which-boxed-primitive@^1.0.2: is-string "^1.0.5" is-symbol "^1.0.3" -which-typed-array@^1.1.14, which-typed-array@^1.1.9: +which-typed-array@^1.1.14: version "1.1.14" resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.14.tgz#1f78a111aee1e131ca66164d8bdc3ab062c95a06" integrity sha512-VnXFiIW8yNn9kIHN88xvZ4yOWchftKDsRJ8fEPacX/wl1lOvBrhsJ/OeJCXq7B0AaijRuqgzSKalJoPk+D8MPg== From db6cdef70566e2196495d90f02fb583ab9a1d267 Mon Sep 17 00:00:00 2001 From: Turadg Aleahmad Date: Wed, 28 Feb 2024 16:57:26 -0800 Subject: [PATCH 29/47] docs: document only what works in tests --- packages/cosmic-proto/README.md | 59 +---------------------- packages/cosmic-proto/test/test-readme.js | 22 +++++++++ 2 files changed, 23 insertions(+), 58 deletions(-) create mode 100644 packages/cosmic-proto/test/test-readme.js diff --git a/packages/cosmic-proto/README.md b/packages/cosmic-proto/README.md index ec406f85ddd..57da42b9d3e 100644 --- a/packages/cosmic-proto/README.md +++ b/packages/cosmic-proto/README.md @@ -16,10 +16,6 @@ npm install @agoric/cosmic-proto - [Usage](#usage) - [RPC Clients](#rpc-clients) - [Composing Messages](#composing-messages) - - Cosmos, CosmWasm, and IBC - - [CosmWasm](#cosmwasm-messages) - - [IBC](#ibc-messages) - - [Cosmos](#cosmos-messages) - [Wallets and Signers](#connecting-with-wallets-and-signing-messages) - [Stargate Client](#initializing-the-stargate-client) - [Creating Signers](#creating-signers) @@ -37,11 +33,6 @@ import { agoric } from '@agoric/cosmic-proto'; const { createRPCQueryClient } = agoric.ClientFactory; const client = await createRPCQueryClient({ rpcEndpoint: RPC_ENDPOINT }); -// now you can query the cosmos modules -const balance = await client.cosmos.bank.v1beta1 - .allBalances({ address: 'agoric1addresshere' }); - -// you can also query the agoric modules const swingsetParams = await client.agoric.swingset.params() ``` @@ -57,49 +48,6 @@ const { } = agoric.exchange.v1beta1.MessageComposer.withTypeUrl; ``` -#### IBC Messages - -```js -import { ibc } from '@agoric/cosmic-proto'; - -const { - transfer -} = ibc.applications.transfer.v1.MessageComposer.withTypeUrl -``` - -#### Cosmos Messages - -```js -import { cosmos } from '@agoric/cosmic-proto'; - -const { - fundCommunityPool, - setWithdrawAddress, - withdrawDelegatorReward, - withdrawValidatorCommission -} = cosmos.distribution.v1beta1.MessageComposer.fromPartial; - -const { - multiSend, - send -} = cosmos.bank.v1beta1.MessageComposer.fromPartial; - -const { - beginRedelegate, - createValidator, - delegate, - editValidator, - undelegate -} = cosmos.staking.v1beta1.MessageComposer.fromPartial; - -const { - deposit, - submitProposal, - vote, - voteWeighted -} = cosmos.gov.v1beta1.MessageComposer.fromPartial; -``` - ## Connecting with Wallets and Signing Messages ⚡️ For web interfaces, we recommend using [cosmos-kit](https://github.com/cosmology-tech/cosmos-kit). Continue below to see how to manually construct signers and clients. @@ -193,8 +141,6 @@ import { AminoTypes, SigningStargateClient } from "@cosmjs/stargate"; import { cosmosAminoConverters, cosmosProtoRegistry, - cosmwasmAminoConverters, - cosmwasmProtoRegistry, ibcProtoRegistry, ibcAminoConverters, agoricAminoConverters, @@ -206,14 +152,12 @@ const rpcEndpint = 'https://rpc.cosmos.directory/agoric'; // or another URL const protoRegistry: ReadonlyArray<[string, GeneratedType]> = [ ...cosmosProtoRegistry, - ...cosmwasmProtoRegistry, ...ibcProtoRegistry, ...agoricProtoRegistry ]; const aminoConverters = { ...cosmosAminoConverters, - ...cosmwasmAminoConverters, ...ibcAminoConverters, ...agoricAminoConverters }; @@ -251,7 +195,7 @@ yarn test:live ### Codegen -Contract schemas live in `./contracts`, and protos in `./proto`. Look inside of `scripts/codegen.cjs` and configure the settings for bundling your SDK and contracts into `@agoric/cosmic-proto`: +Protos live in `./proto`. Look inside of `scripts/codegen.cjs` and configure the settings for bundling your SDK into `@agoric/cosmic-proto`: ``` yarn codegen @@ -271,7 +215,6 @@ yarn publish Checkout these related projects: * [@cosmology/telescope](https://github.com/cosmology-tech/telescope) Your Frontend Companion for Building with TypeScript with Cosmos SDK Modules. -* [@cosmwasm/ts-codegen](https://github.com/CosmWasm/ts-codegen) Convert your CosmWasm smart contracts into dev-friendly TypeScript classes. * [chain-registry](https://github.com/cosmology-tech/chain-registry) Everything from token symbols, logos, and IBC denominations for all assets you want to support in your application. * [cosmos-kit](https://github.com/cosmology-tech/cosmos-kit) Experience the convenience of connecting with a variety of web3 wallets through a single, streamlined interface. * [create-cosmos-app](https://github.com/cosmology-tech/create-cosmos-app) Set up a modern Cosmos app by running one command. diff --git a/packages/cosmic-proto/test/test-readme.js b/packages/cosmic-proto/test/test-readme.js new file mode 100644 index 00000000000..2c00ab56b1c --- /dev/null +++ b/packages/cosmic-proto/test/test-readme.js @@ -0,0 +1,22 @@ +/** @file snippets from the README */ +/* eslint-disable import/no-extraneous-dependencies -- requiring the package itself to check exports map */ +/* eslint-disable import/no-unresolved -- not detecting the "exports" map */ +import test from 'ava'; + +import { agoric } from '../dist/index.js'; + +const RPC_ENDPOINT = ''; + +// Skip because we don't have a real endpoint, still tests the types +test.skip('RPC Clients', async t => { + const { createRPCQueryClient } = agoric.ClientFactory; + const client = await createRPCQueryClient({ rpcEndpoint: RPC_ENDPOINT }); + + const swingsetParams = await client.agoric.swingset.params(); + t.truthy(swingsetParams); +}); + +test('Composing Messages', t => { + const { sendPacket } = agoric.vibc.MessageComposer.withTypeUrl; + t.truthy(sendPacket); +}); From f33b371a650cfc7d182f9e7ab9c0becc72472de7 Mon Sep 17 00:00:00 2001 From: Chris Hibbert Date: Thu, 29 Feb 2024 15:57:31 -0800 Subject: [PATCH 30/47] 8868 gen submissions (#9009) * feat: move specification of generated submission files to proposals * refactor: generate without invoking scripts from proposals * fixup! refactor: generate without invoking scripts from proposals * fixup! fixup! refactor: generate without invoking scripts from proposals --------- Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- a3p-integration/README.md | 9 +++++++- a3p-integration/package.json | 2 +- .../proposals/a:upgrade-next/package.json | 1 + .../proposals/b:localchain/README.md | 4 +++- .../proposals/b:localchain/package.json | 3 ++- scripts/generate-a3p-submission-dir.sh | 12 ++++++++++ scripts/generate-a3p-submission.sh | 22 ++++++++----------- 7 files changed, 36 insertions(+), 17 deletions(-) create mode 100755 scripts/generate-a3p-submission-dir.sh diff --git a/a3p-integration/README.md b/a3p-integration/README.md index b09fe7e926d..660c72ad36e 100644 --- a/a3p-integration/README.md +++ b/a3p-integration/README.md @@ -97,7 +97,14 @@ make -C ../packages/deployment docker-build-sdk ## Generating core-eval submissions -Some core-eval proposals `submission` content are generated from the `agoric-sdk` code, and must be rebuilt every time there is a change. The `/scripts/generate-a3p-submission.sh` script contains commands to generate the core-eval content and move it to the expected proposal package's `submission` folder. It is executed as part of `a3p-integration`'s `build:submission` step. +Some core-eval proposals `submission` content are generated from the `agoric-sdk` +code, and must be rebuilt every time there is a change. The +`/scripts/generate-a3p-submission.sh` script contains commands to generate the +core-eval content and move it to the expected proposal package's submission +directory. It is executed as part of `a3p-integration`'s `build:submission` step. +Each proposal that requires such a build step should add a `submission` entry in +the `agoricProposal` section of package.json. This value specifies the name of +another entry which gives parameters to be passed to `generate-a3p-submission.sh`. ## Building synthetic-chain images diff --git a/a3p-integration/package.json b/a3p-integration/package.json index 05983f3e4a3..c3437a2f2c6 100644 --- a/a3p-integration/package.json +++ b/a3p-integration/package.json @@ -6,7 +6,7 @@ "scripts": { "build": "yarn run build:sdk && yarn run build:submission && yarn run build:synthetic-chain", "build:sdk": "make -C ../packages/deployment docker-build-sdk", - "build:submission": "../scripts/generate-a3p-submission.sh", + "build:submission": "../scripts/generate-a3p-submission-dir.sh", "build:synthetic-chain": "yarn synthetic-chain build", "test": "yarn synthetic-chain test", "doctor": "yarn synthetic-chain doctor" diff --git a/a3p-integration/proposals/a:upgrade-next/package.json b/a3p-integration/proposals/a:upgrade-next/package.json index 28a9ca2ad9f..067a04a3d5d 100644 --- a/a3p-integration/proposals/a:upgrade-next/package.json +++ b/a3p-integration/proposals/a:upgrade-next/package.json @@ -6,6 +6,7 @@ "upgradeInfo": { "coreProposals": [] }, + "sdk-generate": [ "probe-zcf-bundle probeZcfBundle probe-submission" ], "type": "Software Upgrade Proposal" }, "type": "module", diff --git a/a3p-integration/proposals/b:localchain/README.md b/a3p-integration/proposals/b:localchain/README.md index 453ab68c103..eef1bc4acfa 100644 --- a/a3p-integration/proposals/b:localchain/README.md +++ b/a3p-integration/proposals/b:localchain/README.md @@ -1,3 +1,5 @@ CoreEvalProposal to install vat-localchain -The `submission` for the proposal is automatically generated during `yarn build` in `a3p-integration` using the code in agoric-sdk through `script/generate-a3p-submission.sh`. +The `submission` for the proposal is automatically generated during `yarn build` +in `a3p-integration` using the code in agoric-sdk through +`script/generate-a3p-submission-dirs.sh`. and `script/generate-a3p-submission.sh` diff --git a/a3p-integration/proposals/b:localchain/package.json b/a3p-integration/proposals/b:localchain/package.json index 4f969d9eb32..057136a23d2 100644 --- a/a3p-integration/proposals/b:localchain/package.json +++ b/a3p-integration/proposals/b:localchain/package.json @@ -1,7 +1,8 @@ { "agoricProposal": { "type": "/agoric.swingset.CoreEvalProposal", - "source": "subdir" + "source": "subdir", + "sdk-generate": [ "test-localchain" ] }, "type": "module", "license": "Apache-2.0", diff --git a/scripts/generate-a3p-submission-dir.sh b/scripts/generate-a3p-submission-dir.sh new file mode 100755 index 00000000000..c5636bb7056 --- /dev/null +++ b/scripts/generate-a3p-submission-dir.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -ueo pipefail + +SCRIPT_DIR=$( cd ${0%/*} && pwd -P ) + +for proposal in ./proposals/?:* +do + cd $proposal + args=`jq -r < package.json '.agoricProposal["sdk-generate"][0]'` + $SCRIPT_DIR/generate-a3p-submission.sh $proposal $args + cd - +done diff --git a/scripts/generate-a3p-submission.sh b/scripts/generate-a3p-submission.sh index 1abfd40e732..afc91f2d586 100755 --- a/scripts/generate-a3p-submission.sh +++ b/scripts/generate-a3p-submission.sh @@ -5,20 +5,16 @@ sdkroot=$(cd -- "$(dirname "$0")/.." >/dev/null && pwd) cd "$sdkroot" -buildSubmission() { - proposalName=$1 - a3pProposal=$2 - output=${3:-$proposalName} - submissionName=${4:-submission} +proposalDir=$1 +proposalName=$2 +outfileBase=${3:-$proposalName} +submissionDirName=${4:-submission} - yarn agoric run "packages/builders/scripts/vats/$proposalName.js" +yarn agoric run "packages/builders/scripts/vats/$proposalName.js" +submissionDir="./a3p-integration/$proposalDir/$submissionDirName" - submissionDir="a3p-integration/proposals/$a3pProposal/$submissionName" - mkdir -p "$submissionDir" - cp $(grep -oh '/.*b1-.*.json' "$output"*) "$submissionDir" - mv "$output"* "$submissionDir" -} +mkdir -p "$submissionDir" +cp $(grep -oh '/.*b1-.*.json' "$outfileBase"*) "$submissionDir" -buildSubmission probe-zcf-bundle "a:upgrade-next" probeZcfBundle probe-submission -buildSubmission test-localchain "b:localchain" +mv "$outfileBase"* "$submissionDir" From 5500554cb6e27237ba0dc33e3fcbd76fa79eb1a9 Mon Sep 17 00:00:00 2001 From: Chris Hibbert Date: Wed, 28 Feb 2024 11:06:15 -0800 Subject: [PATCH 31/47] feat: move specification of generated submission files to proposals --- a3p-integration/README.md | 6 +++--- a3p-integration/proposals/a:upgrade-next/package.json | 3 ++- a3p-integration/proposals/b:localchain/package.json | 3 +++ scripts/generate-a3p-submission-dirs.sh | 9 +++++++++ 4 files changed, 17 insertions(+), 4 deletions(-) create mode 100755 scripts/generate-a3p-submission-dirs.sh diff --git a/a3p-integration/README.md b/a3p-integration/README.md index 660c72ad36e..8442a58b8a8 100644 --- a/a3p-integration/README.md +++ b/a3p-integration/README.md @@ -102,9 +102,9 @@ code, and must be rebuilt every time there is a change. The `/scripts/generate-a3p-submission.sh` script contains commands to generate the core-eval content and move it to the expected proposal package's submission directory. It is executed as part of `a3p-integration`'s `build:submission` step. -Each proposal that requires such a build step should add a `submission` entry in -the `agoricProposal` section of package.json. This value specifies the name of -another entry which gives parameters to be passed to `generate-a3p-submission.sh`. +Each proposal that requires such a build step should add a `build:submission` +rule in its package.json to specify the details of proposals that require a +build step. ## Building synthetic-chain images diff --git a/a3p-integration/proposals/a:upgrade-next/package.json b/a3p-integration/proposals/a:upgrade-next/package.json index 067a04a3d5d..3222bead2b6 100644 --- a/a3p-integration/proposals/a:upgrade-next/package.json +++ b/a3p-integration/proposals/a:upgrade-next/package.json @@ -24,7 +24,8 @@ ] }, "scripts": { - "agops": "yarn --cwd /usr/src/agoric-sdk/ --silent agops" + "agops": "yarn --cwd /usr/src/agoric-sdk/ --silent agops", + "build:submission": "../../../scripts/generate-a3p-submission.sh probe-zcf-bundle a:upgrade-next probeZcfBundle probe-submission" }, "packageManager": "yarn@4.1.0" } diff --git a/a3p-integration/proposals/b:localchain/package.json b/a3p-integration/proposals/b:localchain/package.json index 057136a23d2..97a94ab3ba1 100644 --- a/a3p-integration/proposals/b:localchain/package.json +++ b/a3p-integration/proposals/b:localchain/package.json @@ -17,5 +17,8 @@ "!submission" ] }, + "scripts": { + "build:submission": "../../../scripts/generate-a3p-submission.sh test-localchain b:localchain" + }, "packageManager": "yarn@4.1.0" } diff --git a/scripts/generate-a3p-submission-dirs.sh b/scripts/generate-a3p-submission-dirs.sh new file mode 100755 index 00000000000..6ed9d6bf3df --- /dev/null +++ b/scripts/generate-a3p-submission-dirs.sh @@ -0,0 +1,9 @@ +#!/bin/bash +set -ueo pipefail + +for proposal in ./proposals/?:* +do + cd $proposal + yarn run build:submission + cd - +done From e6b53e7e9300171cc6c3a32083f278c23c29fee2 Mon Sep 17 00:00:00 2001 From: Chris Hibbert Date: Wed, 20 Dec 2023 11:08:11 -0800 Subject: [PATCH 32/47] feat: refactor ZoeSeat to drop cyclic structure that blocked GC --- .../zoe/src/zoeService/originalZoeSeat.js | 346 ++++++++++++++++++ packages/zoe/src/zoeService/zoeSeat.js | 305 ++++++++++----- packages/zoe/test/unitTests/zcf/test-zcf.js | 2 +- 3 files changed, 562 insertions(+), 91 deletions(-) create mode 100644 packages/zoe/src/zoeService/originalZoeSeat.js diff --git a/packages/zoe/src/zoeService/originalZoeSeat.js b/packages/zoe/src/zoeService/originalZoeSeat.js new file mode 100644 index 00000000000..968dcf3412f --- /dev/null +++ b/packages/zoe/src/zoeService/originalZoeSeat.js @@ -0,0 +1,346 @@ +/* eslint @typescript-eslint/no-floating-promises: "warn" */ +import { SubscriberShape } from '@agoric/notifier'; +import { E } from '@endo/eventual-send'; +import { M, prepareExoClassKit } from '@agoric/vat-data'; +import { deeplyFulfilled } from '@endo/marshal'; +import { makePromiseKit } from '@endo/promise-kit'; + +import { satisfiesWant } from '../contractFacet/offerSafety.js'; +import '../types.js'; +import '../internal-types.js'; +import { + AmountKeywordRecordShape, + KeywordShape, + ExitObjectShape, + PaymentPKeywordRecordShape, +} from '../typeGuards.js'; + +const { Fail } = assert; + +const OriginalZoeSeatIKit = harden({ + zoeSeatAdmin: M.interface('ZoeSeatAdmin', { + replaceAllocation: M.call(AmountKeywordRecordShape).returns(), + exit: M.call(M.any()).returns(), + fail: M.call(M.any()).returns(), + resolveExitAndResult: M.call({ + offerResultPromise: M.promise(), + exitObj: ExitObjectShape, + }).returns(), + getExitSubscriber: M.call().returns(SubscriberShape), + // The return promise is empty, but doExit relies on settlement as a signal + // that the payouts have settled. The exit publisher is notified after that. + finalPayouts: M.call(M.eref(PaymentPKeywordRecordShape)).returns( + M.promise(), + ), + }), + userSeat: M.interface('UserSeat', { + getProposal: M.call().returns(M.promise()), + getPayouts: M.call().returns(M.promise()), + getPayout: M.call(KeywordShape).returns(M.promise()), + getOfferResult: M.call().returns(M.promise()), + hasExited: M.call().returns(M.promise()), + tryExit: M.call().returns(M.promise()), + numWantsSatisfied: M.call().returns(M.promise()), + getFinalAllocation: M.call().returns(M.promise()), + getExitSubscriber: M.call().returns(M.any()), + }), +}); + +const assertHasNotExited = (c, msg) => { + !c.state.instanceAdminHelper.hasExited(c.facets.zoeSeatAdmin) || + assert(!c.state.instanceAdminHelper.hasExited(c.facets.zoeSeatAdmin), msg); +}; + +/** + * declareOldZoeSeatAdminKind declares an exo for the original kind of ZoeSeatKit. + * This version creates a reference cycle that garbage collection can't remove + * because it goes through weakMaps in two different Vats. We've defined a new + * Kind that doesn't have this problem, but we won't upgrade the existing + * objects, so the Kind must continue to be defined, but we don't return the + * maker function. + * + * The original ZoeSeatKit is an object that manages the state + * of a seat participating in a Zoe contract and return its two facets. + * + * The UserSeat is suitable to be handed to an agent outside zoe and the + * contract and allows them to query or monitor the current state, access the + * payouts and result, and call exit() if that's allowed for this seat. + * + * The zoeSeatAdmin is passed by Zoe to the ContractFacet (zcf), to allow zcf to + * query or update the allocation or exit the seat cleanly. + * + * @param {import('@agoric/vat-data').Baggage} baggage + * @param {() => PublishKit} makeDurablePublishKit + */ +export const declareOldZoeSeatAdminKind = (baggage, makeDurablePublishKit) => { + const doExit = ( + zoeSeatAdmin, + currentAllocation, + withdrawFacet, + instanceAdminHelper, + ) => { + /** @type {PaymentPKeywordRecord} */ + const payouts = withdrawFacet.withdrawPayments(currentAllocation); + return E.when( + zoeSeatAdmin.finalPayouts(payouts), + () => instanceAdminHelper.exitZoeSeatAdmin(zoeSeatAdmin), + () => instanceAdminHelper.exitZoeSeatAdmin(zoeSeatAdmin), + ); + }; + + // There is a race between resolveExitAndResult() and getOfferResult() that + // can be limited to when the adminFactory is paged in. If state.offerResult + // is defined, getOfferResult will return it. If it's not defined when + // getOfferResult is called, create a promiseKit, return the promise and store + // the kit here. When resolveExitAndResult() is called, it saves + // state.offerResult and resolves the promise if it exists, then removes the + // table entry. + /** + * @typedef {WeakMap} + */ + const ephemeralOfferResultStore = new WeakMap(); + + // notice that this returns a maker function which we drop on the floor. + prepareExoClassKit( + baggage, + 'ZoeSeatKit', + OriginalZoeSeatIKit, + /** + * + * @param {Allocation} initialAllocation + * @param {ProposalRecord} proposal + * @param {InstanceAdminHelper} instanceAdminHelper + * @param {WithdrawFacet} withdrawFacet + * @param {ERef} [exitObj] + * @param {boolean} [offerResultIsUndefined] + */ + ( + initialAllocation, + proposal, + instanceAdminHelper, + withdrawFacet, + exitObj = undefined, + // emptySeatKits start with offerResult validly undefined; others can set + // it to anything (including undefined) in resolveExitAndResult() + offerResultIsUndefined = false, + ) => { + const { publisher, subscriber } = makeDurablePublishKit(); + return { + currentAllocation: initialAllocation, + proposal, + exitObj, + offerResult: undefined, + offerResultStored: offerResultIsUndefined, + instanceAdminHelper, + withdrawFacet, + publisher, + subscriber, + payouts: harden({}), + exiting: false, + }; + }, + { + zoeSeatAdmin: { + replaceAllocation(replacementAllocation) { + const { state } = this; + assertHasNotExited( + this, + 'Cannot replace allocation. Seat has already exited', + ); + harden(replacementAllocation); + // Merging happens in ZCF, so replacementAllocation can + // replace the old allocation entirely. + + state.currentAllocation = replacementAllocation; + }, + exit(completion) { + const { state, facets } = this; + // Since this method doesn't wait, we could re-enter via exitAllSeats. + // If that happens, we shouldn't re-do any of the work. + if (state.exiting) { + return; + } + assertHasNotExited(this, 'Cannot exit seat. Seat has already exited'); + + state.exiting = true; + E.when( + doExit( + facets.zoeSeatAdmin, + state.currentAllocation, + state.withdrawFacet, + state.instanceAdminHelper, + ), + () => state.publisher.finish(completion), + ); + }, + fail(reason) { + const { state, facets } = this; + // Since this method doesn't wait, we could re-enter via failAllSeats. + // If that happens, we shouldn't re-do any of the work. + if (state.exiting) { + return; + } + + assertHasNotExited(this, 'Cannot fail seat. Seat has already exited'); + + state.exiting = true; + E.when( + doExit( + facets.zoeSeatAdmin, + state.currentAllocation, + state.withdrawFacet, + state.instanceAdminHelper, + ), + () => state.publisher.fail(reason), + () => state.publisher.fail(reason), + ); + }, + // called only for seats resulting from offers. + /** @param {HandleOfferResult} result */ + resolveExitAndResult({ offerResultPromise, exitObj }) { + const { state, facets } = this; + + !state.offerResultStored || + Fail`offerResultStored before offerResultPromise`; + + if (!ephemeralOfferResultStore.has(facets.userSeat)) { + // this was called before getOfferResult + const kit = makePromiseKit(); + kit.resolve(offerResultPromise); + ephemeralOfferResultStore.set(facets.userSeat, kit); + } + + const pKit = ephemeralOfferResultStore.get(facets.userSeat); + E.when( + offerResultPromise, + offerResult => { + // Resolve the ephemeral promise for offerResult + pKit.resolve(offerResult); + // Now we want to store the offerResult in `state` to get it off the heap, + // but we need to handle three cases: + // 1. already durable. (This includes being a remote presence.) + // 2. has promises for durable objects. + // 3. not durable even after resolving promises. + // For #1 we can assign directly, but we deeply await to also handle #2. + void E.when( + deeplyFulfilled(offerResult), + fulfilledOfferResult => { + try { + // In cases 1-2 this assignment will succeed. + state.offerResult = fulfilledOfferResult; + // If it doesn't, then these lines won't be reached so the + // flag will stay false and the promise will stay in the heap + state.offerResultStored = true; + ephemeralOfferResultStore.delete(facets.userSeat); + } catch (err) { + console.warn( + `non-durable offer result will be lost upon zoe vat termination: ${offerResult}`, + ); + } + }, + // no rejection handler because an offer result containing promises that reject + // is within spec + ); + }, + e => { + pKit.reject(e); + // NB: leave the rejected promise in the ephemeralOfferResultStore + // because it can't go in durable state + }, + ); + + state.exitObj = exitObj; + }, + getExitSubscriber() { + const { state } = this; + return state.subscriber; + }, + async finalPayouts(payments) { + const { state } = this; + + const settledPayouts = await deeplyFulfilled(payments); + state.payouts = settledPayouts; + }, + }, + userSeat: { + async getProposal() { + const { state } = this; + return state.proposal; + }, + async getPayouts() { + const { state } = this; + + return E.when( + state.subscriber.subscribeAfter(), + () => state.payouts, + () => state.payouts, + ); + }, + async getPayout(keyword) { + const { state } = this; + + // subscriber.subscribeAfter() only triggers after publisher.finish() + // in exit() or publisher.fail() in fail(). Both of those wait for + // doExit(), which ensures that finalPayouts() has set state.payouts. + return E.when( + state.subscriber.subscribeAfter(), + () => state.payouts[keyword], + () => state.payouts[keyword], + ); + }, + + async getOfferResult() { + const { state, facets } = this; + + if (state.offerResultStored) { + return state.offerResult; + } + + if (ephemeralOfferResultStore.has(facets.userSeat)) { + return ephemeralOfferResultStore.get(facets.userSeat).promise; + } + + const kit = makePromiseKit(); + ephemeralOfferResultStore.set(facets.userSeat, kit); + return kit.promise; + }, + async hasExited() { + const { state, facets } = this; + + return ( + state.exiting || + state.instanceAdminHelper.hasExited(facets.zoeSeatAdmin) + ); + }, + async tryExit() { + const { state } = this; + if (!state.exitObj) + throw Fail`exitObj must be initialized before use`; + assertHasNotExited(this, 'Cannot exit; seat has already exited'); + + return E(state.exitObj).exit(); + }, + async numWantsSatisfied() { + const { state } = this; + return E.when( + state.subscriber.subscribeAfter(), + () => satisfiesWant(state.proposal, state.currentAllocation), + () => satisfiesWant(state.proposal, state.currentAllocation), + ); + }, + getExitSubscriber() { + const { state } = this; + return state.subscriber; + }, + getFinalAllocation() { + const { state } = this; + return E.when( + state.subscriber.subscribeAfter(), + () => state.currentAllocation, + () => state.currentAllocation, + ); + }, + }, + }, + ); +}; diff --git a/packages/zoe/src/zoeService/zoeSeat.js b/packages/zoe/src/zoeService/zoeSeat.js index f58200e164e..f76849f0b8c 100644 --- a/packages/zoe/src/zoeService/zoeSeat.js +++ b/packages/zoe/src/zoeService/zoeSeat.js @@ -10,14 +10,33 @@ import '../types.js'; import '../internal-types.js'; import { AmountKeywordRecordShape, - KeywordShape, ExitObjectShape, + KeywordShape, PaymentPKeywordRecordShape, } from '../typeGuards.js'; +import { declareOldZoeSeatAdminKind } from './originalZoeSeat.js'; const { Fail } = assert; -const ZoeSeatIKit = harden({ +// ZoeSeatAdmin has the implementation of these methods, but ZoeUserSeat is the +// facet shared with users. The latter transparently forwards to the former. +const coreUserSeatMethods = harden({ + getProposal: M.call().returns(M.promise()), + getPayouts: M.call().returns(M.promise()), + getPayout: M.call(KeywordShape).returns(M.promise()), + getOfferResult: M.call().returns(M.promise()), + hasExited: M.call().returns(M.promise()), + numWantsSatisfied: M.call().returns(M.promise()), + getFinalAllocation: M.call().returns(M.promise()), + getExitSubscriber: M.call().returns(M.any()), +}); + +const ZoeSeatAdmin = harden({ + userSeatAccess: M.interface('UserSeatAccess', { + ...coreUserSeatMethods, + initExitObjectSetter: M.call(M.any()).returns(), + assertHasNotExited: M.call(M.string()).returns(), + }), zoeSeatAdmin: M.interface('ZoeSeatAdmin', { replaceAllocation: M.call(AmountKeywordRecordShape).returns(), exit: M.call(M.any()).returns(), @@ -33,24 +52,18 @@ const ZoeSeatIKit = harden({ M.promise(), ), }), +}); + +const ZoeUserSeat = harden({ userSeat: M.interface('UserSeat', { - getProposal: M.call().returns(M.promise()), - getPayouts: M.call().returns(M.promise()), - getPayout: M.call(KeywordShape).returns(M.promise()), - getOfferResult: M.call().returns(M.promise()), - hasExited: M.call().returns(M.promise()), + ...coreUserSeatMethods, tryExit: M.call().returns(M.promise()), - numWantsSatisfied: M.call().returns(M.promise()), - getFinalAllocation: M.call().returns(M.promise()), - getExitSubscriber: M.call().returns(M.any()), + }), + exitObjSetter: M.interface('exitObjSetter', { + setExitObject: M.call(M.or(M.remotable(), M.undefined())).returns(), }), }); -const assertHasNotExited = (c, msg) => { - !c.state.instanceAdminHelper.hasExited(c.facets.zoeSeatAdmin) || - assert(!c.state.instanceAdminHelper.hasExited(c.facets.zoeSeatAdmin), msg); -}; - /** * makeZoeSeatAdminFactory returns a maker for an object that manages the state * of a seat participating in a Zoe contract and return its two facets. @@ -70,6 +83,8 @@ export const makeZoeSeatAdminFactory = baggage => { 'zoe Seat publisher', ); + declareOldZoeSeatAdminKind(baggage, makeDurablePublishKit); + const doExit = ( zoeSeatAdmin, currentAllocation, @@ -97,17 +112,15 @@ export const makeZoeSeatAdminFactory = baggage => { */ const ephemeralOfferResultStore = new WeakMap(); - return prepareExoClassKit( + const makeZoeSeatAdmin = prepareExoClassKit( baggage, - 'ZoeSeatKit', - ZoeSeatIKit, + 'ZoeSeatAdmin', + ZoeSeatAdmin, /** - * * @param {Allocation} initialAllocation * @param {ProposalRecord} proposal * @param {InstanceAdminHelper} instanceAdminHelper * @param {WithdrawFacet} withdrawFacet - * @param {ERef} [exitObj] * @param {boolean} [offerResultIsUndefined] */ ( @@ -115,7 +128,6 @@ export const makeZoeSeatAdminFactory = baggage => { proposal, instanceAdminHelper, withdrawFacet, - exitObj = undefined, // emptySeatKits start with offerResult validly undefined; others can set // it to anything (including undefined) in resolveExitAndResult() offerResultIsUndefined = false, @@ -124,7 +136,6 @@ export const makeZoeSeatAdminFactory = baggage => { return { currentAllocation: initialAllocation, proposal, - exitObj, offerResult: undefined, offerResultStored: offerResultIsUndefined, instanceAdminHelper, @@ -133,14 +144,97 @@ export const makeZoeSeatAdminFactory = baggage => { subscriber, payouts: harden({}), exiting: false, + /** @type {{ setExitObject: (exitObj: ExitObj | undefined) => void} | undefined} */ + exitObjectSetter: undefined, }; }, { + // methods for userSeat to call + userSeatAccess: { + async getProposal() { + const { state } = this; + return state.proposal; + }, + async getPayouts() { + const { state } = this; + + return E.when( + state.subscriber.subscribeAfter(), + () => state.payouts, + () => state.payouts, + ); + }, + async getPayout(keyword) { + const { state } = this; + + // subscriber.subscribeAfter() only triggers after publisher.finish() + // in exit() or publisher.fail() in fail(). Both of those wait for + // doExit(), which ensures that finalPayouts() has set state.payouts. + return E.when( + state.subscriber.subscribeAfter(), + () => state.payouts[keyword], + () => state.payouts[keyword], + ); + }, + + async getOfferResult() { + const { state, facets } = this; + + if (state.offerResultStored) { + return state.offerResult; + } + + if (ephemeralOfferResultStore.has(facets.zoeSeatAdmin)) { + return ephemeralOfferResultStore.get(facets.zoeSeatAdmin).promise; + } + + const kit = makePromiseKit(); + ephemeralOfferResultStore.set(facets.zoeSeatAdmin, kit); + return kit.promise; + }, + async hasExited() { + const { state, facets } = this; + + return ( + state.exiting || + state.instanceAdminHelper.hasExited(facets.zoeSeatAdmin) + ); + }, + async numWantsSatisfied() { + const { state } = this; + return E.when( + state.subscriber.subscribeAfter(), + () => satisfiesWant(state.proposal, state.currentAllocation), + () => satisfiesWant(state.proposal, state.currentAllocation), + ); + }, + getExitSubscriber() { + const { state } = this; + return state.subscriber; + }, + getFinalAllocation() { + const { state } = this; + return E.when( + state.subscriber.subscribeAfter(), + () => state.currentAllocation, + () => state.currentAllocation, + ); + }, + initExitObjectSetter(setter) { + this.state.exitObjectSetter = setter; + }, + assertHasNotExited(msg) { + const { state, facets } = this; + const { instanceAdminHelper } = state; + const hasExited1 = instanceAdminHelper.hasExited(facets.zoeSeatAdmin); + + !hasExited1 || assert(!hasExited1, msg); + }, + }, zoeSeatAdmin: { replaceAllocation(replacementAllocation) { - const { state } = this; - assertHasNotExited( - this, + const { state, facets } = this; + facets.userSeatAccess.assertHasNotExited( 'Cannot replace allocation. Seat has already exited', ); harden(replacementAllocation); @@ -156,7 +250,9 @@ export const makeZoeSeatAdminFactory = baggage => { if (state.exiting) { return; } - assertHasNotExited(this, 'Cannot exit seat. Seat has already exited'); + facets.userSeatAccess.assertHasNotExited( + 'Cannot exit seat. Seat has already exited', + ); state.exiting = true; E.when( @@ -166,7 +262,13 @@ export const makeZoeSeatAdminFactory = baggage => { state.withdrawFacet, state.instanceAdminHelper, ), - () => state.publisher.finish(completion), + () => { + if (state.exitObjectSetter) { + state.exitObjectSetter.setExitObject(undefined); + state.exitObjectSetter = undefined; + } + return state.publisher.finish(completion); + }, ); }, fail(reason) { @@ -177,10 +279,12 @@ export const makeZoeSeatAdminFactory = baggage => { return; } - assertHasNotExited(this, 'Cannot fail seat. Seat has already exited'); + facets.userSeatAccess.assertHasNotExited( + 'Cannot fail seat. Seat has already exited', + ); state.exiting = true; - E.when( + void E.when( doExit( facets.zoeSeatAdmin, state.currentAllocation, @@ -190,6 +294,11 @@ export const makeZoeSeatAdminFactory = baggage => { () => state.publisher.fail(reason), () => state.publisher.fail(reason), ); + + if (state.exitObjectSetter) { + state.exitObjectSetter.setExitObject(undefined); + state.exitObjectSetter = undefined; + } }, // called only for seats resulting from offers. /** @param {HandleOfferResult} result */ @@ -199,15 +308,15 @@ export const makeZoeSeatAdminFactory = baggage => { !state.offerResultStored || Fail`offerResultStored before offerResultPromise`; - if (!ephemeralOfferResultStore.has(facets.userSeat)) { + if (!ephemeralOfferResultStore.has(facets.zoeSeatAdmin)) { // this was called before getOfferResult const kit = makePromiseKit(); kit.resolve(offerResultPromise); - ephemeralOfferResultStore.set(facets.userSeat, kit); + ephemeralOfferResultStore.set(facets.zoeSeatAdmin, kit); } - const pKit = ephemeralOfferResultStore.get(facets.userSeat); - E.when( + const pKit = ephemeralOfferResultStore.get(facets.zoeSeatAdmin); + void E.when( offerResultPromise, offerResult => { // Resolve the ephemeral promise for offerResult @@ -227,7 +336,7 @@ export const makeZoeSeatAdminFactory = baggage => { // If it doesn't, then these lines won't be reached so the // flag will stay false and the promise will stay in the heap state.offerResultStored = true; - ephemeralOfferResultStore.delete(facets.userSeat); + ephemeralOfferResultStore.delete(facets.zoeSeatAdmin); } catch (err) { console.warn( `non-durable offer result will be lost upon zoe vat termination: ${offerResult}`, @@ -245,7 +354,8 @@ export const makeZoeSeatAdminFactory = baggage => { }, ); - state.exitObj = exitObj; + // @ts-expect-error exitObjectSetter is set at birth. + state.exitObjectSetter.setExitObject(exitObj); }, getExitSubscriber() { const { state } = this; @@ -258,85 +368,100 @@ export const makeZoeSeatAdminFactory = baggage => { state.payouts = settledPayouts; }, }, + }, + ); + + const makeUserSeat = prepareExoClassKit( + baggage, + 'ZoeUserSeat', + ZoeUserSeat, + (userSeatAccess, exitObj) => { + return { + userSeatAccess, + exitObj, + }; + }, + { userSeat: { async getProposal() { - const { state } = this; - return state.proposal; + return this.state.userSeatAccess.getProposal(); }, async getPayouts() { - const { state } = this; - - return E.when( - state.subscriber.subscribeAfter(), - () => state.payouts, - () => state.payouts, - ); + return this.state.userSeatAccess.getPayouts(); }, async getPayout(keyword) { - const { state } = this; - - // subscriber.subscribeAfter() only triggers after publisher.finish() - // in exit() or publisher.fail() in fail(). Both of those wait for - // doExit(), which ensures that finalPayouts() has set state.payouts. - return E.when( - state.subscriber.subscribeAfter(), - () => state.payouts[keyword], - () => state.payouts[keyword], - ); + return this.state.userSeatAccess.getPayout(keyword); }, async getOfferResult() { - const { state, facets } = this; - - if (state.offerResultStored) { - return state.offerResult; - } - - if (ephemeralOfferResultStore.has(facets.userSeat)) { - return ephemeralOfferResultStore.get(facets.userSeat).promise; - } - - const kit = makePromiseKit(); - ephemeralOfferResultStore.set(facets.userSeat, kit); - return kit.promise; + return this.state.userSeatAccess.getOfferResult(); }, async hasExited() { - const { state, facets } = this; - - return ( - state.exiting || - state.instanceAdminHelper.hasExited(facets.zoeSeatAdmin) - ); + return this.state.userSeatAccess.hasExited(); }, async tryExit() { const { state } = this; + + state.userSeatAccess.assertHasNotExited( + 'Cannot exit; seat has already exited', + ); if (!state.exitObj) - throw Fail`exitObj must be initialized before use`; - assertHasNotExited(this, 'Cannot exit; seat has already exited'); + throw Fail`exitObj not initialized or already nullified`; - return E(state.exitObj).exit(); + const exitResult = E(state.exitObj).exit(); + + // unlink an un-collectible cycle. + state.exitObj = undefined; + + return exitResult; }, async numWantsSatisfied() { - const { state } = this; - return E.when( - state.subscriber.subscribeAfter(), - () => satisfiesWant(state.proposal, state.currentAllocation), - () => satisfiesWant(state.proposal, state.currentAllocation), - ); + return this.state.userSeatAccess.numWantsSatisfied(); }, getExitSubscriber() { - const { state } = this; - return state.subscriber; + return this.state.userSeatAccess.getExitSubscriber(); }, getFinalAllocation() { - const { state } = this; - return E.when( - state.subscriber.subscribeAfter(), - () => state.currentAllocation, - () => state.currentAllocation, - ); + return this.state.userSeatAccess.getFinalAllocation(); + }, + }, + exitObjSetter: { + setExitObject(exitObject) { + this.state.exitObj = exitObject; }, }, }, ); + + /** + * @param {Allocation} initialAllocation + * @param {ProposalRecord} proposal + * @param {InstanceAdminHelper} instanceAdminHelper + * @param {WithdrawFacet} withdrawFacet + * @param {ERef} [exitObj] + * @param {boolean} [offerResultIsUndefined] + */ + const makeZoeSeatAdminKit = ( + initialAllocation, + proposal, + instanceAdminHelper, + withdrawFacet, + exitObj = undefined, + offerResultIsUndefined = false, + ) => { + const { zoeSeatAdmin, userSeatAccess } = makeZoeSeatAdmin( + initialAllocation, + proposal, + instanceAdminHelper, + withdrawFacet, + offerResultIsUndefined, + ); + const { userSeat, exitObjSetter } = makeUserSeat(userSeatAccess, exitObj); + userSeatAccess.initExitObjectSetter(exitObjSetter); + + // The original makeZoeSeatAdminKit returned two facets of the same kind. + // This is returning two independent facets. + return { userSeat, zoeSeatAdmin }; + }; + return makeZoeSeatAdminKit; }; diff --git a/packages/zoe/test/unitTests/zcf/test-zcf.js b/packages/zoe/test/unitTests/zcf/test-zcf.js index a8e68a05d4f..9859a0500f7 100644 --- a/packages/zoe/test/unitTests/zcf/test-zcf.js +++ b/packages/zoe/test/unitTests/zcf/test-zcf.js @@ -992,7 +992,7 @@ test(`userSeat.getPayout() should throw from zcf.makeEmptySeatKit`, async t => { // @ts-expect-error deliberate invalid arguments for testing await t.throwsAsync(() => E(userSeat).getPayout(), { message: - 'In "getPayout" method of (ZoeSeatKit userSeat): Expected at least 1 arguments: []', + 'In "getPayout" method of (ZoeUserSeat userSeat): Expected at least 1 arguments: []', }); }); From bdf582ebfd9a7190ad21f70766d43f7c03a8f480 Mon Sep 17 00:00:00 2001 From: Chris Hibbert Date: Wed, 27 Dec 2023 16:20:11 -0800 Subject: [PATCH 33/47] chore: deduplicate Interface guards for zoeSeat and originalZoeSeat --- .../zoe/src/zoeService/originalZoeSeat.js | 30 +++++++----- packages/zoe/src/zoeService/zoeSeat.js | 49 +++++-------------- 2 files changed, 29 insertions(+), 50 deletions(-) diff --git a/packages/zoe/src/zoeService/originalZoeSeat.js b/packages/zoe/src/zoeService/originalZoeSeat.js index 968dcf3412f..0187d07dcd3 100644 --- a/packages/zoe/src/zoeService/originalZoeSeat.js +++ b/packages/zoe/src/zoeService/originalZoeSeat.js @@ -17,7 +17,23 @@ import { const { Fail } = assert; -const OriginalZoeSeatIKit = harden({ +export const coreUserSeatMethods = harden({ + getProposal: M.call().returns(M.promise()), + getPayouts: M.call().returns(M.promise()), + getPayout: M.call(KeywordShape).returns(M.promise()), + getOfferResult: M.call().returns(M.promise()), + hasExited: M.call().returns(M.promise()), + numWantsSatisfied: M.call().returns(M.promise()), + getFinalAllocation: M.call().returns(M.promise()), + getExitSubscriber: M.call().returns(M.any()), +}); + +export const ZoeUserSeatShape = M.interface('UserSeat', { + ...coreUserSeatMethods, + tryExit: M.call().returns(M.promise()), +}); + +export const OriginalZoeSeatIKit = harden({ zoeSeatAdmin: M.interface('ZoeSeatAdmin', { replaceAllocation: M.call(AmountKeywordRecordShape).returns(), exit: M.call(M.any()).returns(), @@ -33,17 +49,7 @@ const OriginalZoeSeatIKit = harden({ M.promise(), ), }), - userSeat: M.interface('UserSeat', { - getProposal: M.call().returns(M.promise()), - getPayouts: M.call().returns(M.promise()), - getPayout: M.call(KeywordShape).returns(M.promise()), - getOfferResult: M.call().returns(M.promise()), - hasExited: M.call().returns(M.promise()), - tryExit: M.call().returns(M.promise()), - numWantsSatisfied: M.call().returns(M.promise()), - getFinalAllocation: M.call().returns(M.promise()), - getExitSubscriber: M.call().returns(M.any()), - }), + userSeat: ZoeUserSeatShape, }); const assertHasNotExited = (c, msg) => { diff --git a/packages/zoe/src/zoeService/zoeSeat.js b/packages/zoe/src/zoeService/zoeSeat.js index f76849f0b8c..363932fb89b 100644 --- a/packages/zoe/src/zoeService/zoeSeat.js +++ b/packages/zoe/src/zoeService/zoeSeat.js @@ -1,5 +1,5 @@ /* eslint @typescript-eslint/no-floating-promises: "warn" */ -import { prepareDurablePublishKit, SubscriberShape } from '@agoric/notifier'; +import { prepareDurablePublishKit } from '@agoric/notifier'; import { E } from '@endo/eventual-send'; import { M, prepareExoClassKit } from '@agoric/vat-data'; import { deeplyFulfilled } from '@endo/marshal'; @@ -9,27 +9,17 @@ import { satisfiesWant } from '../contractFacet/offerSafety.js'; import '../types.js'; import '../internal-types.js'; import { - AmountKeywordRecordShape, - ExitObjectShape, - KeywordShape, - PaymentPKeywordRecordShape, -} from '../typeGuards.js'; -import { declareOldZoeSeatAdminKind } from './originalZoeSeat.js'; + declareOldZoeSeatAdminKind, + OriginalZoeSeatIKit, + ZoeUserSeatShape, + coreUserSeatMethods, +} from './originalZoeSeat.js'; const { Fail } = assert; -// ZoeSeatAdmin has the implementation of these methods, but ZoeUserSeat is the -// facet shared with users. The latter transparently forwards to the former. -const coreUserSeatMethods = harden({ - getProposal: M.call().returns(M.promise()), - getPayouts: M.call().returns(M.promise()), - getPayout: M.call(KeywordShape).returns(M.promise()), - getOfferResult: M.call().returns(M.promise()), - hasExited: M.call().returns(M.promise()), - numWantsSatisfied: M.call().returns(M.promise()), - getFinalAllocation: M.call().returns(M.promise()), - getExitSubscriber: M.call().returns(M.any()), -}); +// ZoeSeatAdmin has the implementation of coreUserSeatMethods, but ZoeUserSeat +// is the facet shared with users. The latter transparently forwards to the +// former. const ZoeSeatAdmin = harden({ userSeatAccess: M.interface('UserSeatAccess', { @@ -37,28 +27,11 @@ const ZoeSeatAdmin = harden({ initExitObjectSetter: M.call(M.any()).returns(), assertHasNotExited: M.call(M.string()).returns(), }), - zoeSeatAdmin: M.interface('ZoeSeatAdmin', { - replaceAllocation: M.call(AmountKeywordRecordShape).returns(), - exit: M.call(M.any()).returns(), - fail: M.call(M.any()).returns(), - resolveExitAndResult: M.call({ - offerResultPromise: M.promise(), - exitObj: ExitObjectShape, - }).returns(), - getExitSubscriber: M.call().returns(SubscriberShape), - // The return promise is empty, but doExit relies on settlement as a signal - // that the payouts have settled. The exit publisher is notified after that. - finalPayouts: M.call(M.eref(PaymentPKeywordRecordShape)).returns( - M.promise(), - ), - }), + zoeSeatAdmin: OriginalZoeSeatIKit.zoeSeatAdmin, }); const ZoeUserSeat = harden({ - userSeat: M.interface('UserSeat', { - ...coreUserSeatMethods, - tryExit: M.call().returns(M.promise()), - }), + userSeat: ZoeUserSeatShape, exitObjSetter: M.interface('exitObjSetter', { setExitObject: M.call(M.or(M.remotable(), M.undefined())).returns(), }), From 5d5722f1d78fcb102ab743121ac3e05a2e5f3460 Mon Sep 17 00:00:00 2001 From: Chris Hibbert Date: Tue, 2 Jan 2024 09:30:12 -0800 Subject: [PATCH 34/47] feat: when zcfSeats exit or fail, delete objects holding cycles (#8697) in zcfSeat.js: zcfSeatToSeatHandle.delete(self) in exit.js: state.zccfSeat = undefined --- packages/zoe/src/contractFacet/exit.js | 1 + packages/zoe/src/contractFacet/zcfSeat.js | 2 ++ 2 files changed, 3 insertions(+) diff --git a/packages/zoe/src/contractFacet/exit.js b/packages/zoe/src/contractFacet/exit.js index 2d541ba2806..ac1cd92791e 100644 --- a/packages/zoe/src/contractFacet/exit.js +++ b/packages/zoe/src/contractFacet/exit.js @@ -35,6 +35,7 @@ export const makeMakeExiter = baggage => { exit() { const { state } = this; state.zcfSeat.exit(); + state.zcfSeat = undefined; }, }, { diff --git a/packages/zoe/src/contractFacet/zcfSeat.js b/packages/zoe/src/contractFacet/zcfSeat.js index 2ba5f7e4796..15cc5403d12 100644 --- a/packages/zoe/src/contractFacet/zcfSeat.js +++ b/packages/zoe/src/contractFacet/zcfSeat.js @@ -159,6 +159,7 @@ export const createSeatManager = ( assertNoStagedAllocation(self); doExitSeat(self); E(zoeInstanceAdmin).exitSeat(zcfSeatToSeatHandle.get(self), completion); + zcfSeatToSeatHandle.delete(self); }, fail( reason = Error( @@ -179,6 +180,7 @@ export const createSeatManager = ( zcfSeatToSeatHandle.get(self), harden(reason), ); + zcfSeatToSeatHandle.delete(self); } return reason; }, From 26bbc2ff40771ac2d554d9efdde2d21260ccab7d Mon Sep 17 00:00:00 2001 From: Chris Hibbert Date: Wed, 28 Feb 2024 17:02:30 -0800 Subject: [PATCH 35/47] feat: allow creators/revivors of Issuers to decline recoverySets --- packages/ERTP/src/issuerKit.js | 79 ++++++++++++++++++++++++++---- packages/ERTP/src/paymentLedger.js | 27 +++++++--- packages/ERTP/src/purse.js | 55 +++++++++++++++++++-- packages/ERTP/src/types-ambient.js | 31 ++++++++++-- 4 files changed, 165 insertions(+), 27 deletions(-) diff --git a/packages/ERTP/src/issuerKit.js b/packages/ERTP/src/issuerKit.js index 65cce4993c5..3aaa0ddfbe0 100644 --- a/packages/ERTP/src/issuerKit.js +++ b/packages/ERTP/src/issuerKit.js @@ -1,6 +1,6 @@ // @jessie-check -import { assert } from '@agoric/assert'; +import { assert, Fail } from '@agoric/assert'; import { assertPattern } from '@agoric/store'; import { makeScalarBigMapStore } from '@agoric/vat-data'; import { makeDurableZone } from '@agoric/zone/durable.js'; @@ -26,6 +26,8 @@ import './types-ambient.js'; * @template {AssetKind} K * @param {IssuerRecord} issuerRecord * @param {import('@agoric/zone').Zone} issuerZone + * @param {RecoverySetsOption} recoverySetsState Omitted from issuerRecord + * because it was added in an upgrade. * @param {ShutdownWithFailure} [optShutdownWithFailure] If this issuer fails in * the middle of an atomic action (which btw should never happen), it * potentially leaves its ledger in a corrupted state. If this function was @@ -38,6 +40,7 @@ import './types-ambient.js'; const setupIssuerKit = ( { name, assetKind, displayInfo, elementShape }, issuerZone, + recoverySetsState, optShutdownWithFailure = undefined, ) => { assert.typeof(name, 'string'); @@ -62,6 +65,7 @@ const setupIssuerKit = ( assetKind, cleanDisplayInfo, elementShape, + recoverySetsState, optShutdownWithFailure, ); @@ -77,6 +81,12 @@ harden(setupIssuerKit); /** The key at which the issuer record is stored. */ const INSTANCE_KEY = 'issuer'; +/** + * The key at which the issuerKit's `RecoverySetsOption` state is stored. + * Introduced by an upgrade, so may be absent on a predecessor incarnation. See + * `RecoverySetsOption` for defaulting behavior. + */ +const RECOVERY_SETS_STATE = 'recoverySetsState'; /** * Used _only_ to upgrade a predecessor issuerKit. Use `makeDurableIssuerKit` to @@ -91,15 +101,39 @@ const INSTANCE_KEY = 'issuer'; * unit of computation, like the enclosing vat, can be shutdown before * anything else is corrupted by that corrupted state. See * https://github.com/Agoric/agoric-sdk/issues/3434 + * @param {RecoverySetsOption} [recoverySetsOption] Added in upgrade, so last + * and optional. See `RecoverySetsOption` for defaulting behavior. * @returns {IssuerKit} */ export const upgradeIssuerKit = ( issuerBaggage, optShutdownWithFailure = undefined, + recoverySetsOption = undefined, ) => { const issuerRecord = issuerBaggage.get(INSTANCE_KEY); const issuerZone = makeDurableZone(issuerBaggage); - return setupIssuerKit(issuerRecord, issuerZone, optShutdownWithFailure); + const oldRecoverySetsState = issuerBaggage.has(RECOVERY_SETS_STATE) + ? issuerBaggage.get(RECOVERY_SETS_STATE) + : 'hasRecoverySets'; + if ( + oldRecoverySetsState === 'noRecoverySets' && + recoverySetsOption === 'hasRecoverySets' + ) { + Fail`Cannot (yet?) upgrade from 'noRecoverySets' to 'hasRecoverySets'`; + } + if ( + oldRecoverySetsState === 'hasRecoverySets' && + recoverySetsOption === 'noRecoverySets' + ) { + Fail`Cannot (yet?) upgrade from 'hasRecoverySets' to 'noRecoverySets'`; + } + const recoverySetsState = recoverySetsOption || oldRecoverySetsState; + return setupIssuerKit( + issuerRecord, + issuerZone, + recoverySetsState, + optShutdownWithFailure, + ); }; harden(upgradeIssuerKit); @@ -119,8 +153,14 @@ export const hasIssuer = baggage => baggage.has(INSTANCE_KEY); * typically, the amount of an invitation payment is a singleton set. Such a * payment is often referred to in the singular as "an invitation".) * + * `recoverySetsOption` added in upgrade. Note that `IssuerOptionsRecord` is + * never stored, so we never need to worry about inheriting one from a + * predecessor predating the introduction of recovery sets. See + * `RecoverySetsOption` for defaulting behavior. + * * @typedef {Partial<{ * elementShape: Pattern; + * recoverySetsOption: RecoverySetsOption; * }>} IssuerOptionsRecord */ @@ -161,12 +201,24 @@ export const makeDurableIssuerKit = ( assetKind = AssetKind.NAT, displayInfo = harden({}), optShutdownWithFailure = undefined, - { elementShape = undefined } = {}, + { elementShape = undefined, recoverySetsOption = undefined } = {}, ) => { - const issuerData = harden({ name, assetKind, displayInfo, elementShape }); + const issuerData = harden({ + name, + assetKind, + displayInfo, + elementShape, + }); issuerBaggage.init(INSTANCE_KEY, issuerData); const issuerZone = makeDurableZone(issuerBaggage); - return setupIssuerKit(issuerData, issuerZone, optShutdownWithFailure); + const recoverySetsState = recoverySetsOption || 'hasRecoverySets'; + issuerBaggage.init(RECOVERY_SETS_STATE, recoverySetsState); + return setupIssuerKit( + issuerData, + issuerZone, + recoverySetsState, + optShutdownWithFailure, + ); }; harden(makeDurableIssuerKit); @@ -210,12 +262,19 @@ export const prepareIssuerKit = ( options = {}, ) => { if (hasIssuer(issuerBaggage)) { - const { elementShape: _ = undefined } = options; - const issuerKit = upgradeIssuerKit(issuerBaggage, optShutdownWithFailure); + const { elementShape: _ = undefined, recoverySetsOption = undefined } = + options; + const issuerKit = upgradeIssuerKit( + issuerBaggage, + optShutdownWithFailure, + recoverySetsOption, + ); // TODO check consistency with name, assetKind, displayInfo, elementShape. // Consistency either means that these are the same, or that they differ - // in a direction we are prepared to upgrade. + // in a direction we are prepared to upgrade. Note that it is the + // responsibility of `upgradeIssuerKit` to check consistency of + // `recoverySetsOption`, so continue to not do that here. // @ts-expect-error Type parameter confusion. return issuerKit; @@ -273,7 +332,7 @@ export const makeIssuerKit = ( assetKind = AssetKind.NAT, displayInfo = harden({}), optShutdownWithFailure = undefined, - { elementShape = undefined } = {}, + { elementShape = undefined, recoverySetsOption = undefined } = {}, ) => makeDurableIssuerKit( makeScalarBigMapStore('dropped issuer kit', { durable: true }), @@ -281,6 +340,6 @@ export const makeIssuerKit = ( assetKind, displayInfo, optShutdownWithFailure, - { elementShape }, + { elementShape, recoverySetsOption }, ); harden(makeIssuerKit); diff --git a/packages/ERTP/src/paymentLedger.js b/packages/ERTP/src/paymentLedger.js index bfa57b74f53..cf20111f35e 100644 --- a/packages/ERTP/src/paymentLedger.js +++ b/packages/ERTP/src/paymentLedger.js @@ -72,6 +72,7 @@ const amountShapeFromElementShape = (brand, assetKind, elementShape) => { * @param {K} assetKind * @param {DisplayInfo} displayInfo * @param {Pattern} elementShape + * @param {RecoverySetsOption} recoverySetsState * @param {ShutdownWithFailure} [optShutdownWithFailure] * @returns {PaymentLedger} */ @@ -81,6 +82,7 @@ export const preparePaymentLedger = ( assetKind, displayInfo, elementShape, + recoverySetsState, optShutdownWithFailure = undefined, ) => { /** @type {Brand} */ @@ -141,11 +143,11 @@ export const preparePaymentLedger = ( }); /** - * A withdrawn live payment is associated with the recovery set of the purse - * it was withdrawn from. Let's call these "recoverable" payments. All - * recoverable payments are live, but not all live payments are recoverable. - * We do the bookkeeping for payment recovery with this weakmap from - * recoverable payments to the recovery set they are in. A bunch of + * A (non-empty) withdrawn live payment is associated with the recovery set of + * the purse it was withdrawn from. Let's call these "recoverable" payments. + * All recoverable payments are live, but not all live payments are + * recoverable. We do the bookkeeping for payment recovery with this weakmap + * from recoverable payments to the recovery set they are in. A bunch of * interesting invariants here: * * - Every payment that is a key in the outer `paymentRecoverySets` weakMap is @@ -157,6 +159,9 @@ export const preparePaymentLedger = ( * - A purse's recovery set only contains payments withdrawn from that purse and * not yet consumed. * + * If `recoverySetsState === 'noRecoverySets'`, then nothing should ever be + * added to this WeakStore. + * * @type {WeakMapStore>} */ const paymentRecoverySets = issuerZone.weakMapStore('paymentRecoverySets'); @@ -170,7 +175,11 @@ export const preparePaymentLedger = ( * @param {SetStore} [optRecoverySet] */ const initPayment = (payment, amount, optRecoverySet = undefined) => { - if (optRecoverySet !== undefined) { + if (recoverySetsState === 'noRecoverySets') { + optRecoverySet === undefined || + Fail`when recoverSetsState === 'noRecoverySets', optRecoverySet must be empty`; + } + if (optRecoverySet !== undefined && !AmountMath.isEmpty(amount)) { optRecoverySet.add(payment); paymentRecoverySets.init(payment, optRecoverySet); } @@ -263,10 +272,10 @@ export const preparePaymentLedger = ( * * @param {import('./amountStore.js').AmountStore} balanceStore * @param {Amount} amount - the amount to be withdrawn - * @param {SetStore} recoverySet + * @param {SetStore} [recoverySet] * @returns {Payment} */ - const withdrawInternal = (balanceStore, amount, recoverySet) => { + const withdrawInternal = (balanceStore, amount, recoverySet = undefined) => { amount = coerce(amount); const payment = makePayment(); // COMMIT POINT Move the withdrawn assets from this purse into @@ -294,6 +303,8 @@ export const preparePaymentLedger = ( depositInternal, withdrawInternal, }), + recoverySetsState, + paymentRecoverySets, ); /** @type {Issuer} */ diff --git a/packages/ERTP/src/purse.js b/packages/ERTP/src/purse.js index e8cafa50fa5..11dda7655af 100644 --- a/packages/ERTP/src/purse.js +++ b/packages/ERTP/src/purse.js @@ -1,10 +1,12 @@ -import { M } from '@agoric/store'; +import { M, makeCopySet } from '@agoric/store'; import { AmountMath } from './amountMath.js'; import { makeTransientNotifierKit } from './transientNotifier.js'; import { makeAmountStore } from './amountStore.js'; const { Fail } = assert; +const EMPTY_COPY_SET = makeCopySet([]); + // TODO Type InterfaceGuard better than InterfaceGuard /** * @param {import('@agoric/zone').Zone} issuerZone @@ -19,6 +21,8 @@ const { Fail } = assert; * depositInternal: any; * withdrawInternal: any; * }} purseMethods + * @param {RecoverySetsOption} recoverySetsState + * @param {WeakMapStore>} paymentRecoverySets */ export const preparePurseKind = ( issuerZone, @@ -27,6 +31,8 @@ export const preparePurseKind = ( brand, PurseIKit, purseMethods, + recoverySetsState, + paymentRecoverySets, ) => { const amountShape = brand.getAmountShape(); @@ -35,6 +41,34 @@ export const preparePurseKind = ( // TODO propagate zonifying to notifiers, maybe? const { provideNotifier, update: updateBalance } = makeTransientNotifierKit(); + /** + * If `recoverySetsState === 'hasRecoverySets'` (the normal state), then just + * return `state.recoverySet`. + * + * If `recoverySetsState === 'noRecoverySets'`, return `undefined`. Callers + * must be aware that the `undefined` return happens iff `recoverySetsState + * === 'noRecoverySets'`, and to avoid storing or retrieving anything from the + * actual recovery set. + * + * @param {{ recoverySet: SetStore }} state + * @returns {SetStore | undefined} + */ + const maybeRecoverySet = state => { + const { recoverySet } = state; + if (recoverySetsState === 'hasRecoverySets') { + return recoverySet; + } else { + recoverySetsState === 'noRecoverySets' || + Fail`recoverSetsState must be noRecoverySets if it isn't hasRecoverSets`; + paymentRecoverySets !== undefined || + Fail`paymentRecoverySets must always be defined`; + recoverySet.getSize() === 0 || + Fail`With noRecoverySets, recoverySet must be empty`; + + return undefined; + } + }; + // - This kind is a pair of purse and depositFacet that have a 1:1 // correspondence. // - They are virtualized together to share a single state record. @@ -76,12 +110,14 @@ export const preparePurseKind = ( withdraw(amount) { const { state } = this; const { purse } = this.facets; + + const optRecoverySet = maybeRecoverySet(state); const balanceStore = makeAmountStore(state, 'currentBalance'); // Note COMMIT POINT within withdraw. const payment = withdrawInternal( balanceStore, amount, - state.recoverySet, + optRecoverySet, ); updateBalance(purse, balanceStore.getAmount()); return payment; @@ -103,18 +139,27 @@ export const preparePurseKind = ( }, getRecoverySet() { - return this.state.recoverySet.snapshot(); + const { state } = this; + const optRecoverySet = maybeRecoverySet(state); + if (optRecoverySet === undefined) { + return EMPTY_COPY_SET; + } + return optRecoverySet.snapshot(); }, recoverAll() { const { state, facets } = this; let amount = AmountMath.makeEmpty(brand, assetKind); - for (const payment of state.recoverySet.keys()) { + const optRecoverySet = maybeRecoverySet(state); + if (optRecoverySet === undefined) { + return amount; // empty at this time + } + for (const payment of optRecoverySet.keys()) { // This does cause deletions from the set while iterating, // but this special case is allowed. const delta = facets.purse.deposit(payment); amount = AmountMath.add(amount, delta, brand); } - state.recoverySet.getSize() === 0 || + optRecoverySet.getSize() === 0 || Fail`internal: Remaining unrecovered payments: ${facets.purse.getRecoverySet()}`; return amount; }, diff --git a/packages/ERTP/src/types-ambient.js b/packages/ERTP/src/types-ambient.js index 59c4388b7d0..24e065469ad 100644 --- a/packages/ERTP/src/types-ambient.js +++ b/packages/ERTP/src/types-ambient.js @@ -173,8 +173,9 @@ * @template {AssetKind} [K=AssetKind] * @typedef {object} PaymentLedger * @property {Mint} mint - * @property {Purse} mintRecoveryPurse Useful only to get the recovery set - * associated with minted payments that are still live. + * @property {Purse} mintRecoveryPurse Externally useful only if this issuer + * uses recovery sets. Can be used to get the recovery set associated with + * minted payments that are still live. * @property {Issuer} issuer * @property {Brand} brand */ @@ -183,8 +184,9 @@ * @template {AssetKind} [K=AssetKind] * @typedef {object} IssuerKit * @property {Mint} mint - * @property {Purse} mintRecoveryPurse Useful only to get the recovery set - * associated with minted payments that are still live. + * @property {Purse} mintRecoveryPurse Externally useful only if this issuer + * uses recovery sets. Can be used to get the recovery set associated with + * minted payments that are still live. * @property {Issuer} issuer * @property {Brand} brand * @property {DisplayInfo} displayInfo @@ -217,6 +219,23 @@ // /////////////////////////// Purse / Payment ///////////////////////////////// +/** + * Issuers first became durable with mandatory recovery sets. Later they were + * made optional, but there is no support for converting from one state to the + * other. Thus, absence of a `RecoverySetsOption` state is equivalent to + * `'hasRecoverySets'`. In the absence of a `recoverySetsOption` parameter, + * upgradeIssuerKit defaults to the predecessor's `RecoverySetsOption` state, or + * `'hasRecoverySets'` if none. + * + * At this time, a `'noRecoverySets'` predecessor cannot be upgraded to a + * `'hasRecoverySets'` successor. If it turns out this transition is needed, it + * can likely be supported in a future upgrade. + * + * @typedef {'hasRecoverySets' | 'noRecoverySets'} RecoverySetsOption + */ + +// /////////////////////////// Purse / Payment ///////////////////////////////// + /** * @callback DepositFacetReceive * @param {Payment} payment @@ -276,10 +295,14 @@ * can spend the assets at stake on other things. Afterwards, if the recipient * of the original check finally gets around to depositing it, their deposit * fails. + * + * Returns an empty set if this issuer does not support recovery sets. * @property {() => Amount} recoverAll For use in emergencies, such as coming * back from a traumatic crash and upgrade. This deposits all the payments in * this purse's recovery set into the purse itself, returning the total amount * of assets recovered. + * + * Returns an empty amount if this issuer does not support recovery sets. */ /** From 9eb20a478f3a59bf7c0fb8ca8923f5825fa82113 Mon Sep 17 00:00:00 2001 From: Chris Hibbert Date: Wed, 28 Feb 2024 17:07:40 -0800 Subject: [PATCH 36/47] feat: disable recoverSets in priceAuthority and fluxAggregator --- packages/inter-protocol/src/price/fluxAggregatorContract.js | 1 + packages/zoe/src/contractSupport/priceAuthorityQuoteMint.js | 1 + 2 files changed, 2 insertions(+) diff --git a/packages/inter-protocol/src/price/fluxAggregatorContract.js b/packages/inter-protocol/src/price/fluxAggregatorContract.js index d8050c26a18..84f6b8eb17d 100644 --- a/packages/inter-protocol/src/price/fluxAggregatorContract.js +++ b/packages/inter-protocol/src/price/fluxAggregatorContract.js @@ -74,6 +74,7 @@ export const start = async (zcf, privateArgs, baggage) => { 'set', undefined, undefined, + { recoverySetsOption: 'noRecoverySets' }, ); const { diff --git a/packages/zoe/src/contractSupport/priceAuthorityQuoteMint.js b/packages/zoe/src/contractSupport/priceAuthorityQuoteMint.js index 769b08643ac..46531ffc125 100644 --- a/packages/zoe/src/contractSupport/priceAuthorityQuoteMint.js +++ b/packages/zoe/src/contractSupport/priceAuthorityQuoteMint.js @@ -17,6 +17,7 @@ export const provideQuoteMint = baggage => { AssetKind.SET, undefined, undefined, + { recoverySetsOption: 'noRecoverySets' }, ); return issuerKit.mint; }; From aa4b7dbe1db5f5a8cdd0d9c74f776c164dd456db Mon Sep 17 00:00:00 2001 From: Chris Hibbert Date: Thu, 29 Feb 2024 16:48:31 -0800 Subject: [PATCH 37/47] docs: clarification on adding the ability to convert issuers --- packages/ERTP/src/types-ambient.js | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/ERTP/src/types-ambient.js b/packages/ERTP/src/types-ambient.js index 24e065469ad..46b1ea472aa 100644 --- a/packages/ERTP/src/types-ambient.js +++ b/packages/ERTP/src/types-ambient.js @@ -227,9 +227,10 @@ * upgradeIssuerKit defaults to the predecessor's `RecoverySetsOption` state, or * `'hasRecoverySets'` if none. * - * At this time, a `'noRecoverySets'` predecessor cannot be upgraded to a - * `'hasRecoverySets'` successor. If it turns out this transition is needed, it - * can likely be supported in a future upgrade. + * At this time, issuers started in one of the states (`'noRecoverySets'`, or + * `'hasRecoverySets'`) cannot be converted to the other on upgrade. If this + * transition is needed, it can likely be supported in a future upgrade. File an + * issue on github and explain what you need and why. * * @typedef {'hasRecoverySets' | 'noRecoverySets'} RecoverySetsOption */ From 35e62c45dc09717457426033a1283e681d961e21 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Fri, 23 Feb 2024 17:14:01 -0600 Subject: [PATCH 38/47] docs(network): scrub TODOs --- packages/network/src/network.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/network/src/network.js b/packages/network/src/network.js index 9061aa235be..6bfca90bd7b 100644 --- a/packages/network/src/network.js +++ b/packages/network/src/network.js @@ -484,7 +484,7 @@ const preparePort = (zone, { when }) => { listening.init(localAddr, harden([this.self, listenHandler])); } - // TODO: Check that the listener defines onAccept. + // ASSUME: that the listener defines onAccept. await when( E(protocolHandler).onListen( @@ -945,14 +945,14 @@ export function prepareLoopbackProtocolHandler(zone, { when }) { }, { async onCreate(_impl, _protocolHandler) { - // TODO + // noop }, async generatePortID(_localAddr, _protocolHandler) { this.state.portNonce += 1n; return `port${this.state.portNonce}`; }, async onBind(_port, _localAddr, _protocolHandler) { - // TODO: Maybe handle a bind? + // noop, for now; Maybe handle a bind? }, async onConnect( _port, @@ -988,11 +988,11 @@ export function prepareLoopbackProtocolHandler(zone, { when }) { async onListen(port, localAddr, listenHandler, _protocolHandler) { const { listeners } = this.state; - // TODO: Implement other listener replacement policies. + // This implementation has a simple last-one-wins replacement policy. + // Other handlers might use different policies. if (listeners.has(localAddr)) { const lhandler = listeners.get(localAddr)[1]; if (lhandler !== listenHandler) { - // Last-one-wins. listeners.set(localAddr, [port, listenHandler]); } } else { @@ -1014,7 +1014,7 @@ export function prepareLoopbackProtocolHandler(zone, { when }) { listeners.delete(localAddr); }, async onRevoke(_port, _localAddr, _protocolHandler) { - // TODO: maybe clean up? + // This is an opportunity to clean up resources. }, }, ); From 6136a641d47453b71ef94ff5baba88e72564868d Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Fri, 23 Feb 2024 17:24:42 -0600 Subject: [PATCH 39/47] docs(network): networkVat powers are to be shared via Ports --- .../vats/src/proposals/network-proposal.js | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/packages/vats/src/proposals/network-proposal.js b/packages/vats/src/proposals/network-proposal.js index 729abbf033d..817d785d6ca 100644 --- a/packages/vats/src/proposals/network-proposal.js +++ b/packages/vats/src/proposals/network-proposal.js @@ -1,3 +1,7 @@ +/** + * @file CoreEval module to set up network, IBC vats. + * @see {setupNetworkProtocols} + */ import { E } from '@endo/far'; import { BridgeId as BRIDGE_ID } from '@agoric/internal'; import { prepareVowTools } from '@agoric/vat-data/vow.js'; @@ -47,6 +51,24 @@ export const registerNetworkProtocols = async (vats, dibcBridgeManager) => { }; /** + * Create the network and IBC vats; produce `networkVat` in the core / bootstrap + * space. + * + * The `networkVat` is CLOSELY HELD in the core space, where later, we claim + * ports using `E(networkVat).bind(_path_)`. As discussed in `ProtocolHandler` + * docs, _path_ is: + * + * - /ibc-port/NAME for an IBC port with a known name or, + * - /ibc-port/ for an IBC port with a fresh name. + * + * Contracts are expected to use the services of the network and IBC vats by way + * of such ports. + * + * Testing facilities include: + * + * - loopback ports: `E(networkVat).bind('/local/')` + * - an echo port: `E(vats.network).bind('/ibc-port/echo')` + * * @param {BootstrapPowers & { * consume: { loadCriticalVat: VatLoader }; * produce: { networkVat: Producer }; From 6efe80ac79a579003afd8704616c3d6abe94e225 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Sun, 3 Mar 2024 20:01:49 -0600 Subject: [PATCH 40/47] feat(ibc): support async IBC version negotiation when present --- packages/vats/src/ibc.js | 53 +++++++++++++++++++++++++++++++++++++--- 1 file changed, 49 insertions(+), 4 deletions(-) diff --git a/packages/vats/src/ibc.js b/packages/vats/src/ibc.js index dcec6802d5f..c0097b39c92 100644 --- a/packages/vats/src/ibc.js +++ b/packages/vats/src/ibc.js @@ -345,7 +345,7 @@ export const prepareIBCProtocol = (zone, { makeVowKit, watch, when }) => { srcPortToOutbounds.init(portID, harden([ob])); } - // Initialise the channel, which automatic relayers should pick up. + // Initialise the channel, which a listening relayer should pick up. const packet = { source_port: portID, destination_port: rPortID, @@ -397,6 +397,37 @@ export const prepareIBCProtocol = (zone, { makeVowKit, watch, when }) => { console.info('IBC fromBridge', obj); await null; switch (obj.event) { + case 'channelOpenInit': { + const { + channelID, + portID, + counterparty: { port_id: rPortID, channel_id: rChannelID }, + connectionHops: hops, + order, + version, + asyncVersions, + } = obj; + if (!asyncVersions) { + // Synchronous versions have already been negotiated. + break; + } + + // We have async version negotiation, so we must call back before the + // channel can make progress. + // We just use the provided version without failing. + await util.downcall('initOpenExecuted', { + packet: { + source_port: portID, + source_channel: channelID, + destination_port: rPortID, + destination_channel: rChannelID, + }, + order, + version, + hops, + }); + break; + } case 'channelOpenTry': { // They're (more or less politely) asking if we are listening, so make an attempt. const { @@ -407,6 +438,7 @@ export const prepareIBCProtocol = (zone, { makeVowKit, watch, when }) => { order, version, counterpartyVersion: rVersion, + asyncVersions, } = obj; const localAddr = `/ibc-port/${portID}/${order.toLowerCase()}/${version}`; @@ -448,10 +480,23 @@ export const prepareIBCProtocol = (zone, { makeVowKit, watch, when }) => { channelKeyToInfo.init(channelKey, obj); try { - if (negotiatedVersion !== version) { - // Too late; the relayer gave us a version we didn't like. + if (asyncVersions) { + // We have async version negotiation, so we must call back now. + await util.downcall('tryOpenExecuted', { + packet: { + source_port: rPortID, + source_channel: rChannelID, + destination_port: portID, + destination_channel: channelID, + }, + order, + version: negotiatedVersion, + hops, + }); + } else if (negotiatedVersion !== version) { + // Too late; the other side gave us a version we didn't like. throw Error( - `${channelKey}: negotiated version was ${negotiatedVersion}; rejecting ${version}`, + `${channelKey}: async negotiated version was ${negotiatedVersion} but synchronous version was ${version}`, ); } } catch (e) { From 739c5092123f72a881cea2178b99dfe473c72adc Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Sun, 3 Mar 2024 20:21:14 -0600 Subject: [PATCH 41/47] fix(vibc): propagate `Relayer` account address --- golang/cosmos/x/vibc/ibc.go | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/golang/cosmos/x/vibc/ibc.go b/golang/cosmos/x/vibc/ibc.go index 1d0533247b9..4d0ee0891aa 100644 --- a/golang/cosmos/x/vibc/ibc.go +++ b/golang/cosmos/x/vibc/ibc.go @@ -317,6 +317,7 @@ type receivePacketEvent struct { *vm.ActionHeader `actionType:"IBC_EVENT"` Event string `json:"event" default:"receivePacket"` Packet channeltypes.Packet `json:"packet"` + Relayer sdk.AccAddress `json:"relayer"` } func (im IBCModule) OnRecvPacket( @@ -333,7 +334,8 @@ func (im IBCModule) OnRecvPacket( // the same packets. event := receivePacketEvent{ - Packet: packet, + Packet: packet, + Relayer: relayer, } err := im.PushAction(ctx, event) @@ -349,6 +351,7 @@ type acknowledgementPacketEvent struct { Event string `json:"event" default:"acknowledgementPacket"` Packet channeltypes.Packet `json:"packet"` Acknowledgement []byte `json:"acknowledgement"` + Relayer sdk.AccAddress `json:"relayer"` } func (im IBCModule) OnAcknowledgementPacket( @@ -360,6 +363,7 @@ func (im IBCModule) OnAcknowledgementPacket( event := acknowledgementPacketEvent{ Packet: packet, Acknowledgement: acknowledgement, + Relayer: relayer, } err := im.PushAction(ctx, event) @@ -374,6 +378,7 @@ type timeoutPacketEvent struct { *vm.ActionHeader `actionType:"IBC_EVENT"` Event string `json:"event" default:"timeoutPacket"` Packet channeltypes.Packet `json:"packet"` + Relayer sdk.AccAddress `json:"relayer"` } func (im IBCModule) OnTimeoutPacket( @@ -382,7 +387,8 @@ func (im IBCModule) OnTimeoutPacket( relayer sdk.AccAddress, ) error { event := timeoutPacketEvent{ - Packet: packet, + Packet: packet, + Relayer: relayer, } err := im.PushAction(ctx, event) From fff392b9a30af2d60a6dffe3427eb5bb5ecdde7b Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Sun, 3 Mar 2024 20:30:05 -0600 Subject: [PATCH 42/47] build(cosmos): add `ibc-go` fork and `go mod tidy` --- golang/cosmos/go.mod | 19 +++++++++++++------ golang/cosmos/go.sum | 4 ++-- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/golang/cosmos/go.mod b/golang/cosmos/go.mod index 663a9122195..5ef54f5f2d8 100644 --- a/golang/cosmos/go.mod +++ b/golang/cosmos/go.mod @@ -3,6 +3,7 @@ module github.com/Agoric/agoric-sdk/golang/cosmos go 1.20 require ( + cosmossdk.io/errors v1.0.0-beta.7 cosmossdk.io/math v1.0.0-rc.0 github.com/armon/go-metrics v0.4.1 github.com/cosmos/cosmos-sdk v0.46.16 @@ -31,7 +32,6 @@ require ( cloud.google.com/go/compute/metadata v0.2.3 // indirect cloud.google.com/go/iam v1.1.1 // indirect cloud.google.com/go/storage v1.30.1 // indirect - cosmossdk.io/errors v1.0.0-beta.7 // indirect filippo.io/edwards25519 v1.0.0-rc.1 // indirect github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect github.com/99designs/keyring v1.2.1 // indirect @@ -169,9 +169,6 @@ replace ( github.com/confio/ics23/go => github.com/agoric-labs/cosmos-sdk/ics23/go v0.8.0-alpha.agoric.1 - // We need a fork of cosmos-sdk until all of the differences are merged. - github.com/cosmos/cosmos-sdk => github.com/agoric-labs/cosmos-sdk v0.46.16-alpha.agoric.2 - // https://pkg.go.dev/vuln/GO-2023-2409 github.com/dvsekhvalnov/jose2go => github.com/dvsekhvalnov/jose2go v1.5.1-0.20231206184617-48ba0b76bc88 @@ -185,13 +182,23 @@ replace ( // replace broken goleveldb. github.com/syndtr/goleveldb => github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 +) + +// Agoric-specific replacements: +replace ( + // We need a fork of cosmos-sdk until all of the differences are merged. + github.com/cosmos/cosmos-sdk => github.com/agoric-labs/cosmos-sdk v0.46.16-alpha.agoric.2 + + // Async version negotiation + github.com/cosmos/ibc-go/v6 => github.com/agoric-labs/ibc-go/v6 v6.2.1-alpha.agoric.3 // use cometbft // Use our fork at least until post-v0.34.14 is released with // https://github.com/tendermint/tendermint/issue/6899 resolved. github.com/tendermint/tendermint => github.com/agoric-labs/cometbft v0.34.30-alpha.agoric.1 -// For testing against a local cosmos-sdk or tendermint +// For testing against a local cosmos-sdk, ibc-go, or cometbft // github.com/cosmos/cosmos-sdk => ../../../forks/cosmos-sdk -// github.com/tendermint/tendermint => ../../../forks/tendermint +// github.com/cosmos/ibc-go/v6 => ../../../forks/ibc-go/v6 +// github.com/tendermint/tendermint => ../../../forks/cometbft ) diff --git a/golang/cosmos/go.sum b/golang/cosmos/go.sum index c6d0bd1aa86..7562b04991a 100644 --- a/golang/cosmos/go.sum +++ b/golang/cosmos/go.sum @@ -236,6 +236,8 @@ github.com/agoric-labs/cosmos-sdk v0.46.16-alpha.agoric.2 h1:iHHqpYC0JzMbH4UYnQr github.com/agoric-labs/cosmos-sdk v0.46.16-alpha.agoric.2/go.mod h1:zUe5lsg/X7SeSO1nGkzOh9EGKO295szfrxIxYmeLYic= github.com/agoric-labs/cosmos-sdk/ics23/go v0.8.0-alpha.agoric.1 h1:2jvHI/2d+psWAZy6FQ0vXJCHUtfU3ZbbW+pQFL04arQ= github.com/agoric-labs/cosmos-sdk/ics23/go v0.8.0-alpha.agoric.1/go.mod h1:E45NqnlpxGnpfTWL/xauN7MRwEE28T4Dd4uraToOaKg= +github.com/agoric-labs/ibc-go/v6 v6.2.1-alpha.agoric.3 h1:YqvVwK+Lg/ZsuwyVm9UbPs8K55fg00R3Y9KnmaTBdgc= +github.com/agoric-labs/ibc-go/v6 v6.2.1-alpha.agoric.3/go.mod h1:V9NOCRS9RPkSJNJQIPRAjZn/lo2mCAAKOSv3/83ISDY= github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= @@ -380,8 +382,6 @@ github.com/cosmos/iavl v0.19.6 h1:XY78yEeNPrEYyNCKlqr9chrwoeSDJ0bV2VjocTk//OU= github.com/cosmos/iavl v0.19.6/go.mod h1:X9PKD3J0iFxdmgNLa7b2LYWdsGd90ToV5cAONApkEPw= github.com/cosmos/ibc-apps/middleware/packet-forward-middleware/v6 v6.1.1 h1:2geCtV4PoNPeRnVc0HMAcRcv+7W3Mvk2nmASkGkOdzE= github.com/cosmos/ibc-apps/middleware/packet-forward-middleware/v6 v6.1.1/go.mod h1:ovYRGX7P7Vq0D54JIVlIm/47STEKgWJfw9frvL0AWGQ= -github.com/cosmos/ibc-go/v6 v6.2.1 h1:NiaDXTRhKwf3n9kELD4VRIe5zby1yk1jBvaz9tXTQ6k= -github.com/cosmos/ibc-go/v6 v6.2.1/go.mod h1:XLsARy4Y7+GtAqzMcxNdlQf6lx+ti1e8KcMGv5NIK7A= github.com/cosmos/keyring v1.2.0 h1:8C1lBP9xhImmIabyXW4c3vFjjLiBdGCmfLUfeZlV1Yo= github.com/cosmos/keyring v1.2.0/go.mod h1:fc+wB5KTk9wQ9sDx0kFXB3A0MaeGHM9AwRStKOQ5vOA= github.com/cosmos/ledger-cosmos-go v0.12.4 h1:drvWt+GJP7Aiw550yeb3ON/zsrgW0jgh5saFCr7pDnw= From ca5933cc41075dfba30c44cb3a987d9c721cd97d Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Sun, 3 Mar 2024 20:30:46 -0600 Subject: [PATCH 43/47] feat(vibc): add `AsyncVersions` flag anticipating `ibc-go` --- golang/cosmos/x/vibc/ibc.go | 55 +++++++++++++++++++++++++++++++++---- 1 file changed, 50 insertions(+), 5 deletions(-) diff --git a/golang/cosmos/x/vibc/ibc.go b/golang/cosmos/x/vibc/ibc.go index 4d0ee0891aa..7ac2fd5118c 100644 --- a/golang/cosmos/x/vibc/ibc.go +++ b/golang/cosmos/x/vibc/ibc.go @@ -11,12 +11,21 @@ import ( channeltypes "github.com/cosmos/ibc-go/v6/modules/core/04-channel/types" porttypes "github.com/cosmos/ibc-go/v6/modules/core/05-port/types" host "github.com/cosmos/ibc-go/v6/modules/core/24-host" + ibckeeper "github.com/cosmos/ibc-go/v6/modules/core/keeper" "github.com/cosmos/ibc-go/v6/modules/core/exported" sdk "github.com/cosmos/cosmos-sdk/types" ) +const ( + // AsyncVersions is a flag that indicates whether the IBC module supports + // asynchronous versions. If it does, then the VM must supply an empty + // version string to indicate that the VM explicitly (possibly async) + // performs the Write* method. + AsyncVersions = ibckeeper.AsyncVersionNegotiation +) + var ( _ porttypes.IBCModule = IBCModule{} ) @@ -158,6 +167,18 @@ func (im IBCModule) PushAction(ctx sdk.Context, action vm.Action) error { // fmt.Println("ibc.go upcall reply", reply, err) } +type channelOpenInitEvent struct { + *vm.ActionHeader `actionType:"IBC_EVENT"` + Event string `json:"event" default:"channelOpenInit"` + Order string `json:"order"` + ConnectionHops []string `json:"connectionHops"` + PortID string `json:"portID"` + ChannelID string `json:"channelID"` + Counterparty channeltypes.Counterparty `json:"counterparty"` + Version string `json:"version"` + AsyncVersions bool `json:"asyncVersions"` +} + // Implement IBCModule callbacks func (im IBCModule) OnChanOpenInit( ctx sdk.Context, @@ -169,10 +190,32 @@ func (im IBCModule) OnChanOpenInit( counterparty channeltypes.Counterparty, version string, ) (string, error) { - return "", sdkioerrors.Wrap( - channeltypes.ErrChannelNotFound, - fmt.Sprintf("vibc does not allow synthetic channelOpenInit for port %s", portID), - ) + event := channelOpenInitEvent{ + Order: orderToString(order), + ConnectionHops: connectionHops, + PortID: portID, + ChannelID: channelID, + Counterparty: counterparty, + Version: version, + AsyncVersions: AsyncVersions, + } + + err := im.PushAction(ctx, event) + if err != nil { + return "", err + } + + // Claim channel capability passed back by IBC module + if err := im.keeper.ClaimCapability(ctx, channelCap, host.ChannelCapabilityPath(portID, channelID)); err != nil { + return "", err + } + + if !event.AsyncVersions { + // We have to supply a synchronous version, so just echo back the one they sent. + return event.Version, nil + } + + return "", nil } type channelOpenTryEvent struct { @@ -184,6 +227,7 @@ type channelOpenTryEvent struct { ChannelID string `json:"channelID"` Counterparty channeltypes.Counterparty `json:"counterparty"` Version string `json:"version"` + AsyncVersions bool `json:"asyncVersions"` } func (im IBCModule) OnChanOpenTry( @@ -202,7 +246,8 @@ func (im IBCModule) OnChanOpenTry( PortID: portID, ChannelID: channelID, Counterparty: counterparty, - Version: counterpartyVersion, // TODO: don't just use the counterparty version + Version: counterpartyVersion, + AsyncVersions: AsyncVersions, } err := im.PushAction(ctx, event) From 80e450a9afe0441406fa3e020d94ef0af6366851 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Fri, 15 Dec 2023 12:04:07 -0600 Subject: [PATCH 44/47] refactor(vibc): split into more granular objects --- golang/cosmos/app/app.go | 11 +- golang/cosmos/x/vibc/alias.go | 3 + golang/cosmos/x/vibc/handler.go | 16 +- golang/cosmos/x/vibc/keeper/keeper.go | 157 +++++++++------ golang/cosmos/x/vibc/module.go | 6 +- .../cosmos/x/vibc/types/expected_keepers.go | 13 ++ .../x/vibc/{ibc.go => types/ibc_module.go} | 178 +++--------------- golang/cosmos/x/vibc/types/receiver.go | 160 ++++++++++++++++ 8 files changed, 322 insertions(+), 222 deletions(-) rename golang/cosmos/x/vibc/{ibc.go => types/ibc_module.go} (66%) create mode 100644 golang/cosmos/x/vibc/types/receiver.go diff --git a/golang/cosmos/app/app.go b/golang/cosmos/app/app.go index 2fad1cc93ef..8b96c8fe76d 100644 --- a/golang/cosmos/app/app.go +++ b/golang/cosmos/app/app.go @@ -502,16 +502,13 @@ func NewAgoricApp( ) app.VibcKeeper = vibc.NewKeeper( - appCodec, keys[vibc.StoreKey], + appCodec, app.IBCKeeper.ChannelKeeper, &app.IBCKeeper.PortKeeper, - app.BankKeeper, - scopedVibcKeeper, - app.SwingSetKeeper.PushAction, - ) + ).WithScope(keys[vibc.StoreKey], scopedVibcKeeper, app.SwingSetKeeper.PushAction) - vibcModule := vibc.NewAppModule(app.VibcKeeper) + vibcModule := vibc.NewAppModule(app.VibcKeeper, app.BankKeeper) vibcIBCModule := vibc.NewIBCModule(app.VibcKeeper) - app.vibcPort = vm.RegisterPortHandler("vibc", vibcIBCModule) + app.vibcPort = vm.RegisterPortHandler("vibc", vibc.NewReceiver(app.VibcKeeper)) app.VbankKeeper = vbank.NewKeeper( appCodec, keys[vbank.StoreKey], app.GetSubspace(vbank.ModuleName), diff --git a/golang/cosmos/x/vibc/alias.go b/golang/cosmos/x/vibc/alias.go index bb31d675d40..f2241dbd16b 100644 --- a/golang/cosmos/x/vibc/alias.go +++ b/golang/cosmos/x/vibc/alias.go @@ -14,11 +14,14 @@ const ( var ( NewKeeper = keeper.NewKeeper NewMsgSendPacket = types.NewMsgSendPacket + NewReceiver = types.NewReceiver + NewIBCModule = types.NewIBCModule ModuleCdc = types.ModuleCdc RegisterCodec = types.RegisterCodec ) type ( Keeper = keeper.Keeper + ScopedKeeper = types.ScopedKeeper MsgSendPacket = types.MsgSendPacket ) diff --git a/golang/cosmos/x/vibc/handler.go b/golang/cosmos/x/vibc/handler.go index 524ba8cd5b9..9ca5e6d1b05 100644 --- a/golang/cosmos/x/vibc/handler.go +++ b/golang/cosmos/x/vibc/handler.go @@ -5,16 +5,17 @@ import ( sdkioerrors "cosmossdk.io/errors" "github.com/Agoric/agoric-sdk/golang/cosmos/vm" + "github.com/Agoric/agoric-sdk/golang/cosmos/x/vibc/types" sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" ) // NewHandler returns a handler for "vibc" type messages. -func NewHandler(keeper Keeper) sdk.Handler { +func NewHandler(keeper Keeper, bankKeeper types.BankKeeper) sdk.Handler { return func(ctx sdk.Context, msg sdk.Msg) (*sdk.Result, error) { switch msg := msg.(type) { case *MsgSendPacket: - return handleMsgSendPacket(ctx, keeper, msg) + return handleMsgSendPacket(ctx, keeper, bankKeeper, msg) default: errMsg := fmt.Sprintf("Unrecognized vibc Msg type: %T", msg) @@ -24,14 +25,19 @@ func NewHandler(keeper Keeper) sdk.Handler { } type sendPacketAction struct { - *MsgSendPacket vm.ActionHeader `actionType:"IBC_EVENT"` Event string `json:"event" default:"sendPacket"` + *MsgSendPacket } -func handleMsgSendPacket(ctx sdk.Context, keeper Keeper, msg *MsgSendPacket) (*sdk.Result, error) { +func handleMsgSendPacket( + ctx sdk.Context, + keeper Keeper, + bankKeeper types.BankKeeper, + msg *MsgSendPacket, +) (*sdk.Result, error) { onePass := sdk.NewInt64Coin("sendpacketpass", 1) - balance := keeper.GetBalance(ctx, msg.Sender, onePass.Denom) + balance := bankKeeper.GetBalance(ctx, msg.Sender, onePass.Denom) if balance.IsLT(onePass) { return nil, sdkioerrors.Wrap( sdkerrors.ErrInsufficientFee, diff --git a/golang/cosmos/x/vibc/keeper/keeper.go b/golang/cosmos/x/vibc/keeper/keeper.go index 5b1d8c5696d..4d5587012ef 100644 --- a/golang/cosmos/x/vibc/keeper/keeper.go +++ b/golang/cosmos/x/vibc/keeper/keeper.go @@ -8,7 +8,6 @@ import ( sdk "github.com/cosmos/cosmos-sdk/types" sdkioerrors "cosmossdk.io/errors" - capabilitykeeper "github.com/cosmos/cosmos-sdk/x/capability/keeper" capability "github.com/cosmos/cosmos-sdk/x/capability/types" clienttypes "github.com/cosmos/ibc-go/v6/modules/core/02-client/types" channeltypes "github.com/cosmos/ibc-go/v6/modules/core/04-channel/types" @@ -16,47 +15,66 @@ import ( host "github.com/cosmos/ibc-go/v6/modules/core/24-host" ibcexported "github.com/cosmos/ibc-go/v6/modules/core/exported" - bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" - - vm "github.com/Agoric/agoric-sdk/golang/cosmos/vm" + "github.com/Agoric/agoric-sdk/golang/cosmos/vm" "github.com/Agoric/agoric-sdk/golang/cosmos/x/vibc/types" ) +var ( + _ porttypes.ICS4Wrapper = Keeper{} + _ types.IBCModuleImpl = Keeper{} + _ types.ReceiverImpl = Keeper{} +) + // Keeper maintains the link to data storage and exposes getter/setter methods for the various parts of the state machine type Keeper struct { - storeKey storetypes.StoreKey - cdc codec.Codec + cdc codec.Codec channelKeeper types.ChannelKeeper portKeeper types.PortKeeper - scopedKeeper capabilitykeeper.ScopedKeeper - bankKeeper bankkeeper.Keeper - PushAction vm.ActionPusher + // Filled out by `WithScope` + scopedKeeper types.ScopedKeeper + storeKey storetypes.StoreKey + pushAction vm.ActionPusher } -// NewKeeper creates a new dIBC Keeper instance +// NewKeeper creates a new vibc Keeper instance func NewKeeper( - cdc codec.Codec, key storetypes.StoreKey, - channelKeeper types.ChannelKeeper, portKeeper types.PortKeeper, - bankKeeper bankkeeper.Keeper, - scopedKeeper capabilitykeeper.ScopedKeeper, - pushAction vm.ActionPusher, + cdc codec.Codec, + channelKeeper types.ChannelKeeper, + portKeeper types.PortKeeper, ) Keeper { return Keeper{ - storeKey: key, cdc: cdc, - bankKeeper: bankKeeper, channelKeeper: channelKeeper, portKeeper: portKeeper, - scopedKeeper: scopedKeeper, - PushAction: pushAction, } } -func (k Keeper) GetBalance(ctx sdk.Context, addr sdk.AccAddress, denom string) sdk.Coin { - return k.bankKeeper.GetBalance(ctx, addr, denom) +// WithScope returns a new Keeper copied from the receiver, but with the given +// store key, scoped keeper, and push action. +func (k Keeper) WithScope(storeKey storetypes.StoreKey, scopedKeeper types.ScopedKeeper, pushAction vm.ActionPusher) Keeper { + k.storeKey = storeKey + k.scopedKeeper = scopedKeeper + k.pushAction = pushAction + return k +} + +// PushAction sends a vm.Action to the VM controller. +func (k Keeper) PushAction(ctx sdk.Context, action vm.Action) error { + return k.pushAction(ctx, action) +} + +// GetICS4Wrapper returns the ICS4Wrapper interface for the keeper. +func (k Keeper) GetICS4Wrapper() porttypes.ICS4Wrapper { + return k +} + +// GetAppVersion defines a wrapper function for the channel Keeper's function +// in order to expose it to the vibc IBC handler. +func (k Keeper) GetAppVersion(ctx sdk.Context, portID, channelID string) (string, bool) { + return k.channelKeeper.GetAppVersion(ctx, portID, channelID) } // GetChannel defines a wrapper function for the channel Keeper's function @@ -65,9 +83,8 @@ func (k Keeper) GetChannel(ctx sdk.Context, portID, channelID string) (channelty return k.channelKeeper.GetChannel(ctx, portID, channelID) } -// ChanOpenInit defines a wrapper function for the channel Keeper's function -// in order to expose it to the vibc IBC handler. -func (k Keeper) ChanOpenInit(ctx sdk.Context, order channeltypes.Order, connectionHops []string, +// ReceiveChanOpenInit wraps the keeper's ChanOpenInit function. +func (k Keeper) ReceiveChanOpenInit(ctx sdk.Context, order channeltypes.Order, connectionHops []string, portID, rPortID, version string, ) error { capName := host.PortPath(portID) @@ -92,41 +109,41 @@ func (k Keeper) ChanOpenInit(ctx sdk.Context, order channeltypes.Order, connecti return nil } +// ReceiveSendPacket wraps the keeper's SendPacket function. +func (k Keeper) ReceiveSendPacket(ctx sdk.Context, packet ibcexported.PacketI) (uint64, error) { + sourcePort := packet.GetSourcePort() + sourceChannel := packet.GetSourceChannel() + timeoutHeight := packet.GetTimeoutHeight() + timeoutRevisionNumber := timeoutHeight.GetRevisionNumber() + timeoutRevisionHeight := timeoutHeight.GetRevisionHeight() + clientTimeoutHeight := clienttypes.NewHeight(timeoutRevisionNumber, timeoutRevisionHeight) + timeoutTimestamp := packet.GetTimeoutTimestamp() + data := packet.GetData() + + capName := host.ChannelCapabilityPath(sourcePort, sourceChannel) + chanCap, ok := k.GetCapability(ctx, capName) + if !ok { + return 0, sdkioerrors.Wrapf(channeltypes.ErrChannelCapabilityNotFound, "could not retrieve channel capability at: %s", capName) + } + return k.SendPacket(ctx, chanCap, sourcePort, sourceChannel, clientTimeoutHeight, timeoutTimestamp, data) +} + // SendPacket defines a wrapper function for the channel Keeper's function // in order to expose it to the vibc IBC handler. func (k Keeper) SendPacket( ctx sdk.Context, + chanCap *capability.Capability, sourcePort string, sourceChannel string, timeoutHeight clienttypes.Height, timeoutTimestamp uint64, data []byte, ) (uint64, error) { - capName := host.ChannelCapabilityPath(sourcePort, sourceChannel) - chanCap, ok := k.GetCapability(ctx, capName) - if !ok { - return 0, sdkioerrors.Wrapf(channeltypes.ErrChannelCapabilityNotFound, "could not retrieve channel capability at: %s", capName) - } return k.channelKeeper.SendPacket(ctx, chanCap, sourcePort, sourceChannel, timeoutHeight, timeoutTimestamp, data) } -var _ ibcexported.Acknowledgement = (*rawAcknowledgement)(nil) - -type rawAcknowledgement struct { - data []byte -} - -func (r rawAcknowledgement) Acknowledgement() []byte { - return r.data -} - -func (r rawAcknowledgement) Success() bool { - return true -} - -// WriteAcknowledgement defines a wrapper function for the channel Keeper's function -// in order to expose it to the vibc IBC handler. -func (k Keeper) WriteAcknowledgement(ctx sdk.Context, packet ibcexported.PacketI, acknowledgement []byte) error { +// ReceiveWriteAcknowledgement wraps the keeper's WriteAcknowledgment function. +func (k Keeper) ReceiveWriteAcknowledgement(ctx sdk.Context, packet ibcexported.PacketI, ack ibcexported.Acknowledgement) error { portID := packet.GetDestPort() channelID := packet.GetDestChannel() capName := host.ChannelCapabilityPath(portID, channelID) @@ -134,15 +151,33 @@ func (k Keeper) WriteAcknowledgement(ctx sdk.Context, packet ibcexported.PacketI if !ok { return sdkioerrors.Wrapf(channeltypes.ErrChannelCapabilityNotFound, "could not retrieve channel capability at: %s", capName) } - ack := rawAcknowledgement{ - data: acknowledgement, - } + return k.WriteAcknowledgement(ctx, chanCap, packet, ack) +} + +// WriteAcknowledgement defines a wrapper function for the channel Keeper's function +// in order to expose it to the vibc IBC handler. +func (k Keeper) WriteAcknowledgement(ctx sdk.Context, chanCap *capability.Capability, packet ibcexported.PacketI, ack ibcexported.Acknowledgement) error { return k.channelKeeper.WriteAcknowledgement(ctx, chanCap, packet, ack) } -// ChanCloseInit defines a wrapper function for the channel Keeper's function +// ReceiveWriteOpenTryChannel wraps the keeper's WriteOpenTryChannel function. +func (k Keeper) ReceiveWriteOpenTryChannel(ctx sdk.Context, packet ibcexported.PacketI, order channeltypes.Order, connectionHops []string, version string) error { + portID := packet.GetDestPort() + channelID := packet.GetDestChannel() + counterparty := channeltypes.NewCounterparty(packet.GetSourcePort(), packet.GetSourceChannel()) + k.WriteOpenTryChannel(ctx, portID, channelID, order, connectionHops, counterparty, version) + return nil +} + +// WriteOpenTryChannel is a wrapper function for the channel Keeper's function +func (k Keeper) WriteOpenTryChannel(ctx sdk.Context, portID, channelID string, order channeltypes.Order, + connectionHops []string, counterparty channeltypes.Counterparty, version string) { + k.channelKeeper.WriteOpenTryChannel(ctx, portID, channelID, order, connectionHops, counterparty, version) +} + +// ReceiveChanCloseInit is a wrapper function for the channel Keeper's function // in order to expose it to the vibc IBC handler. -func (k Keeper) ChanCloseInit(ctx sdk.Context, portID, channelID string) error { +func (k Keeper) ReceiveChanCloseInit(ctx sdk.Context, portID, channelID string) error { capName := host.ChannelCapabilityPath(portID, channelID) chanCap, ok := k.GetCapability(ctx, capName) if !ok { @@ -155,20 +190,21 @@ func (k Keeper) ChanCloseInit(ctx sdk.Context, portID, channelID string) error { return nil } -// BindPort defines a wrapper function for the port Keeper's function in -// order to expose it to the vibc IBC handler. -func (k Keeper) BindPort(ctx sdk.Context, portID string) error { - _, ok := k.scopedKeeper.GetCapability(ctx, host.PortPath(portID)) +// ReceiveBindPort is a wrapper function for the port Keeper's function in order +// to expose it to the vibc IBC handler. +func (k Keeper) ReceiveBindPort(ctx sdk.Context, portID string) error { + portPath := host.PortPath(portID) + _, ok := k.GetCapability(ctx, portPath) if ok { return fmt.Errorf("port %s is already bound", portID) } cap := k.portKeeper.BindPort(ctx, portID) - return k.ClaimCapability(ctx, cap, host.PortPath(portID)) + return k.ClaimCapability(ctx, cap, portPath) } -// TimeoutExecuted defines a wrapper function for the channel Keeper's function -// in order to expose it to the vibc IBC handler. -func (k Keeper) TimeoutExecuted(ctx sdk.Context, packet ibcexported.PacketI) error { +// ReceiveTimeoutExecuted is a wrapper function for the channel Keeper's +// function in order to expose it to the vibc IBC handler. +func (k Keeper) ReceiveTimeoutExecuted(ctx sdk.Context, packet ibcexported.PacketI) error { portID := packet.GetSourcePort() channelID := packet.GetSourceChannel() capName := host.ChannelCapabilityPath(portID, channelID) @@ -180,11 +216,12 @@ func (k Keeper) TimeoutExecuted(ctx sdk.Context, packet ibcexported.PacketI) err } // ClaimCapability allows the vibc module to claim a capability that IBC module -// passes to it +// passes to it. func (k Keeper) ClaimCapability(ctx sdk.Context, cap *capability.Capability, name string) error { return k.scopedKeeper.ClaimCapability(ctx, cap, name) } +// GetCapability allows the vibc module to retrieve a capability. func (k Keeper) GetCapability(ctx sdk.Context, name string) (*capability.Capability, bool) { return k.scopedKeeper.GetCapability(ctx, name) } diff --git a/golang/cosmos/x/vibc/module.go b/golang/cosmos/x/vibc/module.go index e1027c7b213..715f8f68042 100644 --- a/golang/cosmos/x/vibc/module.go +++ b/golang/cosmos/x/vibc/module.go @@ -71,13 +71,15 @@ func (AppModuleBasic) GetQueryCmd() *cobra.Command { type AppModule struct { AppModuleBasic keeper Keeper + bankKeeper types.BankKeeper } // NewAppModule creates a new AppModule Object -func NewAppModule(k Keeper) AppModule { +func NewAppModule(k Keeper, bankKeeper types.BankKeeper) AppModule { am := AppModule{ AppModuleBasic: AppModuleBasic{}, keeper: k, + bankKeeper: bankKeeper, } return am } @@ -104,7 +106,7 @@ func (AppModule) RegisterInvariants(ir sdk.InvariantRegistry) { // Route implements the AppModule interface func (am AppModule) Route() sdk.Route { - return sdk.NewRoute(RouterKey, NewHandler(am.keeper)) + return sdk.NewRoute(RouterKey, NewHandler(am.keeper, am.bankKeeper)) } // QuerierRoute implements the AppModule interface diff --git a/golang/cosmos/x/vibc/types/expected_keepers.go b/golang/cosmos/x/vibc/types/expected_keepers.go index b94c33c0445..dacf3d45947 100644 --- a/golang/cosmos/x/vibc/types/expected_keepers.go +++ b/golang/cosmos/x/vibc/types/expected_keepers.go @@ -9,8 +9,13 @@ import ( ibcexported "github.com/cosmos/ibc-go/v6/modules/core/exported" ) +type BankKeeper interface { + GetBalance(ctx sdk.Context, addr sdk.AccAddress, denom string) sdk.Coin +} + // ChannelKeeper defines the expected IBC channel keeper type ChannelKeeper interface { + GetAppVersion(ctx sdk.Context, portID, channelID string) (string, bool) GetChannel(ctx sdk.Context, srcPort, srcChan string) (channel channel.Channel, found bool) SendPacket( ctx sdk.Context, @@ -26,6 +31,8 @@ type ChannelKeeper interface { portCap *capability.Capability, counterparty channel.Counterparty, version string) (string, *capability.Capability, error) WriteOpenInitChannel(ctx sdk.Context, portID, channelID string, order channel.Order, connectionHops []string, counterparty channel.Counterparty, version string) + WriteOpenTryChannel(ctx sdk.Context, portID, channelID string, order channel.Order, + connectionHops []string, counterparty channel.Counterparty, version string) ChanCloseInit(ctx sdk.Context, portID, channelID string, chanCap *capability.Capability) error TimeoutExecuted(ctx sdk.Context, channelCap *capability.Capability, packet ibcexported.PacketI) error } @@ -44,3 +51,9 @@ type ConnectionKeeper interface { type PortKeeper interface { BindPort(ctx sdk.Context, portID string) *capability.Capability } + +// ScopedKeeper defines the expected scoped capability keeper +type ScopedKeeper interface { + ClaimCapability(ctx sdk.Context, cap *capability.Capability, name string) error + GetCapability(ctx sdk.Context, name string) (*capability.Capability, bool) +} diff --git a/golang/cosmos/x/vibc/ibc.go b/golang/cosmos/x/vibc/types/ibc_module.go similarity index 66% rename from golang/cosmos/x/vibc/ibc.go rename to golang/cosmos/x/vibc/types/ibc_module.go index 7ac2fd5118c..0d62115b549 100644 --- a/golang/cosmos/x/vibc/ibc.go +++ b/golang/cosmos/x/vibc/types/ibc_module.go @@ -1,10 +1,6 @@ -package vibc +package types import ( - "context" - "encoding/json" - "fmt" - sdkioerrors "cosmossdk.io/errors" "github.com/Agoric/agoric-sdk/golang/cosmos/vm" capability "github.com/cosmos/cosmos-sdk/x/capability/types" @@ -27,146 +23,25 @@ const ( ) var ( - _ porttypes.IBCModule = IBCModule{} + _ porttypes.IBCModule = (*IBCModule)(nil) ) -type IBCModule struct { - keeper Keeper -} - -type portMessage struct { // comes from swingset's IBC handler - Type string `json:"type"` // IBC_METHOD - Method string `json:"method"` - Packet channeltypes.Packet `json:"packet"` - RelativeTimeoutNs uint64 `json:"relativeTimeoutNs,string"` - Order string `json:"order"` - Hops []string `json:"hops"` - Version string `json:"version"` - Ack []byte `json:"ack"` -} - -func stringToOrder(order string) channeltypes.Order { - switch order { - case "ORDERED": - return channeltypes.ORDERED - case "UNORDERED": - return channeltypes.UNORDERED - default: - return channeltypes.NONE - } +type IBCModuleImpl interface { + ClaimCapability(ctx sdk.Context, channelCap *capability.Capability, path string) error + GetChannel(ctx sdk.Context, portID, channelID string) (channeltypes.Channel, bool) + PushAction(ctx sdk.Context, action vm.Action) error } -func orderToString(order channeltypes.Order) string { - switch order { - case channeltypes.ORDERED: - return "ORDERED" - case channeltypes.UNORDERED: - return "UNORDERED" - default: - return "NONE" - } +type IBCModule struct { + impl IBCModuleImpl } -func NewIBCModule(keeper Keeper) IBCModule { +func NewIBCModule(impl IBCModuleImpl) IBCModule { return IBCModule{ - keeper: keeper, + impl: impl, } } -func (ch IBCModule) Receive(cctx context.Context, str string) (ret string, err error) { - // fmt.Println("ibc.go downcall", str) - ctx := sdk.UnwrapSDKContext(cctx) - keeper := ch.keeper - - msg := new(portMessage) - err = json.Unmarshal([]byte(str), &msg) - if err != nil { - return ret, err - } - - if msg.Type != "IBC_METHOD" { - return "", fmt.Errorf(`channel handler only accepts messages of "type": "IBC_METHOD"`) - } - - switch msg.Method { - case "sendPacket": - timeoutTimestamp := msg.Packet.TimeoutTimestamp - if msg.Packet.TimeoutHeight.IsZero() && msg.Packet.TimeoutTimestamp == 0 { - // Use the relative timeout if no absolute timeout is specifiied. - timeoutTimestamp = uint64(ctx.BlockTime().UnixNano()) + msg.RelativeTimeoutNs - } - - seq, err := keeper.SendPacket( - ctx, - msg.Packet.SourcePort, - msg.Packet.SourceChannel, - msg.Packet.TimeoutHeight, - timeoutTimestamp, - msg.Packet.Data, - ) - if err == nil { - // synthesize the sent packet - packet := channeltypes.NewPacket( - msg.Packet.Data, seq, - msg.Packet.SourcePort, msg.Packet.SourceChannel, - msg.Packet.DestinationPort, msg.Packet.DestinationChannel, - msg.Packet.TimeoutHeight, timeoutTimestamp, - ) - bytes, err := json.Marshal(&packet) - if err == nil { - ret = string(bytes) - } - } - - case "receiveExecuted": - err = keeper.WriteAcknowledgement(ctx, msg.Packet, msg.Ack) - if err == nil { - ret = "true" - } - - case "startChannelOpenInit": - err = keeper.ChanOpenInit( - ctx, stringToOrder(msg.Order), msg.Hops, - msg.Packet.SourcePort, - msg.Packet.DestinationPort, - msg.Version, - ) - if err == nil { - ret = "true" - } - - case "startChannelCloseInit": - err = keeper.ChanCloseInit(ctx, msg.Packet.SourcePort, msg.Packet.SourceChannel) - if err == nil { - ret = "true" - } - - case "bindPort": - err = keeper.BindPort(ctx, msg.Packet.SourcePort) - if err == nil { - ret = "true" - } - - case "timeoutExecuted": - err = keeper.TimeoutExecuted(ctx, msg.Packet) - if err == nil { - ret = "true" - } - - default: - err = fmt.Errorf("unrecognized method %s", msg.Method) - } - - // fmt.Println("ibc.go downcall reply", ret, err) - return -} - -func (im IBCModule) PushAction(ctx sdk.Context, action vm.Action) error { - // fmt.Println("ibc.go upcall", send) - return im.keeper.PushAction(ctx, action) - // fmt.Println("ibc.go upcall reply", reply, err) -} - type channelOpenInitEvent struct { *vm.ActionHeader `actionType:"IBC_EVENT"` Event string `json:"event" default:"channelOpenInit"` @@ -200,13 +75,13 @@ func (im IBCModule) OnChanOpenInit( AsyncVersions: AsyncVersions, } - err := im.PushAction(ctx, event) + err := im.impl.PushAction(ctx, event) if err != nil { return "", err } // Claim channel capability passed back by IBC module - if err := im.keeper.ClaimCapability(ctx, channelCap, host.ChannelCapabilityPath(portID, channelID)); err != nil { + if err := im.impl.ClaimCapability(ctx, channelCap, host.ChannelCapabilityPath(portID, channelID)); err != nil { return "", err } @@ -250,17 +125,24 @@ func (im IBCModule) OnChanOpenTry( AsyncVersions: AsyncVersions, } - err := im.PushAction(ctx, event) + err := im.impl.PushAction(ctx, event) if err != nil { return "", err } // Claim channel capability passed back by IBC module - if err = im.keeper.ClaimCapability(ctx, channelCap, host.ChannelCapabilityPath(portID, channelID)); err != nil { + if err = im.impl.ClaimCapability(ctx, channelCap, host.ChannelCapabilityPath(portID, channelID)); err != nil { return "", sdkioerrors.Wrap(channeltypes.ErrChannelCapabilityNotFound, err.Error()) } - return event.Version, err + if !event.AsyncVersions { + // We have to supply a synchronous version, so just echo back the one they sent. + return event.Version, nil + } + + // Use an empty version string to indicate that the VM explicitly (possibly + // async) performs the WriteOpenTryChannel. + return "", nil } type channelOpenAckEvent struct { @@ -282,7 +164,7 @@ func (im IBCModule) OnChanOpenAck( ) error { // We don't care if the channel was found. If it wasn't then GetChannel // returns an empty channel object that we can still use without crashing. - channel, _ := im.keeper.GetChannel(ctx, portID, channelID) + channel, _ := im.impl.GetChannel(ctx, portID, channelID) channel.Counterparty.ChannelId = counterpartyChannelID event := channelOpenAckEvent{ @@ -293,7 +175,7 @@ func (im IBCModule) OnChanOpenAck( ConnectionHops: channel.ConnectionHops, } - return im.PushAction(ctx, event) + return im.impl.PushAction(ctx, event) } type channelOpenConfirmEvent struct { @@ -313,7 +195,7 @@ func (im IBCModule) OnChanOpenConfirm( ChannelID: channelID, } - return im.PushAction(ctx, event) + return im.impl.PushAction(ctx, event) } type channelCloseInitEvent struct { @@ -333,7 +215,7 @@ func (im IBCModule) OnChanCloseInit( ChannelID: channelID, } - err := im.PushAction(ctx, event) + err := im.impl.PushAction(ctx, event) return err } @@ -354,7 +236,7 @@ func (im IBCModule) OnChanCloseConfirm( ChannelID: channelID, } - err := im.PushAction(ctx, event) + err := im.impl.PushAction(ctx, event) return err } @@ -383,7 +265,7 @@ func (im IBCModule) OnRecvPacket( Relayer: relayer, } - err := im.PushAction(ctx, event) + err := im.impl.PushAction(ctx, event) if err != nil { return channeltypes.NewErrorAcknowledgement(err) } @@ -411,7 +293,7 @@ func (im IBCModule) OnAcknowledgementPacket( Relayer: relayer, } - err := im.PushAction(ctx, event) + err := im.impl.PushAction(ctx, event) if err != nil { return err } @@ -436,7 +318,7 @@ func (im IBCModule) OnTimeoutPacket( Relayer: relayer, } - err := im.PushAction(ctx, event) + err := im.impl.PushAction(ctx, event) if err != nil { return err } diff --git a/golang/cosmos/x/vibc/types/receiver.go b/golang/cosmos/x/vibc/types/receiver.go new file mode 100644 index 00000000000..9dfd43b0957 --- /dev/null +++ b/golang/cosmos/x/vibc/types/receiver.go @@ -0,0 +1,160 @@ +package types + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/Agoric/agoric-sdk/golang/cosmos/vm" + channeltypes "github.com/cosmos/ibc-go/v6/modules/core/04-channel/types" + + "github.com/cosmos/ibc-go/v6/modules/core/exported" + + sdk "github.com/cosmos/cosmos-sdk/types" +) + +var ( + _ vm.PortHandler = Receiver{} + _ exported.Acknowledgement = (*rawAcknowledgement)(nil) +) + +type ReceiverImpl interface { + ReceiveSendPacket(ctx sdk.Context, packet exported.PacketI) (uint64, error) + ReceiveWriteAcknowledgement(ctx sdk.Context, packet exported.PacketI, ack exported.Acknowledgement) error + ReceiveChanOpenInit(ctx sdk.Context, order channeltypes.Order, hops []string, sourcePort, destinationPort, version string) error + ReceiveWriteOpenTryChannel(ctx sdk.Context, packet exported.PacketI, order channeltypes.Order, connectionHops []string, version string) error + ReceiveChanCloseInit(ctx sdk.Context, sourcePort, sourceChannel string) error + ReceiveBindPort(ctx sdk.Context, sourcePort string) error + ReceiveTimeoutExecuted(ctx sdk.Context, packet exported.PacketI) error +} + +type Receiver struct { + impl ReceiverImpl +} + +func NewReceiver(impl ReceiverImpl) Receiver { + return Receiver{ + impl: impl, + } +} + +type portMessage struct { // comes from swingset's IBC handler + Type string `json:"type"` // IBC_METHOD + Method string `json:"method"` + Packet channeltypes.Packet `json:"packet"` + RelativeTimeoutNs uint64 `json:"relativeTimeoutNs,string"` + Order string `json:"order"` + Hops []string `json:"hops"` + Version string `json:"version"` + Ack []byte `json:"ack"` +} + +func stringToOrder(order string) channeltypes.Order { + switch order { + case "ORDERED": + return channeltypes.ORDERED + case "UNORDERED": + return channeltypes.UNORDERED + default: + return channeltypes.NONE + } +} + +func orderToString(order channeltypes.Order) string { + switch order { + case channeltypes.ORDERED: + return "ORDERED" + case channeltypes.UNORDERED: + return "UNORDERED" + default: + return "NONE" + } +} + +type rawAcknowledgement struct { + data []byte +} + +func (r rawAcknowledgement) Acknowledgement() []byte { + return r.data +} + +func (r rawAcknowledgement) Success() bool { + return true +} + +func (ir Receiver) Receive(cctx context.Context, str string) (ret string, err error) { + ctx := sdk.UnwrapSDKContext(cctx) + impl := ir.impl + + msg := new(portMessage) + err = json.Unmarshal([]byte(str), &msg) + if err != nil { + return "", err + } + + if msg.Type != "IBC_METHOD" { + return "", fmt.Errorf(`channel handler only accepts messages of "type": "IBC_METHOD"`) + } + + switch msg.Method { + case "sendPacket": + timeoutTimestamp := msg.Packet.TimeoutTimestamp + if msg.Packet.TimeoutHeight.IsZero() && timeoutTimestamp == 0 { + // Use the relative timeout if no absolute timeout is specifiied. + timeoutTimestamp = uint64(ctx.BlockTime().UnixNano()) + msg.RelativeTimeoutNs + } + + packet := channeltypes.NewPacket( + msg.Packet.Data, 0, + msg.Packet.SourcePort, msg.Packet.SourceChannel, + msg.Packet.DestinationPort, msg.Packet.DestinationChannel, + msg.Packet.TimeoutHeight, timeoutTimestamp, + ) + seq, err := impl.ReceiveSendPacket(ctx, packet) + if err == nil { + packet.Sequence = seq + bytes, err := json.Marshal(&packet) + if err == nil { + ret = string(bytes) + } + } + + case "tryOpenExecuted": + err = impl.ReceiveWriteOpenTryChannel( + ctx, msg.Packet, + stringToOrder(msg.Order), msg.Hops, msg.Version, + ) + + case "receiveExecuted": + ack := rawAcknowledgement{ + data: msg.Ack, + } + err = impl.ReceiveWriteAcknowledgement(ctx, msg.Packet, ack) + + case "startChannelOpenInit": + err = impl.ReceiveChanOpenInit( + ctx, stringToOrder(msg.Order), msg.Hops, + msg.Packet.SourcePort, + msg.Packet.DestinationPort, + msg.Version, + ) + + case "startChannelCloseInit": + err = impl.ReceiveChanCloseInit(ctx, msg.Packet.SourcePort, msg.Packet.SourceChannel) + + case "bindPort": + err = impl.ReceiveBindPort(ctx, msg.Packet.SourcePort) + + case "timeoutExecuted": + err = impl.ReceiveTimeoutExecuted(ctx, msg.Packet) + + default: + err = fmt.Errorf("unrecognized method %s", msg.Method) + } + + if ret == "" && err == nil { + ret = "true" + } + return +} From b89aaca2afd7d0901fc06d7a6bab27aab38d389b Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Mon, 1 Jan 2024 17:24:12 -0600 Subject: [PATCH 45/47] feat(vibc): use triggers to raise targeted events --- golang/cosmos/x/vibc/keeper/triggers.go | 101 +++++++++++++++++++++++ golang/cosmos/x/vibc/types/ibc_module.go | 15 ++-- 2 files changed, 110 insertions(+), 6 deletions(-) create mode 100644 golang/cosmos/x/vibc/keeper/triggers.go diff --git a/golang/cosmos/x/vibc/keeper/triggers.go b/golang/cosmos/x/vibc/keeper/triggers.go new file mode 100644 index 00000000000..10416d0cade --- /dev/null +++ b/golang/cosmos/x/vibc/keeper/triggers.go @@ -0,0 +1,101 @@ +package keeper + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + + clienttypes "github.com/cosmos/ibc-go/v6/modules/core/02-client/types" + channeltypes "github.com/cosmos/ibc-go/v6/modules/core/04-channel/types" + ibcexported "github.com/cosmos/ibc-go/v6/modules/core/exported" + + "github.com/Agoric/agoric-sdk/golang/cosmos/vm" + "github.com/Agoric/agoric-sdk/golang/cosmos/x/vibc/types" +) + +func reifyPacket(packet ibcexported.PacketI) channeltypes.Packet { + height := packet.GetTimeoutHeight() + ctHeight := clienttypes.Height{ + RevisionHeight: height.GetRevisionHeight(), + RevisionNumber: height.GetRevisionNumber(), + } + return channeltypes.Packet{ + Sequence: packet.GetSequence(), + SourcePort: packet.GetSourcePort(), + SourceChannel: packet.GetSourceChannel(), + DestinationPort: packet.GetDestPort(), + DestinationChannel: packet.GetDestChannel(), + Data: packet.GetData(), + TimeoutHeight: ctHeight, + TimeoutTimestamp: packet.GetTimeoutTimestamp(), + } +} + +type WriteAcknowledgementEvent struct { + *vm.ActionHeader `actionType:"IBC_EVENT"` + Event string `json:"event" default:"writeAcknowledgement"` + Target string `json:"target"` + Packet channeltypes.Packet `json:"packet"` + Acknowledgement []byte `json:"acknowledgement"` + Relayer sdk.AccAddress `json:"relayer"` +} + +func (k Keeper) TriggerWriteAcknowledgement( + ctx sdk.Context, + target string, + packet ibcexported.PacketI, + acknowledgement ibcexported.Acknowledgement, +) error { + event := WriteAcknowledgementEvent{ + Target: target, + Packet: reifyPacket(packet), + Acknowledgement: acknowledgement.Acknowledgement(), + } + + err := k.PushAction(ctx, event) + if err != nil { + return err + } + + return nil +} + +func (k Keeper) TriggerOnAcknowledgementPacket( + ctx sdk.Context, + target string, + packet ibcexported.PacketI, + acknowledgement []byte, + relayer sdk.AccAddress, +) error { + event := types.AcknowledgementPacketEvent{ + Target: target, + Packet: reifyPacket(packet), + Acknowledgement: acknowledgement, + Relayer: relayer, + } + + err := k.PushAction(ctx, event) + if err != nil { + return err + } + + return nil +} + +func (k Keeper) TriggerOnTimeoutPacket( + ctx sdk.Context, + target string, + packet ibcexported.PacketI, + relayer sdk.AccAddress, +) error { + event := types.TimeoutPacketEvent{ + Target: target, + Packet: reifyPacket(packet), + Relayer: relayer, + } + + err := k.PushAction(ctx, event) + if err != nil { + return err + } + + return nil +} diff --git a/golang/cosmos/x/vibc/types/ibc_module.go b/golang/cosmos/x/vibc/types/ibc_module.go index 0d62115b549..ab5ee67de8e 100644 --- a/golang/cosmos/x/vibc/types/ibc_module.go +++ b/golang/cosmos/x/vibc/types/ibc_module.go @@ -240,9 +240,10 @@ func (im IBCModule) OnChanCloseConfirm( return err } -type receivePacketEvent struct { +type ReceivePacketEvent struct { *vm.ActionHeader `actionType:"IBC_EVENT"` Event string `json:"event" default:"receivePacket"` + Target string `json:"target"` Packet channeltypes.Packet `json:"packet"` Relayer sdk.AccAddress `json:"relayer"` } @@ -260,7 +261,7 @@ func (im IBCModule) OnRecvPacket( // and also "rly tx xfer"-- they both are trying to relay // the same packets. - event := receivePacketEvent{ + event := ReceivePacketEvent{ Packet: packet, Relayer: relayer, } @@ -273,9 +274,10 @@ func (im IBCModule) OnRecvPacket( return nil } -type acknowledgementPacketEvent struct { +type AcknowledgementPacketEvent struct { *vm.ActionHeader `actionType:"IBC_EVENT"` Event string `json:"event" default:"acknowledgementPacket"` + Target string `json:"target"` Packet channeltypes.Packet `json:"packet"` Acknowledgement []byte `json:"acknowledgement"` Relayer sdk.AccAddress `json:"relayer"` @@ -287,7 +289,7 @@ func (im IBCModule) OnAcknowledgementPacket( acknowledgement []byte, relayer sdk.AccAddress, ) error { - event := acknowledgementPacketEvent{ + event := AcknowledgementPacketEvent{ Packet: packet, Acknowledgement: acknowledgement, Relayer: relayer, @@ -301,9 +303,10 @@ func (im IBCModule) OnAcknowledgementPacket( return nil } -type timeoutPacketEvent struct { +type TimeoutPacketEvent struct { *vm.ActionHeader `actionType:"IBC_EVENT"` Event string `json:"event" default:"timeoutPacket"` + Target string `json:"target"` Packet channeltypes.Packet `json:"packet"` Relayer sdk.AccAddress `json:"relayer"` } @@ -313,7 +316,7 @@ func (im IBCModule) OnTimeoutPacket( packet channeltypes.Packet, relayer sdk.AccAddress, ) error { - event := timeoutPacketEvent{ + event := TimeoutPacketEvent{ Packet: packet, Relayer: relayer, } From c2b36dc1357ee6db77218ff796f8e53423d83a0b Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Sat, 6 Jan 2024 16:41:23 -0600 Subject: [PATCH 46/47] chore(cosmos): make `ActionHeader` a mutable pointer target --- golang/cosmos/x/swingset/abci.go | 20 +++++------ golang/cosmos/x/swingset/keeper/msg_server.go | 36 +++++++++---------- golang/cosmos/x/swingset/keeper/proposal.go | 6 ++-- golang/cosmos/x/vibc/handler.go | 6 ++-- golang/cosmos/x/vibc/types/ibc_module.go | 35 ++++++++++-------- 5 files changed, 54 insertions(+), 49 deletions(-) diff --git a/golang/cosmos/x/swingset/abci.go b/golang/cosmos/x/swingset/abci.go index df6e360c472..2afb78457f5 100644 --- a/golang/cosmos/x/swingset/abci.go +++ b/golang/cosmos/x/swingset/abci.go @@ -15,27 +15,27 @@ import ( ) type beginBlockAction struct { - vm.ActionHeader `actionType:"BEGIN_BLOCK"` - ChainID string `json:"chainID"` - Params types.Params `json:"params"` + *vm.ActionHeader `actionType:"BEGIN_BLOCK"` + ChainID string `json:"chainID"` + Params types.Params `json:"params"` } type endBlockAction struct { - vm.ActionHeader `actionType:"END_BLOCK"` + *vm.ActionHeader `actionType:"END_BLOCK"` } type commitBlockAction struct { - vm.ActionHeader `actionType:"COMMIT_BLOCK"` + *vm.ActionHeader `actionType:"COMMIT_BLOCK"` } type afterCommitBlockAction struct { - vm.ActionHeader `actionType:"AFTER_COMMIT_BLOCK"` + *vm.ActionHeader `actionType:"AFTER_COMMIT_BLOCK"` } func BeginBlock(ctx sdk.Context, req abci.RequestBeginBlock, keeper Keeper) error { defer telemetry.ModuleMeasureSince(types.ModuleName, time.Now(), telemetry.MetricKeyBeginBlocker) - action := &beginBlockAction{ + action := beginBlockAction{ ChainID: ctx.ChainID(), Params: keeper.GetParams(ctx), } @@ -56,7 +56,7 @@ var endBlockTime int64 func EndBlock(ctx sdk.Context, req abci.RequestEndBlock, keeper Keeper) ([]abci.ValidatorUpdate, error) { defer telemetry.ModuleMeasureSince(types.ModuleName, time.Now(), telemetry.MetricKeyEndBlocker) - action := &endBlockAction{} + action := endBlockAction{} _, err := keeper.BlockingSend(ctx, action) // fmt.Fprintf(os.Stderr, "END_BLOCK Returned from SwingSet: %s, %v\n", out, err) @@ -83,7 +83,7 @@ func getEndBlockContext() sdk.Context { func CommitBlock(keeper Keeper) error { defer telemetry.ModuleMeasureSince(types.ModuleName, time.Now(), "commit_blocker") - action := &commitBlockAction{} + action := commitBlockAction{} _, err := keeper.BlockingSend(getEndBlockContext(), action) // fmt.Fprintf(os.Stderr, "COMMIT_BLOCK Returned from SwingSet: %s, %v\n", out, err) @@ -98,7 +98,7 @@ func CommitBlock(keeper Keeper) error { func AfterCommitBlock(keeper Keeper) error { // defer telemetry.ModuleMeasureSince(types.ModuleName, time.Now(), "commit_blocker") - action := &afterCommitBlockAction{} + action := afterCommitBlockAction{} _, err := keeper.BlockingSend(getEndBlockContext(), action) // fmt.Fprintf(os.Stderr, "AFTER_COMMIT_BLOCK Returned from SwingSet: %s, %v\n", out, err) diff --git a/golang/cosmos/x/swingset/keeper/msg_server.go b/golang/cosmos/x/swingset/keeper/msg_server.go index 138a6d6803f..c40aa6a0baa 100644 --- a/golang/cosmos/x/swingset/keeper/msg_server.go +++ b/golang/cosmos/x/swingset/keeper/msg_server.go @@ -21,10 +21,10 @@ func NewMsgServerImpl(keeper Keeper) types.MsgServer { var _ types.MsgServer = msgServer{} type deliverInboundAction struct { - vm.ActionHeader `actionType:"DELIVER_INBOUND"` - Peer string `json:"peer"` - Messages [][]interface{} `json:"messages"` - Ack uint64 `json:"ack"` + *vm.ActionHeader `actionType:"DELIVER_INBOUND"` + Peer string `json:"peer"` + Messages [][]interface{} `json:"messages"` + Ack uint64 `json:"ack"` } func (keeper msgServer) routeAction(ctx sdk.Context, msg vm.ControllerAdmissionMsg, action vm.Action) error { @@ -48,7 +48,7 @@ func (keeper msgServer) DeliverInbound(goCtx context.Context, msg *types.MsgDeli for i, message := range msg.Messages { messages[i] = []interface{}{msg.Nums[i], message} } - action := &deliverInboundAction{ + action := deliverInboundAction{ Peer: msg.Submitter.String(), Messages: messages, Ack: msg.Ack, @@ -63,9 +63,9 @@ func (keeper msgServer) DeliverInbound(goCtx context.Context, msg *types.MsgDeli } type walletAction struct { - vm.ActionHeader `actionType:"WALLET_ACTION"` - Owner string `json:"owner"` - Action string `json:"action"` + *vm.ActionHeader `actionType:"WALLET_ACTION"` + Owner string `json:"owner"` + Action string `json:"action"` } func (keeper msgServer) WalletAction(goCtx context.Context, msg *types.MsgWalletAction) (*types.MsgWalletActionResponse, error) { @@ -76,7 +76,7 @@ func (keeper msgServer) WalletAction(goCtx context.Context, msg *types.MsgWallet return nil, err } - action := &walletAction{ + action := walletAction{ Owner: msg.Owner.String(), Action: msg.Action, } @@ -91,9 +91,9 @@ func (keeper msgServer) WalletAction(goCtx context.Context, msg *types.MsgWallet } type walletSpendAction struct { - vm.ActionHeader `actionType:"WALLET_SPEND_ACTION"` - Owner string `json:"owner"` - SpendAction string `json:"spendAction"` + *vm.ActionHeader `actionType:"WALLET_SPEND_ACTION"` + Owner string `json:"owner"` + SpendAction string `json:"spendAction"` } func (keeper msgServer) WalletSpendAction(goCtx context.Context, msg *types.MsgWalletSpendAction) (*types.MsgWalletSpendActionResponse, error) { @@ -104,7 +104,7 @@ func (keeper msgServer) WalletSpendAction(goCtx context.Context, msg *types.MsgW return nil, err } - action := &walletSpendAction{ + action := walletSpendAction{ Owner: msg.Owner.String(), SpendAction: msg.SpendAction, } @@ -117,7 +117,7 @@ func (keeper msgServer) WalletSpendAction(goCtx context.Context, msg *types.MsgW } type provisionAction struct { - vm.ActionHeader `actionType:"PLEASE_PROVISION"` + *vm.ActionHeader `actionType:"PLEASE_PROVISION"` *types.MsgProvision AutoProvision bool `json:"autoProvision"` } @@ -141,7 +141,7 @@ func (keeper msgServer) provisionIfNeeded(ctx sdk.Context, owner sdk.AccAddress) PowerFlags: []string{types.PowerFlagSmartWallet}, } - action := &provisionAction{ + action := provisionAction{ MsgProvision: msg, AutoProvision: true, } @@ -163,7 +163,7 @@ func (keeper msgServer) Provision(goCtx context.Context, msg *types.MsgProvision return nil, err } - action := &provisionAction{ + action := provisionAction{ MsgProvision: msg, } @@ -184,7 +184,7 @@ func (keeper msgServer) Provision(goCtx context.Context, msg *types.MsgProvision } type installBundleAction struct { - vm.ActionHeader `actionType:"INSTALL_BUNDLE"` + *vm.ActionHeader `actionType:"INSTALL_BUNDLE"` *types.MsgInstallBundle } @@ -195,7 +195,7 @@ func (keeper msgServer) InstallBundle(goCtx context.Context, msg *types.MsgInsta if err != nil { return nil, err } - action := &installBundleAction{ + action := installBundleAction{ MsgInstallBundle: msg, } diff --git a/golang/cosmos/x/swingset/keeper/proposal.go b/golang/cosmos/x/swingset/keeper/proposal.go index dd84fa0287a..079dfbe82e6 100644 --- a/golang/cosmos/x/swingset/keeper/proposal.go +++ b/golang/cosmos/x/swingset/keeper/proposal.go @@ -11,13 +11,13 @@ import ( ) type coreEvalAction struct { - vm.ActionHeader `actionType:"CORE_EVAL"` - Evals []types.CoreEval `json:"evals"` + *vm.ActionHeader `actionType:"CORE_EVAL"` + Evals []types.CoreEval `json:"evals"` } // CoreEvalProposal tells SwingSet to evaluate the given JS code. func (k Keeper) CoreEvalProposal(ctx sdk.Context, p *types.CoreEvalProposal) error { - action := &coreEvalAction{ + action := coreEvalAction{ Evals: p.Evals, } diff --git a/golang/cosmos/x/vibc/handler.go b/golang/cosmos/x/vibc/handler.go index 9ca5e6d1b05..60cfb1f602a 100644 --- a/golang/cosmos/x/vibc/handler.go +++ b/golang/cosmos/x/vibc/handler.go @@ -25,8 +25,8 @@ func NewHandler(keeper Keeper, bankKeeper types.BankKeeper) sdk.Handler { } type sendPacketAction struct { - vm.ActionHeader `actionType:"IBC_EVENT"` - Event string `json:"event" default:"sendPacket"` + *vm.ActionHeader `actionType:"IBC_EVENT"` + Event string `json:"event" default:"sendPacket"` *MsgSendPacket } @@ -45,7 +45,7 @@ func handleMsgSendPacket( ) } - action := &sendPacketAction{ + action := sendPacketAction{ MsgSendPacket: msg, } // fmt.Fprintf(os.Stderr, "Context is %+v\n", ctx) diff --git a/golang/cosmos/x/vibc/types/ibc_module.go b/golang/cosmos/x/vibc/types/ibc_module.go index ab5ee67de8e..dd7d1be4b2a 100644 --- a/golang/cosmos/x/vibc/types/ibc_module.go +++ b/golang/cosmos/x/vibc/types/ibc_module.go @@ -42,9 +42,10 @@ func NewIBCModule(impl IBCModuleImpl) IBCModule { } } -type channelOpenInitEvent struct { +type ChannelOpenInitEvent struct { *vm.ActionHeader `actionType:"IBC_EVENT"` Event string `json:"event" default:"channelOpenInit"` + Target string `json:"target,omitempty"` Order string `json:"order"` ConnectionHops []string `json:"connectionHops"` PortID string `json:"portID"` @@ -65,7 +66,7 @@ func (im IBCModule) OnChanOpenInit( counterparty channeltypes.Counterparty, version string, ) (string, error) { - event := channelOpenInitEvent{ + event := ChannelOpenInitEvent{ Order: orderToString(order), ConnectionHops: connectionHops, PortID: portID, @@ -93,9 +94,10 @@ func (im IBCModule) OnChanOpenInit( return "", nil } -type channelOpenTryEvent struct { +type ChannelOpenTryEvent struct { *vm.ActionHeader `actionType:"IBC_EVENT"` Event string `json:"event" default:"channelOpenTry"` + Target string `json:"target,omitempty"` Order string `json:"order"` ConnectionHops []string `json:"connectionHops"` PortID string `json:"portID"` @@ -115,7 +117,7 @@ func (im IBCModule) OnChanOpenTry( counterparty channeltypes.Counterparty, counterpartyVersion string, ) (string, error) { - event := channelOpenTryEvent{ + event := ChannelOpenTryEvent{ Order: orderToString(order), ConnectionHops: connectionHops, PortID: portID, @@ -145,7 +147,7 @@ func (im IBCModule) OnChanOpenTry( return "", nil } -type channelOpenAckEvent struct { +type ChannelOpenAckEvent struct { *vm.ActionHeader `actionType:"IBC_EVENT"` Event string `json:"event" default:"channelOpenAck"` PortID string `json:"portID"` @@ -167,7 +169,7 @@ func (im IBCModule) OnChanOpenAck( channel, _ := im.impl.GetChannel(ctx, portID, channelID) channel.Counterparty.ChannelId = counterpartyChannelID - event := channelOpenAckEvent{ + event := ChannelOpenAckEvent{ PortID: portID, ChannelID: channelID, CounterpartyVersion: counterpartyVersion, @@ -178,9 +180,10 @@ func (im IBCModule) OnChanOpenAck( return im.impl.PushAction(ctx, event) } -type channelOpenConfirmEvent struct { +type ChannelOpenConfirmEvent struct { *vm.ActionHeader `actionType:"IBC_EVENT"` Event string `json:"event" default:"channelOpenConfirm"` + Target string `json:"target,omitempty"` PortID string `json:"portID"` ChannelID string `json:"channelID"` } @@ -190,7 +193,7 @@ func (im IBCModule) OnChanOpenConfirm( portID, channelID string, ) error { - event := channelOpenConfirmEvent{ + event := ChannelOpenConfirmEvent{ PortID: portID, ChannelID: channelID, } @@ -198,9 +201,10 @@ func (im IBCModule) OnChanOpenConfirm( return im.impl.PushAction(ctx, event) } -type channelCloseInitEvent struct { +type ChannelCloseInitEvent struct { *vm.ActionHeader `actionType:"IBC_EVENT"` Event string `json:"event" default:"channelCloseInit"` + Target string `json:"target,omitempty"` PortID string `json:"portID"` ChannelID string `json:"channelID"` } @@ -210,7 +214,7 @@ func (im IBCModule) OnChanCloseInit( portID, channelID string, ) error { - event := channelCloseInitEvent{ + event := ChannelCloseInitEvent{ PortID: portID, ChannelID: channelID, } @@ -219,9 +223,10 @@ func (im IBCModule) OnChanCloseInit( return err } -type channelCloseConfirmEvent struct { +type ChannelCloseConfirmEvent struct { *vm.ActionHeader `actionType:"IBC_EVENT"` Event string `json:"event" default:"channelCloseConfirm"` + Target string `json:"target,omitempty"` PortID string `json:"portID"` ChannelID string `json:"channelID"` } @@ -231,7 +236,7 @@ func (im IBCModule) OnChanCloseConfirm( portID, channelID string, ) error { - event := channelCloseConfirmEvent{ + event := ChannelCloseConfirmEvent{ PortID: portID, ChannelID: channelID, } @@ -243,7 +248,7 @@ func (im IBCModule) OnChanCloseConfirm( type ReceivePacketEvent struct { *vm.ActionHeader `actionType:"IBC_EVENT"` Event string `json:"event" default:"receivePacket"` - Target string `json:"target"` + Target string `json:"target,omitempty"` Packet channeltypes.Packet `json:"packet"` Relayer sdk.AccAddress `json:"relayer"` } @@ -277,7 +282,7 @@ func (im IBCModule) OnRecvPacket( type AcknowledgementPacketEvent struct { *vm.ActionHeader `actionType:"IBC_EVENT"` Event string `json:"event" default:"acknowledgementPacket"` - Target string `json:"target"` + Target string `json:"target,omitempty"` Packet channeltypes.Packet `json:"packet"` Acknowledgement []byte `json:"acknowledgement"` Relayer sdk.AccAddress `json:"relayer"` @@ -306,7 +311,7 @@ func (im IBCModule) OnAcknowledgementPacket( type TimeoutPacketEvent struct { *vm.ActionHeader `actionType:"IBC_EVENT"` Event string `json:"event" default:"timeoutPacket"` - Target string `json:"target"` + Target string `json:"target,omitempty"` Packet channeltypes.Packet `json:"packet"` Relayer sdk.AccAddress `json:"relayer"` } From c268687666f43e63456b39f77af34e3573ce81e2 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Sun, 3 Mar 2024 16:01:54 -0600 Subject: [PATCH 47/47] ci(after-merge): handle uncommitted changes and report on failures --- .github/workflows/after-merge.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/after-merge.yml b/.github/workflows/after-merge.yml index 24910a6d7ef..2c3f494e97d 100644 --- a/.github/workflows/after-merge.yml +++ b/.github/workflows/after-merge.yml @@ -74,10 +74,14 @@ jobs: TAG=other-dev ;; esac + # Prevent `lerna publish` from failing due to uncommitted changes. + git stash # without concurrency until https://github.com/Agoric/agoric-sdk/issues/8091 yarn lerna publish --concurrency 1 --conventional-prerelease --canary --exact \ --dist-tag=$TAG --preid=$TAG-$(git rev-parse --short=7 HEAD) \ --no-push --no-verify-access --yes + # restore the stashed changes for caching + git stash apply - name: notify on failure if: failure() uses: ./.github/actions/notify-status @@ -85,6 +89,7 @@ jobs: from: ${{ secrets.NOTIFY_EMAIL_FROM }} to: ${{ secrets.NOTIFY_EMAIL_TO }} password: ${{ secrets.NOTIFY_EMAIL_PASSWORD }} + webhook: ${{ secrets.SLACK_WEBHOOK_URL }} coverage: needs: build