Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: APP-3098 - Refactor IPFS pinning and fetching to use Pinata #1346

Merged
merged 19 commits into from
May 14, 2024
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .github/workflows/pull_request_webapp_preview.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,9 @@ jobs:
VITE_WALLET_CONNECT_PROJECT_ID: ${{secrets.VITE_WALLET_CONNECT_PROJECT_ID}}
VITE_COVALENT_API_KEY: ${{secrets.VITE_COVALENT_API_KEY}}
VITE_SENTRY_DNS: ${{secrets.VITE_SENTRY_DNS}}
VITE_PINATA_JWT_API_KEY: ${{secrets.VITE_PINATA_JWT_API_KEY}}
VITE_PINATA_GATEWAY: ${{secrets.VITE_PINATA_GATEWAY}}
VITE_PINATA_CID_VERSION: ${{secrets.VITE_PINATA_CID_VERSION}}
- name: Sentry sourcemap
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
Expand Down
3 changes: 3 additions & 0 deletions .github/workflows/webapp-deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,9 @@ jobs:
VITE_GATEWAY_RPC_API_KEY: ${{secrets.VITE_GATEWAY_RPC_API_KEY}}
VITE_COVALENT_API_KEY: ${{secrets.VITE_COVALENT_API_KEY}}
VITE_SENTRY_DNS: ${{secrets.VITE_SENTRY_DNS}}
VITE_PINATA_JWT_API_KEY: ${{secrets.VITE_PINATA_JWT_API_KEY}}
VITE_PINATA_GATEWAY: ${{secrets.VITE_PINATA_GATEWAY}}
VITE_PINATA_CID_VERSION: ${{secrets.VITE_PINATA_CID_VERSION}}

- name: Sentry sourcemap
env:
Expand Down
3 changes: 3 additions & 0 deletions .github/workflows/webapp-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,3 +32,6 @@ jobs:
VITE_REACT_APP_ANALYTICS_KEY: ${{ secrets.VITE_REACT_APP_ANALYTICS_KEY }}
VITE_GATEWAY_IPFS_API_KEY: ${{secrets.VITE_GATEWAY_IPFS_API_KEY}}
VITE_ETHERSCAN_API_KEY: ${{secrets.VITE_ETHERSCAN_API_KEY}}
VITE_PINATA_JWT_API_KEY: ${{secrets.VITE_PINATA_JWT_API_KEY}}
VITE_PINATA_GATEWAY: ${{secrets.VITE_PINATA_GATEWAY}}
VITE_PINATA_CID_VERSION: ${{secrets.VITE_PINATA_CID_VERSION}}
109 changes: 99 additions & 10 deletions src/hooks/useDaoDetails.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
import {Client, DaoDetails} from '@aragon/sdk-client';
import {
Client,
DaoDetails,
DaoMetadata,
InstalledPluginListItem,
} from '@aragon/sdk-client';
import {JsonRpcProvider} from '@ethersproject/providers';
import {useQuery} from '@tanstack/react-query';
import {isAddress} from 'ethers/lib/utils';
Expand All @@ -7,11 +12,75 @@ import {useLocation, useNavigate, useParams} from 'react-router-dom';

import {useNetwork} from 'context/network';
import {useProviders} from 'context/providers';
import {CHAIN_METADATA} from 'utils/constants';
import {
CHAIN_METADATA,
SUBGRAPH_API_URL,
SupportedNetworks,
} from 'utils/constants';
import {toDisplayEns} from 'utils/library';
import {NotFound} from 'utils/paths';
import {useClient} from './useClient';
import {resolveIpfsCid} from '@aragon/sdk-client-common';
import request, {gql} from 'graphql-request';
import {SubgraphDao, SubgraphPluginListItem} from 'utils/types';
import {ipfsService} from 'services/ipfs/ipfsService';
import {isEnsDomain} from '@aragon/ods-old';

export const QueryDao = gql`
query Dao($address: ID!) {
dao(id: $address) {
id
subdomain
metadata
createdAt
plugins {
appliedPreparation {
pluginAddress
}
appliedPluginRepo {
subdomain
}
appliedVersion {
build
release {
release
}
}
}
}
}
`;

function toDaoDetails(dao: SubgraphDao, metadata: DaoMetadata): DaoDetails {
return {
address: dao.id,
ensDomain: dao.subdomain + '.dao.eth',
metadata: {
name: metadata?.name,
description: metadata?.description,
avatar: metadata?.avatar || undefined,
links: metadata?.links,
},
creationDate: new Date(parseInt(dao.createdAt) * 1000),
// filter out plugins that are not applied
plugins: dao.plugins
.filter(
plugin =>
plugin.appliedPreparation &&
plugin.appliedVersion &&
plugin.appliedPluginRepo
)
.map(
(plugin: SubgraphPluginListItem): InstalledPluginListItem => ({
// we checked with the filter above that these are not null
id: `${plugin.appliedPluginRepo!.subdomain}.plugin.dao.eth`,
release: plugin.appliedVersion!.release.release,
build: plugin.appliedVersion!.build,
instanceAddress: plugin.appliedPreparation!.pluginAddress,
})
),
};
}

/**
* Fetches DAO data for a given DAO address or ENS name using a given client.
Expand All @@ -25,33 +94,45 @@ async function fetchDaoDetails(
daoAddressOrEns: string | undefined,
provider: JsonRpcProvider,
isL2NetworkEns: boolean,
network: SupportedNetworks,
redirectDaoToAddress: (address: string | null) => void
): Promise<DaoDetails | null> {
if (!daoAddressOrEns)
return Promise.reject(new Error('daoAddressOrEns must be defined'));

if (!client) return Promise.reject(new Error('client must be defined'));

const address = isEnsDomain(daoAddressOrEns)
? await provider.resolveName(daoAddressOrEns as string)
: daoAddressOrEns;

// if network is l2 and has ens name, resolve to address
if (isL2NetworkEns) {
const address = await provider.resolveName(daoAddressOrEns as string);
redirectDaoToAddress(address);
}

// Note: SDK doesn't support ens names in L2 chains so we need to resolve the address first
const daoDetails = await client.methods.getDao(daoAddressOrEns.toLowerCase());
const {dao} = await request<{dao: SubgraphDao}>(
SUBGRAPH_API_URL[network]!,
QueryDao,
{
address: address?.toLowerCase() ?? daoAddressOrEns?.toLowerCase(),
}
);

const metadata = await ipfsService.getData(dao.metadata);
const daoDetails = toDaoDetails(dao, metadata);

const avatar = daoDetails?.metadata.avatar;
if (avatar)
if (typeof avatar !== 'string') {
daoDetails.metadata.avatar = URL.createObjectURL(avatar);
} else if (/^ipfs/.test(avatar) && client) {
try {
const cid = resolveIpfsCid(avatar);
const ipfsClient = client.ipfs.getClient();
const imageBytes = await ipfsClient.cat(cid); // Uint8Array
const imageBlob = new Blob([imageBytes] as unknown as BlobPart[]);

daoDetails.metadata.avatar = URL.createObjectURL(imageBlob);
daoDetails.metadata.avatar = `${
import.meta.env.VITE_PINATA_GATEWAY
}/${cid}`;
} catch (err) {
console.warn('Error resolving DAO avatar IPFS Cid', err);
}
Expand Down Expand Up @@ -132,9 +213,17 @@ export const useDaoQuery = (
daoAddressOrEns,
provider,
isL2NetworkEns,
network,
redirectDaoToAddress
);
}, [client, daoAddressOrEns, isL2NetworkEns, provider, redirectDaoToAddress]);
}, [
client,
daoAddressOrEns,
isL2NetworkEns,
network,
provider,
redirectDaoToAddress,
]);

return useQuery<DaoDetails | null>({
queryKey: ['daoDetails', daoAddressOrEns, queryNetwork],
Expand Down
55 changes: 17 additions & 38 deletions src/hooks/useUploadIpfsData.tsx
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
import {useCallback} from 'react';
import {useAddData} from 'services/ipfs/mutations/useAddData';
import {usePinData} from 'services/ipfs/mutations/usePinData';
import {useClient} from './useClient';
import {IAddDataProps} from 'services/ipfs/ipfsService.api';
import {ILoggerErrorContext, logger} from 'services/logger';
import {IPinDataProps, IPinDataResult} from 'services/ipfs/ipfsService.api';

interface IUseUploadIpfsDataParams {
/**
Expand All @@ -20,66 +19,46 @@ interface IUseUploadIpfsDataParams {
onError?: (error: unknown) => void;
}

export enum UploadIpfsDataStep {
ADD_DATA = 'ADD_DATA',
PIN_DATA = 'PIN_DATA',
}

export const useUploadIpfsData = (params: IUseUploadIpfsDataParams = {}) => {
const {onSuccess, onError, logContext} = params;

const {client} = useClient();

const handleUploadIpfsError =
(step: UploadIpfsDataStep) => (error: unknown) => {
if (logContext) {
const {stack, data} = logContext;
logger.error(error, {stack, step, data});
}
const handleUploadIpfsError = (error: unknown) => {
if (logContext) {
const {stack, data} = logContext;
logger.error(error, {stack, step: 'PIN_DATA', data});
}

onError?.(error);
};
onError?.(error);
};

const {
isPending: isPinDataLoading,
isError: isPinDataError,
isPending,
isError,
isSuccess,
mutate: pinData,
reset: resetPinData,
} = usePinData({
onSuccess: (_data, params) => onSuccess?.(params.cid),
onError: handleUploadIpfsError(UploadIpfsDataStep.PIN_DATA),
});

const handleAddDataSuccess = (cid: string) => pinData({client: client!, cid});

const {
isPending: isAddDataLoading,
isError: isAddDataError,
mutate: addData,
reset: resetAddData,
} = useAddData({
onSuccess: handleAddDataSuccess,
onError: handleUploadIpfsError(UploadIpfsDataStep.ADD_DATA),
onSuccess: (_data: IPinDataResult) => {
onSuccess?.(_data.IpfsHash);
},
onError: handleUploadIpfsError,
});

const uploadIpfsData = useCallback(
(data: IAddDataProps['data']) => {
(data: IPinDataProps) => {
if (client == null) {
return;
}

// Reset previous states in case of retries
resetAddData();
resetPinData();

addData({client, data});
pinData(data);
},
[addData, resetAddData, resetPinData, client]
[client, resetPinData, pinData]
);

const isPending = isPinDataLoading || isAddDataLoading;
const isError = isPinDataError || isAddDataError;

return {uploadIpfsData, isPending, isSuccess, isError};
};
Loading
Loading