diff --git a/packages/api/package.json b/packages/api/package.json index 69820553a6..9bf3572264 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -14,6 +14,7 @@ "publish": "wrangler publish --env $(whoami)", "build": "WEBPACK_CLI_FORCE_LOAD_ESM_CONFIG=true webpack", "test": "npm-run-all -p -r mock:cluster mock:backup test:e2e -s test:size", + "test:debug": "npm-run-all -p -r mock:cluster mock:backup test:e2e:debug -s test:size", "test:e2e": "playwright-test \"test/**/*.spec.js\" --sw src/index.js -b webkit", "test:e2e:debug": "npm run test:e2e -- --debug", "test:size": "bundlesize", diff --git a/packages/api/src/car.js b/packages/api/src/car.js index a93f68f0dd..b65e3749b0 100644 --- a/packages/api/src/car.js +++ b/packages/api/src/car.js @@ -10,7 +10,7 @@ import * as pb from '@ipld/dag-pb' import retry from 'p-retry' import { GATEWAY, LOCAL_ADD_THRESHOLD, MAX_BLOCK_SIZE } from './constants.js' import { JSONResponse } from './utils/json-response.js' -import { toPinStatusEnum } from './utils/pin.js' +import { getPins, PIN_OK_STATUS, waitAndUpdateOkPins } from './utils/pin.js' import { normalizeCid } from './utils/normalize-cid.js' /** @@ -19,12 +19,6 @@ import { normalizeCid } from './utils/normalize-cid.js' const decoders = [pb, raw, cbor] -// Duration between status check polls in ms. -const PIN_STATUS_CHECK_INTERVAL = 5000 -// Max time in ms to spend polling for an OK status. -const MAX_PIN_STATUS_CHECK_TIME = 30000 -// Pin statuses considered OK. -const PIN_OK_STATUS = ['Pinned', 'Pinning', 'PinQueued'] // Times to retry the transaction after the first failure. const CREATE_UPLOAD_RETRIES = 4 // Time in ms before starting the first retry. @@ -166,25 +160,12 @@ export async function handleCarUpload (request, env, ctx, car, uploadType = 'Car // Keep querying Cluster until one of the nodes reports something other than // Unpinned i.e. PinQueued or Pinning or Pinned. if (!pins.some(p => PIN_OK_STATUS.includes(p.status))) { - tasks.push(async () => { - const start = Date.now() - while (Date.now() - start > MAX_PIN_STATUS_CHECK_TIME) { - await new Promise(resolve => setTimeout(resolve, PIN_STATUS_CHECK_INTERVAL)) - const { peerMap } = await env.cluster.status(cid) - const pins = toPins(peerMap) - if (!pins.length) { // should not happen - throw new Error('not pinning on any node') - } - - const okPins = pins.filter(p => PIN_OK_STATUS.includes(p.status)) - if (!okPins.length) continue - - for (const pin of okPins) { - await env.db.upsertPin(normalizedCid, pin) - } - return - } - }) + tasks.push(waitAndUpdateOkPins.bind( + null, + normalizeCid, + env.cluster, + env.db) + ) } if (ctx.waitUntil) { @@ -230,12 +211,7 @@ async function addToCluster (car, env) { // will be done async by bitswap instead. local: car.size > LOCAL_ADD_THRESHOLD }) - - const { peerMap } = await env.cluster.status(cid) - const pins = toPins(peerMap) - if (!pins.length) { // should not happen - throw new Error('not pinning on any node') - } + const pins = await getPins(cid, env.cluster) return { cid, pins } } @@ -245,7 +221,7 @@ async function addToCluster (car, env) { * @param {Blob} blob * @param {CID} rootCid * @param {string} userId - * @param {import('../env').Env} env + * @param {import('./env').Env} env */ async function backup (blob, rootCid, userId, env) { if (!env.s3Client) { @@ -338,13 +314,3 @@ function cumulativeSize (pbNodeBytes, pbNode) { // This logic is the same as used by go/js-ipfs to display the cumulative size of a dag-pb dag. return pbNodeBytes.byteLength + pbNode.Links.reduce((acc, curr) => acc + (curr.Tsize || 0), 0) } - -/** - * @param {import('@nftstorage/ipfs-cluster').StatusResponse['peerMap']} peerMap - */ -function toPins (peerMap) { - return Object.entries(peerMap).map(([peerId, { peerName, status }]) => ({ - status: toPinStatusEnum(status), - location: { peerId, peerName } - })) -} diff --git a/packages/api/src/pins.js b/packages/api/src/pins.js index 444179591d..485882e7bd 100644 --- a/packages/api/src/pins.js +++ b/packages/api/src/pins.js @@ -1,6 +1,6 @@ import { JSONResponse, notFound } from './utils/json-response.js' import { normalizeCid } from './utils/normalize-cid.js' -import { waitToGetOkPins } from './utils/pin.js' +import { getPins, PIN_OK_STATUS, waitAndUpdateOkPins } from './utils/pin.js' /** * @typedef {'queued' | 'pinning' | 'failed' | 'pinned'} apiPinStatus @@ -9,7 +9,7 @@ import { waitToGetOkPins } from './utils/pin.js' /** * * Service API Pin object definition - * @typedef {Object} ServiceApiPin + * @typedef {Object} PsaPin * @property {string} cid * @property {string} [name] * @property {Array.} [origins] @@ -19,14 +19,14 @@ import { waitToGetOkPins } from './utils/pin.js' /** * * Service API Pin Status definition - * @typedef {Object} ServiceApiPinStatus + * @typedef {Object} PsaPinStatusResponse * @property {string} requestId * @property {apiPinStatus} status * @property {string} created * @property {Array} delegates * @property {string} [info] * - * @property {ServiceApiPin} pin + * @property {PsaPin} pin */ /** @@ -38,7 +38,7 @@ import { waitToGetOkPins } from './utils/pin.js' * @param {import('../../db/db-client-types.js').PinItemOutput[]} pins * @return {apiPinStatus} status */ -export const getPinningAPIStatus = (pins) => { +export const getEffectivePinStatus = (pins) => { const pinStatuses = pins.map((p) => p.status) // TODO what happens with Sharded? I'd assumed is pinned? @@ -56,11 +56,8 @@ export const getPinningAPIStatus = (pins) => { return 'queued' } - if (pinStatuses.length === 0) { - return 'queued' - // TODO after some time if there are no pins we should give up and return a failed - // status instead - } + // TODO after some time if there are no pins we should give up and return a failed + // status instead return 'failed' } @@ -106,7 +103,7 @@ export async function pinPost (request, env, ctx) { // Validate cid try { - normalizeCid(cid) + pinData.normalizedCid = normalizeCid(cid) } catch (err) { return new JSONResponse( { error: { reason: ERROR_STATUS, details: INVALID_CID } }, @@ -165,61 +162,46 @@ export async function pinPost (request, env, ctx) { * @return {Promise} */ async function createPin (pinData, authToken, env, ctx) { - const { cid, name, origins, meta } = pinData - const normalizedCid = normalizeCid(cid) - - const pinRequestData = { - requestedCid: cid, - cid: normalizedCid, - authKey: authToken - } - const pinOptions = {} + const { cid, origins, meta, normalizedCid } = pinData - if (name) { - pinRequestData.name = name - pinOptions.name = name - } + const pinName = pinData.name || undefined // deal with empty strings - if (origins) { - pinOptions.origins = origins - } + await env.cluster.pin(cid, { + name: pinName, + origins, + metadata: meta + }) + const pins = await getPins(cid, env.cluster) - if (meta) { - pinOptions.meta = meta + const pinRequestData = { + sourceCid: cid, + contentCid: normalizedCid, + authKey: authToken, + name: pinName, + pins } - // Pin CID to Cluster - // TODO: Look into when the returned Promised is resolved to understand if we should be awaiting this call. - env.cluster.pin(normalizedCid, pinOptions) + const pinRequest = await env.db.createPsaPinRequest(pinRequestData) - // Create Pin request in db (not creating any content at this stage if it doesn't already exists) - const pinRequest = await env.db.createPAPinRequest(pinRequestData) - - /** @type {ServiceApiPinStatus} */ + /** @type {PsaPinStatusResponse} */ const pinStatus = getPinStatus(pinRequest) /** @type {(() => Promise)[]} */ const tasks = [] - // If we're pinning content that is currently not in the cluster, it might take a while to - // get the cid from the network. We check pinning status asyncrounosly. - if (pinRequest.pins.length === 0) { - tasks.push(async () => { - const okPins = await waitToGetOkPins(cid, env.cluster) - // Create the content row - // TODO: Get dagSize - env.db.createContent({ cid: normalizedCid, pins: okPins }) - for (const pin of okPins) { - await env.db.upsertPin(normalizedCid, pin) - } - }) + if (!pins.some(p => PIN_OK_STATUS.includes(p.status))) { + tasks.push( + waitAndUpdateOkPins.bind( + null, + normalizeCid, + env.cluster, + env.db) + ) } - // TODO: Backups. At the moment backups are related to uploads so + // TODO(https://github.com/web3-storage/web3.storage/issues/794) + // Backups. At the moment backups are related to uploads so // they' re currently not taken care of in respect of a pin request. - // We should look into this. There's an argument where backups should be related to content rather than upload, at the moment we're - // backing up content multiple times if uploaded multiple times. - // If we refactor that it will naturally work for merge requests as well. if (ctx.waitUntil) { tasks.forEach(t => ctx.waitUntil(t())) @@ -234,7 +216,7 @@ async function createPin (pinData, authToken, env, ctx) { * @param {import('./index').Ctx} ctx */ export async function pinGet (request, env, ctx) { - // Check if requestId contains other charachers than digits + // Ensure requestId only contains digits if (!(/^\d+$/.test(request.params.requestId))) { return new JSONResponse( { error: { reason: ERROR_STATUS, details: INVALID_REQUEST_ID } }, @@ -243,10 +225,12 @@ export async function pinGet (request, env, ctx) { } const requestId = parseInt(request.params.requestId, 10) + + /** @type { import('../../db/db-client-types.js').PsaPinRequestUpsertOutput } */ let pinRequest try { - pinRequest = await env.db.getPAPinRequest(requestId) + pinRequest = await env.db.getPsaPinRequest(requestId) } catch (e) { console.error(e) // TODO catch different exceptions @@ -254,7 +238,7 @@ export async function pinGet (request, env, ctx) { return notFound() } - /** @type { ServiceApiPinStatus } */ + /** @type { PsaPinStatusResponse } */ const pin = getPinStatus(pinRequest) return new JSONResponse(pin) } @@ -281,7 +265,7 @@ export async function pinsGet (request, env, ctx) { const opts = result.data try { - pinRequests = await env.db.listPAPinRequests(request.auth.authToken._id, opts) + pinRequests = await env.db.listPsaPinRequests(request.auth.authToken._id, opts) } catch (e) { console.error(e) return notFound() @@ -407,19 +391,20 @@ function parseSearchParams (params) { * Transform a PinRequest into a PinStatus * * @param { Object } pinRequest - * @returns { ServiceApiPinStatus } + * @returns { PsaPinStatusResponse } */ function getPinStatus (pinRequest) { return { - requestId: pinRequest._id, - status: getPinningAPIStatus(pinRequest.pins), + requestId: pinRequest._id.toString(), + status: getEffectivePinStatus(pinRequest.pins), created: pinRequest.created, pin: { - cid: pinRequest.requestedCid, + cid: pinRequest.sourceCid, name: pinRequest.name, origins: [], meta: {} }, + // TODO(https://github.com/web3-storage/web3.storage/issues/792) delegates: [] } } @@ -453,7 +438,7 @@ export async function pinDelete (request, env, ctx) { let res try { // Update deleted_at (and updated_at) timestamp for the pin request. - res = await env.db.deletePAPinRequest(requestId, authToken._id) + res = await env.db.deletePsaPinRequest(requestId, authToken._id) } catch (e) { console.error(e) // TODO catch different exceptions @@ -461,7 +446,7 @@ export async function pinDelete (request, env, ctx) { return notFound() } - return new JSONResponse(res) + return new JSONResponse({}, { status: 202 }) } /** @@ -474,12 +459,12 @@ export async function pinDelete (request, env, ctx) { async function replacePin (newPinData, requestId, authToken, env, ctx) { let existingPinRequest try { - existingPinRequest = await env.db.getPAPinRequest(requestId) + existingPinRequest = await env.db.getPsaPinRequest(requestId) } catch (e) { return notFound() } - const existingCid = existingPinRequest.requestedCid + const existingCid = existingPinRequest.sourceCid if (newPinData.cid === existingCid) { return new JSONResponse( { error: { reason: ERROR_STATUS, details: INVALID_REPLACE } }, @@ -498,7 +483,7 @@ async function replacePin (newPinData, requestId, authToken, env, ctx) { } try { - await env.db.deletePAPinRequest(requestId, authToken) + await env.db.deletePsaPinRequest(requestId, authToken) } catch (e) { return new JSONResponse( { error: { reason: `DB Error: ${e}` } }, diff --git a/packages/api/src/utils/pin.js b/packages/api/src/utils/pin.js index 76f988df1d..90fb9549f6 100644 --- a/packages/api/src/utils/pin.js +++ b/packages/api/src/utils/pin.js @@ -37,7 +37,7 @@ const PIN_STATUS_CHECK_INTERVAL = 5000 const MAX_PIN_STATUS_CHECK_TIME = 30000 // Pin statuses considered OK. -const PIN_OK_STATUS = ['Pinned', 'Pinning', 'PinQueued'] +export const PIN_OK_STATUS = ['Pinned', 'Pinning', 'PinQueued'] /** * Converts from cluster status string to DB pin status enum string. @@ -55,25 +55,37 @@ export function toPinStatusEnum (trackerStatus) { } /** - * Function that returns list of pins considered ok. - * TODO: refactor car upload to use this instead. + * Function that returns list of pins for given CID. * * @param {string} cid cid to be looked for * @param {import('@nftstorage/ipfs-cluster').Cluster} cluster * @param {import('@nftstorage/ipfs-cluster').StatusResponse['peerMap']} [peerMap] Optional list of peers, if not provided the fuctions queries the cluster. * @return {Promise.} */ -export async function getOKpins (cid, cluster, peerMap) { +export async function getPins (cid, cluster, peerMap) { if (!peerMap) { - const status = await cluster.status(cid) - peerMap = status.peerMap + peerMap = (await cluster.status(cid)).peerMap } + const pins = toPins(peerMap) - // TODO To validate: I expect in case of a pinning request it's acceptable to have no pins here? - // if (!pins.length) { // should not happen - // throw new Error('not pinning on any node') - // } + if (!pins.length) { + throw new Error('not pinning on any node') + } + + return pins +} + +/** + * Function that returns list of pins with a PIN_OK_STATUS. + * + * @param {string} cid cid to be looked for + * @param {import('@nftstorage/ipfs-cluster').Cluster} cluster + * @param {import('@nftstorage/ipfs-cluster').StatusResponse['peerMap']} [peerMap] Optional list of peers, if not provided the fuctions queries the cluster. + * @return {Promise.} + */ +export async function getOKpins (cid, cluster, peerMap) { + const pins = await getPins(cid, cluster, peerMap) return pins.filter(p => PIN_OK_STATUS.includes(p.status)) } @@ -90,6 +102,11 @@ export function toPins (peerMap) { } /** + * + * waitOkPins checks the status of the given CID on the cluster + * every given `checkInterval` until at least one pin has a PIN_OK_STATUS. + * + * After a given maximum `waitTime`, if no OK pins are found the promise is resolved with an empty array. * * @param {string} cid * @param {import('@nftstorage/ipfs-cluster').Cluster} cluster @@ -97,9 +114,9 @@ export function toPins (peerMap) { * @param {number} checkInterval * @return {Promise.} */ -export async function waitToGetOkPins (cid, cluster, waitTime = MAX_PIN_STATUS_CHECK_TIME, checkInterval = PIN_STATUS_CHECK_INTERVAL) { +export async function waitOkPins (cid, cluster, waitTime = MAX_PIN_STATUS_CHECK_TIME, checkInterval = PIN_STATUS_CHECK_INTERVAL) { const start = Date.now() - while (Date.now() - start > waitTime) { + while (Date.now() - start < waitTime) { await new Promise(resolve => setTimeout(resolve, checkInterval)) const okPins = await getOKpins(cid, cluster) if (!okPins.length) continue @@ -108,3 +125,22 @@ export async function waitToGetOkPins (cid, cluster, waitTime = MAX_PIN_STATUS_C } return [] } + +/** + * Used to async wait for pins for the provided cid to have an + * OK_STATUS and update them in the db. + * + * @param {string} cid + * @param {import('@nftstorage/ipfs-cluster').Cluster} cluster + * @param {import('@web3-storage/db').DBClient} db + * @param {number} waitTime + * @param {number} checkInterval + * @return {Promise.} + */ +export async function waitAndUpdateOkPins (cid, cluster, db, waitTime = MAX_PIN_STATUS_CHECK_TIME, checkInterval = PIN_STATUS_CHECK_INTERVAL) { + const okPins = await waitOkPins(cid, cluster, waitTime, checkInterval) + for (const pin of okPins) { + await db.upsertPin(cid, pin) + } + return okPins +} diff --git a/packages/api/test/fixtures/init-data.sql b/packages/api/test/fixtures/init-data.sql index 94d15e0aa8..1176eec0a3 100644 --- a/packages/api/test/fixtures/init-data.sql +++ b/packages/api/test/fixtures/init-data.sql @@ -39,8 +39,8 @@ VALUES (3, 'bafybeifnfkzjeohjf2dch2iqqpef3bfjylwxlcjws2msvdfyze5bvdprfm', 'bafyb (3, 'bafybeica6klnrhlrbx6z24icefykpbwyypouglnypvnwb5esdm6yzcie3q', 'bafybeica6klnrhlrbx6z24icefykpbwyypouglnypvnwb5esdm6yzcie3q', 'Car', '2021-07-14T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'), (3, 'bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy4', 'bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy4', 'Car', '2021-07-14T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'); -INSERT INTO pin_location (id, peer_id, peer_name, region) -VALUES (1, '12D3KooWR1Js', 'who?', 'where?'); +INSERT INTO pin_location (peer_id, peer_name, region) +VALUES ('12D3KooWR1Js', 'who?', 'where?'); INSERT INTO pin (status, content_cid, pin_location_id, inserted_at, updated_at) VALUES ('Pinned', 'bafybeifnfkzjeohjf2dch2iqqpef3bfjylwxlcjws2msvdfyze5bvdprfm', 1, '2021-07-14T19:27:14.934572+00:00', '2021-07-14T19:27:14.934572+00:00'), @@ -70,12 +70,23 @@ VALUES ( ); -- /pins route testing -INSERT INTO pa_pin_request (auth_key_id, content_cid, requested_cid, name, inserted_at, updated_at) -VALUES (3, NULL , 'bafybeifnfkzjeohjf2dch2iqqpef3bfjylwxlcjws2msvdfyze5bvdprfo', 'ReportDoc.pdf', '2021-07-14T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'), - (3, NULL , 'bafybeifnfkzjeohjf2dch2iqqpef3bfjylwxlcjws2msvdfyze5bvdprfm', 'reportdoc.pdf', '2021-01-01T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'), - (3, NULL, 'bafybeica6klnrhlrbx6z24icefykpbwyypouglnypvnwb5esdm6yzcie3q', 'Data', '2021-07-14T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'), - (3, NULL, 'bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfoga4', 'Image.jpeg', '2021-07-14T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'), - (3, NULL, 'bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy5', 'Image.png', '2021-07-14T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'), - (3, NULL, 'bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy6', 'Image.jpg', '2021-07-20T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'), - (2, NULL, 'bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy9', 'Image.jpg', '2021-07-20T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'), - (2, NULL, 'bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy4', 'Image.jpg', '2021-07-14T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'); +INSERT INTO content (cid) +VALUES ('bafybeid46f7zggioxjm5p2ze2l6s6wbqvoo4gzbdzfjtdosthmfyxdign4'), + ('bafybeig7yvw6a4uhio4pmg5gahyd2xumowkfljdukad7pmdsv5uk5zcseu'), + ('bafybeia45bscvzxngto555xsel4gwoclb5fxd7zpxige7rl3maoleznswu'), + ('bafybeidw7pc6nvm7u4rfhpctac4qgtpmwxapw4duugvsl3ppivvzibdlgy'), + ('bafybeidrzt6t4k25qjeasydgi3fyh6ejos5x4d6tk2pdzxkb66bkomezy4'), + ('bafybeifsrhq2qtkcgjt4gzi7rkafrv2gaai24ptt6rohe2ebqzydkz47sm'), + ('bafybeiaqu6ijhfhwzjipwesbqf4myz6uczyigahib5joqbo5jw2xmjczfa'), + ('bafybeidqts3rbwkprggjojbvcxy4jzpgzgcvs4a73y3gx2jjxphjeerbcy'); + + +INSERT INTO psa_pin_request (auth_key_id, content_cid, source_cid, name, inserted_at, updated_at) +VALUES (3, 'bafybeid46f7zggioxjm5p2ze2l6s6wbqvoo4gzbdzfjtdosthmfyxdign4', 'bafybeid46f7zggioxjm5p2ze2l6s6wbqvoo4gzbdzfjtdosthmfyxdign4', 'ReportDoc.pdf', '2021-07-14T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'), + (3, 'bafybeig7yvw6a4uhio4pmg5gahyd2xumowkfljdukad7pmdsv5uk5zcseu', 'bafybeig7yvw6a4uhio4pmg5gahyd2xumowkfljdukad7pmdsv5uk5zcseu', 'reportdoc.pdf', '2021-01-01T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'), + (3, 'bafybeia45bscvzxngto555xsel4gwoclb5fxd7zpxige7rl3maoleznswu', 'bafybeia45bscvzxngto555xsel4gwoclb5fxd7zpxige7rl3maoleznswu', 'Data', '2021-07-14T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'), + (3, 'bafybeidw7pc6nvm7u4rfhpctac4qgtpmwxapw4duugvsl3ppivvzibdlgy', 'bafybeidw7pc6nvm7u4rfhpctac4qgtpmwxapw4duugvsl3ppivvzibdlgy', 'Image.jpeg', '2021-07-14T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'), + (3, 'bafybeidrzt6t4k25qjeasydgi3fyh6ejos5x4d6tk2pdzxkb66bkomezy4', 'bafybeidrzt6t4k25qjeasydgi3fyh6ejos5x4d6tk2pdzxkb66bkomezy4', 'Image.png', '2021-07-14T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'), + (3, 'bafybeifsrhq2qtkcgjt4gzi7rkafrv2gaai24ptt6rohe2ebqzydkz47sm', 'bafybeifsrhq2qtkcgjt4gzi7rkafrv2gaai24ptt6rohe2ebqzydkz47sm', 'Image.jpg', '2021-07-20T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'), + (2, 'bafybeiaqu6ijhfhwzjipwesbqf4myz6uczyigahib5joqbo5jw2xmjczfa', 'bafybeiaqu6ijhfhwzjipwesbqf4myz6uczyigahib5joqbo5jw2xmjczfa', 'Image.jpg', '2021-07-20T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'), + (2, 'bafybeidqts3rbwkprggjojbvcxy4jzpgzgcvs4a73y3gx2jjxphjeerbcy', 'bafybeidqts3rbwkprggjojbvcxy4jzpgzgcvs4a73y3gx2jjxphjeerbcy', 'Image.jpg', '2021-07-14T19:27:14.934572Z', '2021-07-14T19:27:14.934572Z'); diff --git a/packages/api/test/mocks/cluster/get_pins#@cid.js b/packages/api/test/mocks/cluster/get_pins#@cid.js index fbaf54bb78..429b43a70e 100644 --- a/packages/api/test/mocks/cluster/get_pins#@cid.js +++ b/packages/api/test/mocks/cluster/get_pins#@cid.js @@ -10,7 +10,7 @@ module.exports = async ({ params }) => { cid: { '/': params.cid }, name: 'test-pin-name', peer_map: { - 'test-peer-id': { + [`test-peer-id-${Math.floor(Math.random() * 10000)}`]: { peername: 'test-peer-name', status: 'pinned', timestamp: new Date().toISOString() diff --git a/packages/api/test/mocks/cluster/post_pins#@cid.js b/packages/api/test/mocks/cluster/post_pins#@cid.js new file mode 100644 index 0000000000..b93a74af59 --- /dev/null +++ b/packages/api/test/mocks/cluster/post_pins#@cid.js @@ -0,0 +1,32 @@ +/** + * @param {any} data + */ +async function run (data) { + const { query, params } = data + + return { + statusCode: 200, + headers: {}, + body: { + allocations: [], + cid: { + '/': params.cid + }, + expire_at: '0001-01-01T00:00:00Z', + max_depth: -1, + metadata: null, + mode: 'recursive', + name: query.name, + origins: [], + pin_update: null, + reference: null, + replication_factor_max: -1, + replication_factor_min: -1, + shard_size: 0, + type: 2, + user_allocations: null + } + } +} + +module.exports = run diff --git a/packages/api/test/mocks/pgrest/patch_pa_pin_request.js b/packages/api/test/mocks/pgrest/patch_pa_pin_request.js deleted file mode 100644 index 5478a284f3..0000000000 --- a/packages/api/test/mocks/pgrest/patch_pa_pin_request.js +++ /dev/null @@ -1,121 +0,0 @@ -/** - * https://github.com/sinedied/smoke#javascript-mocks - * @param {Request} request - */ -module.exports = ({ query }) => { - const id = query.id && query.id.split('eq.')[1] - const date = new Date(1, 1, 1) - - if (!id) { - // Return a list - const pinRequests = [ - { - _id: 1, - contentCid: null, - requestedCid: 'something', - authKey: 'something', - name: 'something', - created: date.toISOString(), - updated: date.toISOString(), - content: { - cid: 'bafybeigc4fntpegrqzgzhxyc7hzu25ykqqai7nzllov2jn55wvzjju7pwu', - dagSize: null, - pins: [{ - status: 'Pinned', - updated: date.toISOString(), - location: {} - }] - } - }, - { - _id: 2, - contentCid: null, - requestedCid: 'something', - authKey: 'something', - name: 'something', - created: date.toISOString(), - updated: date.toISOString(), - content: { - cid: 'bafybeigc4fntpegrqzgzhxyc7hzu25ykqqai7nzllov2jn55wvzjju7pwu', - dagSize: null, - pins: [{ - status: 'Pinned', - updated: date.toISOString(), - location: {} - }] - } - }, - { - _id: 3, - contentCid: null, - requestedCid: 'something', - authKey: 'something', - name: 'something', - created: date.toISOString(), - updated: date.toISOString(), - content: { - cid: 'bafybeigc4fntpegrqzgzhxyc7hzu25ykqqai7nzllov2jn55wvzjju7pwu', - dagSize: null, - pins: [{ - status: 'Pinned', - updated: date.toISOString(), - location: {} - }] - } - } - ] - return { - statusCode: 200, - body: JSON.stringify(pinRequests) - } - } - - if (id === '1') { - const pinRequest = { - _id: id, - contentCid: null, - requestedCid: 'something', - authKey: 'something', - name: 'something', - created: date.toISOString(), - updated: date.toISOString() - } - return { - statusCode: 200, - body: JSON.stringify(pinRequest) - } - } - - if (id === '2') { - const pinRequest = { - _id: id, - contentCid: null, - requestedCid: 'something', - authKey: 'something', - name: 'something', - created: date.toISOString(), - updated: date.toISOString(), - content: { - cid: 'bafybeigc4fntpegrqzgzhxyc7hzu25ykqqai7nzllov2jn55wvzjju7pwu', - dagSize: null, - pins: [{ - status: 'Pinned', - updated: date.toISOString(), - location: {} - }] - } - } - return { - statusCode: 200, - body: JSON.stringify(pinRequest) - } - } - - return { - statusCode: 404, - body: JSON.stringify({ - details: 'Results contain 0 rows, application/vnd.pgrst.object+json requires 1 row', - message: 'JSON object requested, multiple (or no) rows returned' - }) - } -} diff --git a/packages/api/test/pin.spec.js b/packages/api/test/pin.spec.js index d1f413464c..ffdb0af9c5 100644 --- a/packages/api/test/pin.spec.js +++ b/packages/api/test/pin.spec.js @@ -5,7 +5,7 @@ import { getTestJWT } from './scripts/helpers.js' import { ERROR_CODE, ERROR_STATUS, - getPinningAPIStatus, + getEffectivePinStatus, INVALID_CID, INVALID_META, INVALID_NAME, @@ -66,6 +66,22 @@ const assertCorrectPinResponse = (data) => { } } +/** + * + * @param {string} cid + * @param {string} token + * @return {Promise} + */ +const createPinRequest = async (cid, token) => { + return await (await fetch(new URL('pins', endpoint).toString(), { + method: 'POST', + headers: { + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ cid }) + })).json() +} + describe('Pinning APIs endpoints', () => { let token = null @@ -75,11 +91,9 @@ describe('Pinning APIs endpoints', () => { describe('GET /pins', () => { let baseUrl - let token before(async () => { baseUrl = new URL('pins', endpoint).toString() - token = await getTestJWT('test-upload', 'test-upload') }) it('validates filter values', async () => { @@ -104,13 +118,13 @@ describe('Pinning APIs endpoints', () => { }) it('validates CID values passed as filter', async () => { - const cids = ` -bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy4, -bafybeica6klnrhlrbx6z24icefykpbwyypouglnypvnwb5esdm6yzcie3q, -bafybeifnfkzjeohjf2dch2iqqpef3bfjylwxlcjws2msvdfyze5bvdprfo, -` + const cids = [ + 'notAValidCID', + 'bafybeia45bscvzxngto555xsel4gwoclb5fxd7zpxige7rl3maoleznswu', + 'bafybeid46f7zggioxjm5p2ze2l6s6wbqvoo4gzbdzfjtdosthmfyxdign4' + ] - const url = new URL(`${baseUrl}?cid=${cids}`).toString() + const url = new URL(`${baseUrl}?cid=${cids.join(',')}`).toString() const res = await fetch( url, { method: 'GET', @@ -143,13 +157,13 @@ bafybeifnfkzjeohjf2dch2iqqpef3bfjylwxlcjws2msvdfyze5bvdprfo, }) it('filters pins on CID, for this user', async () => { - const cids = ` -bafybeifnfkzjeohjf2dch2iqqpef3bfjylwxlcjws2msvdfyze5bvdprfm, -bafybeica6klnrhlrbx6z24icefykpbwyypouglnypvnwb5esdm6yzcie3q, -bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy4 -` + const cids = [ + 'bafybeig7yvw6a4uhio4pmg5gahyd2xumowkfljdukad7pmdsv5uk5zcseu', + 'bafybeia45bscvzxngto555xsel4gwoclb5fxd7zpxige7rl3maoleznswu', + 'bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy4' // Not exists + ] - const url = new URL(`${baseUrl}?cid=${cids}`).toString() + const url = new URL(`${baseUrl}?cid=${cids.join(',')}`).toString() const res = await fetch( url, { method: 'GET', @@ -435,10 +449,17 @@ bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy4 }) describe('GET /pins/:requestId', () => { - let token = null + let pinRequest + before(async () => { - // Create token - token = await getTestJWT() + const cid = 'bafybeihy6bymmfcdjdrkhaha2srphnhrewimtkdxdmcama2dpgvpyx4efu' + pinRequest = await (await fetch(new URL('pins', endpoint).toString(), { + method: 'POST', + headers: { + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ cid }) + })).json() }) it('returns unauthorized if no token', async () => { @@ -486,8 +507,7 @@ bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy4 }) it('returns the pin request', async () => { - const requestId = 1 - const res = await fetch(new URL(`pins/${requestId}`, endpoint).toString(), { + const res = await fetch(new URL(`pins/${pinRequest.requestId}`, endpoint).toString(), { method: 'GET', headers: { Authorization: `Bearer ${token}`, @@ -499,8 +519,8 @@ bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy4 assert(res, 'Server responded') assert(res.ok, 'Server response is ok') assertCorrectPinResponse(data) - assert.deepEqual(data.requestId, requestId) - assert.deepEqual(data.status, 'queued') + assert.deepEqual(data.requestId, pinRequest.requestId) + assert.deepEqual(data.status, 'pinned') }) it('returns the pin request with specified name', async () => { @@ -523,7 +543,7 @@ bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy4 }) }) - describe('getPinningAPIStatus', () => { + describe('getEffectivePinStatus', () => { it('should return pinned if at it is pinned in at least a node', () => { /** @type {import('../../db/db-client-types.js').PinItemOutput[]} */ const pins = [ @@ -531,12 +551,12 @@ bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy4 createPinWithStatus('PinQueued'), createPinWithStatus('PinError') ] - assert.strictEqual(getPinningAPIStatus(pins), 'pinned') + assert.strictEqual(getEffectivePinStatus(pins), 'pinned') }) it('should return queued if there are no pins yet', () => { const pins = [] - assert.strictEqual(getPinningAPIStatus(pins), 'queued') + assert.strictEqual(getEffectivePinStatus(pins), 'failed') }) it('should return "queued" at least 1 pin has it queued', () => { @@ -545,7 +565,7 @@ bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy4 createPinWithStatus('PinError'), createPinWithStatus('PinQueued') ] - assert.strictEqual(getPinningAPIStatus(pins), 'queued') + assert.strictEqual(getEffectivePinStatus(pins), 'queued') }) it('should return "queued" at least 1 pin has remote status', () => { @@ -555,7 +575,7 @@ bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy4 createPinWithStatus('PinQueued') ] - assert.strictEqual(getPinningAPIStatus(pins), 'queued') + assert.strictEqual(getEffectivePinStatus(pins), 'queued') }) it('should return failed pins have statuses other than Pinned, Pinning, PinQueued or Remote', () => { @@ -564,16 +584,11 @@ bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy4 createPinWithStatus('PinError') ] - assert.strictEqual(getPinningAPIStatus(pins), 'failed') + assert.strictEqual(getEffectivePinStatus(pins), 'failed') }) }) describe('DELETE /pins/:requestId', () => { - let token = null - before(async () => { - token = await getTestJWT('test-upload', 'test-upload') - }) - it('fails to delete if there is no user token', async () => { const res = await fetch(new URL('pins/1', endpoint).toString(), { method: 'DELETE' @@ -613,6 +628,38 @@ bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy4 assert.deepEqual(res.status, 404) }) + it('deletes the pin request', async () => { + const cid = 'bafybeifzequu4ial7i4jdw4gxabi5xyx2qeci2o4scc65s2st5o7fsynqu' + const pinRequest = await createPinRequest(cid, token) + + const r = await fetch(new URL(`pins/${pinRequest.requestId}`, endpoint).toString(), { + method: 'GET', + headers: { + Authorization: `Bearer ${token}` + } + }) + + assert(r.ok, 'It did not create the request in the first place') + + const resD = await fetch(new URL(`pins/${pinRequest.requestId}`, endpoint).toString(), { + method: 'DELETE', + headers: { + Authorization: `Bearer ${token}` + } + }) + + assert.equal(resD.status, 202, 'Delete request was not successful') + + const res = await fetch(new URL(`pins/${pinRequest.requestId}`, endpoint).toString(), { + method: 'GET', + headers: { + Authorization: `Bearer ${token}` + } + }) + + assert.equal(res.status, 404) + }) + it('returns the pin request id that has been deleted', async () => { const requestId = 1 const res = await fetch(new URL(`pins/${requestId}`, endpoint).toString(), { @@ -623,18 +670,11 @@ bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy4 }) assert(res.ok, 'Server responded') - assert.equal(res.status, 200) - const { _id } = await res.json() - assert.strictEqual(_id, requestId) + assert.equal(res.status, 202) }) }) describe('POST /pins/:requestId', () => { - let token = null - before(async () => { - token = await getTestJWT('test-upload', 'test-upload') - }) - it('should not replace a pin request that doesn\'t exist', async () => { const res = await fetch(new URL('pins/100', endpoint).toString(), { method: 'POST', @@ -652,14 +692,48 @@ bafybeiaiipiibr7aletbbrzmpklw4l5go6sodl22xs6qtcqo3lqogfogy4 assert.equal(message, 'Not Found') }) - it.skip('should not replace the same pin request', async () => { - const res = await fetch(new URL('pins/3', endpoint).toString(), { + it('should delete the pin request and replace it', async () => { + const cid = 'bafybeid3ka3b3f443kv2je3mfm4byk6qps3wipr7wzu5uli6tdo57crcke' + const newCid = 'bafybeid4f2r3zpnkjqrglkng265ttqg6zbdr75dpbiwellvlpcxq7pggjy' + + // Creates pin Requests + const pinRequest = await createPinRequest(cid, token) + + // It replaces it + const resR = await fetch(new URL(`pins/${pinRequest.requestId}`, endpoint).toString(), { + method: 'POST', + headers: { + Authorization: `Bearer ${token}` + }, + body: JSON.stringify({ + cid: newCid + }) + }) + + assert(resR, 'Replace request did not respond') + assert(resR.ok, 'Replace request was not successful') + + const resG = await fetch(new URL(`pins/${pinRequest.requestId}`, endpoint).toString(), { + method: 'GET', + headers: { + Authorization: `Bearer ${token}` + } + }) + + assert(resG, 'Get request did not respond') + assert.strictEqual(resG.status, 404, 'Pin request was not deleted') + }) + + it('should not replace the same pin request', async () => { + const cid = 'bafybeieppxukl4i4acnjcdj2fueoa5oppuaciseggv2cvplj2iu6d7kx2e' + const pinRequest = await createPinRequest(cid, token) + const res = await fetch(new URL(`pins/${pinRequest.requestId}`, endpoint).toString(), { method: 'POST', headers: { Authorization: `Bearer ${token}` }, body: JSON.stringify({ - cid: 'bafybeihgrtet4vowd4t4iqaspzclxajrwwsesur7zllkahrbhcymfh7kyi' + cid }) }) diff --git a/packages/db/db-client-types.ts b/packages/db/db-client-types.ts index 4f8b08b8d0..2e24782114 100644 --- a/packages/db/db-client-types.ts +++ b/packages/db/db-client-types.ts @@ -62,7 +62,7 @@ export type AuthKeyItemOutput = { // Pin export type PinUpsertInput = { - id?: definitions['pin']['id'] + id?: definitions['pin']['id'], status: definitions['pin']['status'], location: Location, } @@ -248,33 +248,37 @@ export type ListUploadsOptions = { // Pinninng // PinRequest -export type PAPinRequestUpsertInput = { +export type PsaPinRequestUpsertInput = { id?: string, - name?: definitions['pa_pin_request']['name'], + name?: definitions['psa_pin_request']['name'], authKey: string, - requestedCid: definitions['pa_pin_request']['requested_cid'], - cid: definitions['pa_pin_request']['requested_cid'], + sourceCid: definitions['psa_pin_request']['source_cid'], + contentCid: definitions['upload']['content_cid'], + dagSize?: definitions['content']['dag_size'], + pins: Array, + created?: definitions['upload']['inserted_at'], + updated?: definitions['upload']['updated_at'], } -export type PAPinRequestItem = PAPinRequestUpsertInput & { +export type PsaPinRequestItem = PsaPinRequestUpsertInput & { _id: string, - contentCid: definitions['pa_pin_request']['content_cid'] + contentCid: definitions['psa_pin_request']['content_cid'] created: definitions['upload']['inserted_at'] updated: definitions['upload']['updated_at'] deleted?: definitions['upload']['deleted_at'] content: ContentItem } -export type PAPinRequestUpsertOutput = PAPinRequestUpsertInput & { +export type PsaPinRequestUpsertOutput = PsaPinRequestUpsertInput & { _id: string, - contentCid: definitions['pa_pin_request']['content_cid'] - created: definitions['pa_pin_request']['inserted_at'] - updated: definitions['pa_pin_request']['updated_at'] - deleted?: definitions['pa_pin_request']['deleted_at'] + contentCid: definitions['psa_pin_request']['content_cid'] + created: definitions['psa_pin_request']['inserted_at'] + updated: definitions['psa_pin_request']['updated_at'] + deleted?: definitions['psa_pin_request']['deleted_at'] pins: Array } -export type ListPAPinRequestOptions = { +export type ListPsaPinRequestOptions = { /** * Comma-separated list of CIDs to match */ @@ -309,9 +313,9 @@ export type ListPAPinRequestOptions = { meta?: unknown, } -export type ListPAPinRequestResults = { +export type ListPsaPinRequestResults = { count: number, - results: Array + results: Array } export type NameItem = { diff --git a/packages/db/index.d.ts b/packages/db/index.d.ts index 73b4fcd309..2be082b871 100644 --- a/packages/db/index.d.ts +++ b/packages/db/index.d.ts @@ -21,12 +21,12 @@ import type { PinSyncRequestOutput, PinUpsertInput, BackupOutput, - PAPinRequestItem, - PAPinRequestUpsertOutput, - PAPinRequestUpsertInput, + PsaPinRequestItem, + PsaPinRequestUpsertOutput, + PsaPinRequestUpsertInput, ContentInput, - ListPAPinRequestOptions, - ListPAPinRequestResults, + ListPsaPinRequestOptions, + ListPsaPinRequestResults, } from './db-client-types' export { gql } @@ -57,10 +57,10 @@ export class DBClient { createKey (key: CreateAuthKeyInput): Promise getKey (issuer: string, secret: string): Promise listKeys (userId: number): Promise> - createPAPinRequest (pinRequest: PAPinRequestUpsertInput): Promise - getPAPinRequest (pinRequestId: number) : Promise - listPAPinRequests(authKey: string, opts?: ListPAPinRequestOptions ) : Promise - deletePAPinRequest (pinRequestId: number, authKey: string) : Promise + createPsaPinRequest (pinRequest: PsaPinRequestUpsertInput): Promise + getPsaPinRequest (pinRequestId: number) : Promise + listPsaPinRequests(authKey: string, opts?: ListPsaPinRequestOptions ) : Promise + deletePsaPinRequest (pinRequestId: number, authKey: string) : Promise createContent (content: ContentInput, opt?: {updatePinRequests?: boolean}) : Promise deleteKey (id: number): Promise query(document: RequestDocument, variables: V): Promise diff --git a/packages/db/index.js b/packages/db/index.js index ee319313ce..f765da5ac5 100644 --- a/packages/db/index.js +++ b/packages/db/index.js @@ -1,6 +1,6 @@ import { PostgrestClient } from '@supabase/postgrest-js' -import { normalizeUpload, normalizeContent, normalizePins, normalizeDeals, normalizePaPinRequest } from './utils.js' +import { normalizeUpload, normalizeContent, normalizePins, normalizeDeals, normalizePsaPinRequest } from './utils.js' import { DBError } from './errors.js' import { getUserMetrics, @@ -20,17 +20,17 @@ const uploadQuery = ` content(cid, dagSize:dag_size, pins:pin(status, updated:updated_at, location:pin_location(_id:id, peerId:peer_id, peerName:peer_name, region))) ` -const PAPinRequestTableName = 'pa_pin_request' +const psaPinRequestTableName = 'psa_pin_request' const pinRequestSelect = ` _id:id::text, - requestedCid:requested_cid, + sourceCid:source_cid, contentCid:content_cid, - authKey:auth_key_id, + authKey:auth_key_id::text, name, deleted:deleted_at, created:inserted_at, updated:updated_at, - content(cid, dagSize:dag_size, pins:pin(status, updated:updated_at, location:pin_location(_id:id, peerId:peer_id, peerName:peer_name, region))) ` + content(cid, dagSize:dag_size, pins:pin(status, updated:updated_at, location:pin_location(_id:id::text, peerId:peer_id, peerName:peer_name, region))) ` /** * @typedef {import('./postgres/pg-rest-api-types').definitions} definitions @@ -763,56 +763,53 @@ export class DBClient { /** * Creates a Pin Request. * - * @param {import('./db-client-types').PAPinRequestUpsertInput} pinRequest - * @return {Promise} + * @param {import('./db-client-types').PsaPinRequestUpsertInput} pinRequestData + * @return {Promise} */ - async createPAPinRequest (pinRequest) { - /** @type { import('./db-client-types').PAPinRequestUpsertOutput } */ - - // TODO is there a better way to avoid 2 queries? - // ie. before insert trigger https://dba.stackexchange.com/questions/27178/handling-error-of-foreign-key - /** @type {{data: {cid: string}}} */ - const { data: content } = await this._client - .from('content') - .select('cid') - .eq('cid', pinRequest.cid) - .single() - - const toInsert = { - requested_cid: pinRequest.requestedCid, - auth_key_id: pinRequest.authKey, - name: pinRequest.name - } - - // If content already exists updated foreigh key - if (content?.cid) { - toInsert.content_cid = content.cid - } + async createPsaPinRequest (pinRequestData) { + const now = new Date().toISOString() - /** @type {{data: import('./db-client-types').PAPinRequestItem, error: PostgrestError }} */ - const { data, error } = await this._client - .from(PAPinRequestTableName) - .insert(toInsert) - .select(pinRequestSelect) - .single() + /** @type {{ data: string, error: PostgrestError }} */ + const { data: pinRequestId, error } = await this._client.rpc('create_psa_pin_request', { + data: { + auth_key_id: pinRequestData.authKey, + content_cid: pinRequestData.contentCid, + source_cid: pinRequestData.sourceCid, + name: pinRequestData.name, + dag_size: pinRequestData.dagSize, + inserted_at: pinRequestData.created || now, + updated_at: pinRequestData.updated || now, + pins: pinRequestData.pins.map(pin => ({ + status: pin.status, + location: { + peer_id: pin.location.peerId, + peer_name: pin.location.peerName, + region: pin.location.region + } + })) + } + }).single() if (error) { throw new DBError(error) } - return normalizePaPinRequest(data) + // TODO: this second request could be avoided by returning the right data + // from create_psa_pin_request remote procedure. (But to keep this DRY we need to refactor + // this a bit) + return await this.getPsaPinRequest(parseInt(pinRequestId, 10)) } /** * Get a Pin Request by id * * @param {number} pinRequestId - * @return {Promise} + * @return {Promise} */ - async getPAPinRequest (pinRequestId) { - /** @type {{data: import('./db-client-types').PAPinRequestItem, error: PostgrestError }} */ + async getPsaPinRequest (pinRequestId) { + /** @type {{data: import('./db-client-types').PsaPinRequestItem, error: PostgrestError }} */ const { data, error } = await this._client - .from(PAPinRequestTableName) + .from(psaPinRequestTableName) .select(pinRequestSelect) .eq('id', pinRequestId) .is('deleted_at', null) @@ -822,80 +819,22 @@ export class DBClient { throw new DBError(error) } - return normalizePaPinRequest(data) - } - - /** - * Creates some content and relative pins, pin_sync_request and pin_requests - * - * Once the content is created through this function, cron jobs will run to check the - * pin status and update them in our db. - * At the same time a pin_request is created to duplicate the content on Pinata - * - * @param {import('./db-client-types').ContentInput} content - * @param {object} [opt] - * @param {boolean} [opt.updatePinRequests] If provided - * @return {Promise} The content cid - */ - async createContent (content, { - updatePinRequests = false - } = {}) { - const now = new Date().toISOString() - - /** @type {{data: string, error: PostgrestError }} */ - const { data: cid, error: createError } = await this._client - .rpc('create_content', { - data: { - content_cid: content.cid, - dag_size: content.dagSize, - inserted_at: now, - updated_at: now, - pins: content.pins.map(pin => ({ - status: pin.status, - location: { - peer_id: pin.location.peerId, - peer_name: pin.location.peerName, - region: pin.location.region - } - })) - } - }).single() - - if (createError) { - throw new DBError(createError) - } - - // Update Pin Request FK - if (updatePinRequests) { - /** @type {{data: string, error: PostgrestError }} */ - const { error: updateError } = (await this._client - .from(PAPinRequestTableName) - .update({ content_cid: content.cid }) - .match({ - requested_cid: content.cid - })) - - if (updateError) { - throw new DBError(updateError) - } - } - - return cid + return normalizePsaPinRequest(data) } /** * Get a filtered list of pin requests for a user * * @param {string} authKey - * @param {import('./db-client-types').ListPAPinRequestOptions} opts - * @return {Promise }> } + * @param {import('./db-client-types').ListPsaPinRequestOptions} opts + * @return {Promise }> } */ - async listPAPinRequests (authKey, opts = {}) { + async listPsaPinRequests (authKey, opts = {}) { const match = opts?.match || 'exact' const limit = opts?.limit || 10 let query = this._client - .from(PAPinRequestTableName) + .from(psaPinRequestTableName) .select(pinRequestSelect) .eq('auth_key_id', authKey) .order('inserted_at', { ascending: false }) @@ -905,7 +844,7 @@ export class DBClient { } if (opts.cid) { - query = query.in('requested_cid', opts.cid) + query = query.in('source_cid', opts.cid) } if (opts.name && match === 'exact') { @@ -934,7 +873,7 @@ export class DBClient { // TODO(https://github.com/web3-storage/web3.storage/issues/798): filter by meta is missing - /** @type {{ data: Array, error: Error }} */ + /** @type {{ data: Array, error: Error }} */ const { data, error } = (await query) if (error) { @@ -946,7 +885,7 @@ export class DBClient { // TODO(https://github.com/web3-storage/web3.storage/issues/804): Not limiting the query might cause // performance issues if a user created lots of requests with a token. We should improve this. const pinRequests = data.slice(0, limit) - const pins = pinRequests.map(pinRequest => normalizePaPinRequest(pinRequest)) + const pins = pinRequests.map(pinRequest => normalizePsaPinRequest(pinRequest)) return { count, @@ -960,11 +899,11 @@ export class DBClient { * @param {number} requestId * @param {string} authKey */ - async deletePAPinRequest (requestId, authKey) { + async deletePsaPinRequest (requestId, authKey) { const date = new Date().toISOString() - /** @type {{ data: import('./db-client-types').PAPinRequestItem, error: PostgrestError }} */ + /** @type {{ data: import('./db-client-types').PsaPinRequestItem, error: PostgrestError }} */ const { data, error } = await this._client - .from(PAPinRequestTableName) + .from(psaPinRequestTableName) .update({ deleted_at: date, updated_at: date diff --git a/packages/db/postgres/functions.sql b/packages/db/postgres/functions.sql index 4df79300b8..1ff12f9048 100644 --- a/packages/db/postgres/functions.sql +++ b/packages/db/postgres/functions.sql @@ -109,7 +109,7 @@ BEGIN END IF; end loop; - return (inserted_cid)::TEXT; + return (inserted_cid); END $$; @@ -170,6 +170,44 @@ BEGIN END $$; +-- Creates a pin request with relative content, pins, pin_requests and backups. +CREATE OR REPLACE FUNCTION create_psa_pin_request(data json) RETURNS TEXT + LANGUAGE plpgsql + volatile + PARALLEL UNSAFE +AS +$$ +DECLARE + inserted_pin_request_id BIGINT; +BEGIN + -- Set timeout as imposed by heroku + SET LOCAL statement_timeout = '30s'; + + PERFORM create_content(data); + + insert into psa_pin_request ( + auth_key_id, + content_cid, + source_cid, + name, + inserted_at, + updated_at + ) + values ( + (data ->> 'auth_key_id')::BIGINT, + data ->> 'content_cid', + data ->> 'source_cid', + data ->> 'name', + (data ->> 'inserted_at')::timestamptz, + (data ->> 'updated_at')::timestamptz + ) + + returning id into inserted_pin_request_id; + + return (inserted_pin_request_id)::TEXT; +END +$$; + CREATE OR REPLACE FUNCTION upsert_pin(data json) RETURNS TEXT LANGUAGE plpgsql volatile diff --git a/packages/db/postgres/pg-rest-api-types.d.ts b/packages/db/postgres/pg-rest-api-types.d.ts index a74e0b3be5..244dd6fa9a 100644 --- a/packages/db/postgres/pg-rest-api-types.d.ts +++ b/packages/db/postgres/pg-rest-api-types.d.ts @@ -12,79 +12,6 @@ export interface paths { }; }; }; - "/aggregate": { - get: { - parameters: { - query: { - aggregate_cid?: parameters["rowFilter.aggregate.aggregate_cid"]; - piece_cid?: parameters["rowFilter.aggregate.piece_cid"]; - sha256hex?: parameters["rowFilter.aggregate.sha256hex"]; - export_size?: parameters["rowFilter.aggregate.export_size"]; - metadata?: parameters["rowFilter.aggregate.metadata"]; - entry_created?: parameters["rowFilter.aggregate.entry_created"]; - /** Filtering Columns */ - select?: parameters["select"]; - /** Ordering */ - order?: parameters["order"]; - /** Limiting and Pagination */ - offset?: parameters["offset"]; - /** Limiting and Pagination */ - limit?: parameters["limit"]; - }; - header: { - /** Limiting and Pagination */ - Range?: parameters["range"]; - /** Limiting and Pagination */ - "Range-Unit"?: parameters["rangeUnit"]; - /** Preference */ - Prefer?: parameters["preferCount"]; - }; - }; - responses: { - /** OK */ - 200: { - schema: definitions["aggregate"][]; - }; - /** Partial Content */ - 206: unknown; - }; - }; - }; - "/aggregate_entry": { - get: { - parameters: { - query: { - aggregate_cid?: parameters["rowFilter.aggregate_entry.aggregate_cid"]; - cid_v1?: parameters["rowFilter.aggregate_entry.cid_v1"]; - datamodel_selector?: parameters["rowFilter.aggregate_entry.datamodel_selector"]; - /** Filtering Columns */ - select?: parameters["select"]; - /** Ordering */ - order?: parameters["order"]; - /** Limiting and Pagination */ - offset?: parameters["offset"]; - /** Limiting and Pagination */ - limit?: parameters["limit"]; - }; - header: { - /** Limiting and Pagination */ - Range?: parameters["range"]; - /** Limiting and Pagination */ - "Range-Unit"?: parameters["rangeUnit"]; - /** Preference */ - Prefer?: parameters["preferCount"]; - }; - }; - responses: { - /** OK */ - 200: { - schema: definitions["aggregate_entry"][]; - }; - /** Partial Content */ - 206: unknown; - }; - }; - }; "/auth_key": { get: { parameters: { @@ -382,52 +309,6 @@ export interface paths { }; }; }; - "/deal": { - get: { - parameters: { - query: { - deal_id?: parameters["rowFilter.deal.deal_id"]; - aggregate_cid?: parameters["rowFilter.deal.aggregate_cid"]; - client?: parameters["rowFilter.deal.client"]; - provider?: parameters["rowFilter.deal.provider"]; - status?: parameters["rowFilter.deal.status"]; - start_epoch?: parameters["rowFilter.deal.start_epoch"]; - end_epoch?: parameters["rowFilter.deal.end_epoch"]; - entry_created?: parameters["rowFilter.deal.entry_created"]; - entry_last_updated?: parameters["rowFilter.deal.entry_last_updated"]; - status_meta?: parameters["rowFilter.deal.status_meta"]; - start_time?: parameters["rowFilter.deal.start_time"]; - sector_start_epoch?: parameters["rowFilter.deal.sector_start_epoch"]; - sector_start_time?: parameters["rowFilter.deal.sector_start_time"]; - end_time?: parameters["rowFilter.deal.end_time"]; - /** Filtering Columns */ - select?: parameters["select"]; - /** Ordering */ - order?: parameters["order"]; - /** Limiting and Pagination */ - offset?: parameters["offset"]; - /** Limiting and Pagination */ - limit?: parameters["limit"]; - }; - header: { - /** Limiting and Pagination */ - Range?: parameters["range"]; - /** Limiting and Pagination */ - "Range-Unit"?: parameters["rangeUnit"]; - /** Preference */ - Prefer?: parameters["preferCount"]; - }; - }; - responses: { - /** OK */ - 200: { - schema: definitions["deal"][]; - }; - /** Partial Content */ - 206: unknown; - }; - }; - }; "/migration_tracker": { get: { parameters: { @@ -635,114 +516,6 @@ export interface paths { }; }; }; - "/pa_pin_request": { - get: { - parameters: { - query: { - id?: parameters["rowFilter.pa_pin_request.id"]; - content_cid?: parameters["rowFilter.pa_pin_request.content_cid"]; - auth_key_id?: parameters["rowFilter.pa_pin_request.auth_key_id"]; - requested_cid?: parameters["rowFilter.pa_pin_request.requested_cid"]; - name?: parameters["rowFilter.pa_pin_request.name"]; - deleted_at?: parameters["rowFilter.pa_pin_request.deleted_at"]; - inserted_at?: parameters["rowFilter.pa_pin_request.inserted_at"]; - updated_at?: parameters["rowFilter.pa_pin_request.updated_at"]; - /** Filtering Columns */ - select?: parameters["select"]; - /** Ordering */ - order?: parameters["order"]; - /** Limiting and Pagination */ - offset?: parameters["offset"]; - /** Limiting and Pagination */ - limit?: parameters["limit"]; - }; - header: { - /** Limiting and Pagination */ - Range?: parameters["range"]; - /** Limiting and Pagination */ - "Range-Unit"?: parameters["rangeUnit"]; - /** Preference */ - Prefer?: parameters["preferCount"]; - }; - }; - responses: { - /** OK */ - 200: { - schema: definitions["pa_pin_request"][]; - }; - /** Partial Content */ - 206: unknown; - }; - }; - post: { - parameters: { - body: { - /** pa_pin_request */ - pa_pin_request?: definitions["pa_pin_request"]; - }; - query: { - /** Filtering Columns */ - select?: parameters["select"]; - }; - header: { - /** Preference */ - Prefer?: parameters["preferReturn"]; - }; - }; - responses: { - /** Created */ - 201: unknown; - }; - }; - delete: { - parameters: { - query: { - id?: parameters["rowFilter.pa_pin_request.id"]; - content_cid?: parameters["rowFilter.pa_pin_request.content_cid"]; - auth_key_id?: parameters["rowFilter.pa_pin_request.auth_key_id"]; - requested_cid?: parameters["rowFilter.pa_pin_request.requested_cid"]; - name?: parameters["rowFilter.pa_pin_request.name"]; - deleted_at?: parameters["rowFilter.pa_pin_request.deleted_at"]; - inserted_at?: parameters["rowFilter.pa_pin_request.inserted_at"]; - updated_at?: parameters["rowFilter.pa_pin_request.updated_at"]; - }; - header: { - /** Preference */ - Prefer?: parameters["preferReturn"]; - }; - }; - responses: { - /** No Content */ - 204: never; - }; - }; - patch: { - parameters: { - query: { - id?: parameters["rowFilter.pa_pin_request.id"]; - content_cid?: parameters["rowFilter.pa_pin_request.content_cid"]; - auth_key_id?: parameters["rowFilter.pa_pin_request.auth_key_id"]; - requested_cid?: parameters["rowFilter.pa_pin_request.requested_cid"]; - name?: parameters["rowFilter.pa_pin_request.name"]; - deleted_at?: parameters["rowFilter.pa_pin_request.deleted_at"]; - inserted_at?: parameters["rowFilter.pa_pin_request.inserted_at"]; - updated_at?: parameters["rowFilter.pa_pin_request.updated_at"]; - }; - body: { - /** pa_pin_request */ - pa_pin_request?: definitions["pa_pin_request"]; - }; - header: { - /** Preference */ - Prefer?: parameters["preferReturn"]; - }; - }; - responses: { - /** No Content */ - 204: never; - }; - }; - }; "/pin": { get: { parameters: { @@ -1133,6 +906,114 @@ export interface paths { }; }; }; + "/psa_pin_request": { + get: { + parameters: { + query: { + id?: parameters["rowFilter.psa_pin_request.id"]; + auth_key_id?: parameters["rowFilter.psa_pin_request.auth_key_id"]; + content_cid?: parameters["rowFilter.psa_pin_request.content_cid"]; + source_cid?: parameters["rowFilter.psa_pin_request.source_cid"]; + name?: parameters["rowFilter.psa_pin_request.name"]; + deleted_at?: parameters["rowFilter.psa_pin_request.deleted_at"]; + inserted_at?: parameters["rowFilter.psa_pin_request.inserted_at"]; + updated_at?: parameters["rowFilter.psa_pin_request.updated_at"]; + /** Filtering Columns */ + select?: parameters["select"]; + /** Ordering */ + order?: parameters["order"]; + /** Limiting and Pagination */ + offset?: parameters["offset"]; + /** Limiting and Pagination */ + limit?: parameters["limit"]; + }; + header: { + /** Limiting and Pagination */ + Range?: parameters["range"]; + /** Limiting and Pagination */ + "Range-Unit"?: parameters["rangeUnit"]; + /** Preference */ + Prefer?: parameters["preferCount"]; + }; + }; + responses: { + /** OK */ + 200: { + schema: definitions["psa_pin_request"][]; + }; + /** Partial Content */ + 206: unknown; + }; + }; + post: { + parameters: { + body: { + /** psa_pin_request */ + psa_pin_request?: definitions["psa_pin_request"]; + }; + query: { + /** Filtering Columns */ + select?: parameters["select"]; + }; + header: { + /** Preference */ + Prefer?: parameters["preferReturn"]; + }; + }; + responses: { + /** Created */ + 201: unknown; + }; + }; + delete: { + parameters: { + query: { + id?: parameters["rowFilter.psa_pin_request.id"]; + auth_key_id?: parameters["rowFilter.psa_pin_request.auth_key_id"]; + content_cid?: parameters["rowFilter.psa_pin_request.content_cid"]; + source_cid?: parameters["rowFilter.psa_pin_request.source_cid"]; + name?: parameters["rowFilter.psa_pin_request.name"]; + deleted_at?: parameters["rowFilter.psa_pin_request.deleted_at"]; + inserted_at?: parameters["rowFilter.psa_pin_request.inserted_at"]; + updated_at?: parameters["rowFilter.psa_pin_request.updated_at"]; + }; + header: { + /** Preference */ + Prefer?: parameters["preferReturn"]; + }; + }; + responses: { + /** No Content */ + 204: never; + }; + }; + patch: { + parameters: { + query: { + id?: parameters["rowFilter.psa_pin_request.id"]; + auth_key_id?: parameters["rowFilter.psa_pin_request.auth_key_id"]; + content_cid?: parameters["rowFilter.psa_pin_request.content_cid"]; + source_cid?: parameters["rowFilter.psa_pin_request.source_cid"]; + name?: parameters["rowFilter.psa_pin_request.name"]; + deleted_at?: parameters["rowFilter.psa_pin_request.deleted_at"]; + inserted_at?: parameters["rowFilter.psa_pin_request.inserted_at"]; + updated_at?: parameters["rowFilter.psa_pin_request.updated_at"]; + }; + body: { + /** psa_pin_request */ + psa_pin_request?: definitions["psa_pin_request"]; + }; + header: { + /** Preference */ + Prefer?: parameters["preferReturn"]; + }; + }; + responses: { + /** No Content */ + 204: never; + }; + }; + }; "/upload": { get: { parameters: { @@ -1375,23 +1256,6 @@ export interface paths { }; }; }; - "/rpc/postgres_fdw_handler": { - post: { - parameters: { - body: { - args: { [key: string]: unknown }; - }; - header: { - /** Preference */ - Prefer?: parameters["preferParams"]; - }; - }; - responses: { - /** OK */ - 200: unknown; - }; - }; - }; "/rpc/pgrst_watch": { post: { parameters: { @@ -1447,25 +1311,6 @@ export interface paths { }; }; }; - "/rpc/postgres_fdw_validator": { - post: { - parameters: { - body: { - args: { - "": string; - }; - }; - header: { - /** Preference */ - Prefer?: parameters["preferParams"]; - }; - }; - responses: { - /** OK */ - 200: unknown; - }; - }; - }; "/rpc/pin_from_status_total": { post: { parameters: { @@ -1616,6 +1461,25 @@ export interface paths { }; }; }; + "/rpc/create_psa_pin_request": { + post: { + parameters: { + body: { + args: { + data: string; + }; + }; + header: { + /** Preference */ + Prefer?: parameters["preferParams"]; + }; + }; + responses: { + /** OK */ + 200: unknown; + }; + }; + }; "/rpc/user_used_storage": { post: { parameters: { @@ -1657,19 +1521,6 @@ export interface paths { } export interface definitions { - aggregate: { - aggregate_cid?: string; - piece_cid?: string; - sha256hex?: string; - export_size?: number; - metadata?: string; - entry_created?: string; - }; - aggregate_entry: { - aggregate_cid?: string; - cid_v1?: string; - datamodel_selector?: string; - }; auth_key: { /** * Note: @@ -1711,22 +1562,6 @@ export interface definitions { inserted_at: string; updated_at: string; }; - deal: { - deal_id?: number; - aggregate_cid?: string; - client?: string; - provider?: string; - status?: string; - start_epoch?: number; - end_epoch?: number; - entry_created?: string; - entry_last_updated?: string; - status_meta?: string; - start_time?: string; - sector_start_epoch?: number; - sector_start_time?: string; - end_time?: string; - }; migration_tracker: { /** * Note: @@ -1752,28 +1587,6 @@ export interface definitions { inserted_at: string; updated_at: string; }; - pa_pin_request: { - /** - * Note: - * This is a Primary Key. - */ - id: number; - /** - * Note: - * This is a Foreign Key to `content.cid`. - */ - content_cid?: string; - /** - * Note: - * This is a Foreign Key to `user.id`. - */ - auth_key_id: number; - requested_cid: string; - name?: string; - deleted_at?: string; - inserted_at: string; - updated_at: string; - }; pin: { /** * Note: @@ -1844,6 +1657,28 @@ export interface definitions { pin_id: number; inserted_at: string; }; + psa_pin_request: { + /** + * Note: + * This is a Primary Key. + */ + id: number; + /** + * Note: + * This is a Foreign Key to `auth_key.id`. + */ + auth_key_id: number; + /** + * Note: + * This is a Foreign Key to `content.cid`. + */ + content_cid: string; + source_cid: string; + name?: string; + deleted_at?: string; + inserted_at: string; + updated_at: string; + }; upload: { /** * Note: @@ -1910,19 +1745,6 @@ export interface parameters { offset: string; /** Limiting and Pagination */ limit: string; - /** aggregate */ - "body.aggregate": definitions["aggregate"]; - "rowFilter.aggregate.aggregate_cid": string; - "rowFilter.aggregate.piece_cid": string; - "rowFilter.aggregate.sha256hex": string; - "rowFilter.aggregate.export_size": string; - "rowFilter.aggregate.metadata": string; - "rowFilter.aggregate.entry_created": string; - /** aggregate_entry */ - "body.aggregate_entry": definitions["aggregate_entry"]; - "rowFilter.aggregate_entry.aggregate_cid": string; - "rowFilter.aggregate_entry.cid_v1": string; - "rowFilter.aggregate_entry.datamodel_selector": string; /** auth_key */ "body.auth_key": definitions["auth_key"]; "rowFilter.auth_key.id": string; @@ -1944,22 +1766,6 @@ export interface parameters { "rowFilter.content.dag_size": string; "rowFilter.content.inserted_at": string; "rowFilter.content.updated_at": string; - /** deal */ - "body.deal": definitions["deal"]; - "rowFilter.deal.deal_id": string; - "rowFilter.deal.aggregate_cid": string; - "rowFilter.deal.client": string; - "rowFilter.deal.provider": string; - "rowFilter.deal.status": string; - "rowFilter.deal.start_epoch": string; - "rowFilter.deal.end_epoch": string; - "rowFilter.deal.entry_created": string; - "rowFilter.deal.entry_last_updated": string; - "rowFilter.deal.status_meta": string; - "rowFilter.deal.start_time": string; - "rowFilter.deal.sector_start_epoch": string; - "rowFilter.deal.sector_start_time": string; - "rowFilter.deal.end_time": string; /** migration_tracker */ "body.migration_tracker": definitions["migration_tracker"]; "rowFilter.migration_tracker.id": string; @@ -1977,16 +1783,6 @@ export interface parameters { "rowFilter.name.validity": string; "rowFilter.name.inserted_at": string; "rowFilter.name.updated_at": string; - /** pa_pin_request */ - "body.pa_pin_request": definitions["pa_pin_request"]; - "rowFilter.pa_pin_request.id": string; - "rowFilter.pa_pin_request.content_cid": string; - "rowFilter.pa_pin_request.auth_key_id": string; - "rowFilter.pa_pin_request.requested_cid": string; - "rowFilter.pa_pin_request.name": string; - "rowFilter.pa_pin_request.deleted_at": string; - "rowFilter.pa_pin_request.inserted_at": string; - "rowFilter.pa_pin_request.updated_at": string; /** pin */ "body.pin": definitions["pin"]; "rowFilter.pin.id": string; @@ -2013,6 +1809,16 @@ export interface parameters { "rowFilter.pin_sync_request.id": string; "rowFilter.pin_sync_request.pin_id": string; "rowFilter.pin_sync_request.inserted_at": string; + /** psa_pin_request */ + "body.psa_pin_request": definitions["psa_pin_request"]; + "rowFilter.psa_pin_request.id": string; + "rowFilter.psa_pin_request.auth_key_id": string; + "rowFilter.psa_pin_request.content_cid": string; + "rowFilter.psa_pin_request.source_cid": string; + "rowFilter.psa_pin_request.name": string; + "rowFilter.psa_pin_request.deleted_at": string; + "rowFilter.psa_pin_request.inserted_at": string; + "rowFilter.psa_pin_request.updated_at": string; /** upload */ "body.upload": definitions["upload"]; "rowFilter.upload.id": string; diff --git a/packages/db/postgres/reset.sql b/packages/db/postgres/reset.sql index 6e50b6c1c7..143973d297 100644 --- a/packages/db/postgres/reset.sql +++ b/packages/db/postgres/reset.sql @@ -5,7 +5,7 @@ DROP TABLE IF EXISTS pin CASCADE; DROP TABLE IF EXISTS pin_location; DROP TABLE IF EXISTS pin_request; DROP TABLE IF EXISTS pin_sync_request; -DROP TABLE IF EXISTS pa_pin_request; +DROP TABLE IF EXISTS psa_pin_request; DROP TABLE IF EXISTS content; DROP TABLE IF EXISTS backup; DROP TABLE IF EXISTS auth_key; diff --git a/packages/db/postgres/tables.sql b/packages/db/postgres/tables.sql index 15619002d0..b53d661249 100644 --- a/packages/db/postgres/tables.sql +++ b/packages/db/postgres/tables.sql @@ -200,16 +200,15 @@ CREATE TABLE IF NOT EXISTS pin_sync_request CREATE INDEX IF NOT EXISTS pin_sync_request_pin_id_idx ON pin_sync_request (pin_id); -- Tracks pinning requests from Pinning Service API --- TODO(paolo) this table should be considered for track all content stored on web3.storage -CREATE TABLE IF NOT EXISTS pa_pin_request +CREATE TABLE IF NOT EXISTS psa_pin_request ( id BIGSERIAL PRIMARY KEY, - -- Points to the pinned content, it is updated once the content is actually being found. - content_cid TEXT REFERENCES content (cid), -- Points to auth key used to pin the content. auth_key_id BIGINT NOT NULL REFERENCES public.auth_key (id), + -- Points to the pinned content, it is updated once the content is actually being found. + content_cid TEXT NOT NULL REFERENCES content (cid), -- The id of the content being requested, it could not exist on IPFS (typo, node offline etc) - requested_cid TEXT NOT NULL, + source_cid TEXT NOT NULL, name TEXT, deleted_at TIMESTAMP WITH TIME ZONE, inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL, diff --git a/packages/db/test/content.spec.js b/packages/db/test/content.spec.js deleted file mode 100644 index 42dff9bafe..0000000000 --- a/packages/db/test/content.spec.js +++ /dev/null @@ -1,211 +0,0 @@ -/* eslint-env mocha, browser */ -import assert from 'assert' -import { DBClient } from '../index' -import { createUser, createUserAuthKey, token } from './utils.js' - -const contentTable = 'content' - -describe('Content', () => { - /** @type {DBClient & {_client: import('@supabase/postgrest-js').PostgrestClient}} */ - const client = (new DBClient({ - endpoint: 'http://127.0.0.1:3000', - token - })) - let user - let authKey - - let createdCid - - const cidToBeCreated = 'bafybeiczsscdsbs7aaqz55asqdf3smv6klcw3gofszvwlyarci47bgf359' - const cidToBeCreatedWithPinRequest = 'bafybeiczsscdsbs7aaqz55asqdf3smv6klcw3gofszvwlyarci47bgf098' - const anotherCid = 'bafybeiczsscdsbs7aaqz55asqdf3smv6klcw3gofszvwlyarci47bgf310' - const yetAnotherCid = 'bafybeiczsscdsbs7aaqz55asqdf3smv6klcw3gofszvwlyarci47bgf102' - const dagSize = 10 - - const cids = [ - cidToBeCreated, - anotherCid, - cidToBeCreatedWithPinRequest, - yetAnotherCid - ] - - const contentToBeCreated = { - cid: cidToBeCreated, - dagSize, - pins: [{ - status: 'Pinning', - location: { - peerId: 'peer_id', - peerName: 'peer_name', - region: 'region' - } - }] - } - - // Create user and auth key` - before(async () => { - user = await createUser(client) - authKey = await createUserAuthKey(client, user._id) - }) - - // Guarantee no Pin requests exist and create the ones needed for our tests - before(async () => { - // Make sure we don't have pinRequest for the given cids - for (let i = 0; i < cids.length; i++) { - const { count: countR } = await client._client.from(contentTable) - .select('cid', { - count: 'exact' - }) - .eq('cid', cids[i]) - - assert.strictEqual(countR, 0) - } - - createdCid = await client.createContent(contentToBeCreated) - }) - - describe('Create Content', () => { - it('it creates a content row', async () => { - const { count } = await client._client - .from(contentTable) - .select('cid', { - count: 'exact' - }) - .eq('cid', cidToBeCreated) - assert.strictEqual(count, 1) - }) - - it('it returns the created conten cid', async () => { - assert.strictEqual(cidToBeCreated, createdCid) - }) - - it('it creates a pin request to duplicate data to Pinata', async () => { - const { count } = await client._client - .from('pin_request') - .select('content_cid', { - count: 'exact' - }) - .match({ - content_cid: cidToBeCreated - }) - assert.strictEqual(count, 1) - }) - - it('it creates relative pin row', async () => { - const { count } = await client._client - .from('pin') - .select('id', { - count: 'exact' - }) - .eq('content_cid', cidToBeCreated) - - assert.strictEqual(count, 1) - }) - - it('it creates a pin sync request if not pinned', async () => { - const { data } = await client._client - .from('pin') - .select('id') - .eq('content_cid', cidToBeCreated) - .single() - - const { count } = await client._client - .from('pin_sync_request') - .select('pin_id', { - count: 'exact' - }) - .match({ - pin_id: data.id - }) - assert.strictEqual(count, 1) - }) - - it('it does not add a duplicated content if already exists', async () => { - const otherContentToBeCreated = { - cid: anotherCid, - dagSize: 10, - pins: [{ - status: 'Pinning', - location: { - peerId: 'peer_id_2', - peerName: 'peer_name_2', - region: 'region' - } - }] - } - - const { error } = await client - ._client - .from(contentTable) - .insert({ - cid: anotherCid - }) - - if (error) { - throw new Error() - } - - await client.createContent(otherContentToBeCreated) - - const { count } = await client._client - .from(contentTable) - .select('cid', { - count: 'exact' - }) - .match({ - cid: anotherCid - }) - - assert.strictEqual(count, 1) - }) - - it('updates the pinRequest', async () => { - /** - * @type {import('../db-client-types').PAPinRequestUpsertInput} - */ - const aPinRequestInput = { - requestedCid: cidToBeCreatedWithPinRequest, - authKey - } - - /** - * @type {import('../db-client-types').PAPinRequestUpsertInput} - */ - const aSecondPinRequestInput = { - requestedCid: yetAnotherCid, - authKey - } - - /** - * @type {import('../db-client-types').PAPinRequestUpsertOutput} - */ - const aPinRequestOutput = await client.createPAPinRequest(aPinRequestInput) - - /** - * @type {import('../db-client-types').PAPinRequestUpsertOutput} - */ - const aSecondPinRequestOutput = await client.createPAPinRequest(aSecondPinRequestInput) - - const contentWithPinRequest = { - cid: cidToBeCreatedWithPinRequest, - dagSize: 10, - pins: [{ - status: 'Pinning', - location: { - peerId: 'peer_id_2', - peerName: 'peer_name_2', - region: 'region' - } - }] - } - - await client.createContent(contentWithPinRequest, { updatePinRequests: true }) - - const request1 = await client.getPAPinRequest(aPinRequestOutput._id) - const request2 = await client.getPAPinRequest(aSecondPinRequestOutput._id) - - assert.strictEqual(request1.contentCid, cidToBeCreatedWithPinRequest) - assert.ifError(request2.contentCid) - }) - }) -}) diff --git a/packages/db/test/pinning.spec.js b/packages/db/test/pinning.spec.js index 6908aa47b5..9392a1caee 100644 --- a/packages/db/test/pinning.spec.js +++ b/packages/db/test/pinning.spec.js @@ -1,14 +1,14 @@ /* eslint-env mocha, browser */ import assert from 'assert' +import { normalizeCid } from '../../api/src/utils/normalize-cid' import { DBClient } from '../index' import { createUpload, createUser, createUserAuthKey, token } from './utils.js' import { CID } from 'multiformats/cid' import { sha256 } from 'multiformats/hashes/sha2' import * as pb from '@ipld/dag-pb' -import { normalizeCid } from '../../api/src/utils/normalize-cid' /* global crypto */ -const pinRequestTable = 'pa_pin_request' +const pinRequestTable = 'psa_pin_request' /** * @param {number} code @@ -26,22 +26,17 @@ async function randomCid (code = pb.code) { * @param {object} opt * @param {boolean} [opt.withContent] */ -const assertCorrectPinRequestOutputTypes = (pinRequestOutput, { withContent = true } = {}) => { +const assertCorrectPinRequestOutputTypes = (pinRequestOutput) => { assert.ok(typeof pinRequestOutput._id === 'string', '_id should be a string') - assert.ok(typeof pinRequestOutput.requestedCid === 'string', 'requestedCid should be a string') + assert.ok(typeof pinRequestOutput.sourceCid === 'string', 'sourceCid should be a string') assert.ok(Array.isArray(pinRequestOutput.pins), 'pin should be an array') assert.ok(Date.parse(pinRequestOutput.created), 'created should be valid date string') assert.ok(Date.parse(pinRequestOutput.updated), 'updated should be valid date string') - - if (withContent) { - assert.ok(typeof pinRequestOutput.contentCid === 'string', 'contentCid should be a string') - } else { - assert.ifError(pinRequestOutput.contentCid) - } + assert.ok(typeof pinRequestOutput.contentCid === 'string', 'contentCid should be a string') } describe('Pin Request', () => { - /** @type {DBClient & {_client}} */ + /** @type {DBClient & {_client: import('@supabase/postgrest-js').PostgrestClient }} */ const client = (new DBClient({ endpoint: 'http://127.0.0.1:3000', token @@ -49,30 +44,22 @@ describe('Pin Request', () => { let user let authKey /** - * @type {import('../db-client-types').PAPinRequestUpsertInput} + * @type {import('../db-client-types').PsaPinRequestUpsertInput} */ let aPinRequestInput /** - * @type {import('../db-client-types').PAPinRequestUpsertOutput} + * @type {import('../db-client-types').PsaPinRequestUpsertOutput} */ let aPinRequestOutput - /** - * @type {import('../db-client-types').PAPinRequestUpsertInput} - */ - let aPinRequestInputForExistingContent - - /** - * @type {import('../db-client-types').PAPinRequestUpsertOutput} - */ - let aPinRequestOutputForExistingContent - const cids = [ - 'bafybeiczsscdsbs7aaqz55asqdf3smv6klcw3gofszvwlyarci47bgf356', - 'bafybeiczsscdsbs7aaqz55asqdf3smv6klcw3gofszvwlyarci47bgf358' + 'QmdA5WkDNALetBn4iFeSepHjdLGJdxPBwZyY47ir1bZGAK', + 'QmNvTjdqEPjZVWCvRWsFJA1vK7TTw1g9JP6we1WBJTRADM' ] + const normalizedCids = cids.map(cid => normalizeCid(cid)) + const pins = [ { status: 'Pinning', @@ -107,50 +94,55 @@ describe('Pin Request', () => { }) assert.strictEqual(countR, 0, 'There are still requests in the db') - await createUpload(client, user._id, authKey, cids[1], { pins: pins }) - aPinRequestInput = { - requestedCid: cids[0], - authKey - } - - aPinRequestInputForExistingContent = { - requestedCid: cids[1], - cid: cids[1], + sourceCid: cids[0], + contentCid: normalizedCids[0], + pins, authKey } - aPinRequestOutput = await client.createPAPinRequest(aPinRequestInput) - aPinRequestOutputForExistingContent = await client.createPAPinRequest(aPinRequestInputForExistingContent) + aPinRequestOutput = await client.createPsaPinRequest(aPinRequestInput) }) describe('Create Pin', () => { it('creates a Pin Request', async () => { - const savedPinRequest = await client.getPAPinRequest(parseInt(aPinRequestOutput._id, 10)) + const savedPinRequest = await client.getPsaPinRequest(parseInt(aPinRequestOutput._id, 10)) assert.ok(savedPinRequest) + assert.strictEqual(savedPinRequest._id, aPinRequestOutput._id) }) it('returns the right object', async () => { - assertCorrectPinRequestOutputTypes(aPinRequestOutput, { withContent: false }) - assert.strictEqual(aPinRequestOutput.requestedCid, cids[0], 'requestedCid is the one provided') + assertCorrectPinRequestOutputTypes(aPinRequestOutput) + assert.strictEqual(aPinRequestOutput.sourceCid, cids[0], 'sourceCid is not the one provided') + assert.strictEqual(aPinRequestOutput.authKey, authKey, 'auth key is not the one provided') + assert.strictEqual(aPinRequestOutput.contentCid, normalizedCids[0], 'contentCid is not the one provided') }) - it('returns no pins if they do not exists', async () => { - assert.strictEqual(aPinRequestOutput.pins.length, 0) - }) - - it('returns the right object when it has content associated', async () => { - assertCorrectPinRequestOutputTypes(aPinRequestOutputForExistingContent) - assert.strictEqual(aPinRequestOutputForExistingContent.requestedCid, cids[1], 'requestedCid is the one provided') - }) + it('creates content and pins', async () => { + const { count: countContent } = await client._client + .from('content') + .select('cid', { + count: 'exact' + }) + .match({ + cid: normalizedCids[0] + }) + assert.strictEqual(countContent, 1) - it('returns a content cid if exists contentCid', async () => { - assert.strictEqual(aPinRequestOutputForExistingContent.contentCid, cids[1]) + const { count: countPins } = await client._client + .from('pin') + .select('id', { + count: 'exact' + }) + .match({ + content_cid: normalizedCids[0] + }) + assert.strictEqual(countPins, pins.length) }) - it('returns pins if pins if content exists', async () => { + it('returns the right pins', async () => { // Only checking statuses for simplicity - const statuses = aPinRequestOutputForExistingContent.pins + const statuses = aPinRequestOutput.pins .map((p) => p.status) assert.deepStrictEqual(statuses, [pins[0].status, pins[1].status]) }) @@ -158,44 +150,31 @@ describe('Pin Request', () => { describe('Get Pin', () => { let savedPinRequest - let savedPinRequestForExistingContent before(async () => { - savedPinRequest = await client.getPAPinRequest(parseInt(aPinRequestOutput._id, 10)) - savedPinRequestForExistingContent = await client.getPAPinRequest(parseInt(aPinRequestOutputForExistingContent._id, 10)) + savedPinRequest = await client.getPsaPinRequest(parseInt(aPinRequestOutput._id, 10)) }) - it('creates a Pin Request', async () => { + it('gets a Pin Request, if it exists', async () => { assert.ok(savedPinRequest) }) it('returns the right object', async () => { - assertCorrectPinRequestOutputTypes(savedPinRequest, { withContent: false }) - assert.strictEqual(savedPinRequest.requestedCid, cids[0], 'requestedCid is the one provided') - }) - - it('returns no pins if they do not exists', async () => { - assert.strictEqual(savedPinRequest.pins.length, 0) - }) - - it('returns the right object when it has content associated', async () => { - assertCorrectPinRequestOutputTypes(savedPinRequestForExistingContent) - assert.strictEqual(savedPinRequestForExistingContent.requestedCid, cids[1], 'rrequestedCid is the one provided') - }) - - it('returns a content cid if exists contentCid', async () => { - assert.strictEqual(savedPinRequestForExistingContent.contentCid, cids[1]) + assertCorrectPinRequestOutputTypes(savedPinRequest) + assert.strictEqual(savedPinRequest.sourceCid, cids[0], 'sourceCid is not the one provided') + assert.strictEqual(savedPinRequest.authKey, authKey, 'auth key is not the one provided') + assert.strictEqual(savedPinRequest.contentCid, normalizedCids[0], 'contentCid is not the one provided') }) - it('returns pins if pins if content exists', async () => { + it('returns the right pins', async () => { // Only checking statuses for simplicity - const statuses = savedPinRequestForExistingContent.pins + const statuses = savedPinRequest.pins .map((p) => p.status) assert.deepStrictEqual(statuses, [pins[0].status, pins[1].status]) }) it('throws if does not exists', async () => { - assert.rejects(client.getPAPinRequest(1000)) + assert.rejects(client.getPsaPinRequest(1000)) }) }) @@ -239,7 +218,7 @@ describe('Pin Request', () => { { name: 'horse', date: [2020, 0, 1], - requestedCid: cidWithContent, + sourceCid: cidWithContent, cid: normalizeCidWithContent }, { name: 'capybara', @@ -273,38 +252,39 @@ describe('Pin Request', () => { } ] createdPinningRequests = await Promise.all(pinRequestsInputs.map(async (item) => { - const requestedCid = item.requestedCid || await randomCid() - const normalizedCid = item.cid || normalizeCid(requestedCid) + const sourceCid = item.sourceCid || await randomCid() + const normalizedCid = item.cid || normalizeCid(sourceCid) - return client.createPAPinRequest({ + return client.createPsaPinRequest({ ...(item.name) && { name: item.name }, authKey: authKeyPinList, - requestedCid: requestedCid, - cid: normalizedCid + sourceCid: sourceCid, + contentCid: normalizedCid, + pins }) })) }) it('limits the results to 10', async () => { - const { results: prs } = await client.listPAPinRequests(authKeyPinList) + const { results: prs } = await client.listPsaPinRequests(authKeyPinList) assert.strictEqual(prs.length, 10) }) it('limits the results to the provided limit', async () => { const limit = 8 - const { results: prs } = await client.listPAPinRequests(authKeyPinList, { + const { results: prs } = await client.listPsaPinRequests(authKeyPinList, { limit }) assert.strictEqual(prs.length, limit) }) it('returns only requests for the provided token', async () => { - const { results: prs } = await client.listPAPinRequests('10') + const { results: prs } = await client.listPsaPinRequests('10') assert.strictEqual(prs.length, 0) }) it('sorts by date', async () => { - const { results: prs } = await client.listPAPinRequests(authKeyPinList) + const { results: prs } = await client.listPsaPinRequests(authKeyPinList) const sorted = prs.reduce((n, item) => n !== null && item.created <= n.created && item) assert(sorted) @@ -312,7 +292,7 @@ describe('Pin Request', () => { it.skip('it filters items by provided status', async () => { // TODO(https://github.com/web3-storage/web3.storage/issues/797): status filtering is currently not working - const { results: prs } = await client.listPAPinRequests(authKeyPinList, { + const { results: prs } = await client.listPsaPinRequests(authKeyPinList, { status: ['Pinning'] }) @@ -321,19 +301,19 @@ describe('Pin Request', () => { }) it('filters items by provided cid', async () => { - const cids = [createdPinningRequests[0].requestedCid, createdPinningRequests[1].requestedCid] - const { results: prs } = await client.listPAPinRequests(authKeyPinList, { + const cids = [createdPinningRequests[0].sourceCid, createdPinningRequests[1].sourceCid] + const { results: prs } = await client.listPsaPinRequests(authKeyPinList, { cid: cids }) assert.strictEqual(prs.length, 2) - assert(prs.map(p => p.requestedCid).includes(cids[0])) - assert(prs.map(p => p.requestedCid).includes(cids[1])) + assert(prs.map(p => p.sourceCid).includes(cids[0])) + assert(prs.map(p => p.sourceCid).includes(cids[1])) }) it('filters items by exact match by default', async () => { const name = 'capybara' - const { results: prs } = await client.listPAPinRequests(authKeyPinList, { + const { results: prs } = await client.listPsaPinRequests(authKeyPinList, { name }) @@ -345,7 +325,7 @@ describe('Pin Request', () => { it('filters items by iexact match', async () => { const name = 'camel' - const { results: prs } = await client.listPAPinRequests(authKeyPinList, { + const { results: prs } = await client.listPsaPinRequests(authKeyPinList, { name, match: 'iexact' }) @@ -358,7 +338,7 @@ describe('Pin Request', () => { it('filters items by partial match', async () => { const name = 'giant' - const { results: prs } = await client.listPAPinRequests(authKeyPinList, { + const { results: prs } = await client.listPsaPinRequests(authKeyPinList, { name, match: 'partial' }) @@ -371,7 +351,7 @@ describe('Pin Request', () => { it('filters items by ipartial match', async () => { const name = 'giant' - const { results: prs } = await client.listPAPinRequests(authKeyPinList, { + const { results: prs } = await client.listPsaPinRequests(authKeyPinList, { name, match: 'ipartial' }) @@ -383,7 +363,7 @@ describe('Pin Request', () => { }) it('filters items created before a date', async () => { - const { results: pins } = await client.listPAPinRequests(authKeyPinList, { + const { results: pins } = await client.listPsaPinRequests(authKeyPinList, { before: '2021-01-01T00:00:00.000000Z' }) @@ -391,7 +371,7 @@ describe('Pin Request', () => { }) it('filters items created after a date', async () => { - const { results: pins } = await client.listPAPinRequests(authKeyPinList, { + const { results: pins } = await client.listPsaPinRequests(authKeyPinList, { after: '2021-01-01T00:00:00.000000Z', limit: 20 }) @@ -402,28 +382,28 @@ describe('Pin Request', () => { describe('Delete Pin', () => { it('throws if the request id does not exist', async () => { - assert.rejects(client.deletePAPinRequest(1000, authKey)) + assert.rejects(client.deletePsaPinRequest(1000, authKey)) }) it('throws if the auth key does not belong to the pin request', async () => { - assert.rejects(client.deletePAPinRequest(parseInt(aPinRequestOutput._id, 10), 'fakeAuth')) + assert.rejects(client.deletePsaPinRequest(parseInt(aPinRequestOutput._id, 10), 'fakeAuth')) }) it('returns the id of the deleted pin request', async () => { const aPinRequestOutputId = parseInt(aPinRequestOutput._id, 10) - const pinRequest = await client.getPAPinRequest(aPinRequestOutputId) + const pinRequest = await client.getPsaPinRequest(aPinRequestOutputId) assert.ok(!pinRequest.deleted, 'is null') - const deletedPinRequest = await client.deletePAPinRequest(aPinRequestOutputId, authKey) + const deletedPinRequest = await client.deletePsaPinRequest(aPinRequestOutputId, authKey) assert.ok(deletedPinRequest) - assert.equal(deletedPinRequest._id, 3) + assert.equal(deletedPinRequest._id, pinRequest._id) }) it('does not select pin request after deletion', async () => { - assert.rejects(client.getPAPinRequest(parseInt(aPinRequestOutput._id, 10))) + assert.rejects(client.getPsaPinRequest(parseInt(aPinRequestOutput._id, 10))) }) it('cannot delete a pin request which is already deleted', async () => { - assert.rejects(client.deletePAPinRequest(parseInt(aPinRequestOutput._id, 10), authKey)) + assert.rejects(client.deletePsaPinRequest(parseInt(aPinRequestOutput._id, 10), authKey)) }) }) }) diff --git a/packages/db/utils.js b/packages/db/utils.js index 9659cd257a..a6c87a6bdb 100644 --- a/packages/db/utils.js +++ b/packages/db/utils.js @@ -15,21 +15,19 @@ export function normalizeUpload (upload) { } } -// TODO: this looks really similar to normalizeUpload. -// should be merged together /** * Normalize pin request * - * @param {object} paPinRequest - * @return {import('./db-client-types').PAPinRequestUpsertOutput} + * @param {object} psaPinRequest + * @return {import('./db-client-types').PsaPinRequestUpsertOutput} */ -export function normalizePaPinRequest (paPinRequest) { - const nPaPinRequest = { ...paPinRequest } - delete nPaPinRequest.content +export function normalizePsaPinRequest (psaPinRequest) { + const nPsaPinRequest = { ...psaPinRequest } + delete nPsaPinRequest.content return { - ...nPaPinRequest, - pins: paPinRequest.content?.pins ? normalizePins(paPinRequest.content.pins) : [] + ...nPsaPinRequest, + pins: psaPinRequest.content?.pins ? normalizePins(psaPinRequest.content.pins) : [] } }