diff --git a/client/src/components/_common/Form/DynamicForm/DynamicForm.jsx b/client/src/components/_common/Form/DynamicForm/DynamicForm.jsx
index f7f74f177..16f0766f7 100644
--- a/client/src/components/_common/Form/DynamicForm/DynamicForm.jsx
+++ b/client/src/components/_common/Form/DynamicForm/DynamicForm.jsx
@@ -300,6 +300,7 @@ const DynamicForm = ({ initialFormFields, onChange }) => {
name={field.name}
label={field.label}
type="file"
+ accept={field?.validation?.accept}
description={field?.description}
required={field?.validation?.required}
onChange={(event) => {
diff --git a/client/src/components/_custom/drp/DataFilesProjectEditDescriptionModalAddon/DataFilesProjectEditDescriptionModalAddon.jsx b/client/src/components/_custom/drp/DataFilesProjectEditDescriptionModalAddon/DataFilesProjectEditDescriptionModalAddon.jsx
index 0e9e89744..3865c141d 100644
--- a/client/src/components/_custom/drp/DataFilesProjectEditDescriptionModalAddon/DataFilesProjectEditDescriptionModalAddon.jsx
+++ b/client/src/components/_custom/drp/DataFilesProjectEditDescriptionModalAddon/DataFilesProjectEditDescriptionModalAddon.jsx
@@ -35,6 +35,9 @@ const DataFilesProjectEditDescriptionModalAddon = ({ setValidationSchema }) => {
});
});
} else {
+ if (field.type === 'file') {
+ return;
+ }
setFieldValue(field.name, metadata[field.name]);
}
}
diff --git a/client/src/components/_custom/drp/DataFilesProjectFileListingMetadataAddon/DataFilesProjectFileListingMetadataAddon.jsx b/client/src/components/_custom/drp/DataFilesProjectFileListingMetadataAddon/DataFilesProjectFileListingMetadataAddon.jsx
index e64b4318d..c9a665342 100644
--- a/client/src/components/_custom/drp/DataFilesProjectFileListingMetadataAddon/DataFilesProjectFileListingMetadataAddon.jsx
+++ b/client/src/components/_custom/drp/DataFilesProjectFileListingMetadataAddon/DataFilesProjectFileListingMetadataAddon.jsx
@@ -15,6 +15,8 @@ const excludeKeys = [
'sample',
'digital_dataset',
'file_objs',
+ 'cover_image',
+ 'cover_image_url',
];
const DataFilesProjectFileListingMetadataAddon = ({
@@ -33,6 +35,8 @@ const DataFilesProjectFileListingMetadataAddon = ({
license,
doi,
keywords,
+ cover_image,
+ cover_image_url,
}) => {
const dateOptions = { month: 'long', day: 'numeric', year: 'numeric' };
const dateLabel = publication_date ? 'Publication Date' : 'Created';
@@ -45,6 +49,8 @@ const DataFilesProjectFileListingMetadataAddon = ({
license: license ?? 'None',
...(doi && { doi }),
...(keywords && { keywords }),
+ ...(cover_image && { cover_image }),
+ ...(cover_image_url && { cover_image_url }),
};
};
@@ -105,8 +111,9 @@ const DataFilesProjectFileListingMetadataAddon = ({
>
))}
diff --git a/client/src/components/_custom/drp/DataFilesProjectPublish/DataFilesProjectPublishWizardSteps/ProjectDescription.jsx b/client/src/components/_custom/drp/DataFilesProjectPublish/DataFilesProjectPublishWizardSteps/ProjectDescription.jsx
index 99c7a593b..da3ce08d3 100644
--- a/client/src/components/_custom/drp/DataFilesProjectPublish/DataFilesProjectPublishWizardSteps/ProjectDescription.jsx
+++ b/client/src/components/_custom/drp/DataFilesProjectPublish/DataFilesProjectPublishWizardSteps/ProjectDescription.jsx
@@ -44,6 +44,14 @@ const ProjectDescription = ({ project }) => {
License: project.license ?? 'None',
};
+ if (project.cover_image) {
+ projectData['Cover Image'] = (
+
+ {project.cover_image.split('/').pop()}
+
+ );
+ }
+
if (project.keywords) {
projectData['Keywords'] = project.keywords;
}
diff --git a/client/src/components/_custom/drp/utils/DataDisplay/DataDisplay.jsx b/client/src/components/_custom/drp/utils/DataDisplay/DataDisplay.jsx
index 805913226..2e6205fbf 100644
--- a/client/src/components/_custom/drp/utils/DataDisplay/DataDisplay.jsx
+++ b/client/src/components/_custom/drp/utils/DataDisplay/DataDisplay.jsx
@@ -3,7 +3,6 @@ import PropTypes from 'prop-types';
import { Section, SectionContent, LoadingSpinner, Button } from '_common';
import { useLocation, Link } from 'react-router-dom';
import styles from './DataDisplay.module.scss';
-import { useFileListing } from 'hooks/datafiles';
import { useDispatch } from 'react-redux';
// Function to format the dict key from snake_case to Label Case i.e. data_type -> Data Type
@@ -80,9 +79,17 @@ const processModalViewableData = (data) => {
}));
};
-const DataDisplay = ({ data, path, excludeKeys, modalData }) => {
- const location = useLocation();
-
+const processCoverImage = (data) => {
+ return [{
+ label: 'Cover Image',
+ value:
+
+ {data.cover_image.split('/').pop()}
+
+ }]
+}
+
+const DataDisplay = ({ data, path, excludeKeys, modalData, coverImage }) => {
//filter out empty values and unwanted keys
let processedData = Object.entries(data)
.filter(([key, value]) => value !== '' && !excludeKeys.includes(key))
@@ -91,6 +98,10 @@ const DataDisplay = ({ data, path, excludeKeys, modalData }) => {
value: typeof value === 'string' ? formatLabel(value) : value,
}));
+ if (coverImage) {
+ processedData.unshift(...processCoverImage(data));
+ }
+
if (path) {
processedData.unshift(...processSampleAndOriginData(data, path));
}
diff --git a/client/src/redux/sagas/projects.sagas.js b/client/src/redux/sagas/projects.sagas.js
index ae4edc6da..2cb307dbe 100644
--- a/client/src/redux/sagas/projects.sagas.js
+++ b/client/src/redux/sagas/projects.sagas.js
@@ -1,6 +1,7 @@
import { put, takeLatest, call } from 'redux-saga/effects';
import queryStringParser from 'query-string';
import { fetchUtil } from 'utils/fetchUtil';
+import Cookies from 'js-cookie';
export async function fetchProjectsListing(queryString, rootSystem) {
const q = queryStringParser.stringify({ query_string: queryString });
@@ -63,14 +64,30 @@ export function* showSharedWorkspaces(action) {
}
export async function fetchCreateProject(project) {
+
+ const formData = new FormData();
+
+ const { file, ...projectMetadata } = project.metadata; // Exclude the file
+ formData.append('metadata', JSON.stringify(projectMetadata));
+
+ if (file) {
+ formData.append('cover_image', file);
+ }
+
+ Object.entries(project)
+ .filter(([key, value]) => value != null && key !== 'metadata')
+ .forEach(([key, value]) => {
+ formData.append(key, value);
+ });
+
const result = await fetchUtil({
url: `/api/projects/`,
method: 'POST',
- headers: {
- 'Content-Type': 'application/json',
- },
- body: JSON.stringify(project),
+ headers: { 'X-CSRFToken': Cookies.get('csrftoken') },
+ credentials: 'same-origin',
+ body: formData,
});
+
return result.response;
}
@@ -166,14 +183,30 @@ export function* setMember(action) {
}
export async function setTitleDescriptionUtil(projectId, data) {
+
+ const formData = new FormData();
+
+ const { file, ...projectMetadata } = data.metadata; // Exclude the file
+ formData.append('metadata', JSON.stringify(projectMetadata));
+
+ if (file) {
+ formData.append('cover_image', file);
+ }
+
+ Object.entries(data)
+ .filter(([key, value]) => value != null && key !== 'metadata')
+ .forEach(([key, value]) => {
+ formData.append(key, value);
+ });
+
const result = await fetchUtil({
url: `/api/projects/${projectId}/`,
method: 'PATCH',
- headers: {
- 'Content-Type': 'application/json',
- },
- body: JSON.stringify(data),
+ headers: { 'X-CSRFToken': Cookies.get('csrftoken') },
+ credentials: 'same-origin',
+ body: formData,
});
+
return result.response;
}
diff --git a/server/portal/apps/_custom/drp/models.py b/server/portal/apps/_custom/drp/models.py
index 095fe853c..10a7efd60 100644
--- a/server/portal/apps/_custom/drp/models.py
+++ b/server/portal/apps/_custom/drp/models.py
@@ -135,6 +135,7 @@ class DrpProjectMetadata(DrpMetadataModel):
is_review_project: Optional[bool] = None
is_published_project: Optional[bool] = None
guest_users: list[DrpGuestUser] = []
+ cover_image: Optional[str] = None
class DrpDatasetMetadata(DrpMetadataModel):
"""Model for Base DRP Dataset Metadata"""
diff --git a/server/portal/apps/projects/views.py b/server/portal/apps/projects/views.py
index 18bd9a155..a17e843da 100644
--- a/server/portal/apps/projects/views.py
+++ b/server/portal/apps/projects/views.py
@@ -35,6 +35,9 @@
from portal.apps._custom.drp import constants
from portal.apps.projects.workspace_operations.graph_operations import add_node_to_project, initialize_project_graph, get_node_from_path
from portal.apps.projects.tasks import process_file, sync_files_without_metadata
+from portal.libs.files.file_processing import resize_cover_image
+from portal.libs.agave.utils import service_account
+from django.http.multipartparser import MultiPartParser
LOGGER = logging.getLogger(__name__)
@@ -133,22 +136,35 @@ def get(self, request, root_system=None):
@transaction.atomic
def post(self, request): # pylint: disable=no-self-use
"""POST handler."""
- data = json.loads(request.body)
- title = data['title']
- description = data['description']
- metadata = data['metadata']
+ title = request.POST.get('title')
+ description = request.POST.get('description')
+ metadata = request.POST.get('metadata')
+ cover_image = request.FILES.get('cover_image')
workspace_number = increment_workspace_count()
- workspace_id = f"{settings.PORTAL_PROJECTS_SYSTEM_PREFIX}.{settings.PORTAL_PROJECTS_ID_PREFIX}-{workspace_number}"
+ system_id = f"{settings.PORTAL_PROJECTS_SYSTEM_PREFIX}.{settings.PORTAL_PROJECTS_ID_PREFIX}-{workspace_number}"
if metadata is not None:
- metadata["projectId"] = workspace_id
+ metadata = json.loads(metadata)
+
+ if cover_image:
+ metadata['cover_image'] = f'media/{settings.PORTAL_PROJECTS_ID_PREFIX}-{workspace_number}/cover_image/{cover_image.name}'
+
+ metadata["projectId"] = system_id
project_meta = create_project_metadata(metadata)
initialize_project_graph(project_meta.project_id)
client = request.user.tapis_oauth.client
system_id = create_shared_workspace(client, title, request.user.username, description, workspace_number)
+ # Upload cover image to media folder
+ if cover_image:
+ service_client = service_account()
+ resized_file = resize_cover_image(cover_image)
+ service_client.files.insert(systemId=settings.PORTAL_PROJECTS_ROOT_SYSTEM_NAME,
+ path=f'media/{settings.PORTAL_PROJECTS_ID_PREFIX}-{workspace_number}/cover_image/{cover_image.name}',
+ file=resized_file)
+
return JsonResponse(
{
'status': 200,
@@ -191,6 +207,20 @@ def get(self, request, project_id=None, system_id=None):
prj.update(get_ordered_value(project.name, project.value))
prj["projectId"] = project_id
+ if prj["cover_image"] is not None:
+ service_client = service_account()
+
+ if prj["is_published_project"]:
+ root_system = settings.PORTAL_PROJECTS_PUBLISHED_ROOT_SYSTEM_NAME
+ elif prj["is_review_project"]:
+ root_system = settings.PORTAL_PROJECTS_REVIEW_ROOT_SYSTEM_NAME
+ else:
+ root_system = settings.PORTAL_PROJECTS_ROOT_SYSTEM_NAME
+
+ postit = service_client.files.createPostIt(systemId=root_system, path=prj['cover_image'], allowedUses=-1,
+ validSeconds=86400)
+ prj["cover_image_url"] = postit.redeemUrl
+
if not getattr(prj, 'is_review_project', False) and not getattr(prj, 'is_published_project', False):
sync_files_without_metadata.delay(client.access_token.access_token, f"{settings.PORTAL_PROJECTS_SYSTEM_PREFIX}.{project_id}")
except:
@@ -236,17 +266,43 @@ def patch(
:param request: Request object
:param str project_id: Project Id.
"""
- data = json.loads(request.body)
- metadata = data['metadata']
+ query_dict, multi_value_dict = MultiPartParser(request.META, request,
+ request.upload_handlers).parse()
+
+ title = query_dict.get('title')
+ description = query_dict.get('description')
+ metadata = query_dict.get('metadata')
+ cover_image = multi_value_dict.get('cover_image')
+
project_id_full = f"{settings.PORTAL_PROJECTS_SYSTEM_PREFIX}.{project_id}"
client = request.user.tapis_oauth.client
- workspace_def = update_project(client, project_id, data['title'], data['description'])
+ workspace_def = update_project(client, project_id, title, description)
if metadata is not None:
- entity = patch_project_entity(project_id_full, metadata)
+ metadata = json.loads(metadata)
+
+ if cover_image:
+ metadata['cover_image'] = f'media/{project_id}/cover_image/{cover_image.name}'
+
+ entity = patch_project_entity(project_id_full, metadata)
workspace_def.update(get_ordered_value(entity.name, entity.value))
workspace_def["projectId"] = project_id
+
+ # Upload cover image to media folder
+ if cover_image:
+ service_client = service_account()
+ resized_file = resize_cover_image(cover_image)
+ service_client.files.insert(systemId=settings.PORTAL_PROJECTS_ROOT_SYSTEM_NAME,
+ path=f'media/{project_id}/cover_image/{cover_image.name}',
+ file=resized_file)
+
+ # Get the postit for the cover image
+ postit = service_client.files.createPostIt(systemId=settings.PORTAL_PROJECTS_ROOT_SYSTEM_NAME,
+ path=f'media/{project_id}/cover_image/{cover_image.name}',
+ allowedUses=-1,
+ validSeconds=86400)
+ workspace_def["cover_image_url"] = postit.redeemUrl
return JsonResponse(
{
diff --git a/server/portal/apps/projects/workspace_operations/project_publish_operations.py b/server/portal/apps/projects/workspace_operations/project_publish_operations.py
index a28768504..284d1d251 100644
--- a/server/portal/apps/projects/workspace_operations/project_publish_operations.py
+++ b/server/portal/apps/projects/workspace_operations/project_publish_operations.py
@@ -39,6 +39,27 @@ def _transfer_files(client, source_system_id, dest_system_id):
transfer = service_client.files.createTransferTask(elements=transfer_elements)
return transfer
+def _transfer_cover_image(source_system_id, dest_system_id, cover_image_path):
+
+ if not cover_image_path:
+ logger.info('No cover image found for project, skipping transfer.')
+ return None
+
+ service_client = service_account()
+
+ # Transfer the cover image to the destination system
+ transfer_elements = [
+ {
+ 'sourceURI': f'tapis://{source_system_id}/{cover_image_path}',
+ 'destinationURI': f'tapis://{dest_system_id}/{cover_image_path}'
+ }
+ ]
+
+ transfer = service_client.files.createTransferTask(elements=transfer_elements)
+ logger.info(f"Transfer task created for cover image: {transfer.uuid}")
+ return transfer
+
+
def _check_transfer_status(service_client, transfer_task_id):
transfer_details = service_client.files.getTransferTask(transferTaskId=transfer_task_id)
return transfer_details.status
@@ -155,6 +176,9 @@ def publish_project(self, project_id: str, version: Optional[int] = 1):
# transfer files
client = service_account()
transfer = _transfer_files(client, review_system_id, published_system_id)
+ cover_image_transfer = _transfer_cover_image(settings.PORTAL_PROJECTS_ROOT_REVIEW_SYSTEM_NAME,
+ settings.PORTAL_PROJECTS_PUBLISHED_ROOT_SYSTEM_NAME,
+ project_meta.value.get("coverImage", None))
poll_tapis_file_transfer.apply_async(
args=(transfer.uuid, False),
@@ -180,6 +204,9 @@ def copy_graph_and_files_for_review_system(self, user_access_token, source_works
client = user_account(user_access_token)
transfer = _transfer_files(client, source_system_id, review_system_id)
+ cover_image_trasnfer = _transfer_cover_image(settings.PORTAL_PROJECTS_ROOT_SYSTEM_NAME,
+ settings.PORTAL_PROJECTS_ROOT_REVIEW_SYSTEM_NAME,
+ review_project.value.get("coverImage", None))
logger.info(f'Transfer task submmited with id {transfer.uuid}')
diff --git a/server/portal/libs/files/file_processing.py b/server/portal/libs/files/file_processing.py
index c86e128ec..2143b1a59 100644
--- a/server/portal/libs/files/file_processing.py
+++ b/server/portal/libs/files/file_processing.py
@@ -1,11 +1,13 @@
import numpy as np
import io
+import os
import logging
from matplotlib import pyplot as plt
import csv
import matplotlib.animation as anim
import tempfile
import tifffile as tiff
+from PIL import Image
logger = logging.getLogger(__name__)
@@ -180,4 +182,39 @@ def save_to_tempfile(self):
gif_binary_data = animated_gif.save_to_tempfile()
logger.debug('Animated Gif Created')
- return gif_binary_data
\ No newline at end of file
+ return gif_binary_data
+
+def resize_cover_image(img):
+
+ max_size = 500
+ image = Image.open(img)
+ (width, height) = image.size
+
+ _, ext = os.path.splitext(img.name)
+
+ if width > max_size or height > max_size:
+ # Calculate the resizing modifier
+ modifier = max_size / width if width > height else max_size / height
+ resized_width = width * modifier
+ resized_height = height * modifier
+ size = (round(resized_width), round(resized_height))
+
+ # Resize the image
+ image = image.resize(size, Image.Resampling.LANCZOS)
+
+ format_map = {
+ '.jpg': 'JPEG',
+ '.jpeg': 'JPEG',
+ '.png': 'PNG',
+ '.gif': 'GIF',
+ }
+
+ # Save the resized image to a binary stream
+ buffer = io.BytesIO()
+ image.save(buffer, format=format_map[ext]) # Preserve the original format
+ buffer.seek(0) # Reset the stream's position to the beginning
+
+ # Clean up
+ image.close()
+
+ return buffer.getvalue()