Skip to content

Commit

Permalink
S3 image sequence remove (#130)
Browse files Browse the repository at this point in the history
* removing the logic for image-sequence S3 processing

* removing the logic for image-sequence S3 processing

* linting

* small graphing fix:
  • Loading branch information
BryonLewis authored Apr 16, 2024
1 parent 19ecbc5 commit 843ba2a
Show file tree
Hide file tree
Showing 3 changed files with 59 additions and 29 deletions.
4 changes: 2 additions & 2 deletions client/src/components/controls/TimelineButtons.vue
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,8 @@ export default defineComponent({

const iconMap = ref({
filter: 'mdi-filter',
graph: 'mdi-chart-timeline',
swimlane: 'mdi-chart-timeline-variant',
graph: 'mdi-chart-timeline-variant',
swimlane: 'mdi-chart-timeline',
event: '',
detections: '',
});
Expand Down
27 changes: 13 additions & 14 deletions server/dive_server/event.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,9 @@
AssetstoreSourcePathMarker,
DatasetMarker,
FPSMarker,
ImageSequenceType,
MarkForPostProcess,
TypeMarker,
VideoType,
imageRegex,
videoRegex,
)

Expand Down Expand Up @@ -50,18 +48,19 @@ def process_assetstore_import(event, meta: dict):
return

dataset_type = None
item = Item().findOne({"_id": info["id"]})
item['meta'].update(
{
**meta,
AssetstoreSourcePathMarker: importPath,
}
)

if imageRegex.search(importPath):
dataset_type = ImageSequenceType

elif videoRegex.search(importPath):

# DIVE-DSA is main used for Video data, remove auto importing of image-sequences for S3
# if imageRegex.search(importPath):
# dataset_type = ImageSequenceType

if videoRegex.search(importPath):
item = Item().findOne({"_id": info["id"]})
item['meta'].update(
{
**meta,
AssetstoreSourcePathMarker: importPath,
}
)
# Look for existing video dataset directory
parentFolder = Folder().findOne({"_id": item["folderId"]})
userId = parentFolder['creatorId'] or parentFolder['baseParentId']
Expand Down
57 changes: 44 additions & 13 deletions server/dive_server/views_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,14 @@
import pymongo

from dive_utils import TRUTHY_META_VALUES, FALSY_META_VALUES
from dive_utils.constants import jsonRegex, ndjsonRegex, DIVEMetadataMarker, DIVEMetadataFilter, DIVEMetadataClonedFilter, DIVEMetadataClonedFilterBase
from dive_utils.constants import (
jsonRegex,
ndjsonRegex,
DIVEMetadataMarker,
DIVEMetadataFilter,
DIVEMetadataClonedFilter,
DIVEMetadataClonedFilterBase,
)
from dive_utils.metadata.models import DIVE_Metadata, DIVE_MetadataKeys
from . import crud_dataset

Expand Down Expand Up @@ -170,7 +177,9 @@ def __init__(self, resourceName):
default=50,
)
)
def process_metadata(self, folder, sibling_path, fileType, matcher, path_key, displayConfig, categoricalLimit):
def process_metadata(
self, folder, sibling_path, fileType, matcher, path_key, displayConfig, categoricalLimit
):
# Process the current folder for the specified fileType using the matcher to generate DIVE_Metadata
# make sure the folder is set to a DIVE Metadata folder using DIVE_METADATA = True
user = self.getCurrentUser()
Expand Down Expand Up @@ -206,15 +215,22 @@ def process_metadata(self, folder, sibling_path, fileType, matcher, path_key, di
matched = False
key_path = item.get(path_key, False)
base_modified_key_path = remove_before_folder(key_path, root_name)
childFolders = list(
Folder().childFolders(folder, 'folder', user=user)
)
modified_key_paths = [{"root": root_name, "modified_path": base_modified_key_path}]
childFolders = list(Folder().childFolders(folder, 'folder', user=user))
modified_key_paths = [
{"root": root_name, "modified_path": base_modified_key_path}
]
print(f" Lenth of child folders: {len(childFolders)}")
print(childFolders)
for childFolder in childFolders:
print(f"Child Item: {childFolder['name']} path: {key_path}")
modified_key_paths.append({"root": childFolder["name"], "modified_path": remove_before_folder(key_path, childFolder['name'])})
modified_key_paths.append(
{
"root": childFolder["name"],
"modified_path": remove_before_folder(
key_path, childFolder['name']
),
}
)
resource_path = ""
print(modified_key_paths)
for datasetFolder in results:
Expand All @@ -233,7 +249,9 @@ def process_metadata(self, folder, sibling_path, fileType, matcher, path_key, di
if modified_path:
if modified_path == resource_path:
item['pathMatches'] = True
DIVE_Metadata().createMetadata(datasetFolder, folder, user, item)
DIVE_Metadata().createMetadata(
datasetFolder, folder, user, item
)
added += 1
matched = True
break
Expand Down Expand Up @@ -275,7 +293,8 @@ def process_metadata(self, folder, sibling_path, fileType, matcher, path_key, di
item = metadataKeys[key]
metadataKeys[key]["unique"] = len(item["set"])
if item["type"] in ['string', 'array'] and (
item["count"] < categoricalLimit or (item["count"] <= len(item["set"]) and len(item["set"]) < categoricalLimit)
item["count"] < categoricalLimit
or (item["count"] <= len(item["set"]) and len(item["set"]) < categoricalLimit)
):
metadataKeys[key]["category"] = "categorical"
metadataKeys[key]['set'] = list(metadataKeys[key]['set'])
Expand All @@ -293,11 +312,17 @@ def process_metadata(self, folder, sibling_path, fileType, matcher, path_key, di
folder['meta'][DIVEMetadataFilter] = displayConfig
Folder().save(folder)

return {"results": f"added {added} folders", "errors": errorLog, "metadataKeys": metadataKeys}
return {
"results": f"added {added} folders",
"errors": errorLog,
"metadataKeys": metadataKeys,
}

@access.user
@autoDescribeRoute(
Description("Get a list of filter keys for a specific folder. This is more used for debugging values in the metadata").modelParam(
Description(
"Get a list of filter keys for a specific folder. This is more used for debugging values in the metadata"
).modelParam(
"id",
description="Base folder ID",
model=Folder,
Expand Down Expand Up @@ -349,7 +374,7 @@ def filter_folder(self, folder, filters, limit, offset, sort):
'totalPages': pages,
'pageResults': list(metadata_items),
'count': total_items,
'filtered': metadata_items.count()
'filtered': metadata_items.count(),
}
return structured_results

Expand Down Expand Up @@ -393,7 +418,13 @@ def clone_filter(
user = self.getCurrentUser()
query = self.get_filter_query(baseFolder, user, filters)
metadata_items = DIVE_Metadata().find(query, user=self.getCurrentUser())
Folder().setMetadata(destFolder, {DIVEMetadataClonedFilter: json.dumps(filters), DIVEMetadataClonedFilterBase: baseFolder["_id"]})
Folder().setMetadata(
destFolder,
{
DIVEMetadataClonedFilter: json.dumps(filters),
DIVEMetadataClonedFilterBase: baseFolder["_id"],
},
)
if metadata_items is not None:
for item in list(metadata_items):
print(item)
Expand Down

0 comments on commit 843ba2a

Please sign in to comment.