Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Media upload datasource! #419

Merged
merged 24 commits into from
Jun 25, 2024
Merged
Changes from 1 commit
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
d46feae
basic changes to allow files box
dale-wahl Mar 27, 2024
cffe373
basic imports, yay!
dale-wahl Mar 27, 2024
888b078
video_scene_timelines to work on video imports!
dale-wahl Mar 27, 2024
677fd7a
add is_compatible_with checks to processors that cannot run on new me…
dale-wahl Mar 27, 2024
6c84ecd
more is_compatible fixes
dale-wahl Mar 27, 2024
9f8b089
necessary function for checking media_types
dale-wahl Mar 27, 2024
53426fe
enable more processors on media datasets
dale-wahl Mar 28, 2024
73580e9
Merge branch 'master' into media_upload
dale-wahl May 2, 2024
b082fbc
Merge branch 'master' into media_upload
dale-wahl May 3, 2024
30c5975
consolidate user_input file type
dale-wahl May 3, 2024
e5d8ef1
detect mimetype from filename
dale-wahl May 6, 2024
43ba4ca
handle zip archives; allow log and metadata files
dale-wahl May 7, 2024
1c7ba16
do not count metadata or log files in num_files
dale-wahl May 7, 2024
8c1eca8
Merge branch 'master' into media_upload
dale-wahl May 28, 2024
91e8697
move machine learning processors so they can be imported elsewhere
dale-wahl May 29, 2024
14f1539
audio_to_text datasource
dale-wahl May 29, 2024
211c4d8
Merge branch 'master' into media_upload
dale-wahl Jun 19, 2024
fd11a7e
When validating zip file uploads, send list of file attributes instea…
stijn-uva Jun 20, 2024
86df7e6
Check type of files in zip when uploading media
stijn-uva Jun 21, 2024
7e144fc
Skip useless files when uploading media as zip
stijn-uva Jun 21, 2024
2ee050f
Merge branch 'master' into media_upload
stijn-uva Jun 21, 2024
f34b054
check multiple zip types in JS
dale-wahl Jun 25, 2024
8225871
js !=== python
dale-wahl Jun 25, 2024
4f7cfc1
fix media_type for loose file imports; fix extension for audio_to_tex…
dale-wahl Jun 25, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 26 additions & 6 deletions datasources/media_import/import_media.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,8 @@ def validate_query(query, request, user):
files = [f for f in files if not (
f["filename"].split("/")[-1].startswith(".")
or f["filename"].endswith(".log")
or f["filename"].split("/")[-1].startswith("__")
or f["filename"].split("/")[-1].startswith("__MACOSX")
or f["filename"].endswith(".DS_Store")
or f["filename"].endswith("/") # sub-directory
)]

Expand Down Expand Up @@ -170,7 +171,8 @@ def after_create(query, dataset, request):
"""
mime_type = query.get("media_type")
saved_files = 0
with zipfile.ZipFile(dataset.get_results_path(), "w", compression=zipfile.ZIP_STORED) as new_zip_archive:
skipped_files = []
with (zipfile.ZipFile(dataset.get_results_path(), "w", compression=zipfile.ZIP_STORED) as new_zip_archive):
Copy link
Member Author

@dale-wahl dale-wahl Jun 25, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this does not load for me in parenthesis

for file in request.files.getlist("option-data_upload"):
# Check if file is zip archive
file_mime_type = mimetypes.guess_type(file.filename)[0]
Expand All @@ -184,10 +186,22 @@ def after_create(query, dataset, request):
if inner_file.is_dir():
continue

if mime_type is None:
# Only zip files were uploaded and media type still unknown; we set media_type here
# TODO: any point in checking all inner files? they've already been uploaded.
dataset.media_type = mimetypes.guess_type(inner_file.filename)[0].split('/')[0]
guessed_file_mime_type = mimetypes.guess_type(inner_file.filename)
if guessed_file_mime_type[0]:
mime_type = guessed_file_mime_type[0].split('/')[0]

# skip useless metadata files
# also skip files not recognised as media files
clean_file_name = inner_file.filename.split("/")[-1]
if not guessed_file_mime_type[0] or (
mime_type not in SearchMedia.accepted_file_types
and not clean_file_name.endswith(".log")
and not clean_file_name == ".metadata.json"
) or clean_file_name.startswith("__MACOSX") \
or inner_file.filename.startswith("__MACOSX"):
print(f"skipping {clean_file_name} ({guessed_file_mime_type})")
skipped_files.append(inner_file.filename)
continue

# save inner file from the uploaded zip archive to the new zip with all files
new_filename = SearchMedia.get_safe_filename(inner_file.filename, new_zip_archive)
Expand All @@ -211,6 +225,12 @@ def after_create(query, dataset, request):

# update the number of files in the dataset
dataset.num_files = saved_files
dataset.media_type = mime_type
if skipped_files:
# todo: this now doesn't actually get logged because the log is
# re-initialised after after_create runs?
dataset.log("The following files in the uploaded zip archive were skipped because they were not recognised"
"as media files:" + "\n -".join(skipped_files))

def process(self):
"""
Expand Down