Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Media upload datasource! #419

Merged
merged 24 commits into from
Jun 25, 2024
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
d46feae
basic changes to allow files box
dale-wahl Mar 27, 2024
cffe373
basic imports, yay!
dale-wahl Mar 27, 2024
888b078
video_scene_timelines to work on video imports!
dale-wahl Mar 27, 2024
677fd7a
add is_compatible_with checks to processors that cannot run on new me…
dale-wahl Mar 27, 2024
6c84ecd
more is_compatible fixes
dale-wahl Mar 27, 2024
9f8b089
necessary function for checking media_types
dale-wahl Mar 27, 2024
53426fe
enable more processors on media datasets
dale-wahl Mar 28, 2024
73580e9
Merge branch 'master' into media_upload
dale-wahl May 2, 2024
b082fbc
Merge branch 'master' into media_upload
dale-wahl May 3, 2024
30c5975
consolidate user_input file type
dale-wahl May 3, 2024
e5d8ef1
detect mimetype from filename
dale-wahl May 6, 2024
43ba4ca
handle zip archives; allow log and metadata files
dale-wahl May 7, 2024
1c7ba16
do not count metadata or log files in num_files
dale-wahl May 7, 2024
8c1eca8
Merge branch 'master' into media_upload
dale-wahl May 28, 2024
91e8697
move machine learning processors so they can be imported elsewhere
dale-wahl May 29, 2024
14f1539
audio_to_text datasource
dale-wahl May 29, 2024
211c4d8
Merge branch 'master' into media_upload
dale-wahl Jun 19, 2024
fd11a7e
When validating zip file uploads, send list of file attributes instea…
stijn-uva Jun 20, 2024
86df7e6
Check type of files in zip when uploading media
stijn-uva Jun 21, 2024
7e144fc
Skip useless files when uploading media as zip
stijn-uva Jun 21, 2024
2ee050f
Merge branch 'master' into media_upload
stijn-uva Jun 21, 2024
f34b054
check multiple zip types in JS
dale-wahl Jun 25, 2024
8225871
js !=== python
dale-wahl Jun 25, 2024
4f7cfc1
fix media_type for loose file imports; fix extension for audio_to_tex…
dale-wahl Jun 25, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions common/lib/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -1524,6 +1524,18 @@ def get_extension(self):

return False

def get_media_type(self):
"""
Gets the media type of the dataset file.

:return str: media type, e.g., "text"
"""
if hasattr(self, "media_type"):
return self.media_type
else:
# Default to text
return self.parameters.get("media_type", "text")

def get_result_url(self):
"""
Gets the 4CAT frontend URL of a dataset file.
Expand Down
1 change: 1 addition & 0 deletions common/lib/user_input.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ class UserInput:
OPTION_DATERANGE = "daterange" # a beginning and end date
OPTION_DIVIDER = "divider" # meta-option, divides related sets of options
OPTION_FILE = "file" # file upload
OPTION_FILES = "files" # multiple files upload
dale-wahl marked this conversation as resolved.
Show resolved Hide resolved
OPTION_HUE = "hue" # colour hue
OPTION_DATASOURCES = "datasources" # data source toggling

Expand Down
6 changes: 6 additions & 0 deletions datasources/media_import/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# Use default data source init function
from common.lib.helpers import init_datasource

# Internal identifier for this data source
DATASOURCE = "media-import"
NAME = "Import/upload Media files"
108 changes: 108 additions & 0 deletions datasources/media_import/import_media.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
import re
import time
import zipfile

from backend.lib.processor import BasicProcessor
from common.lib.exceptions import QueryParametersException
from common.lib.user_input import UserInput


class SearchMedia(BasicProcessor):
type = "media-import-search" # job ID
category = "Search" # category
title = "Upload Media" # title displayed in UI
description = "Upload your own audio, video, or image files to be used as a dataset" # description displayed in UI
extension = "zip" # extension of result file, used internally and in UI
is_local = False # Whether this datasource is locally scraped
is_static = False # Whether this datasource is still updated

max_workers = 1

disallowed_characters = re.compile(r"[^a-zA-Z0-9._+-]")

@classmethod
def get_options(cls, parent_dataset=None, user=None):
return {
"intro": {
"type": UserInput.OPTION_INFO,
"help": "You can upload files here that will be available for further analysis "
"and processing. "
"You can indicate what type of files are uploaded (image, audio, or video) and based on that, "
"the 4CAT will be able to run various processors on these files. "
},
"data_upload": {
"type": UserInput.OPTION_FILES,
"help": "Files"
},
"media_type": {
dale-wahl marked this conversation as resolved.
Show resolved Hide resolved
"type": UserInput.OPTION_CHOICE,
"help": "Media type",
"options": {
"audio": "Audio",
"video": "Videos",
"image": "Images",
},
"default": "image"
},
}

@staticmethod
def validate_query(query, request, user):
"""
Step 1: Validate query and files

Confirms that the uploaded files exist and that the media type is valid.

:param dict query: Query parameters, from client-side.
:param request: Flask request
:param User user: User object of user who has submitted the query
:return dict: Safe query parameters
"""
# do we have uploaded files?
if "option-data_upload" not in request.files:
raise QueryParametersException("No files were offered for upload.")
files = request.files.getlist("option-data_upload")
if len(files) < 1:
raise QueryParametersException("No files were offered for upload.")

# do we have a media type?
if query.get("media_type") not in ["audio", "video", "image"]:
raise QueryParametersException(f"Cannot import files of type {query.get('media_type')}.")

# TODO: check file types against media type
dale-wahl marked this conversation as resolved.
Show resolved Hide resolved

return {
"time": time.time(),
"media_type": query.get("media_type"),
"num_files": len(files),
}

@staticmethod
def after_create(query, dataset, request):
"""
Step 2: Hook to execute after the dataset for this source has been created

In this case, save the files in a zip archive.

:param dict query: Sanitised query parameters
:param DataSet dataset: Dataset created for this query
:param request: Flask request submitted for its creation
"""
saved_files = 0
with zipfile.ZipFile(dataset.get_results_path(), "w", compression=zipfile.ZIP_STORED) as zip_file:
for file in request.files.getlist("option-data_upload"):
new_filename = SearchMedia.disallowed_characters.sub("", file.filename)
with zip_file.open(new_filename, mode='w') as dest_file:
file.seek(0)
while True:
chunk = file.read(1024)
if len(chunk) == 0:
break
dest_file.write(chunk)
saved_files += 1

def process(self):
"""
Step 3: Ummmm, we kinda did everything
"""
self.dataset.finish(self.parameters.get("num_files"))
4 changes: 2 additions & 2 deletions processors/audio/audio_extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,9 @@ class AudioExtractor(BasicProcessor):
@classmethod
def is_compatible_with(cls, module=None, user=None):
"""
Allow on tiktok-search only for dev
Allow on videos only
"""
return module.type.startswith("video-downloader") and \
return (module.get_media_type() == "video" or module.type.startswith("video-downloader")) and \
config.get("video-downloader.ffmpeg_path", user=user) and \
shutil.which(config.get("video-downloader.ffmpeg_path"))

Expand Down
10 changes: 10 additions & 0 deletions processors/conversion/stringify.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,16 @@ class Stringify(BasicProcessor):
}
}

@staticmethod
def is_compatible_with(module=None, user=None):
"""
Determine compatibility; this processor is only compatible with top datasets in CSV or NDJSON format.

:param str module: Module ID to determine compatibility with
:return bool:
"""
return module.is_top_dataset() and module.get_extension() in ("csv", "ndjson")

def process(self):
"""
This takes a 4CAT results file as input, and outputs a plain text file
Expand Down
2 changes: 1 addition & 1 deletion processors/filtering/column_filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def is_compatible_with(cls, module=None, user=None):

:param module: Module to determine compatibility with
"""
return module.is_top_dataset()
return module.is_top_dataset() and module.get_extension() in ("csv", "ndjson")

@classmethod
def get_options(cls, parent_dataset=None, user=None):
Expand Down
2 changes: 1 addition & 1 deletion processors/filtering/write_annotations.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def is_compatible_with(cls, module=None, user=None):

:param module: Module to determine compatibility with
"""
return module.is_top_dataset()
return module.is_top_dataset() and module.get_extension() in ("csv", "ndjson")

def process(self):
"""
Expand Down
2 changes: 1 addition & 1 deletion processors/machine-learning/clip_categorize_images.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def is_compatible_with(cls, module=None, user=None):
"""
return config.get("dmi-service-manager.cc_clip_enabled", False, user=user) and \
config.get("dmi-service-manager.ab_server_address", False, user=user) and \
module.type.startswith("image-downloader")
(module.get_media_type() == "image" or module.type.startswith("image-downloader"))

@classmethod
def get_options(cls, parent_dataset=None, user=None):
Expand Down
2 changes: 1 addition & 1 deletion processors/machine-learning/pix-plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def is_compatible_with(cls, module=None, user=None):
"""
return config.get("dmi-service-manager.db_pixplot_enabled", False, user=user) and \
config.get("dmi-service-manager.ab_server_address", False, user=user) and \
module.type.startswith("image-downloader")
(module.get_media_type() == "image" or module.type.startswith("image-downloader"))

def process(self):
"""
Expand Down
2 changes: 1 addition & 1 deletion processors/machine-learning/text_from_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def is_compatible_with(cls, module=None, user=None):
"""
return config.get('dmi-service-manager.eb_ocr_enabled', False, user=user) and \
config.get("dmi-service-manager.ab_server_address", False, user=user) and \
module.type.startswith("image-downloader")
(module.get_media_type() == "image" or module.type.startswith("image-downloader"))

def process(self):
"""
Expand Down
2 changes: 1 addition & 1 deletion processors/machine-learning/whisper_speech_to_text.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def is_compatible_with(cls, module=None, user=None):
"""
return config.get("dmi-service-manager.bc_whisper_enabled", False, user=user) and \
config.get("dmi-service-manager.ab_server_address", False, user=user) and \
module.type.startswith("audio-extractor")
(module.get_media_type() == 'audio' or module.type.startswith("audio-extractor"))

@classmethod
def get_options(cls, parent_dataset=None, user=None):
Expand Down
2 changes: 1 addition & 1 deletion processors/metrics/clarifai_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def is_compatible_with(cls, module=None, user=None):

:param module: Module to determine compatibility with
"""
return module.type.startswith("image-downloader") or module.type == "video-frames"
return module.get_media_type() == "image" or module.type.startswith("image-downloader") or module.type == "video-frames"

options = {
"amount": {
Expand Down
9 changes: 9 additions & 0 deletions processors/metrics/count_posts.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,15 @@ class CountPosts(BasicProcessor):
}
}

@staticmethod
def is_compatible_with(module=None, user=None):
"""
Determine compatibility

:param Dataset module: Module ID to determine compatibility with
:return bool:
"""
return module.is_top_dataset() and module.get_extension() in ("csv", "ndjson")

def process(self):
"""
Expand Down
2 changes: 1 addition & 1 deletion processors/metrics/google_vision_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def is_compatible_with(cls, module=None, user=None):

:param module: Module to determine compatibility with
"""
return module.type.startswith("image-downloader") or module.type == "video-frames"
return module.get_media_type() == "image" or module.type.startswith("image-downloader") or module.type == "video-frames"

options = {
"amount": {
Expand Down
10 changes: 10 additions & 0 deletions processors/metrics/hatebase.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,16 @@ class HatebaseAnalyser(BasicProcessor):
}
}

@staticmethod
def is_compatible_with(module=None, user=None):
"""
Determine compatibility

:param Dataset module: Module ID to determine compatibility with
:return bool:
"""
return module.is_top_dataset() and module.get_extension() in ("csv", "ndjson")

def process(self):
"""
This takes a 4CAT results file as input, and outputs a new CSV file
Expand Down
10 changes: 10 additions & 0 deletions processors/metrics/thread_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,16 @@ class ThreadMetadata(BasicProcessor):
"that this extracted only on the basis of the items present this dataset." # description displayed in UI
extension = "csv" # extension of result file, used internally and in UI

@staticmethod
def is_compatible_with(module=None, user=None):
"""
Determine compatibility

:param Dataset module: Module ID to determine compatibility with
:return bool:
"""
return module.is_top_dataset() and module.get_extension() in ("csv", "ndjson")

def process(self):
"""
This takes a 4CAT results file as input, and outputs a new CSV file
Expand Down
5 changes: 1 addition & 4 deletions processors/metrics/top_images.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,7 @@ def is_compatible_with(cls, module=None, user=None):
:param module: Module to determine compatibility with
"""

if module.is_top_dataset() and module.type != "telegram-search":
return True
else:
return False
return module.is_top_dataset() and module.type != "telegram-search" and module.get_extension() in ("csv", "ndjson")

def process(self):
"""
Expand Down
10 changes: 10 additions & 0 deletions processors/metrics/url_titles.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,16 @@ class URLFetcher(BasicProcessor):
}
}

@staticmethod
def is_compatible_with(module=None, user=None):
"""
Determine compatibility

:param Dataset module: Module ID to determine compatibility with
:return bool:
"""
return module.is_top_dataset() and module.get_extension() in ("csv", "ndjson")

@classmethod
def get_options(cls, parent_dataset=None, user=None):
"""
Expand Down
10 changes: 10 additions & 0 deletions processors/metrics/vocabulary_overtime.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,16 @@ class OvertimeAnalysis(BasicProcessor):
}
}

@staticmethod
def is_compatible_with(module=None, user=None):
"""
Determine compatibility

:param Dataset module: Module ID to determine compatibility with
:return bool:
"""
return module.is_top_dataset() and module.get_extension() in ("csv", "ndjson")

def process(self):
"""
Reads a CSV file, counts occurences of chosen values over all posts,
Expand Down
2 changes: 1 addition & 1 deletion processors/metrics/youtube_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def is_compatible_with(cls, module=None, user=None):
:param module: Module to determine compatibility with
"""
# Compatible with every top-level dataset.
return module.is_top_dataset()
return module.is_top_dataset() and module.get_extension() in ("csv", "ndjson")

def process(self):
"""
Expand Down
9 changes: 9 additions & 0 deletions processors/networks/colink_urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,15 @@ class URLCoLinker(BasicProcessor):
}
}

@classmethod
def is_compatible_with(cls, module=None, user=None):
"""
Allow processor on top datasets.

:param module: Module to determine compatibility with
"""
return module.is_top_dataset() and module.get_extension() in ("csv", "ndjson")

def process(self):
"""
This takes a 4CAT results file as input, and outputs a new CSV file
Expand Down
9 changes: 9 additions & 0 deletions processors/networks/wikipedia_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,15 @@ class WikiURLCoLinker(BasicProcessor):
description = "Create a GEXF network file comprised network comprised of linked-to Wikipedia pages, linked to the categories they are part of. English Wikipedia only. Will only fetch the first 10,000 links." # description displayed in UI
extension = "gexf" # extension of result file, used internally and in UI

@classmethod
def is_compatible_with(cls, module=None, user=None):
"""
Allow processor on top datasets.

:param module: Module to determine compatibility with
"""
return module.is_top_dataset() and module.get_extension() in ("csv", "ndjson")

def process(self):
"""
This takes a 4CAT results file as input, and outputs a new CSV file
Expand Down
10 changes: 10 additions & 0 deletions processors/presets/annotate-images.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,16 @@ class AnnotateImages(ProcessorPreset):
}
}

@staticmethod
def is_compatible_with(module=None, user=None):
"""
Determine compatibility

:param Dataset module: Module ID to determine compatibility with
:return bool:
"""
return module.is_top_dataset() and module.get_extension() in ("csv", "ndjson")

def get_processor_pipeline(self):
"""
This queues a series of post-processors to annotate images
Expand Down
Loading
Loading