diff --git a/requirements.txt b/requirements.txt
index c76fa6b..5a55155 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,12 +1,13 @@
-numpy==1.16.*
-scipy==1.1.*
-scikit-learn==0.22.*
-scikit-image==0.16.*
-Pillow==7.1.*
cython
-matplotlib==3.1.*
-tensorflow==1.15.4
-keras==2.1.*
-opencv-python-headless>=3.4.5,<4.0
h5py==2.10.*
imgaug==0.3.*
+keras==2.1.*
+matplotlib==3.1.*
+numpy==1.16.*
+opencv-python-headless>=3.4.5,<4.0
+Pillow==7.1.*
+pyvips==2.1.*
+scikit-image==0.16.*
+scikit-learn==0.22.*
+scipy==1.1.*
+tensorflow==1.15.4
diff --git a/src/Http/Controllers/Api/KnowledgeTransferVolumeController.php b/src/Http/Controllers/Api/KnowledgeTransferVolumeController.php
new file mode 100644
index 0000000..f3daee8
--- /dev/null
+++ b/src/Http/Controllers/Api/KnowledgeTransferVolumeController.php
@@ -0,0 +1,57 @@
+user())
+ ->select('id', 'name')
+ ->has('images.annotations')
+ ->whereNotExists(function ($query) {
+ $query->select(DB::raw(1))
+ ->from('images')
+ ->whereRaw('images.volume_id = volumes.id')
+ ->whereNull('attrs->metadata->distance_to_ground');
+ })
+ ->with(['projects' => function ($query) {
+ $query->select('id', 'name');
+ }])
+ ->get()
+ ->each(function ($volume) {
+ $volume->setHidden(['doi', 'video_link', 'gis_link']);
+ });
+ }
+}
diff --git a/src/Http/Controllers/Api/MaiaJobController.php b/src/Http/Controllers/Api/MaiaJobController.php
index befbd68..4b90824 100644
--- a/src/Http/Controllers/Api/MaiaJobController.php
+++ b/src/Http/Controllers/Api/MaiaJobController.php
@@ -23,19 +23,24 @@ class MaiaJobController extends Controller
*
* @apiParam {Number} id The volume ID.
*
- * @apiParam (Required parameters) {number} nd_clusters Number of different kinds of images to expect. Images are of the same kind if they have similar lighting conditions or show similar patterns (e.g. sea floor, habitat types). Increase this number if you expect many different kinds of images. Lower the number to 1 if you have very few images and/or the content is largely uniform.
- * @apiParam (Required parameters) {number} nd_patch_size Size in pixels of the image patches used determine the training proposals. Increase the size if the images contain larger objects of interest, decrease the size if the objects are smaller. Larger patch sizes take longer to compute. Must be an odd number.
- * @apiParam (Required parameters) {number} nd_threshold Percentile of pixel saliency values used to determine the saliency threshold. Lower this value to get more training proposals. The default value should be fine for most cases.
- * @apiParam (Required parameters) {number} nd_latent_size Learning capability used to determine training proposals. Increase this number to ignore more complex objects and patterns.
- * @apiParam (Required parameters) {number} nd_trainset_size Number of training image patches used to determine training proposals. You can increase this number for a large volume but it will take longer to compute.
- * @apiParam (Required parameters) {number} nd_epochs Time spent on training when determining the training proposals.
- * @apiParam (Required parameters) {number} nd_stride A higher stride increases the speed of the novelty detection but reduces the sensitivity to small regions or objects.
- * @apiParam (Required parameters) {number} nd_ignore_radius Ignore training proposals or annotation candidates which have a radius smaller or equal than this value in pixels.
- * @apiParam (Required parameters) {number} is_epochs_head Time spent on training only the head layers of Mask R-CNN for instance segmentation.
- * @apiParam (Required parameters) {number} is_epochs_all Time spent on training all layers of Mask R-CNN for instance segmentation.
- * @apiParam (Optional parameters) {booolean} use_existing Set to `true` to use existing annotations as training proposals.
- * @apiParam (Optional parameters) {Array} restrict_labels Array of label IDs to restrict the existing annotations to, which should be used as training proposals. `use_existing` must be set if this parameter is present.
- * @apiParam (Optional parameters) {boolean} skip_nd Set to `true` to skip the novelty detection stage and take only existing annotations as training proposals. `use_existing` must be set if this parameter is present. Also, all `nd_*` parameters are ignored and no longer required if this parameter is set.
+ * @apiParam (Required parameters) {string} training_data_method One of `novelty_detection` (to perform novelty detection to generate training data), `own_annotations` (to use existing annotations of the same volume as training data) or `knowledge_transfer` (to use knowlegde transfer to get training data from another volume).
+ * @apiParam (Required parameters) {array} is_train_scheme An array containing objects with the following properties. `layers`: Either `heads` or `all`, `epochs`: Number of epochs to train this step, `learing_rate`: Learing rate to use in this step.
+ *
+ * @apiParam (Required parameters for novelty detection) {number} nd_clusters Number of different kinds of images to expect. Images are of the same kind if they have similar lighting conditions or show similar patterns (e.g. sea floor, habitat types). Increase this number if you expect many different kinds of images. Lower the number to 1 if you have very few images and/or the content is largely uniform.
+ * @apiParam (Required parameters for novelty detection) {number} nd_patch_size Size in pixels of the image patches used determine the training proposals. Increase the size if the images contain larger objects of interest, decrease the size if the objects are smaller. Larger patch sizes take longer to compute. Must be an odd number.
+ * @apiParam (Required parameters for novelty detection) {number} nd_threshold Percentile of pixel saliency values used to determine the saliency threshold. Lower this value to get more training proposals. The default value should be fine for most cases.
+ * @apiParam (Required parameters for novelty detection) {number} nd_latent_size Learning capability used to determine training proposals. Increase this number to ignore more complex objects and patterns.
+ * @apiParam (Required parameters for novelty detection) {number} nd_trainset_size Number of training image patches used to determine training proposals. You can increase this number for a large volume but it will take longer to compute.
+ * @apiParam (Required parameters for novelty detection) {number} nd_epochs Time spent on training when determining the training proposals.
+ * @apiParam (Required parameters for novelty detection) {number} nd_stride A higher stride increases the speed of the novelty detection but reduces the sensitivity to small regions or objects.
+ * @apiParam (Required parameters for novelty detection) {number} nd_ignore_radius Ignore training proposals or annotation candidates which have a radius smaller or equal than this value in pixels.
+ *
+ *
+ * @apiParam (Optional parameters for existing annotations) {Array} oa_restrict_labels Array of label IDs to restrict the existing annotations to, which should be used as training proposals.
+ *
+ * @apiParam (Required parameters for knowledge transfer) {number} kt_volume_id The ID of the volume from which to get the annotations for knowledge transfer.
+ *
+ * @apiParam (Optional parameters for knowledge transfer) {Array} kt_restrict_labels Array of label IDs to restrict the annotations of the other volume to, which should be used as training proposals.
*
* @param StoreMaiaJob $request
* @return \Illuminate\Http\Response
@@ -45,19 +50,26 @@ public function store(StoreMaiaJob $request)
$job = new MaiaJob;
$job->volume_id = $request->volume->id;
$job->user_id = $request->user()->id;
- $job->state_id = State::noveltyDetectionId();
$paramKeys = [
- 'use_existing',
- 'restrict_labels',
- 'skip_nd',
+ 'training_data_method',
// is_* are parameters for instance segmentation.
- 'is_epochs_head',
- 'is_epochs_all',
+ 'is_train_scheme',
];
- if (!$request->has('skip_nd')) {
+ if ($request->input('training_data_method') === MaiaJob::TRAIN_OWN_ANNOTATIONS) {
+ $job->state_id = State::instanceSegmentationId();
+ $paramKeys = array_merge($paramKeys, [
+ 'oa_restrict_labels',
+ ]);
+ } else if ($request->input('training_data_method') === MaiaJob::TRAIN_KNOWLEDGE_TRANSFER) {
+ $job->state_id = State::instanceSegmentationId();
+ $paramKeys = array_merge($paramKeys, [
+ 'kt_volume_id',
+ 'kt_restrict_labels',
+ ]);
+ } else {
+ $job->state_id = State::noveltyDetectionId();
$paramKeys = array_merge($paramKeys, [
- // nd_* are parameters for novelty detection.
'nd_clusters',
'nd_patch_size',
'nd_threshold',
diff --git a/src/Http/Controllers/Views/MaiaJobController.php b/src/Http/Controllers/Views/MaiaJobController.php
index 1384937..b7df2f9 100644
--- a/src/Http/Controllers/Views/MaiaJobController.php
+++ b/src/Http/Controllers/Views/MaiaJobController.php
@@ -3,6 +3,7 @@
namespace Biigle\Modules\Maia\Http\Controllers\Views;
use Biigle\Http\Controllers\Views\Controller;
+use Biigle\ImageAnnotation;
use Biigle\LabelTree;
use Biigle\Modules\Maia\MaiaJob;
use Biigle\Modules\Maia\MaiaJobState as State;
@@ -52,12 +53,32 @@ public function index($id)
$newestJobHasFailed = $jobs->isNotEmpty() ? $jobs[0]->hasFailed() : false;
+ $defaultTrainScheme = collect([
+ ['layers' => 'heads', 'epochs' => 10, 'learning_rate' => 0.001],
+ ['layers' => 'heads', 'epochs' => 10, 'learning_rate' => 0.0005],
+ ['layers' => 'heads', 'epochs' => 10, 'learning_rate' => 0.0001],
+ ['layers' => 'all', 'epochs' => 10, 'learning_rate' => 0.0001],
+ ['layers' => 'all', 'epochs' => 10, 'learning_rate' => 0.00005],
+ ['layers' => 'all', 'epochs' => 10, 'learning_rate' => 0.00001],
+ ]);
+
+ $canUseExistingAnnotations = ImageAnnotation::join('images', 'images.id', '=', 'image_annotations.image_id')
+ ->where('images.volume_id', $volume->id)
+ ->exists();
+
+ $canUseKnowledgeTransfer = !$volume->images()
+ ->whereNull('attrs->metadata->distance_to_ground')
+ ->exists();
+
return view('maia::index', compact(
'volume',
'jobs',
'hasJobsInProgress',
'hasJobsRunning',
- 'newestJobHasFailed'
+ 'newestJobHasFailed',
+ 'defaultTrainScheme',
+ 'canUseExistingAnnotations',
+ 'canUseKnowledgeTransfer'
));
}
diff --git a/src/Http/Requests/StoreMaiaJob.php b/src/Http/Requests/StoreMaiaJob.php
index f653d8a..2b4cff3 100644
--- a/src/Http/Requests/StoreMaiaJob.php
+++ b/src/Http/Requests/StoreMaiaJob.php
@@ -4,12 +4,16 @@
use Biigle\Modules\Maia\MaiaJob;
use Biigle\Modules\Maia\MaiaJobState as State;
+use Biigle\Modules\Maia\Rules\KnowledgeTransferVolume;
use Biigle\Modules\Maia\Rules\OddNumber;
use Biigle\Volume;
use Illuminate\Foundation\Http\FormRequest;
+use Biigle\Modules\Maia\Traits\QueriesExistingAnnotations;
class StoreMaiaJob extends FormRequest
{
+ use QueriesExistingAnnotations;
+
/**
* The volume to create the MAIA job for.
*
@@ -37,20 +41,28 @@ public function authorize()
public function rules()
{
return [
- 'use_existing' => 'required_with:restrict_labels,skip_nd|boolean',
- 'restrict_labels' => 'array',
- 'restrict_labels.*' => 'integer|exists:labels,id',
- 'skip_nd' => 'boolean',
- 'nd_clusters' => 'required_unless:skip_nd,true|integer|min:1|max:100',
- 'nd_patch_size' => ['required_unless:skip_nd,true', 'integer', 'min:3', 'max:99', new OddNumber],
- 'nd_threshold' => 'required_unless:skip_nd,true|integer|min:0|max:99',
- 'nd_latent_size' => 'required_unless:skip_nd,true|numeric|min:0.05|max:0.75',
- 'nd_trainset_size' => 'required_unless:skip_nd,true|integer|min:1000|max:100000',
- 'nd_epochs' => 'required_unless:skip_nd,true|integer|min:50|max:1000',
- 'nd_stride' => 'required_unless:skip_nd,true|integer|min:1|max:10',
- 'nd_ignore_radius' => 'required_unless:skip_nd,true|integer|min:0',
- 'is_epochs_head' => 'required|integer|min:1',
- 'is_epochs_all' => 'required|integer|min:1',
+ 'training_data_method' => 'required|in:novelty_detection,own_annotations,knowledge_transfer',
+
+ 'nd_clusters' => 'required_if:training_data_method,novelty_detection|integer|min:1|max:100',
+ 'nd_patch_size' => ['required_if:training_data_method,novelty_detection', 'integer', 'min:3', 'max:99', new OddNumber],
+ 'nd_threshold' => 'required_if:training_data_method,novelty_detection|integer|min:0|max:99',
+ 'nd_latent_size' => 'required_if:training_data_method,novelty_detection|numeric|min:0.05|max:0.75',
+ 'nd_trainset_size' => 'required_if:training_data_method,novelty_detection|integer|min:1000|max:100000',
+ 'nd_epochs' => 'required_if:training_data_method,novelty_detection|integer|min:50|max:1000',
+ 'nd_stride' => 'required_if:training_data_method,novelty_detection|integer|min:1|max:10',
+ 'nd_ignore_radius' => 'required_if:training_data_method,novelty_detection|integer|min:0',
+
+ 'oa_restrict_labels' => 'array',
+ 'oa_restrict_labels.*' => 'integer|exists:labels,id',
+
+ 'kt_volume_id' => ['required_if:training_data_method,knowledge_transfer', 'integer', 'exists:volumes,id', new KnowledgeTransferVolume],
+ 'kt_restrict_labels.*' => 'integer|exists:labels,id',
+
+ 'is_train_scheme' => 'required|array|min:1',
+ 'is_train_scheme.*' => 'array',
+ 'is_train_scheme.*.layers' => 'required|in:heads,all',
+ 'is_train_scheme.*.epochs' => 'required|integer|min:1',
+ 'is_train_scheme.*.learning_rate' => 'required|numeric|min:0|max:1',
];
}
@@ -83,9 +95,41 @@ public function withValidator($validator)
$validator->errors()->add('volume', 'New MAIA jobs cannot be created for volumes with very large images.');
}
- if (!$this->input('skip_nd') && $this->volume->images()->count() < $this->input('nd_clusters')) {
+ if ($this->input('training_data_method') === MaiaJob::TRAIN_NOVELTY_DETECTION && $this->volume->images()->count() < $this->input('nd_clusters')) {
$validator->errors()->add('nd_clusters', 'The number of image clusters must not be greater than the number of images in the volume.');
}
+
+ if ($this->input('training_data_method') === MaiaJob::TRAIN_OWN_ANNOTATIONS && $this->hasNoExistingAnnotations()) {
+ $validator->errors()->add('training_data_method', 'There are no existing annotations (with the chosen labels) in this volume.');
+ }
+
+ if ($this->input('training_data_method') === MaiaJob::TRAIN_KNOWLEDGE_TRANSFER && $this->hasNoKnowledgeTransferAnnotations()) {
+ $validator->errors()->add('training_data_method', 'There are no existing annotations (with the chosen labels) in the volume chosen for knowledge transfer.');
+ }
});
}
+
+ /**
+ * Determine if there are existing annotations that can be used as training data.
+ *
+ * @return boolean
+ */
+ protected function hasNoExistingAnnotations()
+ {
+ $restrictLabels = $this->input('oa_restrict_labels', []);
+
+ return !$this->getExistingAnnotationsQuery($this->volume->id, $restrictLabels)->exists();
+ }
+
+ /**
+ * Determine if there are existing annotations in the volume chosen for knowledge transfer.
+ *
+ * @return boolean
+ */
+ protected function hasNoKnowledgeTransferAnnotations()
+ {
+ $restrictLabels = $this->input('kt_restrict_labels', []);
+
+ return !$this->getExistingAnnotationsQuery($this->input('kt_volume_id'), $restrictLabels)->exists();
+ }
}
diff --git a/src/Http/routes.php b/src/Http/routes.php
index 5424c53..f72f454 100644
--- a/src/Http/routes.php
+++ b/src/Http/routes.php
@@ -40,4 +40,6 @@
$router->get('maia-jobs/{id}/images/{id2}/training-proposals', 'MaiaJobImagesController@indexTrainingProposals');
$router->get('maia-jobs/{id}/images/{id2}/annotation-candidates', 'MaiaJobImagesController@indexAnnotationCandidates');
+
+ $router->get('volumes/filter/knowledge-transfer', 'KnowledgeTransferVolumeController@index');
});
diff --git a/src/Jobs/InstanceSegmentationRequest.php b/src/Jobs/InstanceSegmentationRequest.php
index d503e47..722831a 100644
--- a/src/Jobs/InstanceSegmentationRequest.php
+++ b/src/Jobs/InstanceSegmentationRequest.php
@@ -2,7 +2,9 @@
namespace Biigle\Modules\Maia\Jobs;
+use Biigle\Modules\Maia\GenericImage;
use Biigle\Modules\Maia\MaiaJob;
+use Biigle\Volume;
use Exception;
use File;
use FileCache;
@@ -16,6 +18,20 @@ class InstanceSegmentationRequest extends JobRequest
*/
protected $trainingProposals;
+ /**
+ * URL of the volume for knowledge transfer (if any).
+ *
+ * @var string
+ */
+ protected $knowledgeTransferVolumeUrl;
+
+ /**
+ * Filenames of the images of the knowledge transfer volume, indexed by their IDs.
+ *
+ * @var array
+ */
+ protected $knowledgeTransferImages;
+
/**
* Create a new instance
*
@@ -28,6 +44,14 @@ public function __construct(MaiaJob $job)
// and the GPU server cannot instantiate MaiaAnnotation objects (as they depend
// on biigle/core).
$this->trainingProposals = $this->bundleTrainingProposals($job);
+
+ if ($this->shouldUseKnowledgeTransfer()) {
+ $volume = Volume::find($this->jobParams['kt_volume_id']);
+ $this->knowledgeTransferVolumeUrl = $volume->url;
+ $this->knowledgeTransferImages = $volume->images()
+ ->pluck('filename', 'id')
+ ->toArray();
+ }
}
/**
@@ -40,7 +64,13 @@ public function handle()
try {
$images = $this->getGenericImages();
- $datasetOutputPath = $this->generateDataset($images);
+ if ($this->shouldUseKnowledgeTransfer()) {
+ $datasetImages = $this->getKnowledgeTransferImages();
+ } else {
+ $datasetImages = $images;
+ }
+
+ $datasetOutputPath = $this->generateDataset($datasetImages);
$trainingOutputPath = $this->performTraining($datasetOutputPath);
$this->performInference($images, $datasetOutputPath, $trainingOutputPath);
@@ -51,6 +81,16 @@ public function handle()
}
}
+ /**
+ * Determine whether knowledge transfer should be performed in this job.
+ *
+ * @return bool
+ */
+ protected function shouldUseKnowledgeTransfer()
+ {
+ return array_key_exists('training_data_method', $this->jobParams) && $this->jobParams['training_data_method'] === 'knowledge_transfer';
+ }
+
/**
* Bundle the training proposals to be sent to the GPU server.
*
@@ -125,6 +165,10 @@ protected function createDatasetJson($imagesMap, $outputJsonPath)
'output_path' => $outputJsonPath,
];
+ if ($this->shouldUseKnowledgeTransfer()) {
+ $content['kt_scale_factor'] = $this->jobParams['kt_scale_factor'];
+ }
+
File::put($path, json_encode($content, JSON_UNESCAPED_SLASHES));
return $path;
@@ -176,8 +220,7 @@ protected function createTrainingJson($outputJsonPath)
{
$path = "{$this->tmpDir}/input-training.json";
$content = [
- 'is_epochs_head' => intval($this->jobParams['is_epochs_head']),
- 'is_epochs_all' => intval($this->jobParams['is_epochs_all']),
+ 'is_train_scheme' => $this->jobParams['is_train_scheme'],
'tmp_dir' => $this->tmpDir,
'available_bytes' => intval(config('maia.available_bytes')),
'max_workers' => intval(config('maia.max_workers')),
@@ -272,4 +315,19 @@ protected function getTmpDirPath()
{
return parent::getTmpDirPath()."-instance-segmentation";
}
+
+ /**
+ * Create GenericImage instances for the images of the knowledge transfer volume.
+ *
+ * @return array
+ */
+ protected function getKnowledgeTransferImages()
+ {
+ $images = [];
+ foreach ($this->knowledgeTransferImages as $id => $filename) {
+ $images[$id] = new GenericImage($id, "{$this->knowledgeTransferVolumeUrl}/{$filename}");
+ }
+
+ return $images;
+ }
}
diff --git a/src/Jobs/UseExistingAnnotations.php b/src/Jobs/PrepareAnnotationsJob.php
similarity index 53%
rename from src/Jobs/UseExistingAnnotations.php
rename to src/Jobs/PrepareAnnotationsJob.php
index e40c368..e8f01ab 100644
--- a/src/Jobs/UseExistingAnnotations.php
+++ b/src/Jobs/PrepareAnnotationsJob.php
@@ -5,19 +5,19 @@
use Arr;
use Biigle\ImageAnnotation;
use Biigle\Jobs\Job;
-use Biigle\Modules\Largo\Jobs\GenerateAnnotationPatch;
+use Biigle\Modules\Maia\Events\MaiaJobContinued;
use Biigle\Modules\Maia\MaiaJob;
+use Biigle\Modules\Maia\Traits\QueriesExistingAnnotations;
use Biigle\Modules\Maia\MaiaJobState as State;
-use Biigle\Modules\Maia\Notifications\NoveltyDetectionComplete;
-use Biigle\Modules\Maia\Notifications\NoveltyDetectionFailed;
+use Biigle\Modules\Maia\Notifications\InstanceSegmentationFailed;
use Biigle\Modules\Maia\TrainingProposal;
use Biigle\Shape;
use DB;
use Illuminate\Queue\SerializesModels;
-class UseExistingAnnotations extends Job
+abstract class PrepareAnnotationsJob extends Job
{
- use SerializesModels;
+ use SerializesModels, QueriesExistingAnnotations;
/**
* The job to use existing annotations for.
@@ -27,39 +27,20 @@ class UseExistingAnnotations extends Job
protected $job;
/**
- * Create a new isntance.
+ * Ignore this job if the MAIA job does not exist any more.
*
- * @param MaiaJob $job
+ * @var bool
*/
- public function __construct(MaiaJob $job)
- {
- $this->job = $job;
- }
+ protected $deleteWhenMissingModels = true;
/**
- * Execute the job.
+ * Create a new isntance.
*
- * @return void
+ * @param MaiaJob $job
*/
- public function handle()
+ public function __construct(MaiaJob $job)
{
- if ($this->job->shouldSkipNoveltyDetection() && !$this->hasAnnotations()) {
- $this->job->error = ['message' => 'Novelty detection should be skipped but there are no existing annotations to take as training proposals.'];
- $this->job->state_id = State::failedNoveltyDetectionId();
- $this->job->save();
- $this->job->user->notify(new NoveltyDetectionFailed($this->job));
-
- return;
- }
-
- $this->convertAnnotations();
- $this->dispatchAnnotationPatchJobs();
-
- if ($this->job->shouldSkipNoveltyDetection()) {
- $this->job->state_id = State::trainingProposalsId();
- $this->job->save();
- $this->job->user->notify(new NoveltyDetectionComplete($this->job));
- }
+ $this->job = $job;
}
/**
@@ -67,17 +48,7 @@ public function handle()
*
* @return \Illuminate\Database\Query\Builder
*/
- protected function getAnnotationsQuery()
- {
- $restrictLabels = Arr::get($this->job->params, 'restrict_labels', []);
-
- return ImageAnnotation::join('images', 'image_annotations.image_id', '=', 'images.id')
- ->where('images.volume_id', $this->job->volume_id)
- ->when(!empty($restrictLabels), function ($query) use ($restrictLabels) {
- return $query->join('image_annotation_labels', 'image_annotation_labels.annotation_id', '=', 'image_annotations.id')
- ->whereIn('image_annotation_labels.label_id', $restrictLabels);
- });
- }
+ protected abstract function getAnnotationsQuery();
/**
* Determine if there are any annotations to convert.
@@ -94,12 +65,14 @@ protected function hasAnnotations()
*/
protected function convertAnnotations()
{
- $this->getAnnotationsQuery()
- // Use DISTINCT ON to get only one result per annotation, no matter how many
- // matching labels are attached to it. We can't simply use DISTINCT because
- // the rows include JSON.
- ->select(DB::raw('DISTINCT ON (annotations_id) image_annotations.id as annotations_id, image_annotations.points, image_annotations.image_id, image_annotations.shape_id'))
- ->chunkById(1000, [$this, 'convertAnnotationChunk'], 'image_annotations.id', 'annotations_id');
+ DB::transaction(function () {
+ $this->getAnnotationsQuery()
+ // Use DISTINCT ON to get only one result per annotation, no matter how
+ // many matching labels are attached to it. We can't simply use DISTINCT
+ // because the rows include JSON.
+ ->select(DB::raw('DISTINCT ON (annotations_id) image_annotations.id as annotations_id, image_annotations.points, image_annotations.image_id, image_annotations.shape_id'))
+ ->chunkById(1000, [$this, 'convertAnnotationChunk'], 'image_annotations.id', 'annotations_id');
+ });
}
/**
@@ -113,10 +86,12 @@ public function convertAnnotationChunk($chunk)
{
$trainingProposals = $chunk->map(function ($annotation) {
return [
- 'points' => $this->convertAnnotationPoints($annotation),
+ 'points' => $this->convertAnnotationPointsToCircle($annotation),
'image_id' => $annotation->image_id,
'shape_id' => Shape::circleId(),
'job_id' => $this->job->id,
+ // All these proposals should be taken for instance segmentation.
+ 'selected' => true,
// score should be null in this case.
];
});
@@ -131,7 +106,7 @@ public function convertAnnotationChunk($chunk)
*
* @return string JSON encoded points array.
*/
- protected function convertAnnotationPoints(ImageAnnotation $annotation)
+ protected function convertAnnotationPointsToCircle(ImageAnnotation $annotation)
{
if ($annotation->shape_id === Shape::pointId()) {
// Points are converted to circles with a default radius of 50 px.
@@ -181,18 +156,4 @@ protected function convertPolygonToCirlce($points)
return [$x, $y, $r];
}
-
- /**
- * Dispatch the jobs to generate image patches for the new training proposals.
- */
- protected function dispatchAnnotationPatchJobs()
- {
- $disk = config('maia.training_proposal_storage_disk');
- $this->job->trainingProposals()->chunk(1000, function ($chunk) use ($disk) {
- $chunk->each(function ($proposal) use ($disk) {
- GenerateAnnotationPatch::dispatch($proposal, $disk)
- ->onQueue(config('largo.generate_annotation_patch_queue'));
- });
- });
- }
}
diff --git a/src/Jobs/PrepareExistingAnnotations.php b/src/Jobs/PrepareExistingAnnotations.php
new file mode 100644
index 0000000..751c7ad
--- /dev/null
+++ b/src/Jobs/PrepareExistingAnnotations.php
@@ -0,0 +1,41 @@
+hasAnnotations()) {
+ $this->job->error = ['message' => 'Existing annotations should be used but there are no existing annotations to take as training proposals.'];
+ $this->job->state_id = State::failedInstanceSegmentationId();
+ $this->job->save();
+ $this->job->user->notify(new InstanceSegmentationFailed($this->job));
+
+ return;
+ }
+
+ $this->convertAnnotations();
+ event(new MaiaJobContinued($this->job));
+ }
+
+ /**
+ * {@inheritdoc}
+ */
+ protected function getAnnotationsQuery()
+ {
+ $restrictLabels = Arr::get($this->job->params, 'oa_restrict_labels', []);
+
+ return $this->getExistingAnnotationsQuery($this->job->volume_id, $restrictLabels);
+ }
+}
diff --git a/src/Jobs/PrepareKnowledgeTransfer.php b/src/Jobs/PrepareKnowledgeTransfer.php
new file mode 100644
index 0000000..f9e7e84
--- /dev/null
+++ b/src/Jobs/PrepareKnowledgeTransfer.php
@@ -0,0 +1,98 @@
+job->params['kt_volume_id']);
+
+ if (is_null($otherVolume)) {
+ $this->handleFailure('The volume that was selected for knowledge transfer does not exist.');
+ return;
+ }
+
+ $missingOtherMetadata = $otherVolume->images()
+ ->whereNull('attrs->metadata->distance_to_ground')
+ ->exists();
+
+ if ($missingOtherMetadata) {
+ $this->handleFailure('The volume that was selected for knowledge transfer has images where the distance to ground information is missing.');
+ return;
+ }
+
+ $missingOwnMetadata = $this->job->volume->images()
+ ->whereNull('attrs->metadata->distance_to_ground')
+ ->exists();
+
+ if ($missingOwnMetadata) {
+ $this->handleFailure('The volume of the MAIA job has images where the distance to ground information is missing.');
+ return;
+ }
+
+ $column = DB::raw("cast(attrs->'metadata'->>'distance_to_ground' as real)");
+ $ownDistance = floatval($this->job->volume->images()->avg($column));
+
+ if ($ownDistance == 0) {
+ $this->handleFailure('The average distance to ground of the volume of the MAIA job is zero.');
+ return;
+ }
+
+ $otherDistance = floatval($otherVolume->images()->avg($column));
+
+ if ($otherDistance == 0) {
+ $this->handleFailure('The average distance to ground of the volume that was selected for knowledge transfer is zero.');
+ return;
+ }
+
+ $params = $this->job->params;
+ $params['kt_scale_factor'] = $otherDistance / $ownDistance;
+ $this->job->params = $params;
+
+
+ if (!$this->hasAnnotations()) {
+ $this->handleFailure('The volume that was selected for knowledge transfer has no annotations.');
+ return;
+ }
+
+ $this->convertAnnotations();
+ $this->job->save();
+ event(new MaiaJobContinued($this->job));
+ }
+
+ /**
+ * {@inheritdoc}
+ */
+ protected function getAnnotationsQuery()
+ {
+ $restrictLabels = Arr::get($this->job->params, 'kt_restrict_labels', []);
+
+ return $this->getExistingAnnotationsQuery($this->job->params['kt_volume_id'], $restrictLabels);
+ }
+
+ /**
+ * Set the failed state and error message and notify the user.
+ *
+ * @param string $message
+ */
+ protected function handleFailure($message)
+ {
+ $this->job->error = ['message' => $message];
+ $this->job->state_id = State::failedInstanceSegmentationId();
+ $this->job->save();
+ $this->job->user->notify(new InstanceSegmentationFailed($this->job));
+ }
+}
diff --git a/src/Listeners/DispatchNoveltyDetectionRequest.php b/src/Listeners/DispatchMaiaJob.php
similarity index 58%
rename from src/Listeners/DispatchNoveltyDetectionRequest.php
rename to src/Listeners/DispatchMaiaJob.php
index 3f9f314..ac1458a 100644
--- a/src/Listeners/DispatchNoveltyDetectionRequest.php
+++ b/src/Listeners/DispatchMaiaJob.php
@@ -2,15 +2,17 @@
namespace Biigle\Modules\Maia\Listeners;
+use Biigle\Modules\Maia\Events\MaiaJobContinued;
use Biigle\Modules\Maia\Events\MaiaJobCreated;
use Biigle\Modules\Maia\Jobs\NoveltyDetectionFailure;
use Biigle\Modules\Maia\Jobs\NoveltyDetectionRequest;
-use Biigle\Modules\Maia\Jobs\UseExistingAnnotations;
+use Biigle\Modules\Maia\Jobs\PrepareExistingAnnotations;
+use Biigle\Modules\Maia\Jobs\PrepareKnowledgeTransfer;
use Exception;
use Illuminate\Contracts\Queue\ShouldQueue;
use Queue;
-class DispatchNoveltyDetectionRequest implements ShouldQueue
+class DispatchMaiaJob implements ShouldQueue
{
/**
* Handle the event.
@@ -20,15 +22,16 @@ class DispatchNoveltyDetectionRequest implements ShouldQueue
*/
public function handle(MaiaJobCreated $event)
{
- if ($event->job->shouldUseExistingAnnotations()) {
- UseExistingAnnotations::dispatch($event->job);
- }
-
-
- if (!$event->job->shouldSkipNoveltyDetection()) {
+ if ($event->job->shouldUseNoveltyDetection()) {
$request = new NoveltyDetectionRequest($event->job);
Queue::connection(config('maia.request_connection'))
->pushOn(config('maia.request_queue'), $request);
+ } else if ($event->job->shouldUseExistingAnnotations()) {
+ PrepareExistingAnnotations::dispatch($event->job);
+ } else if ($event->job->shouldUseKnowledgeTransfer()) {
+ PrepareKnowledgeTransfer::dispatch($event->job);
+ } else {
+ throw new Exception('Unknown training data method.');
}
}
@@ -41,7 +44,7 @@ public function handle(MaiaJobCreated $event)
*/
public function failed(MaiaJobCreated $event, $exception)
{
- $e = new Exception('The novelty detection request could not be submitted.');
+ $e = new Exception('The MAIA job could not be submitted.');
Queue::push(new NoveltyDetectionFailure($event->job->id, $e));
}
}
diff --git a/src/MaiaJob.php b/src/MaiaJob.php
index 133bf65..9be6a1e 100644
--- a/src/MaiaJob.php
+++ b/src/MaiaJob.php
@@ -13,6 +13,21 @@ class MaiaJob extends Model
{
use HasJsonAttributes;
+ /**
+ * @var string
+ */
+ const TRAIN_NOVELTY_DETECTION = 'novelty_detection';
+
+ /**
+ * @var string
+ */
+ const TRAIN_OWN_ANNOTATIONS = 'own_annotations';
+
+ /**
+ * @var string
+ */
+ const TRAIN_KNOWLEDGE_TRANSFER = 'knowledge_transfer';
+
/**
* The attributes that should be casted to native types.
*
@@ -155,23 +170,33 @@ public function setErrorAttribute(array $error)
}
/**
- * Determine if this job should use existing annotations.
+ * Determine if this job should use existing annotations to get training data.
*
* @return bool
*/
public function shouldUseExistingAnnotations()
{
- return (bool) $this->getJsonAttr('params.use_existing', false);
+ return $this->getJsonAttr('params.training_data_method') === self::TRAIN_OWN_ANNOTATIONS;
+ }
+
+ /**
+ * Determine if this job should use novelty detection to get training data.
+ *
+ * @return bool
+ */
+ public function shouldUseNoveltyDetection()
+ {
+ // Handle fallback where old jobs don't have a training_data_method yet.
+ return $this->getJsonAttr('params.training_data_method') === self::TRAIN_NOVELTY_DETECTION || $this->getJsonAttr('params.training_data_method') === null;
}
/**
- * Determine if this job should skip novelty detection.
+ * Determine if this job should use knowledge transfer to get training data.
*
* @return bool
*/
- public function shouldSkipNoveltyDetection()
+ public function shouldUseKnowledgeTransfer()
{
- return $this->shouldUseExistingAnnotations()
- && (bool) $this->getJsonAttr('params.skip_nd', false);
+ return $this->getJsonAttr('params.training_data_method') === self::TRAIN_KNOWLEDGE_TRANSFER;
}
}
diff --git a/src/MaiaServiceProvider.php b/src/MaiaServiceProvider.php
index 6e2219c..de7f9c3 100644
--- a/src/MaiaServiceProvider.php
+++ b/src/MaiaServiceProvider.php
@@ -7,7 +7,7 @@
use Biigle\Modules\Maia\Events\MaiaJobCreated;
use Biigle\Modules\Maia\Events\MaiaJobDeleting;
use Biigle\Modules\Maia\Listeners\DispatchInstanceSegmentationRequest;
-use Biigle\Modules\Maia\Listeners\DispatchNoveltyDetectionRequest;
+use Biigle\Modules\Maia\Listeners\DispatchMaiaJob;
use Biigle\Modules\Maia\Listeners\PrepareDeleteAnnotationPatches;
use Biigle\Modules\Maia\Listeners\PruneTrainingProposalPatches;
use Biigle\Services\Modules;
@@ -60,7 +60,7 @@ public function boot(Modules $modules, Router $router)
Gate::policy(MaiaJob::class, Policies\MaiaJobPolicy::class);
Gate::policy(TrainingProposal::class, Policies\TrainingProposalPolicy::class);
Gate::policy(AnnotationCandidate::class, Policies\AnnotationCandidatePolicy::class);
- Event::listen(MaiaJobCreated::class, DispatchNoveltyDetectionRequest::class);
+ Event::listen(MaiaJobCreated::class, DispatchMaiaJob::class);
Event::listen(MaiaJobContinued::class, DispatchInstanceSegmentationRequest::class);
Event::listen(MaiaJobContinued::class, PruneTrainingProposalPatches::class);
Event::listen(MaiaJobDeleting::class, PrepareDeleteAnnotationPatches::class);
diff --git a/src/Notifications/InstanceSegmentationFailed.php b/src/Notifications/InstanceSegmentationFailed.php
index 14aa00a..b7ea8b1 100644
--- a/src/Notifications/InstanceSegmentationFailed.php
+++ b/src/Notifications/InstanceSegmentationFailed.php
@@ -23,6 +23,6 @@ protected function getTitle($job)
*/
protected function getMessage($job)
{
- return "MAIA job {$job->id} failed during instance segmentation. Please notify the BIIGLE administrators.";
+ return "MAIA job {$job->id} failed during instance segmentation.";
}
}
diff --git a/src/Rules/KnowledgeTransferVolume.php b/src/Rules/KnowledgeTransferVolume.php
new file mode 100644
index 0000000..0f144e9
--- /dev/null
+++ b/src/Rules/KnowledgeTransferVolume.php
@@ -0,0 +1,41 @@
+user())
+ ->where('id', $value)
+ ->has('images.annotations')
+ ->whereNotExists(function ($query) {
+ $query->select(DB::raw(1))
+ ->from('images')
+ ->whereRaw('images.volume_id = volumes.id')
+ ->whereNull('attrs->metadata->distance_to_ground');
+ })
+ ->exists();
+ }
+
+ /**
+ * Get the validation error message.
+ *
+ * @return string
+ */
+ public function message()
+ {
+ return 'The :attribute is not suited for knowledge transfer. You must be authorized to access the volume, all images must have distance to ground information and there must be annotations.';
+ }
+}
diff --git a/src/Traits/QueriesExistingAnnotations.php b/src/Traits/QueriesExistingAnnotations.php
new file mode 100644
index 0000000..f326c98
--- /dev/null
+++ b/src/Traits/QueriesExistingAnnotations.php
@@ -0,0 +1,25 @@
+where('images.volume_id', $volumeId)
+ ->when(!empty($restrictLabels), function ($query) use ($restrictLabels) {
+ return $query->join('image_annotation_labels', 'image_annotation_labels.annotation_id', '=', 'image_annotations.id')
+ ->whereIn('image_annotation_labels.label_id', $restrictLabels);
+ });
+ }
+}
diff --git a/src/public/assets/scripts/main.js b/src/public/assets/scripts/main.js
index deb4179..4cbb7e6 100644
--- a/src/public/assets/scripts/main.js
+++ b/src/public/assets/scripts/main.js
@@ -1 +1 @@
-!function(t){var e={};function n(i){if(e[i])return e[i].exports;var o=e[i]={i:i,l:!1,exports:{}};return t[i].call(o.exports,o,o.exports,n),o.l=!0,o.exports}n.m=t,n.c=e,n.d=function(t,e,i){n.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:i})},n.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},n.t=function(t,e){if(1&e&&(t=n(t)),8&e)return t;if(4&e&&"object"==typeof t&&t&&t.__esModule)return t;var i=Object.create(null);if(n.r(i),Object.defineProperty(i,"default",{enumerable:!0,value:t}),2&e&&"string"!=typeof t)for(var o in t)n.d(i,o,function(e){return t[e]}.bind(null,o));return i},n.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return n.d(e,"a",e),e},n.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},n.p="/",n(n.s=0)}({0:function(t,e,n){n("WfG0"),t.exports=n("zcrr")},"A1R+":function(t,e,n){"use strict";t.exports=o,t.exports.default=o;var i=n("YcpW");function o(t,e){if(!(this instanceof o))return new o(t,e);this._maxEntries=Math.max(4,t||9),this._minEntries=Math.max(2,Math.ceil(.4*this._maxEntries)),e&&this._initFormat(e),this.clear()}function s(t,e,n){if(!n)return e.indexOf(t);for(var i=0;i=0&&s[e].children.length>this._maxEntries;)this._split(s,e),e--;this._adjustParentBBoxes(o,s,e)},_split:function(t,e){var n=t[e],i=n.children.length,o=this._minEntries;this._chooseSplitAxis(n,o,i);var s=this._chooseSplitIndex(n,o,i),a=g(n.children.splice(s,n.children.length-s));a.height=n.height,a.leaf=n.leaf,r(n,this.toBBox),r(a,this.toBBox),e?t[e-1].children.push(a):this._splitRoot(n,a)},_splitRoot:function(t,e){this.data=g([t,e]),this.data.height=t.height+1,this.data.leaf=!1,r(this.data,this.toBBox)},_chooseSplitIndex:function(t,e,n){var i,o,s,r,u,h,c,d,p,f,g,_,m,v;for(h=c=1/0,i=e;i<=n-e;i++)o=a(t,0,i,this.toBBox),s=a(t,i,n,this.toBBox),p=o,f=s,g=void 0,_=void 0,m=void 0,v=void 0,g=Math.max(p.minX,f.minX),_=Math.max(p.minY,f.minY),m=Math.min(p.maxX,f.maxX),v=Math.min(p.maxY,f.maxY),r=Math.max(0,m-g)*Math.max(0,v-_),u=l(o)+l(s),r0},e.prototype.removeEventListener=function(t,e){var n=this.listeners_[t];if(n){var i=n.indexOf(e);t in this.pendingRemovals_?(n[i]=N,++this.pendingRemovals_[t]):(n.splice(i,1),0===n.length&&delete this.listeners_[t])}},e}(U),Z="change",Q="clear";var tt=function(t){function e(){t.call(this),this.revision_=0}return t&&(e.__proto__=t),e.prototype=Object.create(t&&t.prototype),e.prototype.constructor=e,e.prototype.changed=function(){++this.revision_,this.dispatchEvent(Z)},e.prototype.getRevision=function(){return this.revision_},e.prototype.on=function(t,e){if(Array.isArray(t)){for(var n=t.length,i=new Array(n),o=0;o=0&&s[e].children.length>this._maxEntries;)this._split(s,e),e--;this._adjustParentBBoxes(o,s,e)},_split:function(t,e){var n=t[e],i=n.children.length,o=this._minEntries;this._chooseSplitAxis(n,o,i);var s=this._chooseSplitIndex(n,o,i),a=g(n.children.splice(s,n.children.length-s));a.height=n.height,a.leaf=n.leaf,r(n,this.toBBox),r(a,this.toBBox),e?t[e-1].children.push(a):this._splitRoot(n,a)},_splitRoot:function(t,e){this.data=g([t,e]),this.data.height=t.height+1,this.data.leaf=!1,r(this.data,this.toBBox)},_chooseSplitIndex:function(t,e,n){var i,o,s,r,h,u,l,d,p,f,g,m,_,v;for(u=l=1/0,i=e;i<=n-e;i++)o=a(t,0,i,this.toBBox),s=a(t,i,n,this.toBBox),p=o,f=s,g=void 0,m=void 0,_=void 0,v=void 0,g=Math.max(p.minX,f.minX),m=Math.max(p.minY,f.minY),_=Math.min(p.maxX,f.maxX),v=Math.min(p.maxY,f.maxY),r=Math.max(0,_-g)*Math.max(0,v-m),h=c(o)+c(s),r=e;o--)s=t.children[o],h(l,t.leaf?r(s):s),c+=d(l);return c},_adjustParentBBoxes:function(t,e,n){for(var i=n;i>=0;i--)h(e[i],t)},_condense:function(t){for(var e,n=t.length-1;n>=0;n--)0===t[n].children.length?n>0?(e=t[n-1].children).splice(e.indexOf(t[n]),1):this.clear():r(t[n],this.toBBox)},_initFormat:function(t){var e=["return a"," - b",";"];this.compareMinX=new Function("a","b",e.join(t[0])),this.compareMinY=new Function("a","b",e.join(t[1])),this.toBBox=new Function("a","return {minX: a"+t[0]+", minY: a"+t[1]+", maxX: a"+t[2]+", maxY: a"+t[3]+"};")}}},WfG0:function(t,e,n){"use strict";n.r(e);var i=Vue.resource("api/v1/volumes/filter/knowledge-transfer"),o=biigle.$require("annotations.components.annotationCanvas"),s=biigle.$require("largo.mixins.annotationPatch"),r=biigle.$require("annotations.ol.AttachLabelInteraction"),a=biigle.$require("messages").handleErrorResponse,h=biigle.$require("volumes.components.imageGrid"),u=biigle.$require("volumes.components.imageGridImage"),l=biigle.$require("annotations.stores.images"),c=biigle.$require("keyboard"),d=biigle.$require("labelTrees.components.labelTrees"),p=biigle.$require("labelTrees.components.labelTypeahead"),f=biigle.$require("core.mixins.loader"),g=biigle.$require("messages"),m=biigle.$require("core.components.sidebar"),_=biigle.$require("core.components.sidebarTab"),v=biigle.$require("annotations.stores.styles");function y(t,e,n,i,o,s,r,a){var h,u="function"==typeof t?t.options:t;if(e&&(u.render=e,u.staticRenderFns=n,u._compiled=!0),i&&(u.functional=!0),s&&(u._scopeId="data-v-"+s),r?(h=function(t){(t=t||this.$vnode&&this.$vnode.ssrContext||this.parent&&this.parent.$vnode&&this.parent.$vnode.ssrContext)||"undefined"==typeof __VUE_SSR_CONTEXT__||(t=__VUE_SSR_CONTEXT__),o&&o.call(this,t),t&&t._registeredComponents&&t._registeredComponents.add(r)},u._ssrRegister=h):o&&(h=a?function(){o.call(this,(u.functional?this.parent:this).$root.$options.shadowRoot)}:o),h)if(u.functional){u._injectStyles=h;var l=u.render;u.render=function(t,e){return h.call(e),l(t,e)}}else{var c=u.beforeCreate;u.beforeCreate=c?[].concat(c,h):[h]}return{exports:t,options:u}}var I=y({mixins:[f],components:{typeahead:p},data:function(){return{volumeId:null,showAdvanced:!1,shouldFetchLabels:!1,labels:[],selectedLabels:[],submitted:!1,trainScheme:[],trainingDataMethod:"",knowledgeTransferVolumes:[],knowledgeTransferVolume:null,shouldFetchKnowledgeTransferVolumes:!1,knowledgeTransferTypeaheadTemplate:"{{item.name}}
({{item.description}})",knowledgeTransferLabelCache:[],selectedKnowledgeTransferLabels:[]}},computed:{hasLabels:function(){return this.labels.length>0},hasSelectedLabels:function(){return this.selectedLabels.length>0},useExistingAnnotations:function(){return"own_annotations"===this.trainingDataMethod},useNoveltyDetection:function(){return"novelty_detection"===this.trainingDataMethod},useKnowledgeTransfer:function(){return"knowledge_transfer"===this.trainingDataMethod},canSubmit:function(){return this.submitted||this.useKnowledgeTransfer&&!this.knowledgeTransferVolume},hasNoKnowledgeTransferVolumes:function(){return this.shouldFetchKnowledgeTransferVolumes&&!this.loading&&0===this.knowledgeTransferVolumes.length},hasSelectedKnowledgeTransferLabels:function(){return this.selectedKnowledgeTransferLabels.length>0},knowledgeTransferLabels:function(){if(!this.knowledgeTransferVolume)return[];var t=this.knowledgeTransferVolume.id;return this.knowledgeTransferLabelCache.hasOwnProperty(t)||this.fetchKnowledgeTransferLabels(t),this.knowledgeTransferLabelCache[t]}},methods:{toggle:function(){this.showAdvanced=!this.showAdvanced},setLabels:function(t){this.labels=t.body},handleSelectedLabel:function(t){-1===this.selectedLabels.indexOf(t)&&this.selectedLabels.push(t)},handleUnselectLabel:function(t){var e=this.selectedLabels.indexOf(t);e>=0&&this.selectedLabels.splice(e,1)},submit:function(){this.submitted=!0},removeTrainStep:function(t){this.trainScheme.splice(t,1)},addTrainStep:function(){var t={layers:"heads",epochs:10,learning_rate:.001};if(this.trainScheme.length>0){var e=this.trainScheme[this.trainScheme.length-1];t.layers=e.layers,t.epochs=e.epochs,t.learning_rate=e.learning_rate}this.trainScheme.push(t)},handleSelectedKnowledgeTransferVolume:function(t){this.knowledgeTransferVolume=t,this.selectedKnowledgeTransferLabels=[]},setKnowledgeTransferVolumes:function(t){var e=this;this.knowledgeTransferVolumes=t.body.filter((function(t){return t.id!==e.volumeId})).map((function(t){return t.description=t.projects.map((function(t){return t.name})).join(", "),t}))},fetchLabels:function(t){this.startLoading();var e=this.$http.get("api/v1/volumes{/id}/annotation-labels",{params:{id:t}});return e.finally(this.finishLoading),e},fetchKnowledgeTransferLabels:function(t){var e=this;this.fetchLabels(t).then((function(n){e.knowledgeTransferLabelCache[t]=n.body}),a)},handleSelectedKnowledgeTransferLabel:function(t){-1===this.selectedKnowledgeTransferLabels.indexOf(t)&&this.selectedKnowledgeTransferLabels.push(t)},handleUnselectKnowledgeTransferLabel:function(t){var e=this.selectedKnowledgeTransferLabels.indexOf(t);e>=0&&this.selectedKnowledgeTransferLabels.splice(e,1)}},watch:{useExistingAnnotations:function(t){t&&(this.shouldFetchLabels=!0)},shouldFetchLabels:function(t){t&&this.fetchLabels(this.volumeId).then(this.setLabels,a)},useKnowledgeTransfer:function(t){t&&(this.shouldFetchKnowledgeTransferVolumes=!0)},shouldFetchKnowledgeTransferVolumes:function(t){t&&(this.startLoading(),i.get().then(this.setKnowledgeTransferVolumes,a).finally(this.finishLoading))}},created:function(){this.volumeId=biigle.$require("maia.volumeId"),this.trainScheme=biigle.$require("maia.trainScheme"),this.showAdvanced=biigle.$require("maia.hasErrors"),this.trainingDataMethod=biigle.$require("maia.trainingDataMethod"),this.useExistingAnnotations&&(this.shouldFetchLabels=!0)}},void 0,void 0,!1,null,null,null).exports,x=Vue.resource("api/v1/maia/annotation-candidates{/id}"),C=y({mixins:[u,s],computed:{label:function(){return this.selected?this.image.label:null},selected:function(){return this.$parent.isSelected(this.image)},converted:function(){return this.$parent.isConverted(this.image)},classObject:function(){return{"image-grid__image--selected":this.selected||this.converted,"image-grid__image--selectable":this.selectable,"image-grid__image--fade":this.selectedFade,"image-grid__image--small-icon":this.smallIcon}},iconClass:function(){return this.converted?"fa-lock":"fa-"+this.selectedIcon},showIcon:function(){return this.selectable||this.selected||this.converted},title:function(){return this.converted?"This annotation candidate has been converted":this.selected?"Detach label":"Attach selected label"},labelStyle:function(){return{"background-color":"#"+this.label.color}},id:function(){return this.image.id},uuid:function(){return this.image.uuid},urlTemplate:function(){return biigle.$require("maia.acUrlTemplate")}}},(function(){var t=this,e=t.$createElement,n=t._self._c||e;return n("figure",{staticClass:"image-grid__image image-grid__image--annotation-candidate",class:t.classObject,attrs:{title:t.title}},[t.showIcon?n("div",{staticClass:"image-icon"},[n("i",{staticClass:"fas",class:t.iconClass})]):t._e(),t._v(" "),n("img",{attrs:{src:t.srcUrl},on:{click:t.toggleSelect,error:t.showEmptyImage}}),t._v(" "),t.selected?n("div",{staticClass:"attached-label"},[n("span",{staticClass:"attached-label__color",style:t.labelStyle}),t._v(" "),n("span",{staticClass:"attached-label__name",domProps:{textContent:t._s(t.label.name)}})]):t._e()])}),[],!1,null,null,null),b=y({mixins:[h],components:{imageGridImage:C.exports},props:{selectedCandidateIds:{type:Object,required:!0},convertedCandidateIds:{type:Object,required:!0}},methods:{isSelected:function(t){return this.selectedCandidateIds.hasOwnProperty(t.id)},isConverted:function(t){return this.convertedCandidateIds.hasOwnProperty(t.id)}}},(function(){var t=this,e=t.$createElement,n=t._self._c||e;return n("div",{staticClass:"image-grid",on:{wheel:function(e){return e.preventDefault(),t.scroll(e)}}},[n("div",{ref:"images",staticClass:"image-grid__images"},t._l(t.displayedImages,(function(e){return n("image-grid-image",{key:e.id,attrs:{image:e,"empty-url":t.emptyUrl,selectable:!t.isConverted(e),"selected-icon":t.selectedIcon},on:{select:t.emitSelect}})})),1),t._v(" "),t.canScroll?n("image-grid-progress",{attrs:{progress:t.progress},on:{top:t.jumpToStart,"prev-page":t.reversePage,"prev-row":t.reverseRow,jump:t.jumpToPercent,"next-row":t.advanceRow,"next-page":t.advancePage,bottom:t.jumpToEnd}}):t._e()],1)}),[],!1,null,null,null).exports,P=Vue.resource("api/v1/maia-jobs{/id}",{},{save:{method:"POST",url:"api/v1/volumes{/id}/maia-jobs"},getTrainingProposals:{method:"GET",url:"api/v1/maia-jobs{/id}/training-proposals"},getTrainingProposalPoints:{method:"GET",url:"api/v1/maia-jobs{/jobId}/images{/imageId}/training-proposals"},getAnnotationCandidates:{method:"GET",url:"api/v1/maia-jobs{/id}/annotation-candidates"},getAnnotationCandidatePoints:{method:"GET",url:"api/v1/maia-jobs{/jobId}/images{/imageId}/annotation-candidates"},convertAnnotationCandidates:{method:"POST",url:"api/v1/maia-jobs{/id}/annotation-candidates"}}),w=Vue.resource("api/v1/maia/training-proposals{/id}"),S=y({mixins:[u,s],computed:{selected:function(){return this.$parent.selectedProposalIds.hasOwnProperty(this.image.id)},title:function(){return this.selectable?this.selected?"Unselect as interesting":"Select as interesting":""},id:function(){return this.image.id},uuid:function(){return this.image.uuid},urlTemplate:function(){return biigle.$require("maia.tpUrlTemplate")}}},(function(){var t=this,e=t.$createElement,n=t._self._c||e;return n("figure",{staticClass:"image-grid__image",class:t.classObject,attrs:{title:t.title}},[t.showIcon?n("div",{staticClass:"image-icon"},[n("i",{staticClass:"fas",class:t.iconClass})]):t._e(),t._v(" "),n("img",{attrs:{src:t.srcUrl},on:{click:t.toggleSelect,error:t.showEmptyImage}})])}),[],!1,null,null,null),A=y({mixins:[h],components:{imageGridImage:S.exports},props:{selectedProposalIds:{type:Object,required:!0}}},(function(){var t=this,e=t.$createElement,n=t._self._c||e;return n("div",{staticClass:"image-grid",on:{wheel:function(e){return e.preventDefault(),t.scroll(e)}}},[n("div",{ref:"images",staticClass:"image-grid__images"},t._l(t.displayedImages,(function(e){return n("image-grid-image",{key:e.id,attrs:{image:e,"empty-url":t.emptyUrl,selectable:t.selectable,"selected-fade":t.selectable,"small-icon":!t.selectable,"selected-icon":t.selectedIcon},on:{select:t.emitSelect}})})),1),t._v(" "),t.canScroll?n("image-grid-progress",{attrs:{progress:t.progress},on:{top:t.jumpToStart,"prev-page":t.reversePage,"prev-row":t.reverseRow,jump:t.jumpToPercent,"next-row":t.advanceRow,"next-page":t.advancePage,bottom:t.jumpToEnd}}):t._e()],1)}),[],!1,null,null,null).exports;function T(){return function(){throw new Error("Unimplemented abstract method.")}()}var E=0;function F(t){return t.ol_uid||(t.ol_uid=String(++E))}var L=function(t){function e(e){var n="Assertion failed. See https://openlayers.org/en/"+("v"+"5.3.3".split("-")[0])+"/doc/errors/#"+e+" for details.";t.call(this,n),this.code=e,this.name="AssertionError",this.message=n}return t&&(e.__proto__=t),e.prototype=Object.create(t&&t.prototype),e.prototype.constructor=e,e}(Error),O="add",M="remove",k="propertychange",R="function"==typeof Object.assign?Object.assign:function(t,e){var n=arguments;if(null==t)throw new TypeError("Cannot convert undefined or null to object");for(var i=Object(t),o=1,s=arguments.length;o0},e.prototype.removeEventListener=function(t,e){var n=this.listeners_[t];if(n){var i=n.indexOf(e);t in this.pendingRemovals_?(n[i]=N,++this.pendingRemovals_[t]):(n.splice(i,1),0===n.length&&delete this.listeners_[t])}},e}(z),Q="change",tt="clear";var et=function(t){function e(){t.call(this),this.revision_=0}return t&&(e.__proto__=t),e.prototype=Object.create(t&&t.prototype),e.prototype.constructor=e,e.prototype.changed=function(){++this.revision_,this.dispatchEvent(Q)},e.prototype.getRevision=function(){return this.revision_},e.prototype.on=function(t,e){if(Array.isArray(t)){for(var n=t.length,i=new Array(n),o=0;o
- In the third step (instance segmentation), the manually filtered set of training proposals is used to train a machine learning model for the automatic detection of the selected interesting objects. The model is highly specialized for this task and can usually detect most (if not all) instances of the interesting objects in the images. In the tests reported by the MAIA paper, 84% of the interesting objects were detected on average [1]. The detections are passed on as "annotation candidates" to the fourth step. + In addition to the novelty detection of the original MAIA method, BIIGLE offers alternative ways to obtain training proposals. Read more in the articles to use existing annotations or knowledge transfer.
- As with the training proposals, the annotation candidates can contain detections that are not actually interesting objects. In addition, the machine learning model only detects the objects and does not attempt to automatically assign labels to them. In the fourth step, the annotation candidates are again manually filtered to select only the actually interesting objects. Furthermore, labels are manually attached to the selected candidates which are subsequently transformed to actual annotations. + In the third step (instance segmentation), the manually filtered or automatically obtained set of training proposals is used to train a machine learning model for the automatic detection of the selected interesting objects. The model is highly specialized for this task and can usually detect most (if not all) instances of the interesting objects in the images. In the tests reported by the MAIA paper, 84% of the interesting objects were detected on average [1]. The detections are passed on as "annotation candidates" to the fourth step. +
+ ++ As with the training proposals of the novelty detection, the annotation candidates can contain detections that are not actually interesting objects. In addition, the machine learning model only detects the objects and does not attempt to automatically assign labels to them. In the fourth step, the annotation candidates are again manually filtered to select only the actually interesting objects. Furthermore, labels are manually attached to the selected candidates which are subsequently transformed to actual annotations.
@@ -39,28 +43,30 @@ To create new annotations with MAIA in BIIGLE, project editors, experts or admins can start a new MAIA "job" for a volume of a project. To start a new MAIA job, click on the button in the sidebar of the volume overview. This will open up the MAIA overview for the volume, which lists any running or finished jobs, as well as a form to create a new MAIA job for the volume. New jobs can only be created when no other job is currently running for the volume.
- The form to create a new MAIA job initially shows only the parameters that are most likely to be modified for each job. To show all available parameters, click on the button below the form. There are quite a lot parameters that can be configured for a MAIA job. Although sensible defaults are set, a careful configuration may be crucial for a good quality of the resulting annotations. You can read more on the configuration parameters of the novelty detection stage and those of the instance segmentation stage in the respective articles. + The form to create a new MAIA job presents you a choice between several methods to obtain training data (training proposals). Choose one that best fits to your use case. The form initially shows only the parameters that are most likely to be modified for each job. To show all available parameters, click on the button below the form. There can be quite a lot parameters that can be configured for a MAIA job. Although sensible defaults are set, a careful configuration may be crucial for a good quality of the resulting annotations. You can read more on the configuration parameters for novelty detection, existing annotations, knowledge transfer and instance segmentation in the respective articles.
- A MAIA job runs through the four consecutive stages outlined above. The first and the third stages perform automatic processing of the images. The second and the fourth stages require manual interaction by you. Once you have created a new job, it will immediately start the automatic processing of the first stage. BIIGLE will notify you when this stage is finished and the job is waiting for your manual interaction in the second stage. In the same way you are notified when the automatic processing of the third stage is finished. You can change the way you receive new MAIA notifications in the notification settings of your user account. + If novelty detection is chosen as the method to obtain training data, a MAIA job runs through the four consecutive stages outlined above. The first and the third stages perform automatic processing of the images. The second and the fourth stages require manual interaction by you. Once you have created a new job, it will immediately start the automatic processing of the first stage. BIIGLE will notify you when this stage is finished and the job is waiting for your manual interaction in the second stage. In the same way you are notified when the automatic processing of the third stage is finished. If existing annotations or knowledge transfer were chosen as the method to obtain training data, the job will directly proceed with the third stage, skipping the first two. You can change the way you receive new MAIA notifications in the notification settings of your user account.
- The overview page of a MAIA job shows a main content area and a sidebar with five different tabs. The first tab shows general information about the job, including all the parameters that were used. The second and third tabs belong to the training proposals stage and are enabled once the job progresses to this stage. The fourth and fifth tabs belong to the annotation candidates stage and are enabled once the job progresses to this stage. + The overview page of a MAIA job shows a main content area and a sidebar with multiple tabs. The first tab shows general information about the job, including all the parameters that were used. The second and third tabs belong to the training proposals stage and are enabled once the job progresses to this stage. These tabs are visible only if novelty detection was chosen as the method to obtain training data. The fourth and fifth tabs belong to the annotation candidates stage and are enabled once the job progresses to this stage.
- Continue reading about MAIA in the articles about the individual stages. You can start with the first stage: novelty detection. + Continue reading about MAIA in the articles about the methods to obtain training data. You can start with the first method: novelty detection.
- A description of the last MAIA stage. + Reviewing the annotation candidates from instance segmentation.
- A MAIA job is finished when it proceeded from the instance segmentation to the annotation candidates stage. In this stage you can review the annotation candidates that were generated in the previous stage and convert them to real annotations. Similar to the training proposals stage, this is done in two steps the selection of annotation candidates and the refinement of annotation candidates. + A MAIA job is finished when it proceeded from the instance segmentation to the annotation candidates stage. In this stage you can review the annotation candidates that were generated in the previous stage and convert them to real annotations. Similar to the training proposals stage of novelty detection, this is done in two steps the selection of annotation candidates and the refinement of annotation candidates.
- The selection of annotation candidates is very similar to the selection of training proposals. When you open the select annotation candidates tab in the sidebar, the annotation candidates are loaded and their image thumbnails are displayed in a regular grid. Please refer to the respective manual article to learn how to interact with the thumbnail grid. + The selection of annotation candidates is very similar to the selection of training proposals of the novelty detection method. When you open the select annotation candidates tab in the sidebar, the annotation candidates are loaded and their image thumbnails are displayed in a regular grid. Please refer to the respective manual article to learn how to interact with the thumbnail grid.
@@ -39,7 +39,7 @@
- The refinement step for annotation candidates is also very similar to the refinement step for training proposals. You cycle through all selected annotation candidates and modify the circle of each annotation candidate to fit to the object or region that it should mark. Please refer to the respective manual article to learn how to interact with the refinement tool. + The refinement step for annotation candidates is also very similar to the refinement step for training proposals of the novelty detection method. You cycle through all selected annotation candidates and modify the circle of each annotation candidate to fit to the object or region that it should mark. Please refer to the respective manual article to learn how to interact with the refinement tool.
@@ -67,9 +67,11 @@
+ Using existing annotations to obtain training data. +
++ This method allows you to choose existing annotations in the same volume as training data for the instance segmentation stage. All annotations will be converted to circles and the new MAIA job will immediately proceed to the instance segmentation stage. +
+ ++ To show the configurable parameters, click on the button below the form. +
+ ++ By default, all annotations are used. +
+ ++ Use the input field to select one or more labels. If present, only annotations with the chosen labels are used as training data for the MAIA job. If no labels are chosen, all annotations are used. +
+ +- A description of the third MAIA stage and the configurable parameters. + The automatic instance segmentation.
- The third stage of a MAIA job processes all images of a volume with a supervised instance segmentation method (Mask R-CNN). This method uses the training proposals that you have selected and refined in the previous stage of the MAIA job to learn a model for what you determined to be interesting objects or regions in the images. The instance segmentation method produces a set of "annotation candidates", which are image regions that the method found to be interesting based on your provided training proposals. When the instance segmentation is finished, the MAIA job will continue to the next stage in which you can manually review the annotation candidates. + The third stage of a MAIA job processes all images of a volume with a supervised instance segmentation method (Mask R-CNN). This method uses the training proposals that were obtained with one of the three methods (novelty detection, existing annotations or knowledge transfer) to learn a model for what you determined to be interesting objects or regions in the images. The instance segmentation method produces a set of "annotation candidates", which are image regions that the method found to be interesting based on your provided training proposals. When the instance segmentation is finished, the MAIA job will continue to the next stage in which you can manually review the annotation candidates.
@@ -21,37 +21,31 @@ The configurable parameters for this stage are not shown by default in the form to submit a new MAIA job. Click on the button below the form to show the parameters for the instance segmentation stage.
-
- Integer greater than or equal to 1
. Default 20
+ By default, the training scheme of UnKnoT [2] is used.
- Time spent on training only the head layers of Mask R-CNN for instance segmentation. This is faster and should be a higher number than epochs (all).
+ A series of training steps consisting of layers to train, the number of epochs and the learning rate to use. Training should begin with the heads
layers. The learning rate should decrease with subsequent steps.
- Integer greater than or equal to 1
. Default 10
-
- Time spent on training all layers of Mask R-CNN for instance segmentation. This is slower and should be a lower number than epochs (head). -
+ Using knowledge transfer to obtain training data. +
+ ++ This method allows you to choose existing annotations of another volume as training data for the instance segmentation stage. This is done using the "knowledge transfer" method UnKnoT [1]. All annotations will be converted to circles and the new MAIA job will immediately proceed to the instance segmentation stage. This method can only be used if distance to ground information is available for all images of the volume of the MAIA job. +
+ ++ To show all configurable parameters, click on the button below the form. +
+ ++ The volume of which to use annotations as training data for the instance segmentation stage. Only volumes with distance to ground information for all images can be selected. The annotations should show the same or very similar object classes than those that should be found with the MAIA job. +
+ ++ By default, all annotations are used. +
+ ++ Use the input field to select one or more labels. If present, only annotations with the chosen labels are used as training data for the MAIA job. If no labels are chosen, all annotations are used. +
+ +- A description of the first MAIA stage and the configurable parameters. + Using novelty detection to obtain training data.
- The first stage of a MAIA job processes all images of a volume with an unsupervised novelty detection method. The novelty detection method attempts to find "interesting" objects or regions in the images, which are called "training proposals". The novelty detection acts without any prior knowledge of what is actually defined as interesting by you or anyone who wants to explore the images. Hence, the quality or meaningfulness of the training proposals may vary dramatically, depending on the images themselves and on what you are looking for. + This method to obtain training data processes all images of a volume with an unsupervised novelty detection method. The novelty detection method attempts to find "interesting" objects or regions in the images, which are called "training proposals". The novelty detection acts without any prior knowledge of what is actually defined as interesting by you or anyone who wants to explore the images. Hence, the quality or meaningfulness of the training proposals may vary dramatically, depending on the images themselves and on what you are looking for.
- To make the novelty detection as flexible as possible, there are many parameters that can be configured before a new MAIA job is submitted. You might have to try out a few parameter combinations before the novelty detection produces meaningful training proposals. In cases where the novelty detection produces too few meaningful training proposals or does not work at all, you can augment the training proposals with your own annotations or skip the novelty detection altogether. + To make the novelty detection as flexible as possible, there are many parameters that can be configured before a new MAIA job is submitted. You might have to try out a few parameter combinations before the novelty detection produces meaningful training proposals. In cases where the novelty detection produces too few meaningful training proposals or does not work at all, you can try one of the other methods to obtain training data: existing annotations or knowledge transfer.
@@ -21,7 +21,7 @@
- By default only the two parameters for the novelty detection are shown, that are the most likely to be modified for each new job. To show all configurable parameters, click on the button below the form. + By default only the one parameter for the novelty detection is shown, that is the most likely to be modified for each new job. To show all configurable parameters, click on the button below the form.
- If you already have existing annotations for the volume or the novelty detection does not produce (enough) meaningful training proposals, you can select the checkbox "Use existing annotations" before you submit a new MAIA job. When this checkbox is checked, the existing annotations are also presented as training proposals in the second MAIA stage. -
- -- If the checkbox is selected, a new input appears that allows you to limit the used existing annotations to one or more labels. This way you can select only those annotations that make sense as training proposals. If you do not choose any label, all existing annotations are used as training proposals. -
- -- If you chose to use existing annotations as training proposals, a new checkbox to "Skip novelty detection" appears. Select this checkbox if you do not want to run the novelty detection at all and only want to use the existing annotations as training proposals. -
- -- Please note that the new MAIA job is shown as "running novelty detection" in the job overview page even if you chose to skip novelty detection. Just refresh the job overview page after a few seconds and the job should have proceeded to the second MAIA stage. -
-- A description of the second MAIA stage. + Reviewing the training proposals from novelty detection.
When one of your MAIA jobs proceeds from novelty detection to the training proposals stage, you will get a notification from BIIGLE. In this stage, the MAIA job requires manual interaction from you before it can proceed to the next stage. This is done in two steps, the selection of training proposals and the refinement of training proposals. @@ -127,12 +127,14 @@
When the training proposals have been submitted, the MAIA job automatically proceeds to the instance segmentation stage.
-10.1371/journal.pone.0207498
+
+ UnKnoT
+ M. Zurowietz and T. W. Nattkemper, "Unsupervised Knowledge Transfer for Object Detection in Marine Environmental Monitoring and Exploration,"
in IEEE Access, vol. 8, pp. 143558-143568, 2020, doi: 10.1109/ACCESS.2020.3014441
.
+
- A description of the first MAIA stage and the configurable parameters. + Using novelty detection to obtain training data. +
+ ++ Using existing annotations to obtain training data. +
+ ++ Using knowledge transfer to obtain training data.
- A description of the second MAIA stage. + Reviewing the training proposals from novelty detection.
- A description of the third MAIA stage and the configurable parameters. + The automatic instance segmentation.
- A description of the last MAIA stage. + Reviewing the annotation candidates from instance segmentation.
diff --git a/src/resources/views/show.blade.php b/src/resources/views/show.blade.php index 3a13ea6..dcdaeb7 100644 --- a/src/resources/views/show.blade.php +++ b/src/resources/views/show.blade.php @@ -48,16 +48,18 @@created {{$job->created_at->diffForHumans()}} by {{$job->user->firstname}} {{$job->user->lastname}}
-- Novelty Detection - @if ($job->shouldSkipNoveltyDetection()) - (skipped) - @endif - | -
---|
+ Novelty Detection + | +|||
---|---|---|---|
Clusters | {{Arr::get($job->params, 'nd_clusters')}} |
@@ -51,33 +48,92 @@
Ignore radius | {{Arr::get($job->params, 'nd_ignore_radius')}} |
- used existing annotations - | -
Instance Segmentation | -|
---|---|
Training epochs (head) | -{{Arr::get($job->params, 'is_epochs_head')}} |
-
Training epochs (all) | -{{Arr::get($job->params, 'is_epochs_all')}} |
-
+ Existing annotations + | +|
---|---|
Restricted to label IDs: {{implode(', ', Arr::get($job->params, 'oa_restrict_labels', []))}} | + @else +Using all annotations of this volume. | + @endif +
+ Knowledge transfer + | +|
---|---|
Restricted to label IDs: {{implode(', ', Arr::get($job->params, 'kt_restrict_labels', []))}} of volume {{$v ? $v->name : $volumeId}}. | + @else +Using all annotations of volume {{$v ? $v->name : $volumeId}}. | + @endif +
Instance Segmentation Training scheme |
+ ||
---|---|---|
Layers | +Epochs | +Learning rate | +
{{$step['layers']}} |
+ {{$step['epochs']}} |
+ {{$step['learning_rate']}} |
+
Instance Segmentation | +|
---|---|
Training epochs (head) | +{{Arr::get($job->params, 'is_epochs_head')}} |
+
Training epochs (all) | +{{Arr::get($job->params, 'is_epochs_all')}} |
+