diff --git a/api/dataimport/internal/converters/pixlem/import.go b/api/dataimport/internal/converters/pixlem/import.go index debc5f64..688d500c 100644 --- a/api/dataimport/internal/converters/pixlem/import.go +++ b/api/dataimport/internal/converters/pixlem/import.go @@ -152,6 +152,9 @@ func (p PIXLEM) Import(importPath string, pseudoIntensityRangesPath string, data log.Infof("Imported scan with RTT: %v", rtt) data.DatasetID += "_em" // To ensure we don't overwrite real datasets + + // NOTE: PIXL EM import - we clear everything before importing so we don't end up with eg images from a bad previous import + data.ClearBeforeSave = true return data, filepath.Join(importPath, zipName, zipName), nil } @@ -243,8 +246,8 @@ func importEMData(creatorId string, rtt string, beamLocPath string, hkPath strin product := "???" // Use current date encoded as a test sol - // A=2017, 'A' is 65 ascii - sol := fmt.Sprintf("%v%v", string(65+time.Now().Year()-2017), time.Now().YearDay()) + // A=2016, 'A' is 65 ascii + sol := fmt.Sprintf("%v%v", string(65+time.Now().Year()-2016), time.Now().YearDay()) ftype := "??" // PE producer := "J" diff --git a/api/dataimport/internal/converters/pixlfm/import.go b/api/dataimport/internal/converters/pixlfm/import.go index 49a85cbf..049ca4d5 100644 --- a/api/dataimport/internal/converters/pixlfm/import.go +++ b/api/dataimport/internal/converters/pixlfm/import.go @@ -393,6 +393,9 @@ func (p PIXLFM) Import(importPath string, pseudoIntensityRangesPath string, data log, ) + // Explicitly set to NOT clear before import - this way we should keep images around... + data.ClearBeforeSave = false + if err != nil { return nil, "", err } diff --git a/api/dataimport/internal/dataConvertModels/models.go b/api/dataimport/internal/dataConvertModels/models.go index 767cc23f..721d5257 100644 --- a/api/dataimport/internal/dataConvertModels/models.go +++ b/api/dataimport/internal/dataConvertModels/models.go @@ -245,6 +245,10 @@ type OutputData struct { // Beam generator version number BeamVersion uint32 + + // ClearBeforeSave - As the name says, this flag can indicate from the importer that when saving this data + // we have to clear existing data for it. Mainly for images, do we want old ones to stick around incorrectly? + ClearBeforeSave bool } // EnsurePMC - allocates an item to store data for the given PMC if doesn't already exist diff --git a/api/dataimport/internal/output/output.go b/api/dataimport/internal/output/output.go index 7fd08b69..ee0261d2 100644 --- a/api/dataimport/internal/output/output.go +++ b/api/dataimport/internal/output/output.go @@ -318,6 +318,67 @@ func (s *PIXLISEDataSaver) Save( } } + // Delete images and other DB entries if need be + if data.ClearBeforeSave { + coll := db.Collection(dbCollections.ImagesName) + + opts := options.Find().SetProjection(bson.D{ + {Key: "id", Value: true}, + }) + + cursor, err := coll.Find(context.TODO(), bson.D{{Key: "originscanid", Value: data.DatasetID}}, opts) + if err != nil { + return fmt.Errorf("Failed to delete images pre scan import for: %v. Error: %v", data.DatasetID, err) + } + + images := []*protos.ScanImage{} + err = cursor.All(context.TODO(), &images) + if err != nil { + return err + } + + // Gather all image names + imageIds := []string{} + for _, img := range images { + imageIds = append(imageIds, img.ImagePath) + } + + // Delete the images themselves + res, err := coll.DeleteMany(context.TODO(), bson.D{{Key: "originscanid", Value: data.DatasetID}}) + if err != nil { + return fmt.Errorf("Failed to delete images pre scan import for: %v. Error: %v", data.DatasetID, err) + } + + coll = db.Collection(dbCollections.ImageBeamLocationsName) + resBeam, err := coll.DeleteMany(context.TODO(), bson.M{"_id": bson.M{"$in": imageIds}}) + if err != nil { + return fmt.Errorf("Failed to delete images pre scan import for: %v. Error: %v", data.DatasetID, err) + } + + jobLog.Infof("Deleted %d images, %d image beam locations pre importing scan: %v...", res.DeletedCount, resBeam.DeletedCount, data.DatasetID) + + // Delete from ownership, scan default images and scan itself + coll = db.Collection(dbCollections.ScanDefaultImagesName) + resDefImg, err := coll.DeleteOne(context.TODO(), bson.M{"_id": data.DatasetID}) + if err != nil { + return fmt.Errorf("Failed to delete scan default image for: %v. Error: %v", data.DatasetID, err) + } + + coll = db.Collection(dbCollections.ScansName) + resScan, err := coll.DeleteOne(context.TODO(), bson.M{"_id": data.DatasetID}) + if err != nil { + return fmt.Errorf("Failed to delete scan default image for: %v. Error: %v", data.DatasetID, err) + } + + coll = db.Collection(dbCollections.OwnershipName) + resOwnership, err := coll.DeleteOne(context.TODO(), bson.M{"_id": data.DatasetID}) + if err != nil { + return fmt.Errorf("Failed to delete scan default image for: %v. Error: %v", data.DatasetID, err) + } + + jobLog.Infof("Deleted %d scan default images, %d scans and %v ownership entries for scan: %v...", resDefImg.DeletedCount, resScan.DeletedCount, resOwnership.DeletedCount, data.DatasetID) + } + // We work out the default file name when copying output images now... because if there isn't one, we may pick one during that process. defaultContextImage, err := copyImagesToOutput(contextImageSrcPath, []string{data.DatasetID}, data.DatasetID, outputImagesPath, data, db, jobLog) if err != nil {