From 3e01ed885d59d5c719f8f05cf3ca94dc6cb6ff37 Mon Sep 17 00:00:00 2001 From: Peter Nemere Date: Fri, 1 Dec 2023 14:49:15 +1000 Subject: [PATCH] Added auto-sharing config for dataset importer --- api/dataimport/for-trigger_test.go | 67 ++++++++++++++----- .../converters/jplbreadboard/import.go | 10 +++ .../internal/dataConvertModels/autoShare.go | 9 +++ .../internal/dataConvertModels/models.go | 3 + .../internal/importerutils/fileReadHelpers.go | 2 + api/dataimport/internal/output/output.go | 49 +++++++++++--- api/dataimport/internal/output/summary.go | 5 +- .../models/breadboardImportParams.go | 1 + .../Archive_OK/dataset-bucket/.gitignore | 3 +- .../ManualEM_OK/dataset-bucket/.gitignore | 3 +- .../Manual_OK/dataset-bucket/.gitignore | 3 +- .../Manual_OK2/dataset-bucket/.gitignore | 3 +- api/dbCollections/collections.go | 1 + api/notificationSender/notifications.go | 2 +- api/quantification/importCSV.go | 2 +- api/specialUserIds/userIds.go | 5 ++ api/ws/handlers/element-set.go | 2 +- api/ws/handlers/expression-group.go | 2 +- api/ws/handlers/expression.go | 2 +- api/ws/handlers/module.go | 2 +- api/ws/handlers/roi.go | 2 +- api/ws/handlers/scan.go | 2 + api/ws/handlers/screen-configuration.go | 2 +- api/ws/wsHelpers/ownership.go | 30 +++------ core/gdsfilename/fmFileNameMeta.go | 9 +-- internal/cmd-line-tools/v3-importer/images.go | 2 +- 26 files changed, 156 insertions(+), 67 deletions(-) create mode 100644 api/dataimport/internal/dataConvertModels/autoShare.go create mode 100644 api/specialUserIds/userIds.go diff --git a/api/dataimport/for-trigger_test.go b/api/dataimport/for-trigger_test.go index bb6053f4..7129473f 100644 --- a/api/dataimport/for-trigger_test.go +++ b/api/dataimport/for-trigger_test.go @@ -22,17 +22,21 @@ import ( "fmt" "os" "strings" + "testing" + "github.com/pixlise/core/v3/api/dataimport/internal/dataConvertModels" "github.com/pixlise/core/v3/api/dataimport/internal/importerutils" "github.com/pixlise/core/v3/api/dbCollections" + "github.com/pixlise/core/v3/api/specialUserIds" "github.com/pixlise/core/v3/core/fileaccess" "github.com/pixlise/core/v3/core/logger" "github.com/pixlise/core/v3/core/wstestlib" + protos "github.com/pixlise/core/v3/generated-protos" "go.mongodb.org/mongo-driver/mongo" "google.golang.org/protobuf/encoding/protojson" ) -func initTest(testDir string) (fileaccess.FileAccess, *logger.StdOutLoggerForTest, string, string, string, string, *mongo.Database) { +func initTest(testDir string, autoShareCreatorId string, autoShareCreatorGroupEditor string) (fileaccess.FileAccess, *logger.StdOutLoggerForTest, string, string, string, string, *mongo.Database) { remoteFS := &fileaccess.FSAccess{} log := &logger.StdOutLoggerForTest{} envName := "unit-test" @@ -47,13 +51,26 @@ func initTest(testDir string) (fileaccess.FileAccess, *logger.StdOutLoggerForTes db.Collection(dbCollections.ImagesName).Drop(ctx) db.Collection(dbCollections.ScansName).Drop(ctx) db.Collection(dbCollections.ScanDefaultImagesName).Drop(ctx) + db.Collection(dbCollections.ScanAutoShareName).Drop(ctx) + + // Insert an item if configured to + if len(autoShareCreatorId) > 0 { + item := dataConvertModels.AutoShareConfigItem{ + Sharer: autoShareCreatorId, + Editors: &protos.UserGroupList{ + GroupIds: []string{autoShareCreatorGroupEditor}, + }, + } + + db.Collection(dbCollections.ScanAutoShareName).InsertOne(ctx, &item) + } return remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db } // Import unknown dataset (simulate trigger by OCS pipeline), file goes to archive, then all files downloaded from archive, dataset create fails due to unknown data type func Example_ImportForTrigger_OCS_Archive_BadData() { - remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Archive_BadData") + remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Archive_BadData", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId") // In case it ran before, delete the file from dataset bucket, otherwise we will end for the wrong reason os.Remove(datasetBucket + "/Archive/70000_069-02-09-2021-06-25-13.zip") @@ -127,7 +144,7 @@ func Example_ImportForTrigger_OCS_Archive_BadData() { // Import FM-style (simulate trigger by OCS pipeline), file already in archive, so should do nothing func Example_ImportForTrigger_OCS_Archive_Exists() { - remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Archive_Exists") + remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Archive_Exists", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId") trigger := `{ "Records": [ { @@ -217,7 +234,7 @@ func printArchiveOKLogOutput(log *logger.StdOutLoggerForTest, db *mongo.Database // Import FM-style (simulate trigger by OCS pipeline), file goes to archive, then all files downloaded from archive and dataset created func Example_ImportForTrigger_OCS_Archive_OK() { - remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Archive_OK") + remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Archive_OK", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId") // In case it ran before, delete the file from dataset bucket, otherwise we will end for the wrong reason os.Remove(datasetBucket + "/Archive/048300551-27-06-2021-09-52-25.zip") @@ -278,12 +295,12 @@ func Example_ImportForTrigger_OCS_Archive_OK() { // Logged "Diffraction db saved successfully": true // Logged "Applying custom title: Naltsos": true // Logged "Matched aligned image: PCCR0577_0718181212_000MSA_N029000020073728500030LUD01.tif, offset(0, 0), scale(1, 1). Match for aligned index: 0": true - // |{"id":"048300551","title":"Naltsos","dataTypes":[{"dataType":"SD_IMAGE","count":5},{"dataType":"SD_RGBU","count":1},{"dataType":"SD_XRF","count":242}],"instrument":"PIXL_FM","instrumentConfig":"PIXL","meta":{"DriveID":"1712","RTT":"048300551","SCLK":"678031418","SOL":"0125","Site":"","SiteID":"4","Target":"","TargetID":"?"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":242,"PseudoIntensities":121}} + // |{"id":"048300551","title":"Naltsos","dataTypes":[{"dataType":"SD_IMAGE","count":5},{"dataType":"SD_RGBU","count":1},{"dataType":"SD_XRF","count":242}],"instrument":"PIXL_FM","instrumentConfig":"PIXL","meta":{"DriveID":"1712","RTT":"048300551","SCLK":"678031418","SOL":"0125","Site":"","SiteID":"4","Target":"","TargetID":"?"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":242,"PseudoIntensities":121},"creatorUserId":"PIXLISEImport"} } // Import FM-style (simulate trigger by dataset edit screen), should create dataset with custom name+image func Example_ImportForTrigger_OCS_DatasetEdit() { - remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Archive_OK") + remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Archive_OK", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId") // To save from checking in 2 sets of the same zip files for this and Example_ImportForTrigger_OCS_Archive_OK, here we copy // the archive files from the Archive_OK test to here. @@ -331,7 +348,7 @@ func Example_ImportForTrigger_OCS_DatasetEdit() { // Logged "Diffraction db saved successfully": true // Logged "Applying custom title: Naltsos": true // Logged "Matched aligned image: PCCR0577_0718181212_000MSA_N029000020073728500030LUD01.tif, offset(0, 0), scale(1, 1). Match for aligned index: 0": true - // |{"id":"048300551","title":"Naltsos","dataTypes":[{"dataType":"SD_IMAGE","count":5},{"dataType":"SD_RGBU","count":1},{"dataType":"SD_XRF","count":242}],"instrument":"PIXL_FM","instrumentConfig":"PIXL","meta":{"DriveID":"1712","RTT":"048300551","SCLK":"678031418","SOL":"0125","Site":"","SiteID":"4","Target":"","TargetID":"?"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":242,"PseudoIntensities":121}} + // |{"id":"048300551","title":"Naltsos","dataTypes":[{"dataType":"SD_IMAGE","count":5},{"dataType":"SD_RGBU","count":1},{"dataType":"SD_XRF","count":242}],"instrument":"PIXL_FM","instrumentConfig":"PIXL","meta":{"DriveID":"1712","RTT":"048300551","SCLK":"678031418","SOL":"0125","Site":"","SiteID":"4","Target":"","TargetID":"?"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":242,"PseudoIntensities":121},"creatorUserId":"PIXLISEImport"} } func printManualOKLogOutput(log *logger.StdOutLoggerForTest, db *mongo.Database, datasetId string, fileCount uint32) { @@ -359,9 +376,10 @@ func printManualOKLogOutput(log *logger.StdOutLoggerForTest, db *mongo.Database, summary, err := importerutils.ReadScanItem(datasetId, db) if err != nil { fmt.Println("Failed to read dataset summary file") + } else { + // Clear the time stamp so it doesn't change next time we run test + summary.TimestampUnixSec = 0 } - // Clear the time stamp so it doesn't change next time we run test - summary.TimestampUnixSec = 0 b, err := protojson.Marshal(summary) s := strings.ReplaceAll(string(b), " ", "") @@ -369,8 +387,8 @@ func printManualOKLogOutput(log *logger.StdOutLoggerForTest, db *mongo.Database, } // Import a breadboard dataset from manual uploaded zip file -func Example_ImportForTrigger_Manual() { - remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_OK") +func Example_ImportForTrigger_Manual_JPL() { + remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_OK", specialUserIds.JPLImport, "JPLTestUserGroupId") trigger := `{ "datasetID": "test1234", @@ -397,12 +415,12 @@ func Example_ImportForTrigger_Manual() { // Logged "WARNING: No main context image determined": true // Logged "Diffraction db saved successfully": true // Logged "Warning: No import.json found, defaults will be used": true - // |{"id":"test1234","title":"test1234","dataTypes":[{"dataType":"SD_XRF","count":2520}],"instrument":"JPL_BREADBOARD","instrumentConfig":"Breadboard","meta":{"DriveID":"0","RTT":"","SCLK":"0","SOL":"","Site":"","SiteID":"0","Target":"","TargetID":"0"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":2520,"PseudoIntensities":0}} + // |{"id":"test1234","title":"test1234","dataTypes":[{"dataType":"SD_XRF","count":2520}],"instrument":"JPL_BREADBOARD","instrumentConfig":"Breadboard","meta":{"DriveID":"0","RTT":"","SCLK":"0","SOL":"","Site":"","SiteID":"0","Target":"","TargetID":"0"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":2520,"PseudoIntensities":0},"creatorUserId":"JPLImport"} } // Import a breadboard dataset from manual uploaded zip file func Example_ImportForTrigger_Manual_SBU() { - remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_OK2") + remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_OK2", specialUserIds.SBUImport, "SBUTestUserGroupId") trigger := `{ "datasetID": "test1234sbu", @@ -429,12 +447,29 @@ func Example_ImportForTrigger_Manual_SBU() { // Logged "WARNING: No main context image determined": true // Logged "Diffraction db saved successfully": true // Logged "Warning: No import.json found, defaults will be used": false - // |{"id":"test1234sbu","title":"test1234sbu","dataTypes":[{"dataType":"SD_XRF","count":2520}],"instrument":"SBU_BREADBOARD","instrumentConfig":"StonyBrookBreadboard","meta":{"DriveID":"0","RTT":"","SCLK":"0","SOL":"","Site":"","SiteID":"0","Target":"","TargetID":"0"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":2520,"PseudoIntensities":0}} + // |{"id":"test1234sbu","title":"test1234sbu","dataTypes":[{"dataType":"SD_XRF","count":2520}],"instrument":"SBU_BREADBOARD","instrumentConfig":"StonyBrookBreadboard","meta":{"DriveID":"0","RTT":"","SCLK":"0","SOL":"","Site":"","SiteID":"0","Target":"","TargetID":"0"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":2520,"PseudoIntensities":0},"creatorUserId":"SBUImport"} +} + +// Import a breadboard dataset from manual uploaded zip file +func Test_ImportForTrigger_Manual_SBU_NoAutoShare(t *testing.T) { + remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_OK2", specialUserIds.JPLImport, "JPLTestUserGroupId") + + trigger := `{ + "datasetID": "test1234sbu", + "logID": "dataimport-unittest123sbu" +}` + + _, err := ImportForTrigger([]byte(trigger), envName, configBucket, datasetBucket, manualBucket, db, log, remoteFS) + + // Make sure we got the error + if !strings.HasSuffix(err.Error(), "Cannot work out groups to auto-share imported dataset with") { + t.Errorf("ImportForTrigger didnt return expected error") + } } // Import a breadboard dataset from manual uploaded zip file func Example_ImportForTrigger_Manual_EM() { - remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("ManualEM_OK") + remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("ManualEM_OK", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId") trigger := `{ "datasetID": "048300551", @@ -461,7 +496,7 @@ func Example_ImportForTrigger_Manual_EM() { // Logged "WARNING: No main context image determined": false // Logged "Diffraction db saved successfully": true // Logged "Warning: No import.json found, defaults will be used": false - // |{"id":"048300551","title":"048300551","dataTypes":[{"dataType":"SD_IMAGE","count":4},{"dataType":"SD_XRF","count":242}],"instrument":"PIXL_EM","instrumentConfig":"PIXL-EM-E2E","meta":{"DriveID":"1712","RTT":"048300551","SCLK":"678031418","SOL":"0125","Site":"","SiteID":"4","Target":"","TargetID":"?"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":242,"PseudoIntensities":121}} + // |{"id":"048300551","title":"048300551","dataTypes":[{"dataType":"SD_IMAGE","count":4},{"dataType":"SD_XRF","count":242}],"instrument":"PIXL_EM","instrumentConfig":"PIXL-EM-E2E","meta":{"DriveID":"1712","RTT":"048300551","SCLK":"678031418","SOL":"0125","Site":"","SiteID":"4","Target":"","TargetID":"?"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":242,"PseudoIntensities":121},"creatorUserId":"PIXLISEImport"} } /* NOT TESTED YET, because it's not done yet! diff --git a/api/dataimport/internal/converters/jplbreadboard/import.go b/api/dataimport/internal/converters/jplbreadboard/import.go index cc38310f..8b28e9b8 100644 --- a/api/dataimport/internal/converters/jplbreadboard/import.go +++ b/api/dataimport/internal/converters/jplbreadboard/import.go @@ -27,6 +27,7 @@ import ( "github.com/pixlise/core/v3/api/dataimport/internal/dataConvertModels" "github.com/pixlise/core/v3/api/dataimport/internal/importerutils" dataimportModel "github.com/pixlise/core/v3/api/dataimport/models" + "github.com/pixlise/core/v3/api/specialUserIds" "github.com/pixlise/core/v3/core/fileaccess" "github.com/pixlise/core/v3/core/logger" protos "github.com/pixlise/core/v3/generated-protos" @@ -211,6 +212,14 @@ func (m MSATestData) Import(importPath string, pseudoIntensityRangesPath string, instr = protos.ScanInstrument_SBU_BREADBOARD // OK hack for now... } + creator := params.CreatorUserId + if len(creator) <= 0 { + creator = specialUserIds.JPLImport + if instr == protos.ScanInstrument_SBU_BREADBOARD { + creator = specialUserIds.SBUImport + } + } + data := &dataConvertModels.OutputData{ DatasetID: params.DatasetID, Instrument: instr, @@ -220,6 +229,7 @@ func (m MSATestData) Import(importPath string, pseudoIntensityRangesPath string, PseudoRanges: pseudoIntensityRanges, PerPMCData: map[int32]*dataConvertModels.PMCData{}, MatchedAlignedImages: matchedAlignedImages, + CreatorUserId: creator, } data.SetPMCData(beamLookup, hkData, spectraLookup, contextImgsPerPMC, pseudoIntensityData, map[int32]string{}) diff --git a/api/dataimport/internal/dataConvertModels/autoShare.go b/api/dataimport/internal/dataConvertModels/autoShare.go new file mode 100644 index 00000000..5689f8d6 --- /dev/null +++ b/api/dataimport/internal/dataConvertModels/autoShare.go @@ -0,0 +1,9 @@ +package dataConvertModels + +import protos "github.com/pixlise/core/v3/generated-protos" + +type AutoShareConfigItem struct { + Sharer string `bson:"_id"` // Either a user ID or some special string that the importer sets + Viewers *protos.UserGroupList + Editors *protos.UserGroupList +} diff --git a/api/dataimport/internal/dataConvertModels/models.go b/api/dataimport/internal/dataConvertModels/models.go index e4651d43..7b201b25 100644 --- a/api/dataimport/internal/dataConvertModels/models.go +++ b/api/dataimport/internal/dataConvertModels/models.go @@ -239,6 +239,9 @@ type OutputData struct { // Images that reference and match aligned images MatchedAlignedImages []MatchedAlignedImageMeta + + // Who created this dataset + CreatorUserId string } // EnsurePMC - allocates an item to store data for the given PMC if doesn't already exist diff --git a/api/dataimport/internal/importerutils/fileReadHelpers.go b/api/dataimport/internal/importerutils/fileReadHelpers.go index 1a12ef18..d07be2c0 100644 --- a/api/dataimport/internal/importerutils/fileReadHelpers.go +++ b/api/dataimport/internal/importerutils/fileReadHelpers.go @@ -26,6 +26,7 @@ import ( "os" "github.com/pixlise/core/v3/api/dataimport/internal/dataConvertModels" + "github.com/pixlise/core/v3/api/specialUserIds" "github.com/pixlise/core/v3/core/gdsfilename" "github.com/pixlise/core/v3/core/logger" protos "github.com/pixlise/core/v3/generated-protos" @@ -172,6 +173,7 @@ func MakeFMDatasetOutput( RGBUImages: rgbuImages, DISCOImages: discoImages, MatchedAlignedImages: matchedAlignedImages, + CreatorUserId: specialUserIds.PIXLISESystemUserId, // Auto-importing FM datasets, we don't show a creator... TODO: what about EM though?? } data.SetPMCData(beamLookup, hkData, locSpectraLookup, contextImgsPerPMC, pseudoIntensityData, map[int32]string{}) diff --git a/api/dataimport/internal/output/output.go b/api/dataimport/internal/output/output.go index b3ad5d52..ebc41a90 100644 --- a/api/dataimport/internal/output/output.go +++ b/api/dataimport/internal/output/output.go @@ -33,6 +33,7 @@ import ( "github.com/pixlise/core/v3/api/dataimport/internal/dataConvertModels" "github.com/pixlise/core/v3/api/dbCollections" "github.com/pixlise/core/v3/api/filepaths" + "github.com/pixlise/core/v3/api/ws/wsHelpers" "github.com/pixlise/core/v3/core/fileaccess" "github.com/pixlise/core/v3/core/gdsfilename" "github.com/pixlise/core/v3/core/logger" @@ -268,6 +269,28 @@ func (s *PIXLISEDataSaver) Save( } } + // Look up who to auto-share with based on creator ID + coll := db.Collection(dbCollections.ScanAutoShareName) + optFind := options.FindOne() + + autoShare := &dataConvertModels.AutoShareConfigItem{} + sharer := data.CreatorUserId + + autoShareResult := coll.FindOne(context.TODO(), bson.D{{"_id", sharer}}, optFind) + if autoShareResult.Err() != nil { + // We couldn't find someone to auto-share it with, if we don't have anyone to share with, just fail here + if autoShareResult.Err() == mongo.ErrNoDocuments { + return fmt.Errorf("Cannot work out groups to auto-share imported dataset with") + } + return autoShareResult.Err() + } else { + err := autoShareResult.Decode(autoShare) + + if err != nil { + return fmt.Errorf("Failed to decode auto share configuration: %v", err) + } + } + // We work out the default file name when copying output images now... because if there isn't one, we may pick one during that process. defaultContextImage, err := copyImagesToOutput(contextImageSrcPath, []string{data.DatasetID}, data.DatasetID, outPath, data, db, jobLog) exp.MainContextImage = defaultContextImage @@ -295,19 +318,14 @@ func (s *PIXLISEDataSaver) Save( return fmt.Errorf("Failed to get dataset file size for: %v", outFilePath) } - summaryData := makeSummaryFileContent(&exp, data.DatasetID, data.Instrument, data.Meta, int(fi.Size()), creationUnixTimeSec) + summaryData := makeSummaryFileContent(&exp, data.DatasetID, data.Instrument, data.Meta, int(fi.Size()), creationUnixTimeSec, data.CreatorUserId) jobLog.Infof("Writing summary to DB...") - coll := db.Collection(dbCollections.ScansName) - filter := bson.D{{"_id", summaryData.Id}} + coll = db.Collection(dbCollections.ScansName) opt := options.Update().SetUpsert(true) - dbItem := bson.D{ - {"$set", summaryData}, - } - - result, err := coll.UpdateOne(context.TODO(), filter, dbItem, opt) + result, err := coll.UpdateOne(context.TODO(), bson.D{{"_id", summaryData.Id}}, bson.D{{"$set", summaryData}}, opt) //result, err := db.Collection(dbCollections.ScansName).InsertOne(context.TODO(), summaryData) if err != nil { @@ -317,6 +335,21 @@ func (s *PIXLISEDataSaver) Save( jobLog.Errorf("Expected summary write to create 1 upsert, got: %v", result.UpsertedCount) } + // Set ownership + ownerItem, err := wsHelpers.MakeOwnerForWrite(summaryData.Id, protos.ObjectType_OT_SCAN, summaryData.CreatorUserId, creationUnixTimeSec) + + ownerItem.Viewers = autoShare.Viewers + ownerItem.Editors = autoShare.Editors + + coll = db.Collection(dbCollections.OwnershipName) + opt = options.Update().SetUpsert(true) + + result, err = coll.UpdateOne(context.TODO(), bson.D{{"_id", ownerItem.Id}}, bson.D{{"$set", ownerItem}}, opt) + if err != nil { + jobLog.Errorf("Failed to write ownership item to DB: %v", err) + return err + } + bulkSpectraCount := summaryData.ContentCounts["BulkSpectra"] maxSpectraCount := summaryData.ContentCounts["MaxSpectra"] diff --git a/api/dataimport/internal/output/summary.go b/api/dataimport/internal/output/summary.go index 6d036df2..aaa33886 100644 --- a/api/dataimport/internal/output/summary.go +++ b/api/dataimport/internal/output/summary.go @@ -31,7 +31,8 @@ func makeSummaryFileContent( sourceInstrument protos.ScanInstrument, meta dataConvertModels.FileMetaData, fileSize int, - creationUnixTimeSec int64) *protos.ScanItem { + creationUnixTimeSec int64, + creatorUserId string) *protos.ScanItem { contextImgCount := len(exp.AlignedContextImages) + len(exp.UnalignedContextImages) + len(exp.MatchedAlignedContextImages) tiffContextImgCount := 0 @@ -101,7 +102,7 @@ func makeSummaryFileContent( TimestampUnixSec: uint32(creationUnixTimeSec), Meta: saveMeta, ContentCounts: contentCounts, - CreatorUserId: "", + CreatorUserId: creatorUserId, } return s } diff --git a/api/dataimport/models/breadboardImportParams.go b/api/dataimport/models/breadboardImportParams.go index 1b06328e..0c6c9f89 100644 --- a/api/dataimport/models/breadboardImportParams.go +++ b/api/dataimport/models/breadboardImportParams.go @@ -46,6 +46,7 @@ type BreadboardImportParams struct { OffsetB float32 `json:"ev_offset_b"` // eV calibration eV start offset (detector B) ExcludeNormalDwellSpectra bool `json:"exclude_normal_dwell"` // Hack for tactical datasets - load all MSAs to gen bulk sum, but dont save them in output SOL string `json:"sol"` // Might as well be able to specify SOL. Needed for first spectrum dataset on SOL13 + CreatorUserId string `json:"creatorUserId"` // Who created it } type DetectorChoice struct { diff --git a/api/dataimport/test-data/Archive_OK/dataset-bucket/.gitignore b/api/dataimport/test-data/Archive_OK/dataset-bucket/.gitignore index fc236cf8..03b870ab 100644 --- a/api/dataimport/test-data/Archive_OK/dataset-bucket/.gitignore +++ b/api/dataimport/test-data/Archive_OK/dataset-bucket/.gitignore @@ -1,2 +1 @@ -Datasets/ -DatasetSummaries/ +Scans/ diff --git a/api/dataimport/test-data/ManualEM_OK/dataset-bucket/.gitignore b/api/dataimport/test-data/ManualEM_OK/dataset-bucket/.gitignore index fc236cf8..03b870ab 100644 --- a/api/dataimport/test-data/ManualEM_OK/dataset-bucket/.gitignore +++ b/api/dataimport/test-data/ManualEM_OK/dataset-bucket/.gitignore @@ -1,2 +1 @@ -Datasets/ -DatasetSummaries/ +Scans/ diff --git a/api/dataimport/test-data/Manual_OK/dataset-bucket/.gitignore b/api/dataimport/test-data/Manual_OK/dataset-bucket/.gitignore index fc236cf8..03b870ab 100644 --- a/api/dataimport/test-data/Manual_OK/dataset-bucket/.gitignore +++ b/api/dataimport/test-data/Manual_OK/dataset-bucket/.gitignore @@ -1,2 +1 @@ -Datasets/ -DatasetSummaries/ +Scans/ diff --git a/api/dataimport/test-data/Manual_OK2/dataset-bucket/.gitignore b/api/dataimport/test-data/Manual_OK2/dataset-bucket/.gitignore index fc236cf8..03b870ab 100644 --- a/api/dataimport/test-data/Manual_OK2/dataset-bucket/.gitignore +++ b/api/dataimport/test-data/Manual_OK2/dataset-bucket/.gitignore @@ -1,2 +1 @@ -Datasets/ -DatasetSummaries/ +Scans/ diff --git a/api/dbCollections/collections.go b/api/dbCollections/collections.go index b842a3aa..723e2b08 100644 --- a/api/dbCollections/collections.go +++ b/api/dbCollections/collections.go @@ -20,6 +20,7 @@ const QuantificationZStacksName = "quantificationZStacks" const RegionsOfInterestName = "regionsOfInterest" const UserROIDisplaySettings = "userROIDisplaySettings" const ScansName = "scans" +const ScanAutoShareName = "scanAutoShare" const ScanDefaultImagesName = "scanDefaultImages" const SelectionName = "selection" const TagsName = "tags" diff --git a/api/notificationSender/notifications.go b/api/notificationSender/notifications.go index 8f242bd8..f62fb66a 100644 --- a/api/notificationSender/notifications.go +++ b/api/notificationSender/notifications.go @@ -186,7 +186,7 @@ func (n *NotificationSender) sendEmail(notif *protos.UserNotification, userId st

%v

-`) +`, notif.Subject, user.Info.Name, notif.Contents, unsub) awsutil.SESSendEmail(user.Info.Email, "UTF-8", text, html, notif.Subject, "info@mail.pixlise.org", []string{}, []string{}) } diff --git a/api/quantification/importCSV.go b/api/quantification/importCSV.go index 19a4894d..e8943e66 100644 --- a/api/quantification/importCSV.go +++ b/api/quantification/importCSV.go @@ -72,7 +72,7 @@ func ImportQuantCSV( } // Finally, write the summary data to DB along with ownership entry - ownerItem, err := wsHelpers.MakeOwnerForWrite(quantId, protos.ObjectType_OT_QUANTIFICATION, hctx) + ownerItem, err := wsHelpers.MakeOwnerForWrite(quantId, protos.ObjectType_OT_QUANTIFICATION, hctx.SessUser.User.Id, hctx.Svcs.TimeStamper.GetTimeNowSec()) if err != nil { return quantId, err } diff --git a/api/specialUserIds/userIds.go b/api/specialUserIds/userIds.go new file mode 100644 index 00000000..0965d490 --- /dev/null +++ b/api/specialUserIds/userIds.go @@ -0,0 +1,5 @@ +package specialUserIds + +var PIXLISESystemUserId = "PIXLISEImport" +var JPLImport = "JPLImport" +var SBUImport = "SBUImport" diff --git a/api/ws/handlers/element-set.go b/api/ws/handlers/element-set.go index 886fa1ab..e871dd2c 100644 --- a/api/ws/handlers/element-set.go +++ b/api/ws/handlers/element-set.go @@ -101,7 +101,7 @@ func createElementSet(elementSet *protos.ElementSet, hctx wsHelpers.HandlerConte elementSet.Id = id // We need to create an ownership item along with it - ownerItem, err := wsHelpers.MakeOwnerForWrite(id, protos.ObjectType_OT_ELEMENT_SET, hctx) + ownerItem, err := wsHelpers.MakeOwnerForWrite(id, protos.ObjectType_OT_ELEMENT_SET, hctx.SessUser.User.Id, hctx.Svcs.TimeStamper.GetTimeNowSec()) if err != nil { return nil, err } diff --git a/api/ws/handlers/expression-group.go b/api/ws/handlers/expression-group.go index b07ade7e..dd6b78f8 100644 --- a/api/ws/handlers/expression-group.go +++ b/api/ws/handlers/expression-group.go @@ -106,7 +106,7 @@ func createExpressionGroup(egroup *protos.ExpressionGroup, hctx wsHelpers.Handle egroup.Id = id // We need to create an ownership item along with it - ownerItem, err := wsHelpers.MakeOwnerForWrite(id, protos.ObjectType_OT_EXPRESSION_GROUP, hctx) + ownerItem, err := wsHelpers.MakeOwnerForWrite(id, protos.ObjectType_OT_EXPRESSION_GROUP, hctx.SessUser.User.Id, hctx.Svcs.TimeStamper.GetTimeNowSec()) if err != nil { return nil, err } diff --git a/api/ws/handlers/expression.go b/api/ws/handlers/expression.go index 7abd5254..169c40cc 100644 --- a/api/ws/handlers/expression.go +++ b/api/ws/handlers/expression.go @@ -110,7 +110,7 @@ func createExpression(expr *protos.DataExpression, hctx wsHelpers.HandlerContext expr.Id = id // We need to create an ownership item along with it - ownerItem, err := wsHelpers.MakeOwnerForWrite(id, protos.ObjectType_OT_EXPRESSION, hctx) + ownerItem, err := wsHelpers.MakeOwnerForWrite(id, protos.ObjectType_OT_EXPRESSION, hctx.SessUser.User.Id, hctx.Svcs.TimeStamper.GetTimeNowSec()) if err != nil { return nil, err } diff --git a/api/ws/handlers/module.go b/api/ws/handlers/module.go index 94977aed..e19b315b 100644 --- a/api/ws/handlers/module.go +++ b/api/ws/handlers/module.go @@ -262,7 +262,7 @@ func createModule(name string, comments string, intialSourceCode string, tags [] } // We need to create an ownership item along with it - ownerItem, err := wsHelpers.MakeOwnerForWrite(modId, protos.ObjectType_OT_DATA_MODULE, hctx) + ownerItem, err := wsHelpers.MakeOwnerForWrite(modId, protos.ObjectType_OT_DATA_MODULE, hctx.SessUser.User.Id, hctx.Svcs.TimeStamper.GetTimeNowSec()) if err != nil { return nil, err } diff --git a/api/ws/handlers/roi.go b/api/ws/handlers/roi.go index 63c6edef..1d21643a 100644 --- a/api/ws/handlers/roi.go +++ b/api/ws/handlers/roi.go @@ -180,7 +180,7 @@ func createROI(roi *protos.ROIItem, hctx wsHelpers.HandlerContext, needMistEntry roi.Id = id // We need to create an ownership item along with it - ownerItem, err := wsHelpers.MakeOwnerForWrite(id, protos.ObjectType_OT_ROI, hctx) + ownerItem, err := wsHelpers.MakeOwnerForWrite(id, protos.ObjectType_OT_ROI, hctx.SessUser.User.Id, hctx.Svcs.TimeStamper.GetTimeNowSec()) if err != nil { return nil, err } diff --git a/api/ws/handlers/scan.go b/api/ws/handlers/scan.go index c1f27761..a9db4f71 100644 --- a/api/ws/handlers/scan.go +++ b/api/ws/handlers/scan.go @@ -342,6 +342,8 @@ func HandleScanUploadReq(req *protos.ScanUploadReq, hctx wsHelpers.HandlerContex TargetID: "0", SiteID: 0, + CreatorUserId: hctx.SessUser.User.Id, + // The rest we set to the dataset ID DatasetID: datasetID, //Site: datasetID, diff --git a/api/ws/handlers/screen-configuration.go b/api/ws/handlers/screen-configuration.go index 4d83fac5..74691926 100644 --- a/api/ws/handlers/screen-configuration.go +++ b/api/ws/handlers/screen-configuration.go @@ -156,7 +156,7 @@ func writeScreenConfiguration(screenConfig *protos.ScreenConfiguration, hctx wsH } // We need to create an ownership item along with it - owner, err = wsHelpers.MakeOwnerForWrite(screenConfig.Id, protos.ObjectType_OT_SCREEN_CONFIG, hctx) + owner, err = wsHelpers.MakeOwnerForWrite(screenConfig.Id, protos.ObjectType_OT_SCREEN_CONFIG, hctx.SessUser.User.Id, hctx.Svcs.TimeStamper.GetTimeNowSec()) if err != nil { return nil, err } diff --git a/api/ws/wsHelpers/ownership.go b/api/ws/wsHelpers/ownership.go index 388ccbdd..439074f9 100644 --- a/api/ws/wsHelpers/ownership.go +++ b/api/ws/wsHelpers/ownership.go @@ -14,29 +14,19 @@ import ( "go.mongodb.org/mongo-driver/mongo/options" ) -// TODO: maybe we can pass in some generic thing that has an owner field? -// NO: we cannot. https://github.com/golang/go/issues/51259 -/* - -type HasOwnerField interface { - Owner *protos.Ownership -} -func MakeOwnerForWrite(writable HasOwnerField, s *melody.Session, svcs *services.APIServices) (*protos.Ownership, error) { - if writable.Owner != nil { -*/ - -func MakeOwnerForWrite(objectId string, objectType protos.ObjectType, hctx HandlerContext) (*protos.OwnershipItem, error) { - ts := uint32(hctx.Svcs.TimeStamper.GetTimeNowSec()) - +func MakeOwnerForWrite(objectId string, objectType protos.ObjectType, creatorUserId string, createTimeUnixSec int64) (*protos.OwnershipItem, error) { ownerItem := &protos.OwnershipItem{ Id: objectId, ObjectType: objectType, - CreatorUserId: hctx.SessUser.User.Id, - CreatedUnixSec: ts, - //Viewers: , - Editors: &protos.UserGroupList{ - UserIds: []string{hctx.SessUser.User.Id}, - }, + CreatedUnixSec: uint32(createTimeUnixSec), + } + + if len(creatorUserId) > 0 { + ownerItem.CreatorUserId = creatorUserId + //ownerItem.Viewers + ownerItem.Editors = &protos.UserGroupList{ + UserIds: []string{creatorUserId}, + } } return ownerItem, nil diff --git a/core/gdsfilename/fmFileNameMeta.go b/core/gdsfilename/fmFileNameMeta.go index 49279a0a..0274c46e 100644 --- a/core/gdsfilename/fmFileNameMeta.go +++ b/core/gdsfilename/fmFileNameMeta.go @@ -48,7 +48,7 @@ type FileNameMeta struct { camSpecific string // PIXL MCC format PPPP = PMC downsample string // 0=1x1, 1=2x2, 2=4x4, 3=8x8 compression string // 00=thumbnail, 01-99,A0=JPG quality, I1-I9=ICER, LI,LL,LM,LU=lossless - producer string // J=JPL, P=Principal investigator + Producer string // J=JPL, P=Principal investigator versionStr string // 01-99=1-9, A0-A9=100-109, AA-AZ=110-135, B0-B9=136-145, __=out of range // . always before... // EXT - file extension, which we get through conventional Go filepath.Ext() @@ -138,7 +138,7 @@ func (m FileNameMeta) ToString() string { s.WriteString(m.camSpecific) s.WriteString(m.downsample) s.WriteString(m.compression) - s.WriteString(m.producer) + s.WriteString(m.Producer) s.WriteString(m.versionStr) return s.String() @@ -157,6 +157,7 @@ func (m FileNameMeta) Timestamp() (int32, error) { return int32(i), err } */ + func ParseFileName(fileName string) (FileNameMeta, error) { // We often get passed paths so here we ensure we're just dealing with the file name at the end fileName = filepath.Base(fileName) @@ -186,7 +187,7 @@ func ParseFileName(fileName string) (FileNameMeta, error) { result.camSpecific = fileName[44:48] result.downsample = fileName[48:49] result.compression = fileName[49:51] - result.producer = fileName[51:52] + result.Producer = fileName[51:52] result.versionStr = fileName[52:54] // "." = fileName[53:54] // EXT = fileName[54:57] @@ -213,7 +214,7 @@ func GetLatestFileVersions(fileNames []string, jobLog logger.ILogger) map[string // Store the key as all the fields we're NOT interested in comparing: // this way if we have 2 TIF files with different PMCs, we won't think we need to ignore some due to versioning - nonVerFields := ext + meta.instrument + meta.ColourFilter + meta.ProdType + meta.siteStr + meta.driveStr + meta.seqRTT + meta.camSpecific + meta.downsample + meta.compression + meta.producer + nonVerFields := ext + meta.instrument + meta.ColourFilter + meta.ProdType + meta.siteStr + meta.driveStr + meta.seqRTT + meta.camSpecific + meta.downsample + meta.compression + meta.Producer if _, ok := byNonVerFields[nonVerFields]; !ok { // Add an empty map for this diff --git a/internal/cmd-line-tools/v3-importer/images.go b/internal/cmd-line-tools/v3-importer/images.go index 5cb741f2..3d064fba 100644 --- a/internal/cmd-line-tools/v3-importer/images.go +++ b/internal/cmd-line-tools/v3-importer/images.go @@ -264,7 +264,7 @@ func getImageSaveName(scanId string, imageName string) string { // If the image name can't be parsed as a gds filename, we prepend the dataset ID to make it more unique. This is not done // on GDS filenames because they would already contain the RTT making them unique, and we also want to keep those // searchable/equivalent to names in Mars Viewer - if _, err := gdsfilename.ParseFileName(imageName); err != nil { + if fields, err := gdsfilename.ParseFileName(imageName); err != nil || fields.Producer == "D" || fields.ProdType == "MSA" || fields.ProdType == "VIS" { imageName = scanId + "-" + imageName } return imageName