Skip to content

Commit

Permalink
Added auto-sharing config for dataset importer
Browse files Browse the repository at this point in the history
  • Loading branch information
Peter Nemere committed Dec 1, 2023
1 parent f246b8b commit 3e01ed8
Show file tree
Hide file tree
Showing 26 changed files with 156 additions and 67 deletions.
67 changes: 51 additions & 16 deletions api/dataimport/for-trigger_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,17 +22,21 @@ import (
"fmt"
"os"
"strings"
"testing"

"github.com/pixlise/core/v3/api/dataimport/internal/dataConvertModels"
"github.com/pixlise/core/v3/api/dataimport/internal/importerutils"
"github.com/pixlise/core/v3/api/dbCollections"
"github.com/pixlise/core/v3/api/specialUserIds"
"github.com/pixlise/core/v3/core/fileaccess"
"github.com/pixlise/core/v3/core/logger"
"github.com/pixlise/core/v3/core/wstestlib"
protos "github.com/pixlise/core/v3/generated-protos"
"go.mongodb.org/mongo-driver/mongo"
"google.golang.org/protobuf/encoding/protojson"
)

func initTest(testDir string) (fileaccess.FileAccess, *logger.StdOutLoggerForTest, string, string, string, string, *mongo.Database) {
func initTest(testDir string, autoShareCreatorId string, autoShareCreatorGroupEditor string) (fileaccess.FileAccess, *logger.StdOutLoggerForTest, string, string, string, string, *mongo.Database) {
remoteFS := &fileaccess.FSAccess{}
log := &logger.StdOutLoggerForTest{}
envName := "unit-test"
Expand All @@ -47,13 +51,26 @@ func initTest(testDir string) (fileaccess.FileAccess, *logger.StdOutLoggerForTes
db.Collection(dbCollections.ImagesName).Drop(ctx)
db.Collection(dbCollections.ScansName).Drop(ctx)
db.Collection(dbCollections.ScanDefaultImagesName).Drop(ctx)
db.Collection(dbCollections.ScanAutoShareName).Drop(ctx)

// Insert an item if configured to
if len(autoShareCreatorId) > 0 {
item := dataConvertModels.AutoShareConfigItem{
Sharer: autoShareCreatorId,
Editors: &protos.UserGroupList{
GroupIds: []string{autoShareCreatorGroupEditor},
},
}

db.Collection(dbCollections.ScanAutoShareName).InsertOne(ctx, &item)
}

return remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db
}

// Import unknown dataset (simulate trigger by OCS pipeline), file goes to archive, then all files downloaded from archive, dataset create fails due to unknown data type
func Example_ImportForTrigger_OCS_Archive_BadData() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Archive_BadData")
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Archive_BadData", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")

// In case it ran before, delete the file from dataset bucket, otherwise we will end for the wrong reason
os.Remove(datasetBucket + "/Archive/70000_069-02-09-2021-06-25-13.zip")
Expand Down Expand Up @@ -127,7 +144,7 @@ func Example_ImportForTrigger_OCS_Archive_BadData() {

// Import FM-style (simulate trigger by OCS pipeline), file already in archive, so should do nothing
func Example_ImportForTrigger_OCS_Archive_Exists() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Archive_Exists")
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Archive_Exists", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")
trigger := `{
"Records": [
{
Expand Down Expand Up @@ -217,7 +234,7 @@ func printArchiveOKLogOutput(log *logger.StdOutLoggerForTest, db *mongo.Database

// Import FM-style (simulate trigger by OCS pipeline), file goes to archive, then all files downloaded from archive and dataset created
func Example_ImportForTrigger_OCS_Archive_OK() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Archive_OK")
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Archive_OK", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")
// In case it ran before, delete the file from dataset bucket, otherwise we will end for the wrong reason
os.Remove(datasetBucket + "/Archive/048300551-27-06-2021-09-52-25.zip")

Expand Down Expand Up @@ -278,12 +295,12 @@ func Example_ImportForTrigger_OCS_Archive_OK() {
// Logged "Diffraction db saved successfully": true
// Logged "Applying custom title: Naltsos": true
// Logged "Matched aligned image: PCCR0577_0718181212_000MSA_N029000020073728500030LUD01.tif, offset(0, 0), scale(1, 1). Match for aligned index: 0": true
// <nil>|{"id":"048300551","title":"Naltsos","dataTypes":[{"dataType":"SD_IMAGE","count":5},{"dataType":"SD_RGBU","count":1},{"dataType":"SD_XRF","count":242}],"instrument":"PIXL_FM","instrumentConfig":"PIXL","meta":{"DriveID":"1712","RTT":"048300551","SCLK":"678031418","SOL":"0125","Site":"","SiteID":"4","Target":"","TargetID":"?"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":242,"PseudoIntensities":121}}
// <nil>|{"id":"048300551","title":"Naltsos","dataTypes":[{"dataType":"SD_IMAGE","count":5},{"dataType":"SD_RGBU","count":1},{"dataType":"SD_XRF","count":242}],"instrument":"PIXL_FM","instrumentConfig":"PIXL","meta":{"DriveID":"1712","RTT":"048300551","SCLK":"678031418","SOL":"0125","Site":"","SiteID":"4","Target":"","TargetID":"?"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":242,"PseudoIntensities":121},"creatorUserId":"PIXLISEImport"}
}

// Import FM-style (simulate trigger by dataset edit screen), should create dataset with custom name+image
func Example_ImportForTrigger_OCS_DatasetEdit() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Archive_OK")
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Archive_OK", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")

// To save from checking in 2 sets of the same zip files for this and Example_ImportForTrigger_OCS_Archive_OK, here we copy
// the archive files from the Archive_OK test to here.
Expand Down Expand Up @@ -331,7 +348,7 @@ func Example_ImportForTrigger_OCS_DatasetEdit() {
// Logged "Diffraction db saved successfully": true
// Logged "Applying custom title: Naltsos": true
// Logged "Matched aligned image: PCCR0577_0718181212_000MSA_N029000020073728500030LUD01.tif, offset(0, 0), scale(1, 1). Match for aligned index: 0": true
// <nil>|{"id":"048300551","title":"Naltsos","dataTypes":[{"dataType":"SD_IMAGE","count":5},{"dataType":"SD_RGBU","count":1},{"dataType":"SD_XRF","count":242}],"instrument":"PIXL_FM","instrumentConfig":"PIXL","meta":{"DriveID":"1712","RTT":"048300551","SCLK":"678031418","SOL":"0125","Site":"","SiteID":"4","Target":"","TargetID":"?"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":242,"PseudoIntensities":121}}
// <nil>|{"id":"048300551","title":"Naltsos","dataTypes":[{"dataType":"SD_IMAGE","count":5},{"dataType":"SD_RGBU","count":1},{"dataType":"SD_XRF","count":242}],"instrument":"PIXL_FM","instrumentConfig":"PIXL","meta":{"DriveID":"1712","RTT":"048300551","SCLK":"678031418","SOL":"0125","Site":"","SiteID":"4","Target":"","TargetID":"?"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":242,"PseudoIntensities":121},"creatorUserId":"PIXLISEImport"}
}

func printManualOKLogOutput(log *logger.StdOutLoggerForTest, db *mongo.Database, datasetId string, fileCount uint32) {
Expand Down Expand Up @@ -359,18 +376,19 @@ func printManualOKLogOutput(log *logger.StdOutLoggerForTest, db *mongo.Database,
summary, err := importerutils.ReadScanItem(datasetId, db)
if err != nil {
fmt.Println("Failed to read dataset summary file")
} else {
// Clear the time stamp so it doesn't change next time we run test
summary.TimestampUnixSec = 0
}
// Clear the time stamp so it doesn't change next time we run test
summary.TimestampUnixSec = 0

b, err := protojson.Marshal(summary)
s := strings.ReplaceAll(string(b), " ", "")
fmt.Printf("%v|%v\n", err, s)
}

// Import a breadboard dataset from manual uploaded zip file
func Example_ImportForTrigger_Manual() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_OK")
func Example_ImportForTrigger_Manual_JPL() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_OK", specialUserIds.JPLImport, "JPLTestUserGroupId")

trigger := `{
"datasetID": "test1234",
Expand All @@ -397,12 +415,12 @@ func Example_ImportForTrigger_Manual() {
// Logged "WARNING: No main context image determined": true
// Logged "Diffraction db saved successfully": true
// Logged "Warning: No import.json found, defaults will be used": true
// <nil>|{"id":"test1234","title":"test1234","dataTypes":[{"dataType":"SD_XRF","count":2520}],"instrument":"JPL_BREADBOARD","instrumentConfig":"Breadboard","meta":{"DriveID":"0","RTT":"","SCLK":"0","SOL":"","Site":"","SiteID":"0","Target":"","TargetID":"0"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":2520,"PseudoIntensities":0}}
// <nil>|{"id":"test1234","title":"test1234","dataTypes":[{"dataType":"SD_XRF","count":2520}],"instrument":"JPL_BREADBOARD","instrumentConfig":"Breadboard","meta":{"DriveID":"0","RTT":"","SCLK":"0","SOL":"","Site":"","SiteID":"0","Target":"","TargetID":"0"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":2520,"PseudoIntensities":0},"creatorUserId":"JPLImport"}
}

// Import a breadboard dataset from manual uploaded zip file
func Example_ImportForTrigger_Manual_SBU() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_OK2")
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_OK2", specialUserIds.SBUImport, "SBUTestUserGroupId")

trigger := `{
"datasetID": "test1234sbu",
Expand All @@ -429,12 +447,29 @@ func Example_ImportForTrigger_Manual_SBU() {
// Logged "WARNING: No main context image determined": true
// Logged "Diffraction db saved successfully": true
// Logged "Warning: No import.json found, defaults will be used": false
// <nil>|{"id":"test1234sbu","title":"test1234sbu","dataTypes":[{"dataType":"SD_XRF","count":2520}],"instrument":"SBU_BREADBOARD","instrumentConfig":"StonyBrookBreadboard","meta":{"DriveID":"0","RTT":"","SCLK":"0","SOL":"","Site":"","SiteID":"0","Target":"","TargetID":"0"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":2520,"PseudoIntensities":0}}
// <nil>|{"id":"test1234sbu","title":"test1234sbu","dataTypes":[{"dataType":"SD_XRF","count":2520}],"instrument":"SBU_BREADBOARD","instrumentConfig":"StonyBrookBreadboard","meta":{"DriveID":"0","RTT":"","SCLK":"0","SOL":"","Site":"","SiteID":"0","Target":"","TargetID":"0"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":2520,"PseudoIntensities":0},"creatorUserId":"SBUImport"}
}

// Import a breadboard dataset from manual uploaded zip file
func Test_ImportForTrigger_Manual_SBU_NoAutoShare(t *testing.T) {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_OK2", specialUserIds.JPLImport, "JPLTestUserGroupId")

trigger := `{
"datasetID": "test1234sbu",
"logID": "dataimport-unittest123sbu"
}`

_, err := ImportForTrigger([]byte(trigger), envName, configBucket, datasetBucket, manualBucket, db, log, remoteFS)

// Make sure we got the error
if !strings.HasSuffix(err.Error(), "Cannot work out groups to auto-share imported dataset with") {
t.Errorf("ImportForTrigger didnt return expected error")
}
}

// Import a breadboard dataset from manual uploaded zip file
func Example_ImportForTrigger_Manual_EM() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("ManualEM_OK")
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("ManualEM_OK", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")

trigger := `{
"datasetID": "048300551",
Expand All @@ -461,7 +496,7 @@ func Example_ImportForTrigger_Manual_EM() {
// Logged "WARNING: No main context image determined": false
// Logged "Diffraction db saved successfully": true
// Logged "Warning: No import.json found, defaults will be used": false
// <nil>|{"id":"048300551","title":"048300551","dataTypes":[{"dataType":"SD_IMAGE","count":4},{"dataType":"SD_XRF","count":242}],"instrument":"PIXL_EM","instrumentConfig":"PIXL-EM-E2E","meta":{"DriveID":"1712","RTT":"048300551","SCLK":"678031418","SOL":"0125","Site":"","SiteID":"4","Target":"","TargetID":"?"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":242,"PseudoIntensities":121}}
// <nil>|{"id":"048300551","title":"048300551","dataTypes":[{"dataType":"SD_IMAGE","count":4},{"dataType":"SD_XRF","count":242}],"instrument":"PIXL_EM","instrumentConfig":"PIXL-EM-E2E","meta":{"DriveID":"1712","RTT":"048300551","SCLK":"678031418","SOL":"0125","Site":"","SiteID":"4","Target":"","TargetID":"?"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":242,"PseudoIntensities":121},"creatorUserId":"PIXLISEImport"}
}

/* NOT TESTED YET, because it's not done yet!
Expand Down
10 changes: 10 additions & 0 deletions api/dataimport/internal/converters/jplbreadboard/import.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import (
"github.com/pixlise/core/v3/api/dataimport/internal/dataConvertModels"
"github.com/pixlise/core/v3/api/dataimport/internal/importerutils"
dataimportModel "github.com/pixlise/core/v3/api/dataimport/models"
"github.com/pixlise/core/v3/api/specialUserIds"
"github.com/pixlise/core/v3/core/fileaccess"
"github.com/pixlise/core/v3/core/logger"
protos "github.com/pixlise/core/v3/generated-protos"
Expand Down Expand Up @@ -211,6 +212,14 @@ func (m MSATestData) Import(importPath string, pseudoIntensityRangesPath string,
instr = protos.ScanInstrument_SBU_BREADBOARD // OK hack for now...
}

creator := params.CreatorUserId
if len(creator) <= 0 {
creator = specialUserIds.JPLImport
if instr == protos.ScanInstrument_SBU_BREADBOARD {
creator = specialUserIds.SBUImport
}
}

data := &dataConvertModels.OutputData{
DatasetID: params.DatasetID,
Instrument: instr,
Expand All @@ -220,6 +229,7 @@ func (m MSATestData) Import(importPath string, pseudoIntensityRangesPath string,
PseudoRanges: pseudoIntensityRanges,
PerPMCData: map[int32]*dataConvertModels.PMCData{},
MatchedAlignedImages: matchedAlignedImages,
CreatorUserId: creator,
}

data.SetPMCData(beamLookup, hkData, spectraLookup, contextImgsPerPMC, pseudoIntensityData, map[int32]string{})
Expand Down
9 changes: 9 additions & 0 deletions api/dataimport/internal/dataConvertModels/autoShare.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
package dataConvertModels

import protos "github.com/pixlise/core/v3/generated-protos"

type AutoShareConfigItem struct {
Sharer string `bson:"_id"` // Either a user ID or some special string that the importer sets
Viewers *protos.UserGroupList
Editors *protos.UserGroupList
}
3 changes: 3 additions & 0 deletions api/dataimport/internal/dataConvertModels/models.go
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,9 @@ type OutputData struct {

// Images that reference and match aligned images
MatchedAlignedImages []MatchedAlignedImageMeta

// Who created this dataset
CreatorUserId string
}

// EnsurePMC - allocates an item to store data for the given PMC if doesn't already exist
Expand Down
2 changes: 2 additions & 0 deletions api/dataimport/internal/importerutils/fileReadHelpers.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ import (
"os"

"github.com/pixlise/core/v3/api/dataimport/internal/dataConvertModels"
"github.com/pixlise/core/v3/api/specialUserIds"
"github.com/pixlise/core/v3/core/gdsfilename"
"github.com/pixlise/core/v3/core/logger"
protos "github.com/pixlise/core/v3/generated-protos"
Expand Down Expand Up @@ -172,6 +173,7 @@ func MakeFMDatasetOutput(
RGBUImages: rgbuImages,
DISCOImages: discoImages,
MatchedAlignedImages: matchedAlignedImages,
CreatorUserId: specialUserIds.PIXLISESystemUserId, // Auto-importing FM datasets, we don't show a creator... TODO: what about EM though??
}

data.SetPMCData(beamLookup, hkData, locSpectraLookup, contextImgsPerPMC, pseudoIntensityData, map[int32]string{})
Expand Down
49 changes: 41 additions & 8 deletions api/dataimport/internal/output/output.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ import (
"github.com/pixlise/core/v3/api/dataimport/internal/dataConvertModels"
"github.com/pixlise/core/v3/api/dbCollections"
"github.com/pixlise/core/v3/api/filepaths"
"github.com/pixlise/core/v3/api/ws/wsHelpers"
"github.com/pixlise/core/v3/core/fileaccess"
"github.com/pixlise/core/v3/core/gdsfilename"
"github.com/pixlise/core/v3/core/logger"
Expand Down Expand Up @@ -268,6 +269,28 @@ func (s *PIXLISEDataSaver) Save(
}
}

// Look up who to auto-share with based on creator ID
coll := db.Collection(dbCollections.ScanAutoShareName)
optFind := options.FindOne()

autoShare := &dataConvertModels.AutoShareConfigItem{}
sharer := data.CreatorUserId

autoShareResult := coll.FindOne(context.TODO(), bson.D{{"_id", sharer}}, optFind)
if autoShareResult.Err() != nil {
// We couldn't find someone to auto-share it with, if we don't have anyone to share with, just fail here
if autoShareResult.Err() == mongo.ErrNoDocuments {
return fmt.Errorf("Cannot work out groups to auto-share imported dataset with")
}
return autoShareResult.Err()
} else {
err := autoShareResult.Decode(autoShare)

if err != nil {
return fmt.Errorf("Failed to decode auto share configuration: %v", err)
}
}

// We work out the default file name when copying output images now... because if there isn't one, we may pick one during that process.
defaultContextImage, err := copyImagesToOutput(contextImageSrcPath, []string{data.DatasetID}, data.DatasetID, outPath, data, db, jobLog)
exp.MainContextImage = defaultContextImage
Expand Down Expand Up @@ -295,19 +318,14 @@ func (s *PIXLISEDataSaver) Save(
return fmt.Errorf("Failed to get dataset file size for: %v", outFilePath)
}

summaryData := makeSummaryFileContent(&exp, data.DatasetID, data.Instrument, data.Meta, int(fi.Size()), creationUnixTimeSec)
summaryData := makeSummaryFileContent(&exp, data.DatasetID, data.Instrument, data.Meta, int(fi.Size()), creationUnixTimeSec, data.CreatorUserId)

jobLog.Infof("Writing summary to DB...")

coll := db.Collection(dbCollections.ScansName)
filter := bson.D{{"_id", summaryData.Id}}
coll = db.Collection(dbCollections.ScansName)
opt := options.Update().SetUpsert(true)

dbItem := bson.D{
{"$set", summaryData},
}

result, err := coll.UpdateOne(context.TODO(), filter, dbItem, opt)
result, err := coll.UpdateOne(context.TODO(), bson.D{{"_id", summaryData.Id}}, bson.D{{"$set", summaryData}}, opt)

//result, err := db.Collection(dbCollections.ScansName).InsertOne(context.TODO(), summaryData)
if err != nil {
Expand All @@ -317,6 +335,21 @@ func (s *PIXLISEDataSaver) Save(
jobLog.Errorf("Expected summary write to create 1 upsert, got: %v", result.UpsertedCount)
}

// Set ownership
ownerItem, err := wsHelpers.MakeOwnerForWrite(summaryData.Id, protos.ObjectType_OT_SCAN, summaryData.CreatorUserId, creationUnixTimeSec)

ownerItem.Viewers = autoShare.Viewers
ownerItem.Editors = autoShare.Editors

coll = db.Collection(dbCollections.OwnershipName)
opt = options.Update().SetUpsert(true)

result, err = coll.UpdateOne(context.TODO(), bson.D{{"_id", ownerItem.Id}}, bson.D{{"$set", ownerItem}}, opt)
if err != nil {
jobLog.Errorf("Failed to write ownership item to DB: %v", err)
return err
}

bulkSpectraCount := summaryData.ContentCounts["BulkSpectra"]
maxSpectraCount := summaryData.ContentCounts["MaxSpectra"]

Expand Down
5 changes: 3 additions & 2 deletions api/dataimport/internal/output/summary.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@ func makeSummaryFileContent(
sourceInstrument protos.ScanInstrument,
meta dataConvertModels.FileMetaData,
fileSize int,
creationUnixTimeSec int64) *protos.ScanItem {
creationUnixTimeSec int64,
creatorUserId string) *protos.ScanItem {
contextImgCount := len(exp.AlignedContextImages) + len(exp.UnalignedContextImages) + len(exp.MatchedAlignedContextImages)
tiffContextImgCount := 0

Expand Down Expand Up @@ -101,7 +102,7 @@ func makeSummaryFileContent(
TimestampUnixSec: uint32(creationUnixTimeSec),
Meta: saveMeta,
ContentCounts: contentCounts,
CreatorUserId: "",
CreatorUserId: creatorUserId,
}
return s
}
1 change: 1 addition & 0 deletions api/dataimport/models/breadboardImportParams.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ type BreadboardImportParams struct {
OffsetB float32 `json:"ev_offset_b"` // eV calibration eV start offset (detector B)
ExcludeNormalDwellSpectra bool `json:"exclude_normal_dwell"` // Hack for tactical datasets - load all MSAs to gen bulk sum, but dont save them in output
SOL string `json:"sol"` // Might as well be able to specify SOL. Needed for first spectrum dataset on SOL13
CreatorUserId string `json:"creatorUserId"` // Who created it
}

type DetectorChoice struct {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
Datasets/
DatasetSummaries/
Scans/
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
Datasets/
DatasetSummaries/
Scans/
3 changes: 1 addition & 2 deletions api/dataimport/test-data/Manual_OK/dataset-bucket/.gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
Datasets/
DatasetSummaries/
Scans/
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
Datasets/
DatasetSummaries/
Scans/
Loading

0 comments on commit 3e01ed8

Please sign in to comment.