diff --git a/Makefile b/Makefile index 2fe9438e..fe4486d2 100644 --- a/Makefile +++ b/Makefile @@ -149,7 +149,6 @@ test-integration: DBS_API_PARAMETERS_FILE=../static/parameters.json \ DBS_READER_LEXICON_FILE=../static/lexicon_reader.json \ DBS_WRITER_LEXICON_FILE=../static/lexicon_writer.json \ - DBS_DB_FILE=./dbfile \ DBS_DB_FILE=/tmp/dbs-test.db \ INTEGRATION_DATA_FILE=./data/integration/integration_data.json \ BULKBLOCKS_DATA_FILE=./data/integration/bulkblocks_data.json \ diff --git a/dbs/datasets.go b/dbs/datasets.go index 78cad262..84da865d 100644 --- a/dbs/datasets.go +++ b/dbs/datasets.go @@ -13,6 +13,7 @@ import ( ) // Datasets API +// //gocyclo:ignore func (a *API) Datasets() error { if utils.VERBOSE > 1 { @@ -355,6 +356,7 @@ func (r *Datasets) Insert(tx *sql.Tx) error { } // Validate implementation of Datasets +// //gocyclo:ignore func (r *Datasets) Validate() error { if err := CheckPattern("dataset", r.DATASET); err != nil { @@ -474,6 +476,7 @@ type DatasetRecord struct { // - processing era info // - output module config info // - insert dataset info +// //gocyclo:ignore func (a *API) InsertDatasets() error { // read given input @@ -501,7 +504,7 @@ func (a *API) InsertDatasets() error { // start transaction tx, err := DB.Begin() if err != nil { - msg := fmt.Sprintf("unable to get DB transaction") + msg := "unable to get DB transaction" return Error(err, TransactionErrorCode, msg, "dbs.datasets.InsertDatasets") } defer tx.Rollback() @@ -661,60 +664,91 @@ func (a *API) InsertDatasets() error { return nil } +// Validate POST/PUT Parameters +func ValidateParameter(params Record, key string) (string, error) { + var value string + value, err := getSingleValue(params, key) + if err != nil { + return "", Error(err, ParseErrorCode, "", "dbs.datasets.UpdateDatasets") + } + if value == "" { + msg := fmt.Sprintf("invalid %s parameter", key) + return "", Error(InvalidParamErr, ParametersErrorCode, msg, "dbs.datasets.UpdateDatasets") + } + if err := CheckPattern(key, value); err != nil { + msg := fmt.Sprintf("%s parameter pattern invalid", key) + return "", Error(err, PatternErrorCode, msg, "dbs.datasets.UpdateDatasets") + } + return value, nil +} + // UpdateDatasets DBS API +// //gocyclo:ignore func (a *API) UpdateDatasets() error { - // get accessTypeID from Access dataset types table + var args []interface{} + + tmpl := make(Record) + tmpl["Owner"] = DBOWNER + tmpl["PhysicsGroup"] = false + tmpl["DatasetAccessType"] = false + + // validate parameteres var createBy string - if v, ok := a.Params["create_by"]; ok { - switch t := v.(type) { - case string: - createBy = t - case []string: - createBy = t[0] + if _, ok := a.Params["create_by"]; ok { + v, err := ValidateParameter(a.Params, "create_by") + if err != nil { + return Error(err, ValidateErrorCode, "", "dbs.datasets.UpdateDatasets") } + createBy = v } + + date := time.Now().Unix() + var isValidDataset int64 var dataset string var datasetAccessType string - if v, ok := a.Params["dataset"]; ok { - switch t := v.(type) { - case string: - dataset = t - case []string: - dataset = t[0] + var physicsGroupName string + // validate dataset_access_type parameter + if _, ok := a.Params["dataset_access_type"]; ok { + tmpl["DatasetAccessType"] = true + v, err := ValidateParameter(a.Params, "dataset_access_type") + if err != nil { + return Error(err, ValidateErrorCode, "", "dbs.datasets.UpdateDatasets") } + datasetAccessType = v } - if v, ok := a.Params["dataset_access_type"]; ok { - switch t := v.(type) { - case string: - datasetAccessType = t - case []string: - datasetAccessType = t[0] + + // validate physics_group_name parameter + // ^[a-zA-Z0-9/][a-zA-Z0-9\\-_']*$ + if _, ok := a.Params["physics_group_name"]; ok { + tmpl["PhysicsGroup"] = true + v, err := ValidateParameter(a.Params, "physics_group_name") + if err != nil { + return Error(err, ValidateErrorCode, "", "dbs.datasets.UpdateDatasets") } + physicsGroupName = v } - date := time.Now().Unix() - // validate input parameters - if dataset == "" { - msg := "invalid dataset parameter" - return Error(InvalidParamErr, ParametersErrorCode, msg, "dbs.datasets.UpdateDatasets") - } - if createBy == "" { - msg := "invalid create_by parameter" - return Error(InvalidParamErr, ParametersErrorCode, msg, "dbs.datasets.UpdateDatasets") - } - if datasetAccessType == "" { - msg := "invalid datasetAccessType parameter" - return Error(InvalidParamErr, ParametersErrorCode, msg, "dbs.datasets.UpdateDatasets") - } - if datasetAccessType == "VALID" { - isValidDataset = 1 + // validate dataset parameter + if _, ok := a.Params["dataset"]; ok { + v, err := ValidateParameter(a.Params, "dataset") + if err != nil { + return Error(err, ValidateErrorCode, "", "dbs.datasets.UpdateDatasets") + } + dataset = v + if datasetAccessType == "VALID" { + isValidDataset = 1 + } } // get SQL statement from static area - stm := getSQL("update_datasets") + // stm := getSQL("update_datasets") + stm, err := LoadTemplateSQL("update_datasets", tmpl) + if err != nil { + return Error(err, LoadErrorCode, "", "dbs.datasets.UpdateDatasets") + } if utils.VERBOSE > 0 { params := []string{dataset, datasetAccessType} log.Printf("update Datasets\n%s\n%+v", stm, params) @@ -727,19 +761,50 @@ func (a *API) UpdateDatasets() error { return Error(err, TransactionErrorCode, "", "dbs.datasets.UpdateDatasets") } defer tx.Rollback() - accessTypeID, err := GetID( - tx, - "DATASET_ACCESS_TYPES", - "dataset_access_type_id", - "dataset_access_type", - datasetAccessType) - if err != nil { - if utils.VERBOSE > 0 { - log.Println("unable to find dataset_access_type_id for", datasetAccessType) + + args = append(args, createBy) + args = append(args, date) + + var physicsGroupID int64 + if tmpl["PhysicsGroup"].(bool) { + physicsGroupID, err = GetID( + tx, + "PHYSICS_GROUPS", + "physics_group_id", + "physics_group_name", + physicsGroupName) + if err != nil { + if utils.VERBOSE > 0 { + log.Println("unable to find physics_group_id for", physicsGroupName) + } + return Error(err, GetIDErrorCode, "", "dbs.datasets.UpdateDatasets") + } + args = append(args, physicsGroupID) + } + + // get accessTypeID from Access dataset types table + if tmpl["DatasetAccessType"].(bool) { + accessTypeID, err := GetID( + tx, + "DATASET_ACCESS_TYPES", + "dataset_access_type_id", + "dataset_access_type", + datasetAccessType) + if err != nil { + if utils.VERBOSE > 0 { + log.Println("unable to find dataset_access_type_id for", datasetAccessType) + } + return Error(err, GetIDErrorCode, "", "dbs.datasets.UpdateDatasets") } - return Error(err, GetIDErrorCode, "", "dbs.datasets.UpdateDatasets") + args = append(args, accessTypeID) + args = append(args, isValidDataset) } - _, err = tx.Exec(stm, createBy, date, accessTypeID, isValidDataset, dataset) + + args = append(args, dataset) + + // perform update + // _, err = tx.Exec(stm, createBy, date, accessTypeID, isValidDataset, physicsGroupID, dataset) + _, err = tx.Exec(stm, args...) if err != nil { if utils.VERBOSE > 0 { log.Printf("unable to update %v", err) diff --git a/dbs/dbs.go b/dbs/dbs.go index c49812b2..0d8e5615 100644 --- a/dbs/dbs.go +++ b/dbs/dbs.go @@ -263,13 +263,13 @@ func getValues(params Record, key string) []string { return v case string: return []string{v} - case interface{}: - return []string{fmt.Sprintf("%v", v)} case []interface{}: for _, val := range v { out = append(out, fmt.Sprintf("%v", val)) } return out + case interface{}: + return []string{fmt.Sprintf("%v", v)} } } return out @@ -281,7 +281,7 @@ func getSingleValue(params Record, key string) (string, error) { if len(values) > 0 { return values[0], nil } - msg := fmt.Sprintf("no list is allowed for provided key: %s", key) + msg := fmt.Sprintf("list is not allowed for provided key: %s", key) return "", Error(InvalidParamErr, ParseErrorCode, msg, "dbs.getSingleValue") } @@ -348,6 +348,7 @@ func CleanStatement(stm string) string { // here we use http response writer in order to make encoder // then we literally stream data with our encoder (i.e. write records // to writer) +// //gocyclo:ignore func executeAll(w io.Writer, sep, stm string, args ...interface{}) error { stm = CleanStatement(stm) @@ -458,6 +459,7 @@ func executeAll(w io.Writer, sep, stm string, args ...interface{}) error { } // similar to executeAll function but it takes explicit set of columns and values +// //gocyclo:ignore func execute( w io.Writer, diff --git a/static/lexicon_reader.json b/static/lexicon_reader.json index d1ab16c3..4e28f8ef 100644 --- a/static/lexicon_reader.json +++ b/static/lexicon_reader.json @@ -95,7 +95,7 @@ { "name": "physics_group", "patterns": [ - "^([a-zA-Z0-9\\-_]+)$" + "^[a-zA-Z0-9][a-zA-Z0-9\\-_]*$" ], "length": -1 }, diff --git a/static/lexicon_writer.json b/static/lexicon_writer.json index 9e17bcdf..497dffe3 100644 --- a/static/lexicon_writer.json +++ b/static/lexicon_writer.json @@ -84,7 +84,7 @@ { "name": "physics_group", "patterns": [ - "^([a-zA-Z0-9\\-_]+)$" + "^[a-zA-Z0-9][a-zA-Z0-9\\-_]*$" ], "length": -1 }, diff --git a/static/sql/update_datasets.sql b/static/sql/update_datasets.sql index a720accb..7c9a6410 100644 --- a/static/sql/update_datasets.sql +++ b/static/sql/update_datasets.sql @@ -1,6 +1,11 @@ UPDATE {{.Owner}}.DATASETS SET LAST_MODIFIED_BY=:myuser, - LAST_MODIFICATION_DATE=:mydate, - DATASET_ACCESS_TYPE_ID = :dataset_access_type_id, - IS_DATASET_VALID = :is_dataset_valid + LAST_MODIFICATION_DATE=:mydate +{{ if .PhysicsGroup }} + ,PHYSICS_GROUP_ID = :physics_group_id +{{ end }} +{{ if .DatasetAccessType }} + ,DATASET_ACCESS_TYPE_ID = :dataset_access_type_id + ,IS_DATASET_VALID = :is_dataset_valid +{{ end }} WHERE DATASET = :dataset diff --git a/test/int_datasets.go b/test/int_datasets.go index 04806e7f..07111c61 100644 --- a/test/int_datasets.go +++ b/test/int_datasets.go @@ -74,9 +74,9 @@ type datasetsDetailVersionResponse struct { } // creates a dataset request -func createDSRequest(dataset string, procdataset string, dsType string, outputConfs []dbs.OutputConfigRecord) dbs.DatasetRecord { +func createDSRequest(dataset string, procdataset string, dsType string, physGroup string, outputConfs []dbs.OutputConfigRecord) dbs.DatasetRecord { return dbs.DatasetRecord{ - PHYSICS_GROUP_NAME: TestData.PhysicsGroupName, + PHYSICS_GROUP_NAME: physGroup, DATASET: dataset, DATASET_ACCESS_TYPE: dsType, PROCESSED_DS_NAME: procdataset, @@ -102,10 +102,10 @@ func createDSResponse(dataset string) datasetsResponse { } // creates a detailed datasets response -func createDetailDSResponse(datasetID int64, dataset string, procdataset string, dsType string) datasetsDetailResponse { +func createDetailDSResponse(datasetID int64, dataset string, procdataset string, dsType string, physicsGroupName string) datasetsDetailResponse { return datasetsDetailResponse{ DATASET_ID: datasetID, - PHYSICS_GROUP_NAME: TestData.PhysicsGroupName, + PHYSICS_GROUP_NAME: physicsGroupName, DATASET: dataset, DATASET_ACCESS_TYPE: dsType, PROCESSED_DS_NAME: procdataset, @@ -151,7 +151,7 @@ func createDetailVersionDSResponse(datasetID int64, dataset string, procdataset } // datasets endpoint tests -//* Note: depends on above tests for their *_id +// * Note: depends on above tests for their *_id // TODO: include prep_id in POST tests // TODO: DBSClientWriter_t.test11 func getDatasetsTestTable(t *testing.T) EndpointTestCase { @@ -164,14 +164,14 @@ func getDatasetsTestTable(t *testing.T) EndpointTestCase { GLOBAL_TAG: TestData.GlobalTag, }, } - dsReq := createDSRequest(TestData.Dataset, TestData.ProcDataset, TestData.DatasetAccessType, outputConfs) - dsParentReq := createDSRequest(TestData.ParentDataset, TestData.ParentProcDataset, TestData.DatasetAccessType, outputConfs) + dsReq := createDSRequest(TestData.Dataset, TestData.ProcDataset, TestData.DatasetAccessType, TestData.PhysicsGroupName, outputConfs) + dsParentReq := createDSRequest(TestData.ParentDataset, TestData.ParentProcDataset, TestData.DatasetAccessType, TestData.PhysicsGroupName, outputConfs) // record without output_configs - noOMCReq := createDSRequest(TestData.Dataset, TestData.ProcDataset, TestData.DatasetAccessType, []dbs.OutputConfigRecord{}) + noOMCReq := createDSRequest(TestData.Dataset, TestData.ProcDataset, TestData.DatasetAccessType, TestData.PhysicsGroupName, []dbs.OutputConfigRecord{}) // alternative access type request - dsAccessTypeReq := createDSRequest(TestData.Dataset2, TestData.ProcDataset, "PRODUCTION", outputConfs) + dsAccessTypeReq := createDSRequest(TestData.Dataset2, TestData.ProcDataset, "PRODUCTION", TestData.PhysicsGroupName, outputConfs) // basic responses dsResp := createDSResponse(TestData.Dataset) @@ -179,7 +179,7 @@ func getDatasetsTestTable(t *testing.T) EndpointTestCase { dsAccessTypeResp := createDSResponse(TestData.Dataset2) // detail responses - dsDetailResp := createDetailDSResponse(1, TestData.Dataset, TestData.ProcDataset, TestData.DatasetAccessType) + dsDetailResp := createDetailDSResponse(1, TestData.Dataset, TestData.ProcDataset, TestData.DatasetAccessType, TestData.PhysicsGroupName) // detail responses for output_config parameters dsDetailVersResp := createDetailVersionDSResponse(1, TestData.Dataset, TestData.ProcDataset, TestData.DatasetAccessType) @@ -575,8 +575,8 @@ func getDatasetsTestTable2(t *testing.T) EndpointTestCase { } dsResp := createDSResponse(TestData.Dataset) dsParentResp := createDSResponse(TestData.ParentDataset) - dsDetailResp := createDetailDSResponse(1, TestData.Dataset, TestData.ProcDataset, TestData.DatasetAccessType) - dsDetailParentResp := createDetailDSResponse(2, TestData.ParentDataset, TestData.ParentProcDataset, TestData.DatasetAccessType) + dsDetailResp := createDetailDSResponse(1, TestData.Dataset, TestData.ProcDataset, TestData.DatasetAccessType, TestData.PhysicsGroupName) + dsDetailParentResp := createDetailDSResponse(2, TestData.ParentDataset, TestData.ParentProcDataset, TestData.DatasetAccessType, TestData.PhysicsGroupName) runs := strings.ReplaceAll(fmt.Sprint(TestData.Runs), " ", ",") datasetsParamErr := dbs.CreateInvalidParamError("fnal", "datasets") @@ -794,6 +794,12 @@ type datasetsUpdateRequest struct { DATASET_ACCESS_TYPE string `json:"dataset_access_type"` } +// struct for datasets update request physics_group +type datasetsPhysicsGroupUpdateRequest struct { + DATASET string `json:"dataset"` + PHYSICS_GROUP_NAME string `json:"physics_group_name"` +} + // third datasets endpoint tests for update datasets func getDatasetsTestTable3(t *testing.T) EndpointTestCase { // basic responses @@ -917,3 +923,139 @@ func getDatasetParentsTestTable(t *testing.T) EndpointTestCase { }, } } + +// test updating physics group name +func getDatasetPhysicsGroupUpdateTestTable(t *testing.T) EndpointTestCase { + physicsGroup1 := dbs.PhysicsGroups{ + PHYSICS_GROUP_NAME: "PleaseChangeMe", + } + physicsGroup2 := dbs.PhysicsGroups{ + PHYSICS_GROUP_NAME: "PickMe", + } + physicsGroup1Resp := physicsGroupsResponse{ + PHYSICS_GROUP_NAME: "PleaseChangeMe", + } + physicsGroup2Resp := physicsGroupsResponse{ + PHYSICS_GROUP_NAME: "PickMe", + } + datasetName := "/unittest_web_primary_ds_name_8268/acq_era_8268-v8268/GEN-SIM-UPDATE-RAW" + dsReq := createDSRequest(datasetName, TestData.ProcDataset, TestData.DatasetAccessType, physicsGroup1.PHYSICS_GROUP_NAME, []dbs.OutputConfigRecord{}) + dsUpdateReq := datasetsPhysicsGroupUpdateRequest{ + DATASET: datasetName, + PHYSICS_GROUP_NAME: "BadN@me", + } + dsUpdateReq2 := datasetsPhysicsGroupUpdateRequest{ + DATASET: datasetName, + PHYSICS_GROUP_NAME: physicsGroup2.PHYSICS_GROUP_NAME, + } + dsResp1 := createDetailDSResponse(9, datasetName, TestData.ProcDataset, TestData.DatasetAccessType, physicsGroup1.PHYSICS_GROUP_NAME) + dsResp2 := createDetailDSResponse(9, datasetName, TestData.ProcDataset, TestData.DatasetAccessType, physicsGroup2.PHYSICS_GROUP_NAME) + dsResp2.LAST_MODIFIED_BY = "DBS-workflow" + return EndpointTestCase{ + description: "Test dataset with physics group renaming update", + defaultHandler: web.DatasetsHandler, + defaultEndpoint: "/dbs/datasets", + testCases: []testCase{ + { + description: "Add physics group", + method: "POST", + serverType: "DBSWriter", + endpoint: "/dbs/physicsgroups", + handler: web.PhysicsGroupsHandler, + input: physicsGroup1, + respCode: http.StatusOK, + }, + { + description: "Check if physics group was added", + method: "GET", + serverType: "DBSReader", + endpoint: "/dbs/physicsgroups", + handler: web.PhysicsGroupsHandler, + params: url.Values{ + "physics_group_name": []string{physicsGroup1.PHYSICS_GROUP_NAME}, + }, + output: []Response{ + physicsGroup1Resp, + }, + respCode: http.StatusOK, + }, + { + description: "Add dataset", + method: "POST", + serverType: "DBSWriter", + input: dsReq, + respCode: http.StatusOK, + }, + { + description: "Verify dataset", + method: "GET", + serverType: "DBSReader", + params: url.Values{ + "dataset": []string{datasetName}, + "detail": []string{"true"}, + }, + output: []Response{ + dsResp1, + }, + respCode: http.StatusOK, + }, + { + description: "Update dataset without adding physics_group", + method: "PUT", + serverType: "DBSWriter", + input: dsUpdateReq, + respCode: http.StatusBadRequest, + }, + { + description: "Add second physics group", + method: "POST", + serverType: "DBSWriter", + endpoint: "/dbs/physicsgroups", + handler: web.PhysicsGroupsHandler, + input: physicsGroup2, + respCode: http.StatusOK, + }, + { + description: "Check if physics group was added", + method: "GET", + serverType: "DBSReader", + endpoint: "/dbs/physicsgroups", + handler: web.PhysicsGroupsHandler, + params: url.Values{ + "physics_group_name": []string{physicsGroup2.PHYSICS_GROUP_NAME}, + }, + output: []Response{ + physicsGroup2Resp, + }, + respCode: http.StatusOK, + }, + { + description: "Update dataset with an invalid physics_group", + method: "PUT", + serverType: "DBSWriter", + input: dsUpdateReq, + respCode: http.StatusBadRequest, + }, + { + description: "Update dataset with new physics_group", + method: "PUT", + serverType: "DBSWriter", + input: dsUpdateReq2, + respCode: http.StatusOK, + }, + { + description: "Verify updated dataset", + method: "GET", + serverType: "DBSReader", + params: url.Values{ + "dataset": []string{datasetName}, + "detail": []string{"true"}, + }, + output: []Response{ + dsResp2, + }, + respCode: http.StatusOK, + }, + }, + } +} diff --git a/test/integration_cases.go b/test/integration_cases.go index e61ea151..6c87ea30 100644 --- a/test/integration_cases.go +++ b/test/integration_cases.go @@ -604,6 +604,7 @@ func LoadTestCases(t *testing.T, filepath string, bulkblockspath string, largeBu ) // endpointTestCases = append(endpointTestCases, largeFileLumiInsertTestTable) // endpointTestCases = append(endpointTestCases, filesReaderAfterChunkTestTable) + endpointTestCases = append(endpointTestCases, getDatasetPhysicsGroupUpdateTestTable(t)) return endpointTestCases } diff --git a/web/handlers.go b/web/handlers.go index a315a00f..2a759a34 100644 --- a/web/handlers.go +++ b/web/handlers.go @@ -379,6 +379,7 @@ func parsePayload(r *http.Request) (dbs.Record, error) { } // DBSPutHandler is a generic Post Handler to call DBS Post APIs +// //gocyclo:ignore func DBSPutHandler(w http.ResponseWriter, r *http.Request, a string) { atomic.AddUint64(&TotalPutRequests, 1) @@ -442,12 +443,13 @@ func DBSPutHandler(w http.ResponseWriter, r *http.Request, a string) { err = api.UpdateFiles() } if err != nil { - responseMsg(w, r, err, http.StatusInternalServerError) + responseMsg(w, r, err, http.StatusBadRequest) return } } // DBSPostHandler is a generic Post Handler to call DBS Post APIs +// //gocyclo:ignore func DBSPostHandler(w http.ResponseWriter, r *http.Request, a string) { atomic.AddUint64(&TotalPostRequests, 1) @@ -573,6 +575,7 @@ func DBSPostHandler(w http.ResponseWriter, r *http.Request, a string) { } // DBSGetHandler is a generic Get handler to call DBS Get APIs. +// //gocyclo:ignore func DBSGetHandler(w http.ResponseWriter, r *http.Request, a string) { atomic.AddUint64(&TotalGetRequests, 1) @@ -813,7 +816,7 @@ func RunSummariesHandler(w http.ResponseWriter, r *http.Request) { DBSGetHandler(w, r, "runsummaries") } -//ProcessingErasHandler provices access to ProcessingEras DBS API. +// ProcessingErasHandler provices access to ProcessingEras DBS API. // Takes the following arguments: processing_version func ProcessingErasHandler(w http.ResponseWriter, r *http.Request) { if r.Method == "POST" {