From 219d110711485af80a26e216e188a6f93813a730 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Mon, 1 Dec 2025 14:55:25 -0500 Subject: [PATCH 01/22] feat: add apis for user-defined goes platform config uploads --- api/internal/db/batch.go | 52 +++++ api/internal/db/goes.sql_gen.go | 152 ++++++++++++++ api/internal/db/models.go | 28 +++ api/internal/db/overrides.go | 5 + api/internal/db/querier.go | 8 + api/internal/dto/goes.go | 8 + api/internal/handler/goes.go | 191 ++++++++++++++++++ api/internal/service/goes.go | 103 ++++++++++ .../repeat/0190__views_telemetry.sql | 12 ++ api/migrations/schema/V1.56.00__goes.sql | 26 +++ api/queries/goes.sql | 38 ++++ sqlc.generate.yaml | 10 + 12 files changed, 633 insertions(+) create mode 100644 api/internal/db/goes.sql_gen.go create mode 100644 api/internal/dto/goes.go create mode 100644 api/internal/handler/goes.go create mode 100644 api/internal/service/goes.go create mode 100644 api/migrations/repeat/0190__views_telemetry.sql create mode 100644 api/migrations/schema/V1.56.00__goes.sql create mode 100644 api/queries/goes.sql diff --git a/api/internal/db/batch.go b/api/internal/db/batch.go index d0e290df..3fb203b3 100644 --- a/api/internal/db/batch.go +++ b/api/internal/db/batch.go @@ -658,6 +658,58 @@ func (b *EvaluationInstrumentCreateBatchBatchResults) Close() error { return b.br.Close() } +const goesTelemetryConfigMappingsCreateBatch = `-- name: GoesTelemetryConfigMappingsCreateBatch :batchexec +insert into goes_telemetry_config_mappings (goes_platform_config_file_id, platform_sensor_key, timeseries_id) +values ($1, $2, $3) +` + +type GoesTelemetryConfigMappingsCreateBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type GoesTelemetryConfigMappingsCreateBatchParams struct { + GoesPlatformConfigFileID uuid.UUID `json:"goes_platform_config_file_id"` + PlatformSensorKey string `json:"platform_sensor_key"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) GoesTelemetryConfigMappingsCreateBatch(ctx context.Context, arg []GoesTelemetryConfigMappingsCreateBatchParams) *GoesTelemetryConfigMappingsCreateBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.GoesPlatformConfigFileID, + a.PlatformSensorKey, + a.TimeseriesID, + } + batch.Queue(goesTelemetryConfigMappingsCreateBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &GoesTelemetryConfigMappingsCreateBatchBatchResults{br, len(arg), false} +} + +func (b *GoesTelemetryConfigMappingsCreateBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *GoesTelemetryConfigMappingsCreateBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + const inclOptsCreateBatch = `-- name: InclOptsCreateBatch :batchexec insert into incl_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) values ($1, $2, $3, $4) diff --git a/api/internal/db/goes.sql_gen.go b/api/internal/db/goes.sql_gen.go new file mode 100644 index 00000000..d383220c --- /dev/null +++ b/api/internal/db/goes.sql_gen.go @@ -0,0 +1,152 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.29.0 +// source: goes.sql + +package db + +import ( + "context" + "time" + + "github.com/google/uuid" +) + +const goesPlatformConfigFileCreate = `-- name: GoesPlatformConfigFileCreate :one +insert into goes_platform_config_file (goes_telemetry_source_id, name, content, created_by) +values ($1, $2, $3::xml, $4) +returning id +` + +type GoesPlatformConfigFileCreateParams struct { + GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` + Name string `json:"name"` + Content string `json:"content"` + CreatedBy uuid.UUID `json:"created_by"` +} + +func (q *Queries) GoesPlatformConfigFileCreate(ctx context.Context, arg GoesPlatformConfigFileCreateParams) (uuid.UUID, error) { + row := q.db.QueryRow(ctx, goesPlatformConfigFileCreate, + arg.GoesTelemetrySourceID, + arg.Name, + arg.Content, + arg.CreatedBy, + ) + var id uuid.UUID + err := row.Scan(&id) + return id, err +} + +const goesPlatformConfigFileDelete = `-- name: GoesPlatformConfigFileDelete :exec +delete from goes_platform_config_file where id=$1 +` + +func (q *Queries) GoesPlatformConfigFileDelete(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, goesPlatformConfigFileDelete, id) + return err +} + +const goesPlatformConfigFileGet = `-- name: GoesPlatformConfigFileGet :one +select id, goes_telemetry_source_id, name, content, created_at, created_by, updated_at, updated_by from goes_platform_config_file where id=$1 +` + +func (q *Queries) GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) (GoesPlatformConfigFile, error) { + row := q.db.QueryRow(ctx, goesPlatformConfigFileGet, id) + var i GoesPlatformConfigFile + err := row.Scan( + &i.ID, + &i.GoesTelemetrySourceID, + &i.Name, + &i.Content, + &i.CreatedAt, + &i.CreatedBy, + &i.UpdatedAt, + &i.UpdatedBy, + ) + return i, err +} + +const goesPlatformConfigFileUpdate = `-- name: GoesPlatformConfigFileUpdate :exec +update goes_platform_config_file set + name=$1, + content=$2::xml, + updated_at=$3, + updated_by=$4 +where id=$5 +` + +type GoesPlatformConfigFileUpdateParams struct { + Name string `json:"name"` + Content string `json:"content"` + UpdatedAt *time.Time `json:"updated_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + ID uuid.UUID `json:"id"` +} + +func (q *Queries) GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPlatformConfigFileUpdateParams) error { + _, err := q.db.Exec(ctx, goesPlatformConfigFileUpdate, + arg.Name, + arg.Content, + arg.UpdatedAt, + arg.UpdatedBy, + arg.ID, + ) + return err +} + +const goesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile = `-- name: GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile :exec +delete from goes_telemetry_config_mappings where goes_platform_config_file_id=$1 +` + +func (q *Queries) GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile(ctx context.Context, goesPlatformConfigFileID uuid.UUID) error { + _, err := q.db.Exec(ctx, goesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile, goesPlatformConfigFileID) + return err +} + +const goesTelemetryConfigMappingsList = `-- name: GoesTelemetryConfigMappingsList :many +select goes_platform_config_file_id, platform_sensor_key, timeseries_id from goes_telemetry_config_mappings where goes_platform_config_file_id=$1 +` + +func (q *Queries) GoesTelemetryConfigMappingsList(ctx context.Context, goesPlatformConfigFileID uuid.UUID) ([]GoesTelemetryConfigMappings, error) { + rows, err := q.db.Query(ctx, goesTelemetryConfigMappingsList, goesPlatformConfigFileID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GoesTelemetryConfigMappings{} + for rows.Next() { + var i GoesTelemetryConfigMappings + if err := rows.Scan(&i.GoesPlatformConfigFileID, &i.PlatformSensorKey, &i.TimeseriesID); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const goesTelemetrySourceList = `-- name: GoesTelemetrySourceList :many +select id, name, files from v_goes_telemetry_source +` + +func (q *Queries) GoesTelemetrySourceList(ctx context.Context) ([]VGoesTelemetrySource, error) { + rows, err := q.db.Query(ctx, goesTelemetrySourceList) + if err != nil { + return nil, err + } + defer rows.Close() + items := []VGoesTelemetrySource{} + for rows.Next() { + var i VGoesTelemetrySource + if err := rows.Scan(&i.ID, &i.Name, &i.Files); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/api/internal/db/models.go b/api/internal/db/models.go index 77a0ac85..cf3e64a7 100644 --- a/api/internal/db/models.go +++ b/api/internal/db/models.go @@ -643,6 +643,28 @@ type EvaluationInstrument struct { InstrumentID *uuid.UUID `json:"instrument_id"` } +type GoesPlatformConfigFile struct { + ID uuid.UUID `json:"id"` + GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` + Name string `json:"name"` + Content string `json:"content"` + CreatedAt time.Time `json:"created_at"` + CreatedBy uuid.UUID `json:"created_by"` + UpdatedAt *time.Time `json:"updated_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` +} + +type GoesTelemetryConfigMappings struct { + GoesPlatformConfigFileID uuid.UUID `json:"goes_platform_config_file_id"` + PlatformSensorKey string `json:"platform_sensor_key"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +type GoesTelemetrySource struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + type Heartbeat struct { Time time.Time `json:"time"` } @@ -1409,6 +1431,12 @@ type VEvaluation struct { Instruments []InstrumentIDName `json:"instruments"` } +type VGoesTelemetrySource struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Files []IDName `json:"files"` +} + type VInclMeasurement struct { InstrumentID uuid.UUID `json:"instrument_id"` Time time.Time `json:"time"` diff --git a/api/internal/db/overrides.go b/api/internal/db/overrides.go index fb90d2ab..151ba2e4 100644 --- a/api/internal/db/overrides.go +++ b/api/internal/db/overrides.go @@ -90,6 +90,11 @@ type IDSlugName struct { Name string `json:"name"` } +type IDName struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` +} + type InstrumentIDName struct { InstrumentID uuid.UUID `json:"instrument_id"` InstrumentName string `json:"instrument_name"` diff --git a/api/internal/db/querier.go b/api/internal/db/querier.go index f41736ea..52c07f65 100644 --- a/api/internal/db/querier.go +++ b/api/internal/db/querier.go @@ -116,6 +116,14 @@ type Querier interface { EvaluationListForProject(ctx context.Context, projectID uuid.UUID) ([]VEvaluation, error) EvaluationListForProjectAlertConfig(ctx context.Context, arg EvaluationListForProjectAlertConfigParams) ([]VEvaluation, error) EvaluationUpdate(ctx context.Context, arg EvaluationUpdateParams) error + GoesPlatformConfigFileCreate(ctx context.Context, arg GoesPlatformConfigFileCreateParams) (uuid.UUID, error) + GoesPlatformConfigFileDelete(ctx context.Context, id uuid.UUID) error + GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) (GoesPlatformConfigFile, error) + GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPlatformConfigFileUpdateParams) error + GoesTelemetryConfigMappingsCreateBatch(ctx context.Context, arg []GoesTelemetryConfigMappingsCreateBatchParams) *GoesTelemetryConfigMappingsCreateBatchBatchResults + GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile(ctx context.Context, goesPlatformConfigFileID uuid.UUID) error + GoesTelemetryConfigMappingsList(ctx context.Context, goesPlatformConfigFileID uuid.UUID) ([]GoesTelemetryConfigMappings, error) + GoesTelemetrySourceList(ctx context.Context) ([]VGoesTelemetrySource, error) HeartbeatCreate(ctx context.Context, argTime time.Time) (time.Time, error) HeartbeatGetLatest(ctx context.Context) (time.Time, error) HeartbeatList(ctx context.Context, resultLimit int32) ([]time.Time, error) diff --git a/api/internal/dto/goes.go b/api/internal/dto/goes.go new file mode 100644 index 00000000..2b4063ad --- /dev/null +++ b/api/internal/dto/goes.go @@ -0,0 +1,8 @@ +package dto + +import "github.com/google/uuid" + +type GoesTelemetryConfigMappingDTO struct { + PlatformSensorKey string `json:"platform_sensor_key"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} diff --git a/api/internal/handler/goes.go b/api/internal/handler/goes.go new file mode 100644 index 00000000..6acd532e --- /dev/null +++ b/api/internal/handler/goes.go @@ -0,0 +1,191 @@ +package handler + +import ( + "context" + "errors" + "io" + "net/http" + "time" + + "github.com/USACE/instrumentation-api/api/v4/internal/ctxkey" + "github.com/USACE/instrumentation-api/api/v4/internal/db" + "github.com/USACE/instrumentation-api/api/v4/internal/dto" + "github.com/USACE/instrumentation-api/api/v4/internal/httperr" + "github.com/danielgtaylor/huma/v2" +) + +var goesTags = []string{"GOES Telemetry"} + +type TelemetrySourceIDParam struct { + TelemetrySourceID UUID `path:"telemetry_source_id"` +} + +type TelemetryConfigIDParam struct { + TelemetryConfigID UUID `path:"telemetry_config_id"` +} + +func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { + huma.Register(api, huma.Operation{ + Middlewares: h.Public, + OperationID: "goes-telemetry-client-list", + Method: http.MethodGet, + Path: "/domains/goes", + Description: "list of goes client instances (opendcs)", + Tags: goesTags, + }, func(ctx context.Context, input *struct { + }) (*Response[[]db.VGoesTelemetrySource], error) { + aa, err := h.DBService.GoesTelemetrySourceList(ctx) + if err != nil { + return nil, httperr.InternalServerError(err) + } + return NewResponse(aa), nil + }) + + huma.Register(api, huma.Operation{ + Middlewares: h.Public, + OperationID: "goes-telemetry-config-get", + Method: http.MethodGet, + Path: "/projects/{project_id}/goes/{telemetry_source_id}/configs/{telemetry_config_id}", + Description: "gets a platform configuraiton xml file", + Tags: goesTags, + }, func(ctx context.Context, input *struct { + ProjectIDParam + TelemetrySourceIDParam + TelemetryConfigIDParam + }) (*Response[db.GoesPlatformConfigFile], error) { + a, err := h.DBService.GoesPlatformConfigFileGet(ctx, input.TelemetryConfigID.UUID) + if err != nil { + return nil, httperr.InternalServerError(err) + } + return NewResponse(a), nil + }) + + type XmlPlatformConfig struct { + PlatformConfig huma.FormFile `form:"file" contentType:"text/xml" required:"true"` + DryRun bool `form:"dry_run"` + } + + huma.Register(api, huma.Operation{ + Middlewares: h.ProjectAdmin, + OperationID: "goes-telemetry-config-create", + Method: http.MethodPost, + Path: "/projects/{project_id}/goes/{telemetry_source_id}", + Description: "create a goes telemetry configuration", + Tags: goesTags, + }, func(ctx context.Context, input *struct { + ProjectIDParam + TelemetrySourceIDParam + RawBody huma.MultipartFormFiles[XmlPlatformConfig] + }) (*Response[ID], error) { + p := ctx.Value(ctxkey.Profile).(db.VProfile) + formData := input.RawBody.Data() + xmlDoc, err := io.ReadAll(formData.PlatformConfig) + if err != nil { + return nil, httperr.BadRequest(err) + } + if xmlDoc == nil { + return nil, httperr.BadRequest(errors.New("uploaded file is empty")) + } + newID, err := h.DBService.GoesPlatformConfigFileCreate(ctx, db.GoesPlatformConfigFileCreateParams{ + GoesTelemetrySourceID: input.TelemetrySourceID.UUID, + Name: formData.PlatformConfig.Filename, + Content: string(xmlDoc), + CreatedBy: p.ID, + }, formData.DryRun) + if err != nil { + return nil, httperr.InternalServerError(err) + } + return NewResponseID(newID), nil + }) + + huma.Register(api, huma.Operation{ + Middlewares: h.Public, + OperationID: "goes-telemetry-config-update", + Method: http.MethodPut, + Path: "/projects/{project_id}/goes/{telemetry_source_id}/configs/{telemetry_config_id}", + Description: "lists goes telemetry configurations", + Tags: goesTags, + }, func(ctx context.Context, input *struct { + ProjectIDParam + TelemetrySourceIDParam + TelemetryConfigIDParam + RawBody huma.MultipartFormFiles[XmlPlatformConfig] + }) (*Response[struct{}], error) { + p := ctx.Value(ctxkey.Profile).(db.VProfile) + formData := input.RawBody.Data() + xmlDoc, err := io.ReadAll(formData.PlatformConfig) + if err != nil { + return nil, httperr.BadRequest(err) + } + if xmlDoc == nil { + return nil, httperr.BadRequest(errors.New("uploaded file is empty")) + } + now := time.Now().UTC() + if err := h.DBService.GoesPlatformConfigFileUpdate(ctx, db.GoesPlatformConfigFileUpdateParams{ + ID: input.TelemetryConfigID.UUID, + Name: formData.PlatformConfig.Filename, + Content: string(xmlDoc), + UpdatedBy: &p.ID, + UpdatedAt: &now, + }); err != nil { + return nil, httperr.InternalServerError(err) + } + return nil, nil + }) + + huma.Register(api, huma.Operation{ + Middlewares: h.ProjectAdmin, + OperationID: "goes-telemetry-config-delete", + Method: http.MethodDelete, + Path: "/projects/{project_id}/goes/{telemetry_source_id}/configs/{telemetry_config_id}", + Description: "delete a goes telemetry configuration", + Tags: goesTags, + }, func(ctx context.Context, input *struct { + ProjectIDParam + TelemetrySourceIDParam + TelemetryConfigIDParam + }) (*Response[struct{}], error) { + if err := h.DBService.GoesPlatformConfigFileDelete(ctx, input.TelemetryConfigID.UUID); err != nil { + return nil, httperr.InternalServerError(err) + } + return nil, nil + }) + + huma.Register(api, huma.Operation{ + Middlewares: h.Public, + OperationID: "goes-telemetry-config-mapping-list", + Method: http.MethodGet, + Path: "/projects/{project_id}/goes/{telemetry_source_id}/configs/{telemetry_config_id}/mappings", + Description: "lists goes telemetry timeseries mappings", + Tags: goesTags, + }, func(ctx context.Context, input *struct { + ProjectIDParam + TelemetrySourceIDParam + TelemetryConfigIDParam + }) (*Response[[]db.GoesTelemetryConfigMappings], error) { + aa, err := h.DBService.GoesTelemetryConfigMappingsList(ctx, input.TelemetryConfigID.UUID) + if err != nil { + return nil, httperr.InternalServerError(err) + } + return NewResponse(aa), nil + }) + + huma.Register(api, huma.Operation{ + Middlewares: h.ProjectMember, + OperationID: "goes-update-mappings", + Method: http.MethodPut, + Path: "/projects/{project_id}/goes/{telemetry_source_id}/configs/{telemetry_config_id}/mappings", + Description: "updates goes telemetry timeseries mappings", + Tags: goesTags, + }, func(ctx context.Context, input *struct { + ProjectIDParam + TelemetrySourceIDParam + TelemetryConfigIDParam + Body []dto.GoesTelemetryConfigMappingDTO + }) (*Response[struct{}], error) { + if err := h.DBService.GoesTelemetryConfigMappingsUpdate(ctx, input.TelemetryConfigID.UUID, input.Body); err != nil { + return nil, httperr.InternalServerError(err) + } + return nil, nil + }) +} diff --git a/api/internal/service/goes.go b/api/internal/service/goes.go new file mode 100644 index 00000000..ff570cfc --- /dev/null +++ b/api/internal/service/goes.go @@ -0,0 +1,103 @@ +package service + +import ( + "context" + "encoding/xml" + "fmt" + "strings" + + "github.com/USACE/instrumentation-api/api/v4/internal/db" + "github.com/USACE/instrumentation-api/api/v4/internal/dto" + "github.com/google/uuid" +) + +func (s *DBService) GoesTelemetryConfigMappingsUpdate(ctx context.Context, cfgID uuid.UUID, mappings []dto.GoesTelemetryConfigMappingDTO) error { + mm := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, len(mappings)) + for i, m := range mappings { + mm[i] = db.GoesTelemetryConfigMappingsCreateBatchParams{ + GoesPlatformConfigFileID: cfgID, + PlatformSensorKey: m.PlatformSensorKey, + TimeseriesID: m.TimeseriesID, + } + } + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer s.TxDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + if err := qtx.GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile(ctx, cfgID); err != nil { + return fmt.Errorf("GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile %w", err) + } + + qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, mm).Exec(batchExecErr(&err)) + if err != nil { + return fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err) + } + + return tx.Commit(ctx) +} + +type Platforms struct { + XMLName xml.Name `xml:"Platforms"` + Platforms []Platform `xml:"Platform"` +} + +type Platform struct { + PlatformConfig PlatformConfig `xml:"PlatformConfig"` +} + +type PlatformConfig struct { + ConfigSensors []ConfigSensor `xml:"ConfigSensor"` +} + +type ConfigSensor struct { + SensorName string `xml:"SensorName"` + SensorNumber string `xml:"SensorNumber"` +} + +func extractSensorNames(xmlStr string) ([]string, error) { + var platforms Platforms + if err := xml.NewDecoder(strings.NewReader(xmlStr)).Decode(&platforms); err != nil { + return nil, fmt.Errorf("failed to decode xml document: %w", err) + } + var result []string + for _, platform := range platforms.Platforms { + for _, sensor := range platform.PlatformConfig.ConfigSensors { + result = append(result, sensor.SensorName+"."+sensor.SensorNumber) + } + } + return result, nil +} + +func (s *DBService) GoesPlatformConfigFileCreate(ctx context.Context, arg db.GoesPlatformConfigFileCreateParams, dryRun bool) (uuid.UUID, error) { + names, err := extractSensorNames(arg.Content) + if err != nil { + return uuid.Nil, err + } + if dryRun { + return uuid.Nil, nil + } + + mm := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, len(names)) + + tx, err := s.db.Begin(ctx) + if err != nil { + return uuid.Nil, err + } + defer s.TxDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + newID, err := qtx.GoesPlatformConfigFileCreate(ctx, arg) + if err != nil { + return uuid.Nil, fmt.Errorf("GoesPlatformConfigFileCreate %w", err) + } + + qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, mm).Exec(batchExecErr(&err)) + if err != nil { + return uuid.Nil, fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err) + } + + return newID, tx.Commit(ctx) +} diff --git a/api/migrations/repeat/0190__views_telemetry.sql b/api/migrations/repeat/0190__views_telemetry.sql new file mode 100644 index 00000000..fd095684 --- /dev/null +++ b/api/migrations/repeat/0190__views_telemetry.sql @@ -0,0 +1,12 @@ +create or replace view v_goes_telemetry_source as +select + s.*, + f.files +from goes_telemetry_source s +left join ( + select coalesce(jsonb_agg(jsonb_build_object( + 'id', cf.id, + 'name', cf.name + )), '[]'::jsonb) as files + from goes_platform_config_file cf +) f on true; diff --git a/api/migrations/schema/V1.56.00__goes.sql b/api/migrations/schema/V1.56.00__goes.sql new file mode 100644 index 00000000..0881a8a4 --- /dev/null +++ b/api/migrations/schema/V1.56.00__goes.sql @@ -0,0 +1,26 @@ +create table goes_telemetry_source ( + id uuid primary key default uuid_generate_v4(), + name text unique not null +); + + +create table goes_platform_config_file ( + id uuid primary key default uuid_generate_v4(), + goes_telemetry_source_id uuid not null references goes_telemetry_source(id), + name text not null, + content xml not null, + created_at timestamptz not null default now(), + created_by uuid not null references profile(id), + updated_at timestamptz, + updated_by uuid references profile(id) +); + + +create table goes_telemetry_config_mappings ( + goes_platform_config_file_id uuid not null references goes_platform_config_file(id) on delete cascade, + platform_sensor_key text not null, + timeseries_id uuid references timeseries(id) +); + + +insert into goes_telemetry_source (id, name) values ('666e60ec-2c0a-4446-9eda-6f45cbcd0a60', 'OpenDCS #1'); diff --git a/api/queries/goes.sql b/api/queries/goes.sql new file mode 100644 index 00000000..cbd36def --- /dev/null +++ b/api/queries/goes.sql @@ -0,0 +1,38 @@ +-- name: GoesTelemetrySourceList :many +select * from v_goes_telemetry_source; + + +-- name: GoesPlatformConfigFileCreate :one +insert into goes_platform_config_file (goes_telemetry_source_id, name, content, created_by) +values (sqlc.arg(goes_telemetry_source_id), sqlc.arg(name), sqlc.arg(content)::xml, sqlc.arg(created_by)) +returning id; + + +-- name: GoesPlatformConfigFileGet :one +select * from goes_platform_config_file where id=$1; + + +-- name: GoesPlatformConfigFileUpdate :exec +update goes_platform_config_file set + name=sqlc.arg(name), + content=sqlc.arg(content)::xml, + updated_at=sqlc.arg(updated_at), + updated_by=sqlc.arg(updated_by) +where id=sqlc.arg(id); + + +-- name: GoesPlatformConfigFileDelete :exec +delete from goes_platform_config_file where id=$1; + + +-- name: GoesTelemetryConfigMappingsCreateBatch :batchexec +insert into goes_telemetry_config_mappings (goes_platform_config_file_id, platform_sensor_key, timeseries_id) +values ($1, $2, $3); + + +-- name: GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile :exec +delete from goes_telemetry_config_mappings where goes_platform_config_file_id=$1; + + +-- name: GoesTelemetryConfigMappingsList :many +select * from goes_telemetry_config_mappings where goes_platform_config_file_id=$1; diff --git a/sqlc.generate.yaml b/sqlc.generate.yaml index e41574a3..df436cac 100644 --- a/sqlc.generate.yaml +++ b/sqlc.generate.yaml @@ -41,6 +41,10 @@ sql: type: uuid.UUID pointer: true + # xml + - db_type: xml + go_type: string + # timestamptz - db_type: timestamptz go_type: time.Time @@ -149,6 +153,12 @@ sql: type: InstrumentIDName slice: true + # v_goes_telemetry + - column: v_goes_telemetry_source.files + go_type: + type: IDName + slice: true + # v_incl_measurement - column: v_incl_measurement.measurements go_type: From 6a39e98ce08ffa44650c4759ef760cf2c77de034 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 3 Dec 2025 16:24:03 -0500 Subject: [PATCH 02/22] fix: incorrect platform xml parsing chore: allow parent element parsing --- api/internal/service/goes.go | 47 ++++++++++++++++++++++++++++-------- 1 file changed, 37 insertions(+), 10 deletions(-) diff --git a/api/internal/service/goes.go b/api/internal/service/goes.go index ff570cfc..c7c63989 100644 --- a/api/internal/service/goes.go +++ b/api/internal/service/goes.go @@ -39,12 +39,8 @@ func (s *DBService) GoesTelemetryConfigMappingsUpdate(ctx context.Context, cfgID return tx.Commit(ctx) } -type Platforms struct { - XMLName xml.Name `xml:"Platforms"` - Platforms []Platform `xml:"Platform"` -} - type Platform struct { + XMLName xml.Name `xml:"Platform"` PlatformConfig PlatformConfig `xml:"PlatformConfig"` } @@ -58,17 +54,48 @@ type ConfigSensor struct { } func extractSensorNames(xmlStr string) ([]string, error) { - var platforms Platforms - if err := xml.NewDecoder(strings.NewReader(xmlStr)).Decode(&platforms); err != nil { - return nil, fmt.Errorf("failed to decode xml document: %w", err) + dec := xml.NewDecoder(strings.NewReader(xmlStr)) + for { + tok, err := dec.Token() + if err != nil { + return nil, fmt.Errorf("failed to read xml token: %w", err) + } + + start, ok := tok.(xml.StartElement) + if !ok { + continue + } + + switch start.Name.Local { + case "Platform": + var p Platform + if err := dec.DecodeElement(&p, &start); err != nil { + return nil, fmt.Errorf("failed to decode Platform: %w", err) + } + return extractFromPlatforms([]Platform{p}), nil + case "Database": + var wrapper struct { + Platforms []Platform `xml:"Platform"` + } + if err := dec.DecodeElement(&wrapper, &start); err != nil { + return nil, fmt.Errorf("failed to decode Database: %w", err) + } + return extractFromPlatforms(wrapper.Platforms), nil + + default: + return nil, fmt.Errorf("unexpected root element <%s>", start.Name.Local) + } } +} + +func extractFromPlatforms(platforms []Platform) []string { var result []string - for _, platform := range platforms.Platforms { + for _, platform := range platforms { for _, sensor := range platform.PlatformConfig.ConfigSensors { result = append(result, sensor.SensorName+"."+sensor.SensorNumber) } } - return result, nil + return result } func (s *DBService) GoesPlatformConfigFileCreate(ctx context.Context, arg db.GoesPlatformConfigFileCreateParams, dryRun bool) (uuid.UUID, error) { From 69071940289c2dd3fe31a5acf463fe64aa08de25 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 3 Dec 2025 16:35:28 -0500 Subject: [PATCH 03/22] fix: missing parsing of new timeseries mappings when uploading a new platform config file --- api/internal/service/goes.go | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/api/internal/service/goes.go b/api/internal/service/goes.go index c7c63989..f8e4e9e8 100644 --- a/api/internal/service/goes.go +++ b/api/internal/service/goes.go @@ -3,6 +3,7 @@ package service import ( "context" "encoding/xml" + "errors" "fmt" "strings" @@ -104,11 +105,9 @@ func (s *DBService) GoesPlatformConfigFileCreate(ctx context.Context, arg db.Goe return uuid.Nil, err } if dryRun { - return uuid.Nil, nil + return uuid.Nil, errors.New("TODO") } - mm := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, len(names)) - tx, err := s.db.Begin(ctx) if err != nil { return uuid.Nil, err @@ -121,6 +120,15 @@ func (s *DBService) GoesPlatformConfigFileCreate(ctx context.Context, arg db.Goe return uuid.Nil, fmt.Errorf("GoesPlatformConfigFileCreate %w", err) } + mm := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, 0, len(names)) + for _, n := range names { + mm = append(mm, db.GoesTelemetryConfigMappingsCreateBatchParams{ + GoesPlatformConfigFileID: newID, + PlatformSensorKey: n, + TimeseriesID: nil, + }) + } + qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, mm).Exec(batchExecErr(&err)) if err != nil { return uuid.Nil, fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err) From dff40185f7d161278fecae5701b9d24175aac097 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 3 Dec 2025 17:17:15 -0500 Subject: [PATCH 04/22] chore: add alias and bytes_size to goes telemetry domain payload --- api/internal/db/goes.sql_gen.go | 26 ++++++++++++++----- api/internal/db/models.go | 8 +++--- api/internal/db/overrides.go | 6 +++++ api/internal/handler/goes.go | 13 ++++++++++ .../repeat/0190__views_telemetry.sql | 4 ++- api/migrations/schema/V1.56.00__goes.sql | 2 ++ api/queries/goes.sql | 6 +++-- sqlc.generate.yaml | 2 +- 8 files changed, 53 insertions(+), 14 deletions(-) diff --git a/api/internal/db/goes.sql_gen.go b/api/internal/db/goes.sql_gen.go index d383220c..b25241e3 100644 --- a/api/internal/db/goes.sql_gen.go +++ b/api/internal/db/goes.sql_gen.go @@ -13,14 +13,16 @@ import ( ) const goesPlatformConfigFileCreate = `-- name: GoesPlatformConfigFileCreate :one -insert into goes_platform_config_file (goes_telemetry_source_id, name, content, created_by) -values ($1, $2, $3::xml, $4) +insert into goes_platform_config_file (goes_telemetry_source_id, name, alias, size_bytes, content, created_by) +values ($1, $2, $3, $4, $5::xml, $6) returning id ` type GoesPlatformConfigFileCreateParams struct { GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` Name string `json:"name"` + Alias string `json:"alias"` + SizeBytes int64 `json:"size_bytes"` Content string `json:"content"` CreatedBy uuid.UUID `json:"created_by"` } @@ -29,6 +31,8 @@ func (q *Queries) GoesPlatformConfigFileCreate(ctx context.Context, arg GoesPlat row := q.db.QueryRow(ctx, goesPlatformConfigFileCreate, arg.GoesTelemetrySourceID, arg.Name, + arg.Alias, + arg.SizeBytes, arg.Content, arg.CreatedBy, ) @@ -47,7 +51,7 @@ func (q *Queries) GoesPlatformConfigFileDelete(ctx context.Context, id uuid.UUID } const goesPlatformConfigFileGet = `-- name: GoesPlatformConfigFileGet :one -select id, goes_telemetry_source_id, name, content, created_at, created_by, updated_at, updated_by from goes_platform_config_file where id=$1 +select id, goes_telemetry_source_id, name, alias, size_bytes, content, created_at, created_by, updated_at, updated_by from goes_platform_config_file where id=$1 ` func (q *Queries) GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) (GoesPlatformConfigFile, error) { @@ -57,6 +61,8 @@ func (q *Queries) GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) ( &i.ID, &i.GoesTelemetrySourceID, &i.Name, + &i.Alias, + &i.SizeBytes, &i.Content, &i.CreatedAt, &i.CreatedBy, @@ -69,14 +75,18 @@ func (q *Queries) GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) ( const goesPlatformConfigFileUpdate = `-- name: GoesPlatformConfigFileUpdate :exec update goes_platform_config_file set name=$1, - content=$2::xml, - updated_at=$3, - updated_by=$4 -where id=$5 + alias=$2, + size_bytes=$3, + content=$4::xml, + updated_at=$5, + updated_by=$6 +where id=$7 ` type GoesPlatformConfigFileUpdateParams struct { Name string `json:"name"` + Alias string `json:"alias"` + SizeBytes int64 `json:"size_bytes"` Content string `json:"content"` UpdatedAt *time.Time `json:"updated_at"` UpdatedBy *uuid.UUID `json:"updated_by"` @@ -86,6 +96,8 @@ type GoesPlatformConfigFileUpdateParams struct { func (q *Queries) GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPlatformConfigFileUpdateParams) error { _, err := q.db.Exec(ctx, goesPlatformConfigFileUpdate, arg.Name, + arg.Alias, + arg.SizeBytes, arg.Content, arg.UpdatedAt, arg.UpdatedBy, diff --git a/api/internal/db/models.go b/api/internal/db/models.go index cf3e64a7..4141b473 100644 --- a/api/internal/db/models.go +++ b/api/internal/db/models.go @@ -647,6 +647,8 @@ type GoesPlatformConfigFile struct { ID uuid.UUID `json:"id"` GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` Name string `json:"name"` + Alias string `json:"alias"` + SizeBytes int64 `json:"size_bytes"` Content string `json:"content"` CreatedAt time.Time `json:"created_at"` CreatedBy uuid.UUID `json:"created_by"` @@ -1432,9 +1434,9 @@ type VEvaluation struct { } type VGoesTelemetrySource struct { - ID uuid.UUID `json:"id"` - Name string `json:"name"` - Files []IDName `json:"files"` + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Files []VGoesTelemetrySourceFiles `json:"files"` } type VInclMeasurement struct { diff --git a/api/internal/db/overrides.go b/api/internal/db/overrides.go index 151ba2e4..c4da034d 100644 --- a/api/internal/db/overrides.go +++ b/api/internal/db/overrides.go @@ -95,6 +95,12 @@ type IDName struct { Name string `json:"name"` } +type VGoesTelemetrySourceFiles struct { + IDName + Alias string `json:"alias"` + SizeBytes int64 `json:"size_bytes"` +} + type InstrumentIDName struct { InstrumentID uuid.UUID `json:"instrument_id"` InstrumentName string `json:"instrument_name"` diff --git a/api/internal/handler/goes.go b/api/internal/handler/goes.go index 6acd532e..c3ba4dc9 100644 --- a/api/internal/handler/goes.go +++ b/api/internal/handler/goes.go @@ -62,6 +62,7 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { type XmlPlatformConfig struct { PlatformConfig huma.FormFile `form:"file" contentType:"text/xml" required:"true"` + Alias string `form:"alias"` DryRun bool `form:"dry_run"` } @@ -86,9 +87,15 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { if xmlDoc == nil { return nil, httperr.BadRequest(errors.New("uploaded file is empty")) } + alias := formData.Alias + if alias == "" { + alias = formData.PlatformConfig.Filename + } newID, err := h.DBService.GoesPlatformConfigFileCreate(ctx, db.GoesPlatformConfigFileCreateParams{ GoesTelemetrySourceID: input.TelemetrySourceID.UUID, Name: formData.PlatformConfig.Filename, + SizeBytes: formData.PlatformConfig.Size, + Alias: alias, Content: string(xmlDoc), CreatedBy: p.ID, }, formData.DryRun) @@ -121,9 +128,15 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { return nil, httperr.BadRequest(errors.New("uploaded file is empty")) } now := time.Now().UTC() + alias := formData.Alias + if alias == "" { + alias = formData.PlatformConfig.Filename + } if err := h.DBService.GoesPlatformConfigFileUpdate(ctx, db.GoesPlatformConfigFileUpdateParams{ ID: input.TelemetryConfigID.UUID, Name: formData.PlatformConfig.Filename, + Alias: alias, + SizeBytes: formData.PlatformConfig.Size, Content: string(xmlDoc), UpdatedBy: &p.ID, UpdatedAt: &now, diff --git a/api/migrations/repeat/0190__views_telemetry.sql b/api/migrations/repeat/0190__views_telemetry.sql index fd095684..d9ce5c21 100644 --- a/api/migrations/repeat/0190__views_telemetry.sql +++ b/api/migrations/repeat/0190__views_telemetry.sql @@ -6,7 +6,9 @@ from goes_telemetry_source s left join ( select coalesce(jsonb_agg(jsonb_build_object( 'id', cf.id, - 'name', cf.name + 'name', cf.name, + 'alias', cf.alias, + 'size_bytes', cf.size_bytes )), '[]'::jsonb) as files from goes_platform_config_file cf ) f on true; diff --git a/api/migrations/schema/V1.56.00__goes.sql b/api/migrations/schema/V1.56.00__goes.sql index 0881a8a4..e48428c3 100644 --- a/api/migrations/schema/V1.56.00__goes.sql +++ b/api/migrations/schema/V1.56.00__goes.sql @@ -8,6 +8,8 @@ create table goes_platform_config_file ( id uuid primary key default uuid_generate_v4(), goes_telemetry_source_id uuid not null references goes_telemetry_source(id), name text not null, + alias text not null, + size_bytes bigint not null, content xml not null, created_at timestamptz not null default now(), created_by uuid not null references profile(id), diff --git a/api/queries/goes.sql b/api/queries/goes.sql index cbd36def..3857e684 100644 --- a/api/queries/goes.sql +++ b/api/queries/goes.sql @@ -3,8 +3,8 @@ select * from v_goes_telemetry_source; -- name: GoesPlatformConfigFileCreate :one -insert into goes_platform_config_file (goes_telemetry_source_id, name, content, created_by) -values (sqlc.arg(goes_telemetry_source_id), sqlc.arg(name), sqlc.arg(content)::xml, sqlc.arg(created_by)) +insert into goes_platform_config_file (goes_telemetry_source_id, name, alias, size_bytes, content, created_by) +values (sqlc.arg(goes_telemetry_source_id), sqlc.arg(name), sqlc.arg(alias), sqlc.arg(size_bytes), sqlc.arg(content)::xml, sqlc.arg(created_by)) returning id; @@ -15,6 +15,8 @@ select * from goes_platform_config_file where id=$1; -- name: GoesPlatformConfigFileUpdate :exec update goes_platform_config_file set name=sqlc.arg(name), + alias=sqlc.arg(alias), + size_bytes=sqlc.arg(size_bytes), content=sqlc.arg(content)::xml, updated_at=sqlc.arg(updated_at), updated_by=sqlc.arg(updated_by) diff --git a/sqlc.generate.yaml b/sqlc.generate.yaml index df436cac..0c7ec2ce 100644 --- a/sqlc.generate.yaml +++ b/sqlc.generate.yaml @@ -156,7 +156,7 @@ sql: # v_goes_telemetry - column: v_goes_telemetry_source.files go_type: - type: IDName + type: VGoesTelemetrySourceFiles slice: true # v_incl_measurement From 174a79a118b243d0126253d25f106a180aed60b0 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 10 Dec 2025 11:28:14 -0500 Subject: [PATCH 05/22] chore: add project_id to platform config file response --- api/internal/db/goes.sql_gen.go | 9 ++++++--- api/internal/db/models.go | 1 + api/internal/db/overrides.go | 5 +++-- api/internal/handler/goes.go | 1 + api/migrations/repeat/0190__views_telemetry.sql | 1 + api/migrations/schema/V1.56.00__goes.sql | 1 + api/queries/goes.sql | 4 ++-- 7 files changed, 15 insertions(+), 7 deletions(-) diff --git a/api/internal/db/goes.sql_gen.go b/api/internal/db/goes.sql_gen.go index b25241e3..0487e727 100644 --- a/api/internal/db/goes.sql_gen.go +++ b/api/internal/db/goes.sql_gen.go @@ -13,13 +13,14 @@ import ( ) const goesPlatformConfigFileCreate = `-- name: GoesPlatformConfigFileCreate :one -insert into goes_platform_config_file (goes_telemetry_source_id, name, alias, size_bytes, content, created_by) -values ($1, $2, $3, $4, $5::xml, $6) +insert into goes_platform_config_file (goes_telemetry_source_id, project_id, name, alias, size_bytes, content, created_by) +values ($1, $2, $3, $4, $5, $6::xml, $7) returning id ` type GoesPlatformConfigFileCreateParams struct { GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` + ProjectID uuid.UUID `json:"project_id"` Name string `json:"name"` Alias string `json:"alias"` SizeBytes int64 `json:"size_bytes"` @@ -30,6 +31,7 @@ type GoesPlatformConfigFileCreateParams struct { func (q *Queries) GoesPlatformConfigFileCreate(ctx context.Context, arg GoesPlatformConfigFileCreateParams) (uuid.UUID, error) { row := q.db.QueryRow(ctx, goesPlatformConfigFileCreate, arg.GoesTelemetrySourceID, + arg.ProjectID, arg.Name, arg.Alias, arg.SizeBytes, @@ -51,7 +53,7 @@ func (q *Queries) GoesPlatformConfigFileDelete(ctx context.Context, id uuid.UUID } const goesPlatformConfigFileGet = `-- name: GoesPlatformConfigFileGet :one -select id, goes_telemetry_source_id, name, alias, size_bytes, content, created_at, created_by, updated_at, updated_by from goes_platform_config_file where id=$1 +select id, goes_telemetry_source_id, project_id, name, alias, size_bytes, content, created_at, created_by, updated_at, updated_by from goes_platform_config_file where id=$1 ` func (q *Queries) GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) (GoesPlatformConfigFile, error) { @@ -60,6 +62,7 @@ func (q *Queries) GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) ( err := row.Scan( &i.ID, &i.GoesTelemetrySourceID, + &i.ProjectID, &i.Name, &i.Alias, &i.SizeBytes, diff --git a/api/internal/db/models.go b/api/internal/db/models.go index 4141b473..93059148 100644 --- a/api/internal/db/models.go +++ b/api/internal/db/models.go @@ -646,6 +646,7 @@ type EvaluationInstrument struct { type GoesPlatformConfigFile struct { ID uuid.UUID `json:"id"` GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` + ProjectID uuid.UUID `json:"project_id"` Name string `json:"name"` Alias string `json:"alias"` SizeBytes int64 `json:"size_bytes"` diff --git a/api/internal/db/overrides.go b/api/internal/db/overrides.go index c4da034d..f178ac31 100644 --- a/api/internal/db/overrides.go +++ b/api/internal/db/overrides.go @@ -97,8 +97,9 @@ type IDName struct { type VGoesTelemetrySourceFiles struct { IDName - Alias string `json:"alias"` - SizeBytes int64 `json:"size_bytes"` + ProjectID uuid.UUID `json:"project_id"` + Alias string `json:"alias"` + SizeBytes int64 `json:"size_bytes"` } type InstrumentIDName struct { diff --git a/api/internal/handler/goes.go b/api/internal/handler/goes.go index c3ba4dc9..1b28c7ce 100644 --- a/api/internal/handler/goes.go +++ b/api/internal/handler/goes.go @@ -93,6 +93,7 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { } newID, err := h.DBService.GoesPlatformConfigFileCreate(ctx, db.GoesPlatformConfigFileCreateParams{ GoesTelemetrySourceID: input.TelemetrySourceID.UUID, + ProjectID: input.ProjectID.UUID, Name: formData.PlatformConfig.Filename, SizeBytes: formData.PlatformConfig.Size, Alias: alias, diff --git a/api/migrations/repeat/0190__views_telemetry.sql b/api/migrations/repeat/0190__views_telemetry.sql index d9ce5c21..b6767b7e 100644 --- a/api/migrations/repeat/0190__views_telemetry.sql +++ b/api/migrations/repeat/0190__views_telemetry.sql @@ -7,6 +7,7 @@ left join ( select coalesce(jsonb_agg(jsonb_build_object( 'id', cf.id, 'name', cf.name, + 'project_id', cf.project_id, 'alias', cf.alias, 'size_bytes', cf.size_bytes )), '[]'::jsonb) as files diff --git a/api/migrations/schema/V1.56.00__goes.sql b/api/migrations/schema/V1.56.00__goes.sql index e48428c3..fe964aa3 100644 --- a/api/migrations/schema/V1.56.00__goes.sql +++ b/api/migrations/schema/V1.56.00__goes.sql @@ -7,6 +7,7 @@ create table goes_telemetry_source ( create table goes_platform_config_file ( id uuid primary key default uuid_generate_v4(), goes_telemetry_source_id uuid not null references goes_telemetry_source(id), + project_id uuid not null references project(id), name text not null, alias text not null, size_bytes bigint not null, diff --git a/api/queries/goes.sql b/api/queries/goes.sql index 3857e684..1c8248ab 100644 --- a/api/queries/goes.sql +++ b/api/queries/goes.sql @@ -3,8 +3,8 @@ select * from v_goes_telemetry_source; -- name: GoesPlatformConfigFileCreate :one -insert into goes_platform_config_file (goes_telemetry_source_id, name, alias, size_bytes, content, created_by) -values (sqlc.arg(goes_telemetry_source_id), sqlc.arg(name), sqlc.arg(alias), sqlc.arg(size_bytes), sqlc.arg(content)::xml, sqlc.arg(created_by)) +insert into goes_platform_config_file (goes_telemetry_source_id, project_id, name, alias, size_bytes, content, created_by) +values (sqlc.arg(goes_telemetry_source_id), sqlc.arg(project_id), sqlc.arg(name), sqlc.arg(alias), sqlc.arg(size_bytes), sqlc.arg(content)::xml, sqlc.arg(created_by)) returning id; From cc3bcf660acdfc821347d84bff5e6498d59276ea Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Thu, 11 Dec 2025 16:19:58 -0500 Subject: [PATCH 06/22] chore!: wip implementation of user-defined goes opendcs --- compose.sh | 6 + go.work | 4 +- opendcs/.gitignore | 19 + opendcs/Dockerfile | 87 +++ opendcs/decodes.properties | 47 ++ opendcs/go.mod | 49 ++ opendcs/go.sum | 162 +++++ opendcs/main.go | 542 ++++++++++++++++ opendcs/midas_config/datasource/hotbackup.xml | 25 + .../reference/DataTypeEquivalenceList.xml | 590 ++++++++++++++++++ opendcs/midas_config/reference/EnumList.xml | 402 ++++++++++++ .../midas_config/reference/MIDAS-English.xml | 268 ++++++++ .../midas_config/reference/MIDAS-Metric.xml | 274 ++++++++ opendcs/midas_config/routing/goes.xml | 55 ++ opendcs/midas_config/routing/monitor.xml | 51 ++ opendcs/rsgis/.mvn/jvm.config | 0 opendcs/rsgis/.mvn/maven.config | 0 .../org.eclipse.core.resources.prefs | 4 + .../.settings/org.eclipse.jdt.apt.core.prefs | 2 + .../.settings/org.eclipse.jdt.core.prefs | 9 + .../.settings/org.eclipse.m2e.core.prefs | 4 + opendcs/rsgis/pom.xml | 71 +++ .../rsgis/consumer/MidasOutputFormatter.java | 122 ++++ 23 files changed, 2792 insertions(+), 1 deletion(-) create mode 100644 opendcs/.gitignore create mode 100644 opendcs/Dockerfile create mode 100644 opendcs/decodes.properties create mode 100644 opendcs/go.mod create mode 100644 opendcs/go.sum create mode 100644 opendcs/main.go create mode 100644 opendcs/midas_config/datasource/hotbackup.xml create mode 100644 opendcs/midas_config/reference/DataTypeEquivalenceList.xml create mode 100644 opendcs/midas_config/reference/EnumList.xml create mode 100644 opendcs/midas_config/reference/MIDAS-English.xml create mode 100644 opendcs/midas_config/reference/MIDAS-Metric.xml create mode 100644 opendcs/midas_config/routing/goes.xml create mode 100644 opendcs/midas_config/routing/monitor.xml create mode 100644 opendcs/rsgis/.mvn/jvm.config create mode 100644 opendcs/rsgis/.mvn/maven.config create mode 100644 opendcs/rsgis/.settings/org.eclipse.core.resources.prefs create mode 100644 opendcs/rsgis/.settings/org.eclipse.jdt.apt.core.prefs create mode 100644 opendcs/rsgis/.settings/org.eclipse.jdt.core.prefs create mode 100644 opendcs/rsgis/.settings/org.eclipse.m2e.core.prefs create mode 100644 opendcs/rsgis/pom.xml create mode 100644 opendcs/rsgis/src/main/java/rsgis/consumer/MidasOutputFormatter.java diff --git a/compose.sh b/compose.sh index 5a80c41c..2d86e078 100755 --- a/compose.sh +++ b/compose.sh @@ -180,6 +180,12 @@ elif [ "$1" = "test" ]; then elif [ "$1" = "mkdocs" ]; then mkdocs +elif [ "$1" = "opendcs-dep" ]; then + cid=$(docker create ghcr.io/opendcs/routingscheduler:7.0-nightly) && + mkdir -p "${parent_path}/opendcs/rsgis/src/main/resources" && + docker cp "$cid:/opt/opendcs/bin/opendcs.jar" "${parent_path}/opendcs/rsgis/src/main/resources/opendcs.jar" && + docker rm "$cid" + else echo -e "usage:\n\t./compose.sh watch\n\t./compose.sh up\n\t./compose.sh down\n\t./compose.sh clean\n\t./compose.sh test\n\t./compose.sh mkdocs" fi diff --git a/go.work b/go.work index 9671ce28..8fbc0b78 100644 --- a/go.work +++ b/go.work @@ -1,3 +1,5 @@ -go 1.25 +go 1.25.5 use ./api + +use ./opendcs diff --git a/opendcs/.gitignore b/opendcs/.gitignore new file mode 100644 index 00000000..641c73d1 --- /dev/null +++ b/opendcs/.gitignore @@ -0,0 +1,19 @@ +target/ +pom.xml.tag +pom.xml.releaseBackup +pom.xml.versionsBackup +pom.xml.next +release.properties +dependency-reduced-pom.xml +buildNumber.properties +.mvn/timing.properties +# https://maven.apache.org/wrapper/#usage-without-binary-jar +.mvn/wrapper/maven-wrapper.jar + +# Eclipse m2e generated files +# Eclipse Core +.project +# JDT-specific (Eclipse Java Development Tools) +.classpath + +rsgis/src/main/resources diff --git a/opendcs/Dockerfile b/opendcs/Dockerfile new file mode 100644 index 00000000..33be4732 --- /dev/null +++ b/opendcs/Dockerfile @@ -0,0 +1,87 @@ +ARG OPENDCS_BASE_IMAGE=ghcr.io/opendcs/routingscheduler:7.0-nightly +ARG MAVEN_BUILD_IMAGE=maven:3-eclipse-temurin-8-noble +ARG GO_BUILD_IMAGE=golang:1.23-alpine + +FROM ${OPENDCS_BASE_IMAGE} AS opendcs_patched + +USER root + +RUN rm -f /opt/opendcs/dep/commons-net-*.jar \ + /opt/opendcs/dep/jackson-core*.jar \ + /opt/opendcs/dep/jackson-dataformat-toml*.jar \ + /opt/opendcs/dep/commons-vfs2-*.jar \ + /opt/opendcs/dep/javax.el-*.jar \ + /opt/opendcs/dep/jdom-*.jar \ + /opt/opendcs/dep/poi-*.jar \ + /opt/opendcs/dep/postgresql-*.jar \ + /opt/opendcs/dep/jetty-*.jar || true + +RUN wget -qO /opt/opendcs/dep/commons-net-3.11.1.jar \ + "https://repo1.maven.org/maven2/commons-net/commons-net/3.11.1/commons-net-3.11.1.jar" \ + && wget -qO /opt/opendcs/dep/jackson-dataformat-toml-2.18.2.jar \ + "https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-toml/2.18.2/jackson-dataformat-toml-2.18.2.jar" \ + && wget -qO /opt/opendcs/dep/jackson-core-2.19.2.jar \ + "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-core/2.19.2/jackson-core-2.19.2.jar" \ + && wget -qO /opt/opendcs/dep/commons-vfs2-2.10.0.jar \ + "https://repo1.maven.org/maven2/org/apache/commons/commons-vfs2/2.10.0/commons-vfs2-2.10.0.jar" \ + && wget -qO /opt/opendcs/dep/jdom2-2.0.6.1.jar \ + "https://repo1.maven.org/maven2/org/jdom/jdom2/2.0.6.1/jdom2-2.0.6.1.jar" \ + && wget -qO /opt/opendcs/dep/poi-5.4.1.jar \ + "https://repo1.maven.org/maven2/org/apache/poi/poi/5.4.1/poi-5.4.1.jar" \ + && wget -qO /opt/opendcs/dep/postgresql-42.7.7.jar \ + "https://repo1.maven.org/maven2/org/postgresql/postgresql/42.7.7/postgresql-42.7.7.jar" \ + && chown opendcs:opendcs /opt/opendcs/dep/*.jar + +FROM opendcs_patched AS opendcs_base + +FROM ${MAVEN_BUILD_IMAGE} AS maven_builder + +COPY ./rsgis /opt/rsgis +RUN mkdir -p /opt/rsgis/src/main/resources +COPY --from=opendcs_base /opt/opendcs/bin/opendcs.jar /opt/rsgis/src/main/resources/opendcs.jar + +RUN --mount=type=cache,target=/root/.m2 \ + mvn -f /opt/rsgis/pom.xml clean package + +FROM ${GO_BUILD_IMAGE} AS go_builder + +WORKDIR /src + +COPY go.mod go.sum ./ +RUN --mount=type=cache,target=/go/pkg/mod \ + go mod download + +COPY . . + +RUN --mount=type=cache,target=/root/.cache/go-build \ + go build -o /opendcs-wrapper . + +FROM opendcs_patched + +USER root + +RUN rm -rf /opt/java/openjdk/release + +RUN apk add --no-cache coreutils ca-certificates + +ENV INSTRUMENTATION_DCS_CONFIG=${HOME}/midas_config +ENV DCSTOOL_USERDIR=/opt/opendcs +ENV DATABASE_URL=/opt/opendcs/edit-db +ENV OPENDCS_IMPORT_DIR=/opt/opendcs/import +ENV OPENDCS_HTTP_ADDR=:8080 +ENV OPENDCS_LOG_DIR=/opendcs_output + +RUN mkdir -p -m 775 ${DCSTOOL_HOME} /opendcs_output ${OPENDCS_IMPORT_DIR} && \ + chown -R opendcs:opendcs ${DCSTOOL_HOME} /opendcs_output ${OPENDCS_IMPORT_DIR} + +RUN apk del py3-cryptography || true + +COPY --chown=opendcs:opendcs --from=maven_builder /opt/rsgis/target/rsgis.jar ${DCSTOOL_HOME}/dep + +COPY --chown=opendcs:opendcs ./decodes.properties ${DCSTOOL_HOME}/decodes.properties +COPY --chown=opendcs:opendcs ./midas_config ${INSTRUMENTATION_DCS_CONFIG} +COPY --chown=opendcs:opendcs --from=go_builder /opendcs-wrapper /usr/local/bin/opendcs-wrapper + +USER opendcs + +CMD ["/usr/local/bin/opendcs-wrapper"] diff --git a/opendcs/decodes.properties b/opendcs/decodes.properties new file mode 100644 index 00000000..e422df70 --- /dev/null +++ b/opendcs/decodes.properties @@ -0,0 +1,47 @@ +# +# The 'EditDatabase' is the provisional working database. +# The default installation is set up for a local XML database. +# +EditDatabaseType=XML +EditDatabaseLocation=/opt/opendcs/edit-db + +# +# For SQL Editable Database, change EditDatabaseType to sql +# Then... +# Format for EditDatabaseLocation is a JDBC Database URL: +# +# jdbc:protocol:[//host[:port]]/databasename +# +# where +# protocol is usually the DB product name like 'postgresql' +# host and port are optional. If not supplied, a local database is assumed. +# databasename is the database name - required. +# +# example: +# EditDatabaseLocation=jdbc:postgresql://mylrgs/decodesedit +# + +# Settings for the dbedit GUI: +EditPresentationGroup=CWMS-English + +# Various agency-specific preferences: +SiteNameTypePreference=CWMS +EditTimeZone=UTC +#EditOutputFormat=Human-Readable + +jdbcDriverClass=org.postgresql.Driver + +SqlKeyGenerator=decodes.sql.SequenceKeyGenerator +#sqlDateFormat= +#sqlTimeZone= + +transportMediumTypePreference=goes + +#defaultDataSource= +#routingStatusDir= +dataTypeStdPreference=CWMS +#decwizTimeZone= +#decwizOutputFormat= +#decwizDebugLevel= +#decwizDecodedDataDir= +#decwizSummaryLog= diff --git a/opendcs/go.mod b/opendcs/go.mod new file mode 100644 index 00000000..1e7e5c72 --- /dev/null +++ b/opendcs/go.mod @@ -0,0 +1,49 @@ +module github.com/USACE/instrumentation-api/opendcs + +go 1.25.5 + +require ( + github.com/danielgtaylor/huma/v2 v2.34.1 + gocloud.dev v0.44.0 +) + +require ( + github.com/aws/aws-sdk-go-v2 v1.39.6 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3 // indirect + github.com/aws/aws-sdk-go-v2/config v1.31.17 // indirect + github.com/aws/aws-sdk-go-v2/credentials v1.18.21 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13 // indirect + github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.20.3 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.13 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.13 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect + github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.13 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.4 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.13 // indirect + github.com/aws/aws-sdk-go-v2/service/s3 v1.89.2 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.30.1 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.39.1 // indirect + github.com/aws/smithy-go v1.23.2 // indirect + github.com/go-logr/logr v1.4.3 // indirect + github.com/go-logr/stdr v1.2.2 // indirect + github.com/google/uuid v1.6.0 // indirect + github.com/google/wire v0.7.0 // indirect + github.com/googleapis/gax-go/v2 v2.15.0 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/otel v1.37.0 // indirect + go.opentelemetry.io/otel/metric v1.37.0 // indirect + go.opentelemetry.io/otel/sdk v1.37.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.37.0 // indirect + go.opentelemetry.io/otel/trace v1.37.0 // indirect + golang.org/x/net v0.43.0 // indirect + golang.org/x/sys v0.35.0 // indirect + golang.org/x/text v0.28.0 // indirect + golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect + google.golang.org/api v0.247.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20250811230008-5f3141c8851a // indirect + google.golang.org/grpc v1.74.2 // indirect + google.golang.org/protobuf v1.36.7 // indirect +) diff --git a/opendcs/go.sum b/opendcs/go.sum new file mode 100644 index 00000000..bfa4b0ca --- /dev/null +++ b/opendcs/go.sum @@ -0,0 +1,162 @@ +cel.dev/expr v0.24.0 h1:56OvJKSH3hDGL0ml5uSxZmz3/3Pq4tJ+fb1unVLAFcY= +cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= +cloud.google.com/go v0.121.6 h1:waZiuajrI28iAf40cWgycWNgaXPO06dupuS+sgibK6c= +cloud.google.com/go v0.121.6/go.mod h1:coChdst4Ea5vUpiALcYKXEpR1S9ZgXbhEzzMcMR66vI= +cloud.google.com/go/auth v0.16.4 h1:fXOAIQmkApVvcIn7Pc2+5J8QTMVbUGLscnSVNl11su8= +cloud.google.com/go/auth v0.16.4/go.mod h1:j10ncYwjX/g3cdX7GpEzsdM+d+ZNsXAbb6qXA7p1Y5M= +cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= +cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= +cloud.google.com/go/compute/metadata v0.8.0 h1:HxMRIbao8w17ZX6wBnjhcDkW6lTFpgcaobyVfZWqRLA= +cloud.google.com/go/compute/metadata v0.8.0/go.mod h1:sYOGTp851OV9bOFJ9CH7elVvyzopvWQFNNghtDQ/Biw= +cloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8= +cloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE= +cloud.google.com/go/monitoring v1.24.2 h1:5OTsoJ1dXYIiMiuL+sYscLc9BumrL3CarVLL7dd7lHM= +cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+6qY4eq0I9B4U= +cloud.google.com/go/storage v1.56.0 h1:iixmq2Fse2tqxMbWhLWC9HfBj1qdxqAmiK8/eqtsLxI= +cloud.google.com/go/storage v1.56.0/go.mod h1:Tpuj6t4NweCLzlNbw9Z9iwxEkrSem20AetIeH/shgVU= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0 h1:UQUsRi8WTzhZntp5313l+CHIAT95ojUI2lpP/ExlZa4= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0/go.mod h1:Cz6ft6Dkn3Et6l2v2a9/RpN7epQ1GtDlO6lj8bEcOvw= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0 h1:owcC2UnmsZycprQ5RfRgjydWhuoxg71LUfyiQdijZuM= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0/go.mod h1:ZPpqegjbE99EPKsu3iUWV22A04wzGPcAY/ziSIQEEgs= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0 h1:Ron4zCA/yk6U7WOBXhTJcDpsUBG9npumK6xw2auFltQ= +github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.53.0/go.mod h1:cSgYe11MCNYunTnRXrKiR/tHc0eoKjICUuWpNZoVCOo= +github.com/aws/aws-sdk-go-v2 v1.39.6 h1:2JrPCVgWJm7bm83BDwY5z8ietmeJUbh3O2ACnn+Xsqk= +github.com/aws/aws-sdk-go-v2 v1.39.6/go.mod h1:c9pm7VwuW0UPxAEYGyTmyurVcNrbF6Rt/wixFqDhcjE= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3 h1:DHctwEM8P8iTXFxC/QK0MRjwEpWQeM9yzidCRjldUz0= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3/go.mod h1:xdCzcZEtnSTKVDOmUZs4l/j3pSV6rpo1WXl5ugNsL8Y= +github.com/aws/aws-sdk-go-v2/config v1.31.17 h1:QFl8lL6RgakNK86vusim14P2k8BFSxjvUkcWLDjgz9Y= +github.com/aws/aws-sdk-go-v2/config v1.31.17/go.mod h1:V8P7ILjp/Uef/aX8TjGk6OHZN6IKPM5YW6S78QnRD5c= +github.com/aws/aws-sdk-go-v2/credentials v1.18.21 h1:56HGpsgnmD+2/KpG0ikvvR8+3v3COCwaF4r+oWwOeNA= +github.com/aws/aws-sdk-go-v2/credentials v1.18.21/go.mod h1:3YELwedmQbw7cXNaII2Wywd+YY58AmLPwX4LzARgmmA= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13 h1:T1brd5dR3/fzNFAQch/iBKeX07/ffu/cLu+q+RuzEWk= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13/go.mod h1:Peg/GBAQ6JDt+RoBf4meB1wylmAipb7Kg2ZFakZTlwk= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.20.3 h1:4GNV1lhyELGjMz5ILMRxDvxvOaeo3Ux9Z69S1EgVMMQ= +github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.20.3/go.mod h1:br7KA6edAAqDGUYJ+zVVPAyMrPhnN+zdt17yTUT6FPw= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.13 h1:a+8/MLcWlIxo1lF9xaGt3J/u3yOZx+CdSveSNwjhD40= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.13/go.mod h1:oGnKwIYZ4XttyU2JWxFrwvhF6YKiK/9/wmE3v3Iu9K8= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.13 h1:HBSI2kDkMdWz4ZM7FjwE7e/pWDEZ+nR95x8Ztet1ooY= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.13/go.mod h1:YE94ZoDArI7awZqJzBAZ3PDD2zSfuP7w6P2knOzIn8M= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.13 h1:eg/WYAa12vqTphzIdWMzqYRVKKnCboVPRlvaybNCqPA= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.13/go.mod h1:/FDdxWhz1486obGrKKC1HONd7krpk38LBt+dutLcN9k= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 h1:x2Ibm/Af8Fi+BH+Hsn9TXGdT+hKbDd5XOTZxTMxDk7o= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3/go.mod h1:IW1jwyrQgMdhisceG8fQLmQIydcT/jWY21rFhzgaKwo= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.4 h1:NvMjwvv8hpGUILarKw7Z4Q0w1H9anXKsesMxtw++MA4= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.4/go.mod h1:455WPHSwaGj2waRSpQp7TsnpOnBfw8iDfPfbwl7KPJE= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13 h1:kDqdFvMY4AtKoACfzIGD8A0+hbT41KTKF//gq7jITfM= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13/go.mod h1:lmKuogqSU3HzQCwZ9ZtcqOc5XGMqtDK7OIc2+DxiUEg= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.13 h1:zhBJXdhWIFZ1acfDYIhu4+LCzdUS2Vbcum7D01dXlHQ= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.13/go.mod h1:JaaOeCE368qn2Hzi3sEzY6FgAZVCIYcC2nwbro2QCh8= +github.com/aws/aws-sdk-go-v2/service/s3 v1.89.2 h1:xgBWsgaeUESl8A8k80p6yBdexMWDVeiDmJ/pkjohJ7c= +github.com/aws/aws-sdk-go-v2/service/s3 v1.89.2/go.mod h1:+wArOOrcHUevqdto9k1tKOF5++YTe9JEcPSc9Tx2ZSw= +github.com/aws/aws-sdk-go-v2/service/sso v1.30.1 h1:0JPwLz1J+5lEOfy/g0SURC9cxhbQ1lIMHMa+AHZSzz0= +github.com/aws/aws-sdk-go-v2/service/sso v1.30.1/go.mod h1:fKvyjJcz63iL/ftA6RaM8sRCtN4r4zl4tjL3qw5ec7k= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5 h1:OWs0/j2UYR5LOGi88sD5/lhN6TDLG6SfA7CqsQO9zF0= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5/go.mod h1:klO+ejMvYsB4QATfEOIXk8WAEwN4N0aBfJpvC+5SZBo= +github.com/aws/aws-sdk-go-v2/service/sts v1.39.1 h1:mLlUgHn02ue8whiR4BmxxGJLR2gwU6s6ZzJ5wDamBUs= +github.com/aws/aws-sdk-go-v2/service/sts v1.39.1/go.mod h1:E19xDjpzPZC7LS2knI9E6BaRFDK43Eul7vd6rSq2HWk= +github.com/aws/smithy-go v1.23.2 h1:Crv0eatJUQhaManss33hS5r40CG3ZFH+21XSkqMrIUM= +github.com/aws/smithy-go v1.23.2/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls= +github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= +github.com/danielgtaylor/huma/v2 v2.34.1 h1:EmOJAbzEGfy0wAq/QMQ1YKfEMBEfE94xdBRLPBP0gwQ= +github.com/danielgtaylor/huma/v2 v2.34.1/go.mod h1:ynwJgLk8iGVgoaipi5tgwIQ5yoFNmiu+QdhU7CEEmhk= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/envoyproxy/go-control-plane v0.13.4 h1:zEqyPVyku6IvWCFwux4x9RxkLOMUL+1vC9xUFv5l2/M= +github.com/envoyproxy/go-control-plane/envoy v1.32.4 h1:jb83lalDRZSpPWW2Z7Mck/8kXZ5CQAFYVjQcdVIr83A= +github.com/envoyproxy/go-control-plane/envoy v1.32.4/go.mod h1:Gzjc5k8JcJswLjAx1Zm+wSYE20UrLtt7JZMWiWQXQEw= +github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8= +github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= +github.com/go-jose/go-jose/v4 v4.1.1 h1:JYhSgy4mXXzAdF3nUx3ygx347LRXJRrpgyU3adRmkAI= +github.com/go-jose/go-jose/v4 v4.1.1/go.mod h1:BdsZGqgdO3b6tTc6LSE56wcDbMMLuPsw5d4ZD5f94kA= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI= +github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= +github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= +github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/go-replayers/grpcreplay v1.3.0 h1:1Keyy0m1sIpqstQmgz307zhiJ1pV4uIlFds5weTmxbo= +github.com/google/go-replayers/grpcreplay v1.3.0/go.mod h1:v6NgKtkijC0d3e3RW8il6Sy5sqRVUwoQa4mHOGEy8DI= +github.com/google/go-replayers/httpreplay v1.2.0 h1:VM1wEyyjaoU53BwrOnaf9VhAyQQEEioJvFYxYcLRKzk= +github.com/google/go-replayers/httpreplay v1.2.0/go.mod h1:WahEFFZZ7a1P4VM1qEeHy+tME4bwyqPcwWbNlUI1Mcg= +github.com/google/martian/v3 v3.3.3 h1:DIhPTQrbPkgs2yJYdXU/eNACCG5DVQjySNRNlflZ9Fc= +github.com/google/martian/v3 v3.3.3/go.mod h1:iEPrYcgCF7jA9OtScMFQyAlZZ4YXTKEtJ1E6RWzmBA0= +github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0= +github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/wire v0.7.0 h1:JxUKI6+CVBgCO2WToKy/nQk0sS+amI9z9EjVmdaocj4= +github.com/google/wire v0.7.0/go.mod h1:n6YbUQD9cPKTnHXEBN2DXlOp/mVADhVErcMFb0v3J18= +github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU9uHLo7OnF5tL52HFAgMmyrf4= +github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= +github.com/googleapis/gax-go/v2 v2.15.0 h1:SyjDc1mGgZU5LncH8gimWo9lW1DtIfPibOG81vgd/bo= +github.com/googleapis/gax-go/v2 v2.15.0/go.mod h1:zVVkkxAQHa1RQpg9z2AUCMnKhi0Qld9rcmyfL1OZhoc= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo= +github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE= +github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM= +github.com/zeebo/errs v1.4.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/detectors/gcp v1.37.0 h1:B+WbN9RPsvobe6q4vP6KgM8/9plR/HNjgGBrfcOlweA= +go.opentelemetry.io/contrib/detectors/gcp v1.37.0/go.mod h1:K5zQ3TT7p2ru9Qkzk0bKtCql0RGkPj9pRjpXgZJZ+rU= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.62.0 h1:rbRJ8BBoVMsQShESYZ0FkvcITu8X8QNwJogcLUmDNNw= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.62.0/go.mod h1:ru6KHrNtNHxM4nD/vd6QrLVWgKhxPYgblq4VAtNawTQ= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0 h1:Hf9xI/XLML9ElpiHVDNwvqI0hIFlzV8dgIr35kV1kRU= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0/go.mod h1:NfchwuyNoMcZ5MLHwPrODwUF1HWCXWrL31s8gSAdIKY= +go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ= +go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I= +go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE= +go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E= +go.opentelemetry.io/otel/sdk v1.37.0 h1:ItB0QUqnjesGRvNcmAcU0LyvkVyGJ2xftD29bWdDvKI= +go.opentelemetry.io/otel/sdk v1.37.0/go.mod h1:VredYzxUvuo2q3WRcDnKDjbdvmO0sCzOvVAiY+yUkAg= +go.opentelemetry.io/otel/sdk/metric v1.37.0 h1:90lI228XrB9jCMuSdA0673aubgRobVZFhbjxHHspCPc= +go.opentelemetry.io/otel/sdk/metric v1.37.0/go.mod h1:cNen4ZWfiD37l5NhS+Keb5RXVWZWpRE+9WyVCpbo5ps= +go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4= +go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0= +gocloud.dev v0.44.0 h1:iVyMAqFl2r6xUy7M4mfqwlN+21UpJoEtgHEcfiLMUXs= +gocloud.dev v0.44.0/go.mod h1:ZmjROXGdC/eKZLF1N+RujDlFRx3D+4Av2thREKDMVxY= +golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= +golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= +golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE= +golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg= +golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= +golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= +golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= +golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= +golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= +golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= +golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= +golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= +golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY= +golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= +google.golang.org/api v0.247.0 h1:tSd/e0QrUlLsrwMKmkbQhYVa109qIintOls2Wh6bngc= +google.golang.org/api v0.247.0/go.mod h1:r1qZOPmxXffXg6xS5uhx16Fa/UFY8QU/K4bfKrnvovM= +google.golang.org/genproto v0.0.0-20250715232539-7130f93afb79 h1:Nt6z9UHqSlIdIGJdz6KhTIs2VRx/iOsA5iE8bmQNcxs= +google.golang.org/genproto v0.0.0-20250715232539-7130f93afb79/go.mod h1:kTmlBHMPqR5uCZPBvwa2B18mvubkjyY3CRLI0c6fj0s= +google.golang.org/genproto/googleapis/api v0.0.0-20250818200422-3122310a409c h1:AtEkQdl5b6zsybXcbz00j1LwNodDuH6hVifIaNqk7NQ= +google.golang.org/genproto/googleapis/api v0.0.0-20250818200422-3122310a409c/go.mod h1:ea2MjsO70ssTfCjiwHgI0ZFqcw45Ksuk2ckf9G468GA= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250811230008-5f3141c8851a h1:tPE/Kp+x9dMSwUm/uM0JKK0IfdiJkwAbSMSeZBXXJXc= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250811230008-5f3141c8851a/go.mod h1:gw1tLEfykwDz2ET4a12jcXt4couGAm7IwsVaTy0Sflo= +google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4= +google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM= +google.golang.org/protobuf v1.36.7 h1:IgrO7UwFQGJdRNXH/sQux4R1Dj1WAKcLElzeeRaXV2A= +google.golang.org/protobuf v1.36.7/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/opendcs/main.go b/opendcs/main.go new file mode 100644 index 00000000..e3570c83 --- /dev/null +++ b/opendcs/main.go @@ -0,0 +1,542 @@ +package main + +import ( + "bytes" + "context" + "crypto/subtle" + "errors" + "fmt" + "io" + "log/slog" + "net/http" + "net/url" + "os" + "os/exec" + "os/signal" + "path" + "path/filepath" + "strings" + "sync" + "syscall" + "time" + + "github.com/danielgtaylor/huma/v2" + "github.com/danielgtaylor/huma/v2/adapters/humago" + + "gocloud.dev/blob" + _ "gocloud.dev/blob/s3blob" +) + +type Config struct { + InstrConfigDir string + ImportDir string + RoutingSpec string + PlatformImportPrefix string + AuthToken string + ListenAddr string + LogDir string +} + +type Response[T any] struct { + Body T +} + +func NewResponse[T any](body T) *Response[T] { + return &Response[T]{ + Body: body, + } +} + +type KeyQueryParam struct { + Key string `query:"key" required:"true" doc:"API key for authentication"` +} + +type ImportRequest struct { + Files []string `json:"files"` + ValidateOnly bool `json:"validate_only"` +} + +type ImportResponse struct { + Status string `json:"status"` + ValidateLog string `json:"validate_log,omitempty"` + ImportLog string `json:"import_log,omitempty"` + CommandOutput string `json:"command_output,omitempty"` + Error string `json:"error,omitempty"` +} + +type RuntimeLogsOutput struct { + Body struct { + Log string `json:"log" doc:"Contents of routing scheduler runtime log"` + } +} + +const VERSION = "1.0.0" + +var ( + cfg Config + importMu sync.Mutex + rsCmd *exec.Cmd + rsMu sync.Mutex + + dataloadS3Root string + awsEndpointURL string +) + +func init() { + var level slog.Level + levelText := getenvDefault("LOGLEVEL", "INFO") + if err := level.UnmarshalText([]byte(levelText)); err != nil { + panic(err) + } + logger := slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{Level: level})) + slog.SetDefault(logger) + + cfg = loadConfig() + + dataloadS3Root = os.Getenv("DATALOAD_S3_ROOT") + awsEndpointURL = os.Getenv("AWS_ENDPOINT_URL") +} + +func main() { + if len(os.Args) > 1 && os.Args[1] == "upload" { + if len(os.Args) < 3 { + slog.Error("usage: upload requires file path argument", "argv", os.Args) + os.Exit(1) + } + if err := runUploadCLI(os.Args[2]); err != nil { + slog.Error("upload failed", "err", err) + os.Exit(1) + } + return + } + + slog.Info("starting opendcs wrapper (server mode)", "version", VERSION) + slog.Debug("configuration", "cfg", cfg) + + go func() { + if err := startHTTPServer(); err != nil { + slog.Error("http server failed", "err", err) + os.Exit(1) + } + }() + + go handleSignals() + + if err := initialImport(); err != nil { + slog.Error("initial import failed", "err", err) + os.Exit(1) + } + + if err := startRoutingScheduler(); err != nil { + slog.Error("failed to start routing scheduler", "err", err) + os.Exit(1) + } + + if err := rsCmd.Wait(); err != nil { + slog.Error("routing scheduler exited with error", "err", err) + } else { + slog.Info("routing scheduler exited cleanly") + } + + time.Sleep(1 * time.Second) // allow logs to flush +} + +func loadConfig() Config { + c := Config{ + InstrConfigDir: os.Getenv("INSTRUMENTATION_DCS_CONFIG"), + ImportDir: getenvDefault("OPENDCS_IMPORT_DIR", "/opt/opendcs/import"), + RoutingSpec: getenvDefault("ROUTING_SPEC", "goes"), + AuthToken: os.Getenv("OPENDCS_IMPORT_TOKEN"), + ListenAddr: getenvDefault("OPENDCS_HTTP_ADDR", ":8080"), + LogDir: getenvDefault("OPENDCS_LOG_DIR", "/opendcs_output"), + } + + if c.InstrConfigDir == "" { + slog.Error("INSTRUMENTATION_DCS_CONFIG must be set") + os.Exit(1) + } + if c.AuthToken == "" { + slog.Error("OPENDCS_IMPORT_TOKEN must be set for secure access") + os.Exit(1) + } + if err := os.MkdirAll(c.ImportDir, 0o775); err != nil { + slog.Error("failed to ensure import dir exists", "dir", c.ImportDir, "err", err) + os.Exit(1) + } + if err := os.MkdirAll(c.LogDir, 0o775); err != nil { + slog.Error("failed to ensure log dir exists", "dir", c.LogDir, "err", err) + os.Exit(1) + } + return c +} + +func getenvDefault(key, def string) string { + if v := os.Getenv(key); v != "" { + return v + } + return def +} + +func initialImport() error { + slog.Info("performing initial import", "dir", cfg.InstrConfigDir) + + files, err := findInitialXMLFiles() + if err != nil { + return fmt.Errorf("find initial xml files: %w", err) + } + if len(files) == 0 { + slog.Info("no initial XML files found") + return nil + } + + for _, f := range files { + slog.Info("initial dbimport", "file", f) + if _, err := runCommand(context.Background(), "dbimport", []string{"-l", "/proc/self/fd/1", f}, 0); err != nil { + return fmt.Errorf("dbimport failed for %s: %w", f, err) + } + } + return nil +} + +func findInitialXMLFiles() ([]string, error) { + var results []string + + root := cfg.InstrConfigDir + + err := filepath.WalkDir(root, func(pathStr string, d os.DirEntry, err error) error { + if err != nil { + return err + } + if d.IsDir() { + return nil + } + + if strings.ToLower(filepath.Ext(pathStr)) == ".xml" { + results = append(results, pathStr) + } + return nil + }) + if err != nil { + return nil, err + } + return results, nil +} + +func startRoutingScheduler() error { + rsMu.Lock() + defer rsMu.Unlock() + + if rsCmd != nil { + return errors.New("routing scheduler already running") + } + + runtimeLogPath := filepath.Join(cfg.LogDir, "runtime.log") + slog.Info("starting routing scheduler 'rs'", + "spec", cfg.RoutingSpec, + "runtimeLogPath", runtimeLogPath) + + cmd := exec.Command("rs", "-l", runtimeLogPath, cfg.RoutingSpec) + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + + if err := cmd.Start(); err != nil { + return fmt.Errorf("failed to start rs: %w", err) + } + rsCmd = cmd + return nil +} + +func handleSignals() { + sigCh := make(chan os.Signal, 1) + signal.Notify(sigCh, syscall.SIGINT, syscall.SIGTERM) + + sig := <-sigCh + slog.Info("received signal, forwarding to rs and shutting down", "signal", sig) + + rsMu.Lock() + if rsCmd != nil && rsCmd.Process != nil { + _ = rsCmd.Process.Signal(sig) + } + rsMu.Unlock() +} + +func startHTTPServer() error { + router := http.NewServeMux() + + api := humago.New(router, huma.DefaultConfig("OpenDCS Wrapper", VERSION)) + + huma.Post(api, "/import", func(ctx context.Context, input *struct { + KeyQueryParam + Body ImportRequest `contentType:"application/json"` + }) (*Response[ImportResponse], error) { + if res := subtle.ConstantTimeCompare([]byte(input.Key), []byte(cfg.AuthToken)); res != 1 { + return nil, huma.NewError(http.StatusUnauthorized, "invalid key") + } + resp := processImport(ctx, input.Body) + return NewResponse(resp), nil + }) + + type RuntimeLogs struct { + Log string `json:"log" doc:"Contents of routing scheduler runtime log"` + } + + huma.Get(api, "/logs/runtime", func(ctx context.Context, input *struct { + KeyQueryParam + }) (*Response[RuntimeLogs], error) { + if res := subtle.ConstantTimeCompare([]byte(input.Key), []byte(cfg.AuthToken)); res != 1 { + return nil, huma.NewError(http.StatusUnauthorized, "invalid key") + } + + runtimeLogPath := filepath.Join(cfg.LogDir, "runtime.log") + data, err := os.ReadFile(runtimeLogPath) + if err != nil { + return nil, huma.NewError(http.StatusInternalServerError, fmt.Sprintf("failed to read runtime log: %v", err)) + } + + return NewResponse(RuntimeLogs{ + Log: string(data), + }), nil + }) + + router.HandleFunc("/healthz", handleHealth) + + server := &http.Server{ + Addr: cfg.ListenAddr, + Handler: router, + ReadHeaderTimeout: 5 * time.Second, + } + + slog.Info("http api listening", "addr", cfg.ListenAddr) + return server.ListenAndServe() +} + +func handleHealth(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + _, _ = io.WriteString(w, "ok\n") +} + +func processImport(ctx context.Context, req ImportRequest) ImportResponse { + resp := ImportResponse{} + + files, err := resolveImportFiles(req.Files) + if err != nil { + resp.Status = "error" + resp.Error = err.Error() + return resp + } + if len(files) == 0 { + resp.Status = "error" + resp.Error = "no xml files found to import" + return resp + } + + importMu.Lock() + defer importMu.Unlock() + + ctx, cancel := context.WithTimeout(ctx, 10*time.Minute) + defer cancel() + + timestamp := time.Now().UTC().Format("20060102-150405") + validateLogPath := filepath.Join(cfg.LogDir, fmt.Sprintf("dbimport-validate-%s.log", timestamp)) + importLogPath := filepath.Join(cfg.LogDir, fmt.Sprintf("dbimport-import-%s.log", timestamp)) + + valArgs := append([]string{"-v", "-l", validateLogPath}, files...) + valOut, valErr := runCommand(ctx, "dbimport", valArgs, 0) + resp.ValidateLog = readFileOrEmpty(validateLogPath) + resp.CommandOutput = string(valOut) + + if valErr != nil { + resp.Status = "validation_failed" + resp.Error = valErr.Error() + return resp + } + + if req.ValidateOnly { + resp.Status = "validation_ok" + return resp + } + + impArgs := append([]string{"-l", importLogPath}, files...) + impOut, impErr := runCommand(ctx, "dbimport", impArgs, 0) + resp.ImportLog = readFileOrEmpty(importLogPath) + resp.CommandOutput = string(impOut) + + if impErr != nil { + resp.Status = "import_failed" + resp.Error = impErr.Error() + return resp + } + + resp.Status = "success" + return resp +} + +func resolveImportFiles(files []string) ([]string, error) { + var resolved []string + if len(files) == 0 { + err := filepath.WalkDir(cfg.ImportDir, func(pathStr string, d os.DirEntry, err error) error { + if err != nil { + return err + } + if d.IsDir() { + return nil + } + if strings.ToLower(filepath.Ext(pathStr)) == ".xml" { + resolved = append(resolved, pathStr) + } + return nil + }) + if err != nil { + return nil, fmt.Errorf("walk import dir: %w", err) + } + return resolved, nil + } + + for _, f := range files { + if !filepath.IsAbs(f) { + f = filepath.Join(cfg.ImportDir, f) + } + clean := filepath.Clean(f) + baseImport := filepath.Clean(cfg.ImportDir) + if !strings.HasPrefix(clean, baseImport+string(os.PathSeparator)) && clean != baseImport { + return nil, fmt.Errorf("file %q is outside allowed import dir %q", clean, cfg.ImportDir) + } + resolved = append(resolved, clean) + } + return resolved, nil +} + +func runCommand(ctx context.Context, name string, args []string, timeout time.Duration) ([]byte, error) { + if timeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, timeout) + defer cancel() + } + + slog.Info("exec command", "name", name, "args", strings.Join(args, " ")) + + cmd := exec.CommandContext(ctx, name, args...) + var buf bytes.Buffer + cmd.Stdout = &buf + cmd.Stderr = &buf + + err := cmd.Run() + out := buf.Bytes() + if ctx.Err() == context.DeadlineExceeded { + return out, fmt.Errorf("command timed out: %w", ctx.Err()) + } + if err != nil { + return out, err + } + return out, nil +} + +func readFileOrEmpty(pathStr string) string { + data, err := os.ReadFile(pathStr) + if err != nil { + return "" + } + return string(data) +} + +func runUploadCLI(filePath string) error { + ctx := context.Background() + + root := dataloadS3Root + if root == "" { + return fmt.Errorf("DATALOAD_S3_ROOT is not set; cannot upload") + } + + const app = "goes" + + stat, err := os.Stat(filePath) + if err != nil { + return fmt.Errorf("input file %q does not exist: %w", filePath, err) + } + + if stat.Size() == 0 { + if err := os.Remove(filePath); err != nil && !os.IsNotExist(err) { + slog.Warn("failed to remove empty file", "file", filePath, "err", err) + } + return nil + } + + bucketURL, prefix, err := buildBucketURLFromRoot(root, awsEndpointURL) + if err != nil { + return err + } + + bucket, err := blob.OpenBucket(ctx, bucketURL) + if err != nil { + return fmt.Errorf("failed to open bucket %q: %w", bucketURL, err) + } + defer bucket.Close() + + base := filepath.Base(filePath) + platform := derivePlatformFromFilename(base) + + key := path.Join(prefix, app, platform, base) + slog.Info("uploading file to bucket", "file", filePath, "bucketURL", bucketURL, "key", key) + + f, err := os.Open(filePath) + if err != nil { + return fmt.Errorf("failed to open file %q: %w", filePath, err) + } + defer f.Close() + + w, err := bucket.NewWriter(ctx, key, nil) + if err != nil { + return fmt.Errorf("failed to create blob writer: %w", err) + } + + if _, err := io.Copy(w, f); err != nil { + _ = w.Close() + return fmt.Errorf("failed to stream file to bucket: %w", err) + } + + if err := w.Close(); err != nil { + return fmt.Errorf("failed to finalize blob write: %w", err) + } + + if err := os.Remove(filePath); err != nil && !os.IsNotExist(err) { + slog.Warn("failed to remove local file after upload", "file", filePath, "err", err) + } + + return nil +} + +func buildBucketURLFromRoot(root, awsEndpoint string) (bucketURL string, prefix string, err error) { + u, err := url.Parse(root) + if err != nil { + return "", "", fmt.Errorf("invalid DATALOAD_S3_ROOT %q: %w", root, err) + } + if u.Scheme != "s3" { + return "", "", fmt.Errorf("DATALOAD_S3_ROOT %q must use s3:// scheme", root) + } + if u.Host == "" { + return "", "", fmt.Errorf("DATALOAD_S3_ROOT %q missing bucket name", root) + } + + prefix = strings.TrimPrefix(u.Path, "/") + + v := u.Query() + if awsEndpoint != "" { + v.Set("endpoint", awsEndpoint) + } + u.Path = "" + u.RawQuery = v.Encode() + bucketURL = u.Scheme + "://" + u.Host + if u.RawQuery != "" { + bucketURL += "?" + u.RawQuery + } + + return bucketURL, prefix, nil +} + +func derivePlatformFromFilename(filename string) string { + name := strings.TrimSuffix(filename, filepath.Ext(filename)) + if idx := strings.LastIndex(name, "-"); idx > 0 { + return name[:idx] + } + return name +} diff --git a/opendcs/midas_config/datasource/hotbackup.xml b/opendcs/midas_config/datasource/hotbackup.xml new file mode 100644 index 00000000..2753d808 --- /dev/null +++ b/opendcs/midas_config/datasource/hotbackup.xml @@ -0,0 +1,25 @@ + + + + + + + hostname=cdadata.wcda.noaa.gov, port=16003, password=${env.CDADATA_PASSWORD}, username=${env.CDADATA_USERNAME} + + + + + + + hostname=cdabackup.wcda.noaa.gov, port=16003, password=${env.CDABACKUP_PASSWORD}, username=${env.CDABACKUP_USERNAME} + + + + + + + hostname=lrgseddn1.cr.usgs.gov, port=16003, password=${env.EDDN1_PASSWORD}, username=${env.EDDN1_USERNAME} + + + + diff --git a/opendcs/midas_config/reference/DataTypeEquivalenceList.xml b/opendcs/midas_config/reference/DataTypeEquivalenceList.xml new file mode 100644 index 00000000..371ba059 --- /dev/null +++ b/opendcs/midas_config/reference/DataTypeEquivalenceList.xml @@ -0,0 +1,590 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/opendcs/midas_config/reference/EnumList.xml b/opendcs/midas_config/reference/EnumList.xml new file mode 100644 index 00000000..ec9cb693 --- /dev/null +++ b/opendcs/midas_config/reference/EnumList.xml @@ -0,0 +1,402 @@ + + + + + Read messages from LRGS data server + decodes.datasource.LrgsDataSource + 1 + + + Read messages from one source in a group + decodes.datasource.HotBackupGroup + 2 + + + Read message(s) from specified file + decodes.datasource.FileDataSource + 3 + + + + Read messages from files in a specified directory + + decodes.datasource.DirectoryDataSource + 4 + + + Read messages a socket stream + decodes.datasource.SocketStreamDataSource + 5 + + + Read messages from all sources in a group + decodes.datasource.RoundRobinGroup + 6 + + + + + + National Data Buoy Center Context-Sensitive Parser + + NDBCMessageParser + 1 + + + Hard-coded NOS data parser + NOSMessageParser + 2 + + + DECODES Format Statements and Unit Conversions + DecodesScript + 3 + + + + + National Weather Service Handbook 5 Name + 1 + + + Local Site Naming Convention + 2 + + + USGS Numeric Station ID + 3 + + + Columbia Basin TeleType + 4 + + + CWMS Name + + + 5 + + + UUID4 identifier + + + 6 + + + + + Pipe data to standard output. + decodes.consumer.PipeConsumer + 1 + + + Save data in specified file + decodes.consumer.FileConsumer + 2 + + + Append data to file in a specified directory. + decodes.consumer.FileAppendConsumer + 3 + + + + Save message data in files in a directory, then optionally run a trigger + script. + + decodes.consumer.DirectoryConsumer + 4 + + + + + degrees or radians + 1 + + + Area + 2 + + + Volume / Time + 3 + + + Length or distance + 4 + + + Ratio + 5 + + + Temperature + 6 + + + Time + 7 + + + Velocity + 8 + + + voltage + 9 + + + Volume + 10 + + + + + Y = Ax + B + LinearConverter + 1 + + + No Conversion (output = input) + NoConversion + 2 + + + Y = Ax5 + Bx4 + Cx3 + Dx2 + Ex + F + Poly5Converter + 3 + + + Y = A * (B + x)^C + D + UsgsStdConverter + 4 + + + + + Descending + 1 + + + Ascending + 2 + + + + + + Input must match table value to produce an output + + ExactMatchLookup + + + Exponential interpolation between table values + ExponentialInterpLookup + + + Linear interpolation between table values + LinearInterpLookup + + + Logarithmic interpolation between table values + LogarithmicInterpLookup + + + Inputs are rounded to nearest table value + RoundingLookup + + + Inputs are truncated to lower table value + TruncatingLookup + + + + + Apply to all platforms + + + + Apply to platforms sharing a given configuration + + + + Apply to specific platform(s) + + + Apply to platforms in a network list + + + Apply to platform at a given site + + + + + English Measurements + 1 + + + International Metric System + 2 + + + + + Display Format + decodes.consumer.HumanReadableFormatter + 1 + + + Standard Hydrometerologic Exchange Format + decodes.consumer.ShefFormatter + 2 + + + USACE HEC Intermediate SHEF Format + decodes.consumer.ShefitFormatter + 3 + + + USGS Standard Message Format + decodes.consumer.StdmsgFormatter + 4 + + + Compatible with EMIT ASCII format + decodes.consumer.EmitAsciiFormatter + 5 + + + Compatible with EMIT Oracle format + decodes.consumer.EmitOracleFormatter + 6 + + + Dump Format for testing and trouble-shooting + decodes.consumer.DumpFormatter + 7 + + + Transmission Monitor + decodes.consumer.TransmitMonitorFormatter + 8 + + + Delimited row-column format + decodes.consumer.TableFormatter + 9 + + + Hydstra Format. + decodes.consumer.HydstraFormatter + 10 + + + HTML Report Format + decodes.consumer.HtmlFormatter + 11 + + + CWMS Oracle with TSID format + rsgis.consumer.CwmsOracleFormatter + + 12 + + + CWMS Oracle Output Formatter + rsgis.consumer.CwmsOutputFormatter + + 13 + + + CWMS Oracle with TSID format + rsgis.consumer.MidasOutputFormatter + + 14 + + + + shef-pe + + + Standard Hydrometeorologic Exchange Format Physical Element Code + + 1 + + + Environmental Protection Agency Parameter Code + 2 + + + U.S. Bureau of Reclamations Hydrologic Database + 3 + + + Hydstra Data Code + 4 + + + CWMS parameters + + + 5 + + + UUID4 + + + 6 + + + + + Electronic Data Logger File + 1 + + + GOES DCP + 2 + + + GOES DCP Random Message + 3 + + + GOES DCP Self-Timed Message + 4 + + + LRGS Archive File + 5 + + + Data collected via telephone telementry + 6 + + + + + + + Data Collection Platform + 1 + + + Transmitter, data logger, modem, etc. + 2 + + + Environmental Sensor + 3 + + + + + Fixed Regular Interval + 1 + + + Variable, Triggered or Random + 2 + + + diff --git a/opendcs/midas_config/reference/MIDAS-English.xml b/opendcs/midas_config/reference/MIDAS-English.xml new file mode 100644 index 00000000..270fb68d --- /dev/null +++ b/opendcs/midas_config/reference/MIDAS-English.xml @@ -0,0 +1,268 @@ + + + true + + + + in + 2 + + + + + in + 2 + + + + + ft + 2 + + + + + ft + 2 + + + + + ft + 2 + + + + + cfs + 2 + + + + + cfs + 2 + + + + + cfs + 2 + + + + + ft + 2 + + + + + W/m2 + 3 + + + + + ft + 2 + + + + + ft + 2 + + + + + ft + 2 + + + + + ft + 2 + + + + + ft + 2 + + + + + ft + 2 + + + + + kW + 3 + + + + + in + 2 + + + + + mb + 2 + + + + + J/m2 + 3 + + + + + % + 3 + + + + + rev + 3 + + + + + mph + 3 + + + + + mph + 2 + + + + + mph + 2 + + + + + rpm + 3 + + + + + ft + 2 + + + + + ft + 2 + + + + + ft + 2 + + + + + ac-ft + 3 + + + + + F + 3 + + + + + F + 2 + + + + + F + 2 + + + + + in + 2 + + + + + in + 2 + + + + + hr + 3 + + + + + ft + 2 + + + + + JTU + 3 + + + + + FNU + 3 + + + + + JTU + 3 + + + + + NTU + 3 + + + + + Volts + 3 + + + + + ac-ft + 3 + + + + + su + 2 + + diff --git a/opendcs/midas_config/reference/MIDAS-Metric.xml b/opendcs/midas_config/reference/MIDAS-Metric.xml new file mode 100644 index 00000000..5f548c3a --- /dev/null +++ b/opendcs/midas_config/reference/MIDAS-Metric.xml @@ -0,0 +1,274 @@ + + + false + + + + m + 3 + + + + + m + 3 + + + + + W/m2 + 3 + + + + + m + 3 + + + + + su + 3 + + + + + kW + 3 + + + + + mm + 3 + + + + + mb + 3 + + + + + J/m2 + 3 + + + + + % + 3 + + + + + rev + 3 + + + + + kph + 3 + + + + + rpm + 3 + + + + + m3 + 3 + + + + + C + 3 + + + + + cm + 3 + + + + + hr + 3 + + + + + m + 3 + + + + + JTU + 3 + + + + + FNU + 3 + + + + + JTU + 3 + + + + + NTU + 3 + + + + + v + 3 + + + + + m3 + 3 + + + + + mm + 3 + + + + + mm + 3 + + + + + m + 3 + + + + + m + 3 + + + + + m + 3 + + + + + cms + 3 + + + + + cms + 3 + + + + + cms + 3 + + + + + m + 3 + + + + + m + 3 + + + + + m + 3 + + + + + m + 3 + + + + + m + 3 + + + + + kph + 3 + + + + + kph + 3 + + + + + m + 3 + + + + + m + 3 + + + + + m + 3 + + + + + C + 3 + + + + + C + 3 + + + + + cm + 3 + + diff --git a/opendcs/midas_config/routing/goes.xml b/opendcs/midas_config/routing/goes.xml new file mode 100644 index 00000000..c2b4bddb --- /dev/null +++ b/opendcs/midas_config/routing/goes.xml @@ -0,0 +1,55 @@ + + + true + + + + + + hostname=cdadata.wcda.noaa.gov, port=16003, password=${env.CDADATA_PASSWORD}, username=${env.CDADATA_USERNAME} + + + + + + + hostname=cdabackup.wcda.noaa.gov, port=16003, password=${env.CDABACKUP_PASSWORD}, username=${env.CDABACKUP_USERNAME} + + + + + + + hostname=lrgseddn1.cr.usgs.gov, port=16003, password=${env.EDDN1_PASSWORD}, username=${env.EDDN1_USERNAME} + + + + + false + false + midas-formatter + UTC + MIDAS-English + directory + /opendcs_output + now - 2 hours + + + yyyy-MM-dd'T'HH:mm:ss'Z' + + + ${java.TRANSPORTID}-$DATE(yyyyMMddHHmmss) + + + False + + + , + + + l + + + /opendcs-wrapper upload ${java.FILENAME} + + diff --git a/opendcs/midas_config/routing/monitor.xml b/opendcs/midas_config/routing/monitor.xml new file mode 100644 index 00000000..efa0e196 --- /dev/null +++ b/opendcs/midas_config/routing/monitor.xml @@ -0,0 +1,51 @@ + + + true + + + + + + hostname=cdadata.wcda.noaa.gov, port=16003, password=${env.CDADATA_PASSWORD}, username=${env.CDADATA_USERNAME} + + + + + + + hostname=cdabackup.wcda.noaa.gov, port=16003, password=${env.CDABACKUP_PASSWORD}, username=${env.CDABACKUP_USERNAME} + + + + + + + hostname=lrgseddn1.cr.usgs.gov, port=16003, password=${env.EDDN1_PASSWORD}, username=${env.EDDN1_USERNAME} + + + + + false + false + transmit-monitor + UTC + directory + /opendcs_output + now - 2 hours + + + ${java.TRANSPORTID}-$DATE(yyyyMMddHHmmss) + + + , + + + False + + + l + + + /opendcs-wrapper upload ${java.FILENAME} + + diff --git a/opendcs/rsgis/.mvn/jvm.config b/opendcs/rsgis/.mvn/jvm.config new file mode 100644 index 00000000..e69de29b diff --git a/opendcs/rsgis/.mvn/maven.config b/opendcs/rsgis/.mvn/maven.config new file mode 100644 index 00000000..e69de29b diff --git a/opendcs/rsgis/.settings/org.eclipse.core.resources.prefs b/opendcs/rsgis/.settings/org.eclipse.core.resources.prefs new file mode 100644 index 00000000..abdea9ac --- /dev/null +++ b/opendcs/rsgis/.settings/org.eclipse.core.resources.prefs @@ -0,0 +1,4 @@ +eclipse.preferences.version=1 +encoding//src/main/java=UTF-8 +encoding//src/main/resources=UTF-8 +encoding/=UTF-8 diff --git a/opendcs/rsgis/.settings/org.eclipse.jdt.apt.core.prefs b/opendcs/rsgis/.settings/org.eclipse.jdt.apt.core.prefs new file mode 100644 index 00000000..d4313d4b --- /dev/null +++ b/opendcs/rsgis/.settings/org.eclipse.jdt.apt.core.prefs @@ -0,0 +1,2 @@ +eclipse.preferences.version=1 +org.eclipse.jdt.apt.aptEnabled=false diff --git a/opendcs/rsgis/.settings/org.eclipse.jdt.core.prefs b/opendcs/rsgis/.settings/org.eclipse.jdt.core.prefs new file mode 100644 index 00000000..faca1b3f --- /dev/null +++ b/opendcs/rsgis/.settings/org.eclipse.jdt.core.prefs @@ -0,0 +1,9 @@ +eclipse.preferences.version=1 +org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8 +org.eclipse.jdt.core.compiler.compliance=1.8 +org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled +org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning +org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=ignore +org.eclipse.jdt.core.compiler.processAnnotations=disabled +org.eclipse.jdt.core.compiler.release=enabled +org.eclipse.jdt.core.compiler.source=1.8 diff --git a/opendcs/rsgis/.settings/org.eclipse.m2e.core.prefs b/opendcs/rsgis/.settings/org.eclipse.m2e.core.prefs new file mode 100644 index 00000000..f897a7f1 --- /dev/null +++ b/opendcs/rsgis/.settings/org.eclipse.m2e.core.prefs @@ -0,0 +1,4 @@ +activeProfiles= +eclipse.preferences.version=1 +resolveWorkspaceProjects=true +version=1 diff --git a/opendcs/rsgis/pom.xml b/opendcs/rsgis/pom.xml new file mode 100644 index 00000000..f85f88ef --- /dev/null +++ b/opendcs/rsgis/pom.xml @@ -0,0 +1,71 @@ + + + 4.0.0 + + rsgis.consumer + rsgis + 1.0-SNAPSHOT + + rsgis + + + UTF-8 + 8 + + + + + org.opendcs + opendcs + 7.0.12 + system + ${project.basedir}/src/main/resources/opendcs.jar + + + + jar + + rsgis + + + + maven-clean-plugin + 3.4.0 + + + maven-resources-plugin + 3.3.1 + + + maven-compiler-plugin + 3.13.0 + + + maven-surefire-plugin + 3.3.0 + + + maven-jar-plugin + 3.4.2 + + + maven-install-plugin + 3.1.2 + + + maven-deploy-plugin + 3.1.2 + + + maven-site-plugin + 3.12.1 + + + maven-project-info-reports-plugin + 3.6.1 + + + + + diff --git a/opendcs/rsgis/src/main/java/rsgis/consumer/MidasOutputFormatter.java b/opendcs/rsgis/src/main/java/rsgis/consumer/MidasOutputFormatter.java new file mode 100644 index 00000000..152bee96 --- /dev/null +++ b/opendcs/rsgis/src/main/java/rsgis/consumer/MidasOutputFormatter.java @@ -0,0 +1,122 @@ +package rsgis.consumer; + +import decodes.consumer.DataConsumer; +import decodes.consumer.DataConsumerException; +import decodes.consumer.OutputFormatter; +import decodes.consumer.OutputFormatterException; +import decodes.datasource.RawMessage; +import decodes.datasource.UnknownPlatformException; +import decodes.db.Platform; +import decodes.db.PresentationGroup; +import decodes.decoder.DecodedMessage; +import decodes.decoder.Sensor; +import decodes.decoder.TimeSeries; +import decodes.util.PropertySpec; +import ilex.util.Logger; +import ilex.util.PropertiesUtil; +import ilex.var.TimedVariable; +import java.text.SimpleDateFormat; +import java.util.Iterator; +import java.util.Properties; +import java.util.TimeZone; + +public class MidasOutputFormatter + extends OutputFormatter { + private final String module = "MidasOutputFormatter"; + private String delimiter = " "; + private String dateFormat = "yyyy-MM-dd'T'HH:mmZ"; + private PropertySpec[] propSpecs = new PropertySpec[] { + new PropertySpec("cwmsOfficeID", "s", "Three letter code for the CWMS office (District/Division)"), + new PropertySpec("delimiter", "s", "Used between columns (default=space)"), + new PropertySpec("justify", "b", "(default=true) Pad with blanks to line up columns."), + new PropertySpec("dateFormat", "s", "(default=yyyy-MM-dd'T'HH:mmZ) Java SimpleDateFormat spec.") }; + private SimpleDateFormat sdf = null; + + protected void initFormatter(String type, TimeZone timeZone, PresentationGroup presentationGroup, + Properties properties) throws OutputFormatterException { + String s = PropertiesUtil.getIgnoreCase((Properties) properties, (String) "cwmsOfficeID"); + if ((s = PropertiesUtil.getIgnoreCase((Properties) properties, (String) "delimiter")) != null) { + this.delimiter = s; + } + if ((s = PropertiesUtil.getIgnoreCase((Properties) properties, (String) "dateFormat")) != null) { + this.dateFormat = s; + } + this.sdf = new SimpleDateFormat(this.dateFormat); + this.sdf.setTimeZone(timeZone); + } + + public void shutdown() { + } + + public void formatMessage(DecodedMessage decodedMessage, DataConsumer dataConsumer) + throws DataConsumerException, OutputFormatterException { + Platform platform; + dataConsumer.startMessage(decodedMessage); + RawMessage rawMessage = decodedMessage.getRawMessage(); + + try { + platform = rawMessage.getPlatform(); + } catch (UnknownPlatformException var23) { + throw new OutputFormatterException(var23.toString()); + } + + String platformName = platform.getDisplayName(); + String platformFileId = platform.getProperty("fileId"); + Iterator timeSeriesIterator = decodedMessage.getAllTimeSeries(); + + while (timeSeriesIterator.hasNext()) { + TimeSeries timeSeries = (TimeSeries) timeSeriesIterator.next(); + Sensor sensor = timeSeries.getSensor(); + + if (sensor == null) { + Logger.instance().warning(String.format( + "%s: sensor is null, skipping...; platformName: %s; timeSeries: %s;", + this.module, platformName, timeSeries.getDisplayName())); + continue; + } + + if (timeSeries.size() == 0) { + Logger.instance().warning(String.format( + "%s: no timeseries to record, skipping...; platformName: %s; timeSeries: %s;", + this.module, platformName, timeSeries.getDisplayName())); + continue; + } + + String sensorNameNumber = String.format("%s.%d", sensor.getName(), sensor.getNumber()); + this.processDataOutput(dataConsumer, timeSeries, platformFileId, sensorNameNumber); + + Logger.instance().info(String.format( + "%s: measurements recorded; timeSeries: %s; size: %d;", + this.module, timeSeries.getDisplayName(), timeSeries.size())); + } + + dataConsumer.endMessage(); + } + + public void processDataOutput( + DataConsumer dataConsumer, + TimeSeries timeSeries, + String platformFileId, + String sensorNameNumber) { + StringBuffer sb = new StringBuffer(); + int tsSize = timeSeries.size(); + for (int i = 0; i < tsSize; ++i) { + TimedVariable tv = timeSeries.sampleAt(i); + if ((tv.getFlags() & 0x60000000) != 0) + continue; + sb.setLength(0); + sb.append(platformFileId); + sb.append(this.delimiter); + sb.append(sensorNameNumber); + sb.append(this.delimiter); + sb.append(this.sdf.format(tv.getTime())); + sb.append(this.delimiter); + sb.append(timeSeries.formattedSampleAt(i)); + dataConsumer.println(sb.toString()); + } + } + + public PropertySpec[] getSupportedProps() { + return this.propSpecs; + } +} From cb74e937b748cda360e890a437c90fae970eb285 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Fri, 12 Dec 2025 11:08:11 -0500 Subject: [PATCH 07/22] feat: hook up opendcs as docker service --- .env.example | 17 +++++++++++++++-- docker-compose.yaml | 10 ++++++++++ env_files/opendcs.env | 9 +++++++++ opendcs/Dockerfile | 2 +- opendcs/main.go | 15 ++++++++++----- 5 files changed, 45 insertions(+), 8 deletions(-) create mode 100644 env_files/opendcs.env diff --git a/.env.example b/.env.example index 1329b7af..7334c6fd 100644 --- a/.env.example +++ b/.env.example @@ -1,6 +1,7 @@ # .env used for port and service confiuration # for service specific environment variables, see ./env_files/*.env +# ports API_PORT=8080 TELEMETRY_PORT=9090 LOCALSTACK_GATEWAY_PORT=9000 @@ -8,12 +9,24 @@ LOCALSTACK_UI_PORT=9001 KEYCLOAK_PORT=8090 RIVER_QUEUE_UI_PORT=9326 -INSTRUMENTATION_AUTH_JWT_MOCKED= -INSTRUMENTATION_SURVEY123_IP_WHITELIST= +# api +INSTRUMENTATION_AUTH_JWT_MOCKED=false + +# sl-client SLCLIENT_SEEDLINK_SERVER_URI= + +# task TASK_THINGLOGIX_COGNITO_POOL= TASK_THINGLOGIX_PROVIDER_NAME= TASK_THINGLOGIX_API_GATEWAY_ENDPOINT= TASK_THINGLOGIX_USER= TASK_THINGLOGIX_PASSWORD= TASK_THINGLOGIX_ACCOUNT_ID= + +# opendcs +CDADATA_USERNAME= +CDADATA_PASSWORD= +CDABACKUP_USERNAME= +CDABACKUP_PASSWORD= +EDDN1_USERNAME= +EDDN1_PASSWORD= diff --git a/docker-compose.yaml b/docker-compose.yaml index 8e7a679a..ab55ced9 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -180,6 +180,16 @@ services: localstack-init: condition: service_completed_successfully + opendcs: + build: + context: ./opendcs + image: opendcs + env_file: + - path: ./env_files/opendcs.env + required: true + - path: .env + required: true + localstack: image: localstack/localstack:4 ports: diff --git a/env_files/opendcs.env b/env_files/opendcs.env new file mode 100644 index 00000000..a4b77198 --- /dev/null +++ b/env_files/opendcs.env @@ -0,0 +1,9 @@ +AWS_ENDPOINT_URL="http://localstack:4566" +DATALOAD_S3_ROOT="s3://corpsmap-data-incoming/instrumentation" +OPENDCS_IMPORT_TOKEN="appkey" +CDADATA_USERNAME= +CDADATA_PASSWORD= +CDABACKUP_USERNAME= +CDABACKUP_PASSWORD= +EDDN1_USERNAME= +EDDN1_PASSWORD= diff --git a/opendcs/Dockerfile b/opendcs/Dockerfile index 33be4732..eb275d11 100644 --- a/opendcs/Dockerfile +++ b/opendcs/Dockerfile @@ -1,6 +1,6 @@ ARG OPENDCS_BASE_IMAGE=ghcr.io/opendcs/routingscheduler:7.0-nightly ARG MAVEN_BUILD_IMAGE=maven:3-eclipse-temurin-8-noble -ARG GO_BUILD_IMAGE=golang:1.23-alpine +ARG GO_BUILD_IMAGE=golang:1.25-alpine FROM ${OPENDCS_BASE_IMAGE} AS opendcs_patched diff --git a/opendcs/main.go b/opendcs/main.go index e3570c83..e62fffad 100644 --- a/opendcs/main.go +++ b/opendcs/main.go @@ -189,11 +189,16 @@ func initialImport() error { return nil } - for _, f := range files { - slog.Info("initial dbimport", "file", f) - if _, err := runCommand(context.Background(), "dbimport", []string{"-l", "/proc/self/fd/1", f}, 0); err != nil { - return fmt.Errorf("dbimport failed for %s: %w", f, err) - } + args := make([]string, len(files)+2) + args[0] = "-l" + args[1] = "/proc/self/fd/1" + + for i, f := range files { + args[i+2] = f + } + slog.Info("initial dbimport", "command", "dbimport", "args", args) + if _, err := runCommand(context.Background(), "dbimport", args, 0); err != nil { + return fmt.Errorf("dbimport command failed: %w", err) } return nil } From c811e82e81accae8f8f7f5df07f0f92deeb5adc0 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Mon, 15 Dec 2025 11:38:24 -0500 Subject: [PATCH 08/22] chore: clean up opendcs/main.go chore: gitignore eclipse **/.settings chore: set up validation types for dbimport api handler chore: add constrains for platform file keys --- .gitignore | 2 + api/internal/handler/goes.go | 22 +- api/internal/service/goes.go | 169 +++- api/migrations/schema/V1.56.00__goes.sql | 3 +- api/queries/goes.sql | 3 +- opendcs/main.go | 937 ++++++++++++------ .../org.eclipse.core.resources.prefs | 4 - .../.settings/org.eclipse.jdt.apt.core.prefs | 2 - .../.settings/org.eclipse.jdt.core.prefs | 9 - .../.settings/org.eclipse.m2e.core.prefs | 4 - 10 files changed, 796 insertions(+), 359 deletions(-) delete mode 100644 opendcs/rsgis/.settings/org.eclipse.core.resources.prefs delete mode 100644 opendcs/rsgis/.settings/org.eclipse.jdt.apt.core.prefs delete mode 100644 opendcs/rsgis/.settings/org.eclipse.jdt.core.prefs delete mode 100644 opendcs/rsgis/.settings/org.eclipse.m2e.core.prefs diff --git a/.gitignore b/.gitignore index dde8a666..8184aa10 100644 --- a/.gitignore +++ b/.gitignore @@ -36,3 +36,5 @@ test.log **/dist go.work.sum + +**/.settings diff --git a/api/internal/handler/goes.go b/api/internal/handler/goes.go index 1b28c7ce..daf4cc47 100644 --- a/api/internal/handler/goes.go +++ b/api/internal/handler/goes.go @@ -11,6 +11,7 @@ import ( "github.com/USACE/instrumentation-api/api/v4/internal/db" "github.com/USACE/instrumentation-api/api/v4/internal/dto" "github.com/USACE/instrumentation-api/api/v4/internal/httperr" + "github.com/USACE/instrumentation-api/api/v4/internal/service" "github.com/danielgtaylor/huma/v2" ) @@ -61,9 +62,10 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { }) type XmlPlatformConfig struct { - PlatformConfig huma.FormFile `form:"file" contentType:"text/xml" required:"true"` - Alias string `form:"alias"` - DryRun bool `form:"dry_run"` + PlatformConfig huma.FormFile `form:"file" contentType:"text/xml" required:"true"` + Alias string `form:"alias"` + DryRun bool `form:"dry_run"` + DeleteOldMappings bool `form:"delete_old_mappings"` } huma.Register(api, huma.Operation{ @@ -77,7 +79,7 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { ProjectIDParam TelemetrySourceIDParam RawBody huma.MultipartFormFiles[XmlPlatformConfig] - }) (*Response[ID], error) { + }) (*Response[service.DbImportResponse], error) { p := ctx.Value(ctxkey.Profile).(db.VProfile) formData := input.RawBody.Data() xmlDoc, err := io.ReadAll(formData.PlatformConfig) @@ -91,7 +93,7 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { if alias == "" { alias = formData.PlatformConfig.Filename } - newID, err := h.DBService.GoesPlatformConfigFileCreate(ctx, db.GoesPlatformConfigFileCreateParams{ + a, err := h.DBService.GoesPlatformConfigFileCreate(ctx, db.GoesPlatformConfigFileCreateParams{ GoesTelemetrySourceID: input.TelemetrySourceID.UUID, ProjectID: input.ProjectID.UUID, Name: formData.PlatformConfig.Filename, @@ -103,7 +105,7 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { if err != nil { return nil, httperr.InternalServerError(err) } - return NewResponseID(newID), nil + return NewResponse(a), nil }) huma.Register(api, huma.Operation{ @@ -118,7 +120,7 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { TelemetrySourceIDParam TelemetryConfigIDParam RawBody huma.MultipartFormFiles[XmlPlatformConfig] - }) (*Response[struct{}], error) { + }) (*Response[service.DbImportResponse], error) { p := ctx.Value(ctxkey.Profile).(db.VProfile) formData := input.RawBody.Data() xmlDoc, err := io.ReadAll(formData.PlatformConfig) @@ -133,7 +135,8 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { if alias == "" { alias = formData.PlatformConfig.Filename } - if err := h.DBService.GoesPlatformConfigFileUpdate(ctx, db.GoesPlatformConfigFileUpdateParams{ + // TODO: return dbimport response + _, err = h.DBService.GoesPlatformConfigFileUpdate(ctx, db.GoesPlatformConfigFileUpdateParams{ ID: input.TelemetryConfigID.UUID, Name: formData.PlatformConfig.Filename, Alias: alias, @@ -141,7 +144,8 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { Content: string(xmlDoc), UpdatedBy: &p.ID, UpdatedAt: &now, - }); err != nil { + }, formData.DryRun, formData.DeleteOldMappings) + if err != nil { return nil, httperr.InternalServerError(err) } return nil, nil diff --git a/api/internal/service/goes.go b/api/internal/service/goes.go index f8e4e9e8..c63045eb 100644 --- a/api/internal/service/goes.go +++ b/api/internal/service/goes.go @@ -2,6 +2,7 @@ package service import ( "context" + "encoding/json" "encoding/xml" "errors" "fmt" @@ -12,6 +13,122 @@ import ( "github.com/google/uuid" ) +type Platform struct { + XMLName xml.Name `xml:"Platform"` + PlatformConfig PlatformConfig `xml:"PlatformConfig"` +} + +type PlatformConfig struct { + ConfigSensors []ConfigSensor `xml:"ConfigSensor"` +} + +type ConfigSensor struct { + SensorName string `xml:"SensorName"` + SensorNumber string `xml:"SensorNumber"` +} + +type DbImportCommandType string + +type DbImportResponse struct { + PlatformFileID uuid.UUID `json:"platform_file_id"` + Response json.RawMessage `json:"response"` +} + +// GoesPlatformConfigFileCreate validates and creates a platform configuration file for a given MIDAS project +// +// TODO: This endpoint should return the results of the proxied dbimport validation +func (s *DBService) GoesPlatformConfigFileCreate(ctx context.Context, arg db.GoesPlatformConfigFileCreateParams, dryRun bool) (DbImportResponse, error) { + names, err := extractSensorNames(arg.Content) + if err != nil { + return DbImportResponse{}, err + } + + // TODO: proxy request to opendcs service to validate dbimport + var a DbImportResponse + // http.Get... + + if dryRun { + return a, nil + } + + tx, err := s.db.Begin(ctx) + if err != nil { + return a, err + } + defer s.TxDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + newID, err := qtx.GoesPlatformConfigFileCreate(ctx, arg) + if err != nil { + return a, fmt.Errorf("GoesPlatformConfigFileCreate %w", err) + } + a.PlatformFileID = newID + + mm := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, 0, len(names)) + for _, n := range names { + mm = append(mm, db.GoesTelemetryConfigMappingsCreateBatchParams{ + GoesPlatformConfigFileID: newID, + PlatformSensorKey: n, + TimeseriesID: nil, + }) + } + + qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, mm).Exec(batchExecErr(&err)) + if err != nil { + return a, fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err) + } + + return a, tx.Commit(ctx) +} + +// TODO: return validation results +func (s *DBService) GoesPlatformConfigFileUpdate(ctx context.Context, arg db.GoesPlatformConfigFileUpdateParams, dryRun, deleteOldMappings bool) (uuid.UUID, error) { + names, err := extractSensorNames(arg.Content) + if err != nil { + return uuid.Nil, err + } + + // TODO: proxy request to opendcs service to validate dbimport + + if dryRun { + // TODO: respond with validation result / error + return uuid.Nil, errors.New("TODO") + } + + tx, err := s.db.Begin(ctx) + if err != nil { + return uuid.Nil, err + } + defer s.TxDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + if err := qtx.GoesPlatformConfigFileUpdate(ctx, arg); err != nil { + return uuid.Nil, fmt.Errorf("GoesPlatformConfigFileUpdate %w", err) + } + + if deleteOldMappings { + if err := qtx.GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile(ctx, arg.ID); err != nil { + return uuid.Nil, fmt.Errorf("GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile %w", err) + } + } + + mm := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, 0, len(names)) + for _, n := range names { + mm = append(mm, db.GoesTelemetryConfigMappingsCreateBatchParams{ + GoesPlatformConfigFileID: arg.ID, + PlatformSensorKey: n, + TimeseriesID: nil, + }) + } + + qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, mm).Exec(batchExecErr(&err)) + if err != nil { + return uuid.Nil, fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err) + } + + return uuid.Nil, tx.Commit(ctx) +} + func (s *DBService) GoesTelemetryConfigMappingsUpdate(ctx context.Context, cfgID uuid.UUID, mappings []dto.GoesTelemetryConfigMappingDTO) error { mm := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, len(mappings)) for i, m := range mappings { @@ -40,20 +157,6 @@ func (s *DBService) GoesTelemetryConfigMappingsUpdate(ctx context.Context, cfgID return tx.Commit(ctx) } -type Platform struct { - XMLName xml.Name `xml:"Platform"` - PlatformConfig PlatformConfig `xml:"PlatformConfig"` -} - -type PlatformConfig struct { - ConfigSensors []ConfigSensor `xml:"ConfigSensor"` -} - -type ConfigSensor struct { - SensorName string `xml:"SensorName"` - SensorNumber string `xml:"SensorNumber"` -} - func extractSensorNames(xmlStr string) ([]string, error) { dec := xml.NewDecoder(strings.NewReader(xmlStr)) for { @@ -98,41 +201,3 @@ func extractFromPlatforms(platforms []Platform) []string { } return result } - -func (s *DBService) GoesPlatformConfigFileCreate(ctx context.Context, arg db.GoesPlatformConfigFileCreateParams, dryRun bool) (uuid.UUID, error) { - names, err := extractSensorNames(arg.Content) - if err != nil { - return uuid.Nil, err - } - if dryRun { - return uuid.Nil, errors.New("TODO") - } - - tx, err := s.db.Begin(ctx) - if err != nil { - return uuid.Nil, err - } - defer s.TxDo(ctx, tx.Rollback) - qtx := s.WithTx(tx) - - newID, err := qtx.GoesPlatformConfigFileCreate(ctx, arg) - if err != nil { - return uuid.Nil, fmt.Errorf("GoesPlatformConfigFileCreate %w", err) - } - - mm := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, 0, len(names)) - for _, n := range names { - mm = append(mm, db.GoesTelemetryConfigMappingsCreateBatchParams{ - GoesPlatformConfigFileID: newID, - PlatformSensorKey: n, - TimeseriesID: nil, - }) - } - - qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, mm).Exec(batchExecErr(&err)) - if err != nil { - return uuid.Nil, fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err) - } - - return newID, tx.Commit(ctx) -} diff --git a/api/migrations/schema/V1.56.00__goes.sql b/api/migrations/schema/V1.56.00__goes.sql index fe964aa3..769cd24a 100644 --- a/api/migrations/schema/V1.56.00__goes.sql +++ b/api/migrations/schema/V1.56.00__goes.sql @@ -22,7 +22,8 @@ create table goes_platform_config_file ( create table goes_telemetry_config_mappings ( goes_platform_config_file_id uuid not null references goes_platform_config_file(id) on delete cascade, platform_sensor_key text not null, - timeseries_id uuid references timeseries(id) + timeseries_id uuid unique references timeseries(id), + constraint unique_goes_platform_config_file_id_platform_sensor_key unique (goes_platform_config_file_id, platform_sensor_key) ); diff --git a/api/queries/goes.sql b/api/queries/goes.sql index 1c8248ab..18d5945d 100644 --- a/api/queries/goes.sql +++ b/api/queries/goes.sql @@ -29,7 +29,8 @@ delete from goes_platform_config_file where id=$1; -- name: GoesTelemetryConfigMappingsCreateBatch :batchexec insert into goes_telemetry_config_mappings (goes_platform_config_file_id, platform_sensor_key, timeseries_id) -values ($1, $2, $3); +values ($1, $2, $3) +on conflict on constraint unique_goes_platform_config_file_id_platform_sensor_key do nothing; -- name: GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile :exec diff --git a/opendcs/main.go b/opendcs/main.go index e62fffad..e9f331c3 100644 --- a/opendcs/main.go +++ b/opendcs/main.go @@ -15,6 +15,7 @@ import ( "os/signal" "path" "path/filepath" + "sort" "strings" "sync" "syscall" @@ -27,25 +28,30 @@ import ( _ "gocloud.dev/blob/s3blob" ) +const ( + VERSION = "1.0.0" +) + type Config struct { - InstrConfigDir string - ImportDir string - RoutingSpec string - PlatformImportPrefix string - AuthToken string - ListenAddr string - LogDir string + InstrConfigDir string + ImportDir string + RoutingSpec string + AuthToken string + ListenAddr string + LogDir string + + DcsToolUserDir string + DecodesProps string + + DataloadS3Root string + AWSEndpointURL string } type Response[T any] struct { Body T } -func NewResponse[T any](body T) *Response[T] { - return &Response[T]{ - Body: body, - } -} +func NewResponse[T any](body T) *Response[T] { return &Response[T]{Body: body} } type KeyQueryParam struct { Key string `query:"key" required:"true" doc:"API key for authentication"` @@ -64,291 +70,233 @@ type ImportResponse struct { Error string `json:"error,omitempty"` } -type RuntimeLogsOutput struct { - Body struct { - Log string `json:"log" doc:"Contents of routing scheduler runtime log"` - } +type TryMutex struct { + ch chan struct{} } -const VERSION = "1.0.0" - -var ( - cfg Config - importMu sync.Mutex - rsCmd *exec.Cmd - rsMu sync.Mutex - - dataloadS3Root string - awsEndpointURL string -) - -func init() { - var level slog.Level - levelText := getenvDefault("LOGLEVEL", "INFO") - if err := level.UnmarshalText([]byte(levelText)); err != nil { - panic(err) - } - logger := slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{Level: level})) - slog.SetDefault(logger) - - cfg = loadConfig() - - dataloadS3Root = os.Getenv("DATALOAD_S3_ROOT") - awsEndpointURL = os.Getenv("AWS_ENDPOINT_URL") +func NewTryMutex() *TryMutex { + m := &TryMutex{ch: make(chan struct{}, 1)} + m.ch <- struct{}{} + return m } -func main() { - if len(os.Args) > 1 && os.Args[1] == "upload" { - if len(os.Args) < 3 { - slog.Error("usage: upload requires file path argument", "argv", os.Args) - os.Exit(1) - } - if err := runUploadCLI(os.Args[2]); err != nil { - slog.Error("upload failed", "err", err) - os.Exit(1) - } - return +func (m *TryMutex) TryLock() bool { + select { + case <-m.ch: + return true + default: + return false } - - slog.Info("starting opendcs wrapper (server mode)", "version", VERSION) - slog.Debug("configuration", "cfg", cfg) - - go func() { - if err := startHTTPServer(); err != nil { - slog.Error("http server failed", "err", err) - os.Exit(1) - } - }() - - go handleSignals() - - if err := initialImport(); err != nil { - slog.Error("initial import failed", "err", err) - os.Exit(1) - } - - if err := startRoutingScheduler(); err != nil { - slog.Error("failed to start routing scheduler", "err", err) - os.Exit(1) - } - - if err := rsCmd.Wait(); err != nil { - slog.Error("routing scheduler exited with error", "err", err) - } else { - slog.Info("routing scheduler exited cleanly") - } - - time.Sleep(1 * time.Second) // allow logs to flush } -func loadConfig() Config { - c := Config{ - InstrConfigDir: os.Getenv("INSTRUMENTATION_DCS_CONFIG"), - ImportDir: getenvDefault("OPENDCS_IMPORT_DIR", "/opt/opendcs/import"), - RoutingSpec: getenvDefault("ROUTING_SPEC", "goes"), - AuthToken: os.Getenv("OPENDCS_IMPORT_TOKEN"), - ListenAddr: getenvDefault("OPENDCS_HTTP_ADDR", ":8080"), - LogDir: getenvDefault("OPENDCS_LOG_DIR", "/opendcs_output"), - } - - if c.InstrConfigDir == "" { - slog.Error("INSTRUMENTATION_DCS_CONFIG must be set") - os.Exit(1) +func (m *TryMutex) Unlock() { + select { + case m.ch <- struct{}{}: + default: } - if c.AuthToken == "" { - slog.Error("OPENDCS_IMPORT_TOKEN must be set for secure access") - os.Exit(1) - } - if err := os.MkdirAll(c.ImportDir, 0o775); err != nil { - slog.Error("failed to ensure import dir exists", "dir", c.ImportDir, "err", err) - os.Exit(1) - } - if err := os.MkdirAll(c.LogDir, 0o775); err != nil { - slog.Error("failed to ensure log dir exists", "dir", c.LogDir, "err", err) - os.Exit(1) - } - return c } -func getenvDefault(key, def string) string { - if v := os.Getenv(key); v != "" { - return v - } - return def -} +type CommandRunner struct{} -func initialImport() error { - slog.Info("performing initial import", "dir", cfg.InstrConfigDir) - - files, err := findInitialXMLFiles() - if err != nil { - return fmt.Errorf("find initial xml files: %w", err) - } - if len(files) == 0 { - slog.Info("no initial XML files found") - return nil +func (r *CommandRunner) Run(ctx context.Context, name string, args []string, env []string) ([]byte, error) { + slog.Info("exec command", "name", name, "args", strings.Join(args, " ")) + cmd := exec.CommandContext(ctx, name, args...) + if env != nil { + cmd.Env = env } - args := make([]string, len(files)+2) - args[0] = "-l" - args[1] = "/proc/self/fd/1" + var buf bytes.Buffer + cmd.Stdout = &buf + cmd.Stderr = &buf + + err := cmd.Run() + out := buf.Bytes() - for i, f := range files { - args[i+2] = f + if ctx.Err() != nil { + return out, fmt.Errorf("command canceled: %w", ctx.Err()) } - slog.Info("initial dbimport", "command", "dbimport", "args", args) - if _, err := runCommand(context.Background(), "dbimport", args, 0); err != nil { - return fmt.Errorf("dbimport command failed: %w", err) + if err != nil { + return out, fmt.Errorf("%s failed: %w", name, err) } - return nil + return out, nil } -func findInitialXMLFiles() ([]string, error) { - var results []string - - root := cfg.InstrConfigDir +type RouterScheduler struct { + mu sync.Mutex + cmd *exec.Cmd - err := filepath.WalkDir(root, func(pathStr string, d os.DirEntry, err error) error { - if err != nil { - return err - } - if d.IsDir() { - return nil - } + logDir string + routingSpec string +} - if strings.ToLower(filepath.Ext(pathStr)) == ".xml" { - results = append(results, pathStr) - } - return nil - }) - if err != nil { - return nil, err +func NewRouterScheduler(logDir, routingSpec string) *RouterScheduler { + return &RouterScheduler{ + logDir: logDir, + routingSpec: routingSpec, } - return results, nil } -func startRoutingScheduler() error { - rsMu.Lock() - defer rsMu.Unlock() +func (s *RouterScheduler) Start() error { + s.mu.Lock() + defer s.mu.Unlock() - if rsCmd != nil { + if s.cmd != nil { return errors.New("routing scheduler already running") } - runtimeLogPath := filepath.Join(cfg.LogDir, "runtime.log") - slog.Info("starting routing scheduler 'rs'", - "spec", cfg.RoutingSpec, - "runtimeLogPath", runtimeLogPath) + runtimeLogPath := filepath.Join(s.logDir, "runtime.log") + slog.Info("starting routing scheduler 'rs'", "spec", s.routingSpec, "runtimeLogPath", runtimeLogPath) - cmd := exec.Command("rs", "-l", runtimeLogPath, cfg.RoutingSpec) + cmd := exec.Command("rs", "-l", runtimeLogPath, s.routingSpec) cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr if err := cmd.Start(); err != nil { return fmt.Errorf("failed to start rs: %w", err) } - rsCmd = cmd + s.cmd = cmd return nil } -func handleSignals() { - sigCh := make(chan os.Signal, 1) - signal.Notify(sigCh, syscall.SIGINT, syscall.SIGTERM) +func (s *RouterScheduler) Stop(ctx context.Context) error { + s.mu.Lock() + cmd := s.cmd + s.mu.Unlock() - sig := <-sigCh - slog.Info("received signal, forwarding to rs and shutting down", "signal", sig) - - rsMu.Lock() - if rsCmd != nil && rsCmd.Process != nil { - _ = rsCmd.Process.Signal(sig) + if cmd == nil || cmd.Process == nil { + return nil } - rsMu.Unlock() -} -func startHTTPServer() error { - router := http.NewServeMux() + slog.Info("stopping routing scheduler") + _ = cmd.Process.Signal(syscall.SIGTERM) - api := humago.New(router, huma.DefaultConfig("OpenDCS Wrapper", VERSION)) + done := make(chan error, 1) + go func() { done <- cmd.Wait() }() - huma.Post(api, "/import", func(ctx context.Context, input *struct { - KeyQueryParam - Body ImportRequest `contentType:"application/json"` - }) (*Response[ImportResponse], error) { - if res := subtle.ConstantTimeCompare([]byte(input.Key), []byte(cfg.AuthToken)); res != 1 { - return nil, huma.NewError(http.StatusUnauthorized, "invalid key") - } - resp := processImport(ctx, input.Body) - return NewResponse(resp), nil - }) + select { + case err := <-done: + s.mu.Lock() + s.cmd = nil + s.mu.Unlock() - type RuntimeLogs struct { - Log string `json:"log" doc:"Contents of routing scheduler runtime log"` - } - - huma.Get(api, "/logs/runtime", func(ctx context.Context, input *struct { - KeyQueryParam - }) (*Response[RuntimeLogs], error) { - if res := subtle.ConstantTimeCompare([]byte(input.Key), []byte(cfg.AuthToken)); res != 1 { - return nil, huma.NewError(http.StatusUnauthorized, "invalid key") - } - - runtimeLogPath := filepath.Join(cfg.LogDir, "runtime.log") - data, err := os.ReadFile(runtimeLogPath) if err != nil { - return nil, huma.NewError(http.StatusInternalServerError, fmt.Sprintf("failed to read runtime log: %v", err)) + slog.Warn("routing scheduler exited with error during stop", "err", err) } + return nil + case <-ctx.Done(): + slog.Warn("routing scheduler did not stop in time; sending SIGKILL") + _ = cmd.Process.Kill() + <-done - return NewResponse(RuntimeLogs{ - Log: string(data), - }), nil - }) + s.mu.Lock() + s.cmd = nil + s.mu.Unlock() - router.HandleFunc("/healthz", handleHealth) + return fmt.Errorf("rs stop timeout: %w", ctx.Err()) + } +} - server := &http.Server{ - Addr: cfg.ListenAddr, - Handler: router, - ReadHeaderTimeout: 5 * time.Second, +func (s *RouterScheduler) ForwardSignal(sig os.Signal) { + s.mu.Lock() + defer s.mu.Unlock() + if s.cmd != nil && s.cmd.Process != nil { + _ = s.cmd.Process.Signal(sig.(syscall.Signal)) } +} - slog.Info("http api listening", "addr", cfg.ListenAddr) - return server.ListenAndServe() +type Importer struct { + cfg Config + rs *RouterScheduler + runner CommandRunner } -func handleHealth(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusOK) - _, _ = io.WriteString(w, "ok\n") +func NewImporter(cfg Config, rs *RouterScheduler) *Importer { + return &Importer{cfg: cfg, rs: rs, runner: CommandRunner{}} } -func processImport(ctx context.Context, req ImportRequest) ImportResponse { - resp := ImportResponse{} +func (i *Importer) InitialImport(ctx context.Context) error { + slog.Info("performing initial import", "dir", i.cfg.InstrConfigDir) - files, err := resolveImportFiles(req.Files) + files, err := i.findInitialXMLFiles() if err != nil { - resp.Status = "error" - resp.Error = err.Error() - return resp + return fmt.Errorf("find initial xml files: %w", err) } if len(files) == 0 { - resp.Status = "error" - resp.Error = "no xml files found to import" - return resp + slog.Info("no initial XML files found") + return nil } - importMu.Lock() - defer importMu.Unlock() + args := make([]string, 0, len(files)+2) + args = append(args, "-l", "/proc/self/fd/1") + args = append(args, files...) + + slog.Info("initial dbimport", "command", "dbimport", "args", strings.Join(args, " ")) + _, err = i.runner.Run(ctx, "dbimport", args, nil) + if err != nil { + return fmt.Errorf("dbimport command failed: %w", err) + } + return nil +} - ctx, cancel := context.WithTimeout(ctx, 10*time.Minute) +func (i *Importer) ProcessAtomic(ctx context.Context, req ImportRequest) ImportResponse { + ctx, cancel := context.WithTimeout(ctx, 15*time.Minute) defer cancel() - timestamp := time.Now().UTC().Format("20060102-150405") - validateLogPath := filepath.Join(cfg.LogDir, fmt.Sprintf("dbimport-validate-%s.log", timestamp)) - importLogPath := filepath.Join(cfg.LogDir, fmt.Sprintf("dbimport-import-%s.log", timestamp)) + files, err := i.resolveImportFiles(req.Files) + if err != nil { + return ImportResponse{Status: "error", Error: err.Error()} + } + if len(files) == 0 { + return ImportResponse{Status: "error", Error: "no xml files found to import"} + } + + basePropsBytes, err := os.ReadFile(i.cfg.DecodesProps) + if err != nil { + return ImportResponse{Status: "error", Error: fmt.Sprintf("failed to read decodes.properties: %v", err)} + } + baseProps := string(basePropsBytes) + + liveEditDB, err := parseEditDBLocation(baseProps) + if err != nil { + return ImportResponse{Status: "error", Error: err.Error()} + } + + ts := time.Now().UTC().Format("20060102-150405") + + // NOTE: stage on the same filesystem as liveEditDB for atomic rename semantics + stageRoot := filepath.Dir(liveEditDB) + stageUserDir := filepath.Join(stageRoot, ".opendcs-userdir-stage-"+ts) + stageEditDB := filepath.Join(stageRoot, ".opendcs-edit-db-stage-"+ts) + backupEditDB := liveEditDB + ".prev-" + ts + + if err := os.MkdirAll(stageUserDir, 0o775); err != nil { + return ImportResponse{Status: "error", Error: fmt.Sprintf("failed to create staging userdir: %v", err)} + } + defer func() { _ = os.RemoveAll(stageUserDir) }() + + if err := copyDir(liveEditDB, stageEditDB); err != nil { + return ImportResponse{Status: "error", Error: fmt.Sprintf("failed to stage edit-db copy: %v", err)} + } + defer func() { _ = os.RemoveAll(stageEditDB) }() + + stageProps := rewriteEditDBLocation(baseProps, stageEditDB) + stagePropsPath := filepath.Join(stageUserDir, "decodes.properties") + if err := os.WriteFile(stagePropsPath, []byte(stageProps), 0o664); err != nil { + return ImportResponse{Status: "error", Error: fmt.Sprintf("failed to write staging decodes.properties: %v", err)} + } + + env := append(os.Environ(), "DCSTOOL_USERDIR="+stageUserDir) + + validateLogPath := filepath.Join(i.cfg.LogDir, fmt.Sprintf("dbimport-validate-%s.log", ts)) + importLogPath := filepath.Join(i.cfg.LogDir, fmt.Sprintf("dbimport-import-%s.log", ts)) valArgs := append([]string{"-v", "-l", validateLogPath}, files...) - valOut, valErr := runCommand(ctx, "dbimport", valArgs, 0) - resp.ValidateLog = readFileOrEmpty(validateLogPath) - resp.CommandOutput = string(valOut) + valOut, valErr := i.runner.Run(ctx, "dbimport", valArgs, env) + + resp := ImportResponse{ + ValidateLog: readFileOrEmpty(validateLogPath), + CommandOutput: string(valOut), + } if valErr != nil { resp.Status = "validation_failed" @@ -362,7 +310,7 @@ func processImport(ctx context.Context, req ImportRequest) ImportResponse { } impArgs := append([]string{"-l", importLogPath}, files...) - impOut, impErr := runCommand(ctx, "dbimport", impArgs, 0) + impOut, impErr := i.runner.Run(ctx, "dbimport", impArgs, env) resp.ImportLog = readFileOrEmpty(importLogPath) resp.CommandOutput = string(impOut) @@ -372,21 +320,71 @@ func processImport(ctx context.Context, req ImportRequest) ImportResponse { return resp } + stopCtx, stopCancel := context.WithTimeout(ctx, 30*time.Second) + defer stopCancel() + if err := i.rs.Stop(stopCtx); err != nil { + return ImportResponse{Status: "error", Error: fmt.Sprintf("failed stopping rs: %v", err)} + } + + if err := atomicSwapDir(liveEditDB, stageEditDB, backupEditDB); err != nil { + _ = i.rs.Start() + return ImportResponse{Status: "error", Error: fmt.Sprintf("failed swapping edit-db: %v", err)} + } + + if err := i.rs.Start(); err != nil { + slog.Error("rs failed to start after commit; rolling back", "err", err) + + if rbErr := atomicRollbackDir(liveEditDB, backupEditDB); rbErr != nil { + return ImportResponse{ + Status: "error", + Error: fmt.Sprintf("rs restart failed (%v) and rollback failed (%v)", err, rbErr), + } + } + + _ = i.rs.Start() + return ImportResponse{Status: "error", Error: fmt.Sprintf("rs restart failed; rolled back to previous db: %v", err)} + } + resp.Status = "success" return resp } -func resolveImportFiles(files []string) ([]string, error) { +func (i *Importer) findInitialXMLFiles() ([]string, error) { + var results []string + root := i.cfg.InstrConfigDir + + err := filepath.WalkDir(root, func(pathStr string, d os.DirEntry, err error) error { + if err != nil { + return err + } + if d.IsDir() { + return nil + } + if strings.EqualFold(filepath.Ext(pathStr), ".xml") { + results = append(results, pathStr) + } + return nil + }) + if err != nil { + return nil, err + } + + sort.Strings(results) + return results, nil +} + +func (i *Importer) resolveImportFiles(files []string) ([]string, error) { var resolved []string + if len(files) == 0 { - err := filepath.WalkDir(cfg.ImportDir, func(pathStr string, d os.DirEntry, err error) error { + err := filepath.WalkDir(i.cfg.ImportDir, func(pathStr string, d os.DirEntry, err error) error { if err != nil { return err } if d.IsDir() { return nil } - if strings.ToLower(filepath.Ext(pathStr)) == ".xml" { + if strings.EqualFold(filepath.Ext(pathStr), ".xml") { resolved = append(resolved, pathStr) } return nil @@ -394,61 +392,71 @@ func resolveImportFiles(files []string) ([]string, error) { if err != nil { return nil, fmt.Errorf("walk import dir: %w", err) } + sort.Strings(resolved) return resolved, nil } + baseImport := filepath.Clean(i.cfg.ImportDir) + for _, f := range files { if !filepath.IsAbs(f) { - f = filepath.Join(cfg.ImportDir, f) + f = filepath.Join(i.cfg.ImportDir, f) } clean := filepath.Clean(f) - baseImport := filepath.Clean(cfg.ImportDir) if !strings.HasPrefix(clean, baseImport+string(os.PathSeparator)) && clean != baseImport { - return nil, fmt.Errorf("file %q is outside allowed import dir %q", clean, cfg.ImportDir) + return nil, fmt.Errorf("file %q is outside allowed import dir %q", clean, i.cfg.ImportDir) } resolved = append(resolved, clean) } + + sort.Strings(resolved) return resolved, nil } -func runCommand(ctx context.Context, name string, args []string, timeout time.Duration) ([]byte, error) { - if timeout > 0 { - var cancel context.CancelFunc - ctx, cancel = context.WithTimeout(ctx, timeout) - defer cancel() - } +type Uploader struct { + root string + awsEndpoint string - slog.Info("exec command", "name", name, "args", strings.Join(args, " ")) + once sync.Once + b *blob.Bucket + err error +} - cmd := exec.CommandContext(ctx, name, args...) - var buf bytes.Buffer - cmd.Stdout = &buf - cmd.Stderr = &buf +func NewUploader(root, endpoint string) *Uploader { + return &Uploader{root: root, awsEndpoint: endpoint} +} - err := cmd.Run() - out := buf.Bytes() - if ctx.Err() == context.DeadlineExceeded { - return out, fmt.Errorf("command timed out: %w", ctx.Err()) - } - if err != nil { - return out, err - } - return out, nil +func (u *Uploader) Bucket(ctx context.Context) (*blob.Bucket, error) { + u.once.Do(func() { + if u.root == "" { + u.err = fmt.Errorf("DATALOAD_S3_ROOT is not set; cannot upload") + return + } + bucketURL, _, err := buildBucketURLFromRoot(u.root, u.awsEndpoint) + if err != nil { + u.err = err + return + } + b, err := blob.OpenBucket(ctx, bucketURL) + if err != nil { + u.err = fmt.Errorf("failed to open bucket %q: %w", bucketURL, err) + return + } + u.b = b + }) + return u.b, u.err } -func readFileOrEmpty(pathStr string) string { - data, err := os.ReadFile(pathStr) - if err != nil { - return "" +func (u *Uploader) Close() { + if u.b != nil { + if err := u.b.Close(); err != nil { + slog.Warn("failed to close bucket", "err", err) + } } - return string(data) } -func runUploadCLI(filePath string) error { - ctx := context.Background() - - root := dataloadS3Root - if root == "" { +func (u *Uploader) RunUploadCLI(ctx context.Context, filePath string) error { + if u.root == "" { return fmt.Errorf("DATALOAD_S3_ROOT is not set; cannot upload") } @@ -458,7 +466,6 @@ func runUploadCLI(filePath string) error { if err != nil { return fmt.Errorf("input file %q does not exist: %w", filePath, err) } - if stat.Size() == 0 { if err := os.Remove(filePath); err != nil && !os.IsNotExist(err) { slog.Warn("failed to remove empty file", "file", filePath, "err", err) @@ -466,21 +473,20 @@ func runUploadCLI(filePath string) error { return nil } - bucketURL, prefix, err := buildBucketURLFromRoot(root, awsEndpointURL) + bucketURL, prefix, err := buildBucketURLFromRoot(u.root, u.awsEndpoint) if err != nil { return err } - bucket, err := blob.OpenBucket(ctx, bucketURL) + b, err := u.Bucket(ctx) if err != nil { - return fmt.Errorf("failed to open bucket %q: %w", bucketURL, err) + return err } - defer bucket.Close() base := filepath.Base(filePath) platform := derivePlatformFromFilename(base) - key := path.Join(prefix, app, platform, base) + slog.Info("uploading file to bucket", "file", filePath, "bucketURL", bucketURL, "key", key) f, err := os.Open(filePath) @@ -489,7 +495,7 @@ func runUploadCLI(filePath string) error { } defer f.Close() - w, err := bucket.NewWriter(ctx, key, nil) + w, err := b.NewWriter(ctx, key, nil) if err != nil { return fmt.Errorf("failed to create blob writer: %w", err) } @@ -498,7 +504,6 @@ func runUploadCLI(filePath string) error { _ = w.Close() return fmt.Errorf("failed to stream file to bucket: %w", err) } - if err := w.Close(); err != nil { return fmt.Errorf("failed to finalize blob write: %w", err) } @@ -506,10 +511,387 @@ func runUploadCLI(filePath string) error { if err := os.Remove(filePath); err != nil && !os.IsNotExist(err) { slog.Warn("failed to remove local file after upload", "file", filePath, "err", err) } + return nil +} + +type App struct { + cfg Config + importMu *TryMutex + + rs *RouterScheduler + importer *Importer + uploader *Uploader + + httpServer *http.Server +} + +func NewApp(cfg Config) *App { + rs := NewRouterScheduler(cfg.LogDir, cfg.RoutingSpec) + return &App{ + cfg: cfg, + importMu: NewTryMutex(), + rs: rs, + importer: NewImporter(cfg, rs), + uploader: NewUploader(cfg.DataloadS3Root, cfg.AWSEndpointURL), + } +} + +func (a *App) Close() { + a.uploader.Close() +} + +func (a *App) RunServer(ctx context.Context) error { + router := http.NewServeMux() + api := humago.New(router, huma.DefaultConfig("OpenDCS Wrapper", VERSION)) + + huma.Post(api, "/import", func(ctx context.Context, input *struct { + KeyQueryParam + Body ImportRequest `contentType:"application/json"` + }) (*Response[ImportResponse], error) { + if err := a.checkKey(input.Key); err != nil { + return nil, err + } + + if !a.importMu.TryLock() { + return nil, huma.NewError(http.StatusConflict, "import already in progress") + } + defer a.importMu.Unlock() + + resp := a.importer.ProcessAtomic(ctx, input.Body) + if resp.Status == "conflict" { + return nil, huma.NewError(http.StatusConflict, resp.Error) + } + return NewResponse(resp), nil + }) + + type RuntimeLogs struct { + Log string `json:"log" doc:"Contents of routing scheduler runtime log"` + } + huma.Get(api, "/logs/runtime", func(ctx context.Context, input *struct { + KeyQueryParam + }) (*Response[RuntimeLogs], error) { + if err := a.checkKey(input.Key); err != nil { + return nil, err + } + runtimeLogPath := filepath.Join(a.cfg.LogDir, "runtime.log") + data, err := os.ReadFile(runtimeLogPath) + if err != nil { + return nil, huma.NewError(http.StatusInternalServerError, fmt.Sprintf("failed to read runtime log: %v", err)) + } + return NewResponse(RuntimeLogs{Log: string(data)}), nil + }) + + router.HandleFunc("/healthz", handleHealth) + + a.httpServer = &http.Server{ + Addr: a.cfg.ListenAddr, + Handler: router, + ReadHeaderTimeout: 5 * time.Second, + ReadTimeout: 30 * time.Second, + WriteTimeout: 30 * time.Second, + IdleTimeout: 2 * time.Minute, + MaxHeaderBytes: 1 << 20, + } + + errCh := make(chan error, 1) + go func() { + slog.Info("http api listening", "addr", a.cfg.ListenAddr) + if err := a.httpServer.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) { + errCh <- err + return + } + errCh <- nil + }() + select { + case <-ctx.Done(): + shutdownCtx, cancel := context.WithTimeout(context.Background(), 20*time.Second) + defer cancel() + _ = a.httpServer.Shutdown(shutdownCtx) + return ctx.Err() + case err := <-errCh: + return err + } +} + +func (a *App) checkKey(key string) error { + if subtle.ConstantTimeCompare([]byte(key), []byte(a.cfg.AuthToken)) != 1 { + return huma.NewError(http.StatusUnauthorized, "invalid key") + } return nil } +func (a *App) Run(ctx context.Context) error { + if err := a.rs.Start(); err != nil { + return err + } + + if err := a.importer.InitialImport(ctx); err != nil { + return err + } + + go func() { + if err := a.RunServer(ctx); err != nil && !errors.Is(err, context.Canceled) { + slog.Error("http server failed", "err", err) + } + }() + + <-ctx.Done() + + stopCtx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + _ = a.rs.Stop(stopCtx) + return ctx.Err() +} + +func main() { + initLogger() + + cfg, err := loadConfig() + if err != nil { + slog.Error("config error", "err", err) + os.Exit(1) + } + + app := NewApp(cfg) + defer app.Close() + + if len(os.Args) > 1 && os.Args[1] == "upload" { + if len(os.Args) < 3 { + slog.Error("usage: upload requires file path argument", "argv", os.Args) + os.Exit(1) + } + if err := app.uploader.RunUploadCLI(context.Background(), os.Args[2]); err != nil { + slog.Error("upload failed", "err", err) + os.Exit(1) + } + return + } + + ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM) + defer stop() + + slog.Info("starting opendcs wrapper (server mode)", "version", VERSION) + + if err := app.Run(ctx); err != nil && !errors.Is(err, context.Canceled) { + slog.Error("app exited with error", "err", err) + os.Exit(1) + } +} + +func initLogger() { + var level slog.Level + levelText := getenvDefault("LOGLEVEL", "INFO") + if err := level.UnmarshalText([]byte(levelText)); err != nil { + panic(err) + } + logger := slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{Level: level})) + slog.SetDefault(logger) +} + +func loadConfig() (Config, error) { + userDir := getenvDefault("DCSTOOL_USERDIR", "/opt/opendcs") + props := getenvDefault("DECODES_PROPERTIES_PATH", filepath.Join(userDir, "decodes.properties")) + + c := Config{ + InstrConfigDir: os.Getenv("INSTRUMENTATION_DCS_CONFIG"), + ImportDir: getenvDefault("OPENDCS_IMPORT_DIR", "/opt/opendcs/import"), + RoutingSpec: getenvDefault("ROUTING_SPEC", "goes"), + AuthToken: os.Getenv("OPENDCS_IMPORT_TOKEN"), + ListenAddr: getenvDefault("OPENDCS_HTTP_ADDR", ":8080"), + LogDir: getenvDefault("OPENDCS_LOG_DIR", "/opendcs_output"), + + DcsToolUserDir: userDir, + DecodesProps: props, + + DataloadS3Root: os.Getenv("DATALOAD_S3_ROOT"), + AWSEndpointURL: os.Getenv("AWS_ENDPOINT_URL"), + } + + if c.InstrConfigDir == "" { + return Config{}, fmt.Errorf("INSTRUMENTATION_DCS_CONFIG must be set") + } + if c.AuthToken == "" { + return Config{}, fmt.Errorf("OPENDCS_IMPORT_TOKEN must be set for secure access") + } + if err := os.MkdirAll(c.ImportDir, 0o775); err != nil { + return Config{}, fmt.Errorf("failed to ensure import dir exists (%s): %w", c.ImportDir, err) + } + if err := os.MkdirAll(c.LogDir, 0o775); err != nil { + return Config{}, fmt.Errorf("failed to ensure log dir exists (%s): %w", c.LogDir, err) + } + return c, nil +} + +func getenvDefault(key, def string) string { + if v := os.Getenv(key); v != "" { + return v + } + return def +} + +func handleHealth(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusOK) + _, _ = io.WriteString(w, "ok\n") +} + +func readFileOrEmpty(pathStr string) string { + data, err := os.ReadFile(pathStr) + if err != nil { + return "" + } + return string(data) +} + +func parseEditDBLocation(props string) (string, error) { + lines := strings.SplitSeq(props, "\n") + for line := range lines { + trim := strings.TrimSpace(line) + if trim == "" || strings.HasPrefix(trim, "#") { + continue + } + if v, found := strings.CutPrefix(trim, "EditDatabaseLocation="); found { + v = strings.TrimSpace(v) + if v == "" { + return "", fmt.Errorf("EditDatabaseLocation present but empty in decodes.properties") + } + return v, nil + } + } + return "", fmt.Errorf("EditDatabaseLocation not found in decodes.properties") +} + +func rewriteEditDBLocation(props, newPath string) string { + lines := strings.Split(props, "\n") + out := make([]string, 0, len(lines)+1) + replaced := false + + for _, line := range lines { + trim := strings.TrimSpace(line) + if strings.HasPrefix(trim, "EditDatabaseLocation=") { + out = append(out, "EditDatabaseLocation="+newPath) + replaced = true + continue + } + out = append(out, line) + } + if !replaced { + out = append(out, "EditDatabaseLocation="+newPath) + } + return strings.Join(out, "\n") +} + +func atomicSwapDir(liveDir, stagedDir, backupDir string) error { + if _, err := os.Stat(stagedDir); err != nil { + return fmt.Errorf("staged dir missing: %w", err) + } + + if _, err := os.Stat(liveDir); err == nil { + if err := os.Rename(liveDir, backupDir); err != nil { + return fmt.Errorf("failed to move live->backup: %w", err) + } + } + + if err := os.Rename(stagedDir, liveDir); err != nil { + _ = os.Rename(backupDir, liveDir) + return fmt.Errorf("failed to move staged->live: %w", err) + } + return nil +} + +func atomicRollbackDir(liveDir, backupDir string) error { + if _, err := os.Stat(backupDir); err != nil { + return fmt.Errorf("backup dir missing: %w", err) + } + + badDir := liveDir + ".bad-" + time.Now().UTC().Format("20060102-150405") + if _, err := os.Stat(liveDir); err == nil { + _ = os.Rename(liveDir, badDir) + } + + if err := os.Rename(backupDir, liveDir); err != nil { + return fmt.Errorf("failed to restore backup->live: %w", err) + } + return nil +} + +func copyDir(src, dst string) error { + src = filepath.Clean(src) + dst = filepath.Clean(dst) + + info, err := os.Stat(src) + if err != nil { + return err + } + if !info.IsDir() { + return fmt.Errorf("source is not a directory: %s", src) + } + + if err := os.MkdirAll(dst, info.Mode().Perm()); err != nil { + return err + } + + return filepath.WalkDir(src, func(p string, d os.DirEntry, walkErr error) error { + if walkErr != nil { + return walkErr + } + rel, err := filepath.Rel(src, p) + if err != nil { + return err + } + if rel == "." { + return nil + } + target := filepath.Join(dst, rel) + + if d.IsDir() { + di, err := d.Info() + if err != nil { + return err + } + return os.MkdirAll(target, di.Mode().Perm()) + } + + fi, err := d.Info() + if err != nil { + return err + } + + if fi.Mode()&os.ModeSymlink != 0 { + linkTarget, err := os.Readlink(p) + if err != nil { + return err + } + return os.Symlink(linkTarget, target) + } + + return copyFile(p, target, fi.Mode().Perm()) + }) +} + +func copyFile(src, dst string, perm os.FileMode) error { + in, err := os.Open(src) + if err != nil { + return err + } + defer in.Close() + + if err := os.MkdirAll(filepath.Dir(dst), 0o775); err != nil { + return err + } + + out, err := os.OpenFile(dst, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, perm) + if err != nil { + return err + } + defer func() { _ = out.Close() }() + + if _, err := io.Copy(out, in); err != nil { + return err + } + return out.Close() +} + func buildBucketURLFromRoot(root, awsEndpoint string) (bucketURL string, prefix string, err error) { u, err := url.Parse(root) if err != nil { @@ -530,6 +912,7 @@ func buildBucketURLFromRoot(root, awsEndpoint string) (bucketURL string, prefix } u.Path = "" u.RawQuery = v.Encode() + bucketURL = u.Scheme + "://" + u.Host if u.RawQuery != "" { bucketURL += "?" + u.RawQuery diff --git a/opendcs/rsgis/.settings/org.eclipse.core.resources.prefs b/opendcs/rsgis/.settings/org.eclipse.core.resources.prefs deleted file mode 100644 index abdea9ac..00000000 --- a/opendcs/rsgis/.settings/org.eclipse.core.resources.prefs +++ /dev/null @@ -1,4 +0,0 @@ -eclipse.preferences.version=1 -encoding//src/main/java=UTF-8 -encoding//src/main/resources=UTF-8 -encoding/=UTF-8 diff --git a/opendcs/rsgis/.settings/org.eclipse.jdt.apt.core.prefs b/opendcs/rsgis/.settings/org.eclipse.jdt.apt.core.prefs deleted file mode 100644 index d4313d4b..00000000 --- a/opendcs/rsgis/.settings/org.eclipse.jdt.apt.core.prefs +++ /dev/null @@ -1,2 +0,0 @@ -eclipse.preferences.version=1 -org.eclipse.jdt.apt.aptEnabled=false diff --git a/opendcs/rsgis/.settings/org.eclipse.jdt.core.prefs b/opendcs/rsgis/.settings/org.eclipse.jdt.core.prefs deleted file mode 100644 index faca1b3f..00000000 --- a/opendcs/rsgis/.settings/org.eclipse.jdt.core.prefs +++ /dev/null @@ -1,9 +0,0 @@ -eclipse.preferences.version=1 -org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8 -org.eclipse.jdt.core.compiler.compliance=1.8 -org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled -org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning -org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=ignore -org.eclipse.jdt.core.compiler.processAnnotations=disabled -org.eclipse.jdt.core.compiler.release=enabled -org.eclipse.jdt.core.compiler.source=1.8 diff --git a/opendcs/rsgis/.settings/org.eclipse.m2e.core.prefs b/opendcs/rsgis/.settings/org.eclipse.m2e.core.prefs deleted file mode 100644 index f897a7f1..00000000 --- a/opendcs/rsgis/.settings/org.eclipse.m2e.core.prefs +++ /dev/null @@ -1,4 +0,0 @@ -activeProfiles= -eclipse.preferences.version=1 -resolveWorkspaceProjects=true -version=1 From a0b4b0e89dfb474e6c5494f60b669a999a870d00 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Mon, 15 Dec 2025 15:42:19 -0500 Subject: [PATCH 09/22] fix: use slf4j logger with logback for newer nightly release chore: do initial dbimport before staring routescheduler chore: configure logback to emit json logs chore: clean up mvn build process chore: move patch script to separate file --- opendcs/Dockerfile | 47 +++---- opendcs/logback.xml | 33 +++++ opendcs/main.go | 4 +- opendcs/patch_opendcs.sh | 31 +++++ opendcs/rsgis/pom.xml | 36 +++--- .../rsgis/consumer/MidasOutputFormatter.java | 118 +++++++++--------- 6 files changed, 162 insertions(+), 107 deletions(-) create mode 100644 opendcs/logback.xml create mode 100755 opendcs/patch_opendcs.sh diff --git a/opendcs/Dockerfile b/opendcs/Dockerfile index eb275d11..8b3fc773 100644 --- a/opendcs/Dockerfile +++ b/opendcs/Dockerfile @@ -1,47 +1,39 @@ ARG OPENDCS_BASE_IMAGE=ghcr.io/opendcs/routingscheduler:7.0-nightly -ARG MAVEN_BUILD_IMAGE=maven:3-eclipse-temurin-8-noble +ARG MAVEN_BUILD_IMAGE=maven:3-eclipse-temurin-17-noble ARG GO_BUILD_IMAGE=golang:1.25-alpine +ARG OPENDCS_VERSION=7.0-nightly FROM ${OPENDCS_BASE_IMAGE} AS opendcs_patched USER root -RUN rm -f /opt/opendcs/dep/commons-net-*.jar \ - /opt/opendcs/dep/jackson-core*.jar \ - /opt/opendcs/dep/jackson-dataformat-toml*.jar \ - /opt/opendcs/dep/commons-vfs2-*.jar \ - /opt/opendcs/dep/javax.el-*.jar \ - /opt/opendcs/dep/jdom-*.jar \ - /opt/opendcs/dep/poi-*.jar \ - /opt/opendcs/dep/postgresql-*.jar \ - /opt/opendcs/dep/jetty-*.jar || true - -RUN wget -qO /opt/opendcs/dep/commons-net-3.11.1.jar \ - "https://repo1.maven.org/maven2/commons-net/commons-net/3.11.1/commons-net-3.11.1.jar" \ - && wget -qO /opt/opendcs/dep/jackson-dataformat-toml-2.18.2.jar \ - "https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-toml/2.18.2/jackson-dataformat-toml-2.18.2.jar" \ - && wget -qO /opt/opendcs/dep/jackson-core-2.19.2.jar \ - "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-core/2.19.2/jackson-core-2.19.2.jar" \ - && wget -qO /opt/opendcs/dep/commons-vfs2-2.10.0.jar \ - "https://repo1.maven.org/maven2/org/apache/commons/commons-vfs2/2.10.0/commons-vfs2-2.10.0.jar" \ - && wget -qO /opt/opendcs/dep/jdom2-2.0.6.1.jar \ - "https://repo1.maven.org/maven2/org/jdom/jdom2/2.0.6.1/jdom2-2.0.6.1.jar" \ - && wget -qO /opt/opendcs/dep/poi-5.4.1.jar \ - "https://repo1.maven.org/maven2/org/apache/poi/poi/5.4.1/poi-5.4.1.jar" \ - && wget -qO /opt/opendcs/dep/postgresql-42.7.7.jar \ - "https://repo1.maven.org/maven2/org/postgresql/postgresql/42.7.7/postgresql-42.7.7.jar" \ - && chown opendcs:opendcs /opt/opendcs/dep/*.jar +COPY patch_opendcs.sh /patch_opendcs.sh +RUN /patch_opendcs.sh && rm /patch_opendcs.sh FROM opendcs_patched AS opendcs_base FROM ${MAVEN_BUILD_IMAGE} AS maven_builder +ARG OPENDCS_VERSION + +# workaround for issues with cross-compilation +ENV JAVA_TOOL_OPTIONS="-XX:TieredStopAtLevel=1" + COPY ./rsgis /opt/rsgis RUN mkdir -p /opt/rsgis/src/main/resources COPY --from=opendcs_base /opt/opendcs/bin/opendcs.jar /opt/rsgis/src/main/resources/opendcs.jar RUN --mount=type=cache,target=/root/.m2 \ - mvn -f /opt/rsgis/pom.xml clean package + mvn -f /opt/rsgis/pom.xml -q install:install-file \ + -Dfile=/opt/rsgis/src/main/resources/opendcs.jar \ + -DgroupId=org.opendcs \ + -DartifactId=opendcs \ + -Dversion=${OPENDCS_VERSION} \ + -Dpackaging=jar \ + -DgeneratePom=true + +RUN --mount=type=cache,target=/root/.m2 \ + mvn -f /opt/rsgis/pom.xml -Dopendcs.version=${OPENDCS_VERSION} clean package FROM ${GO_BUILD_IMAGE} AS go_builder @@ -78,6 +70,7 @@ RUN apk del py3-cryptography || true COPY --chown=opendcs:opendcs --from=maven_builder /opt/rsgis/target/rsgis.jar ${DCSTOOL_HOME}/dep +COPY --chown=opendcs:opendcs ./logback.xml ${DCSTOOL_HOME}/logback.xml COPY --chown=opendcs:opendcs ./decodes.properties ${DCSTOOL_HOME}/decodes.properties COPY --chown=opendcs:opendcs ./midas_config ${INSTRUMENTATION_DCS_CONFIG} COPY --chown=opendcs:opendcs --from=go_builder /opendcs-wrapper /usr/local/bin/opendcs-wrapper diff --git a/opendcs/logback.xml b/opendcs/logback.xml new file mode 100644 index 00000000..b766859e --- /dev/null +++ b/opendcs/logback.xml @@ -0,0 +1,33 @@ + + + + + + time + level + msg + logger + thread + stacktrace + + + UTC + + {"app":"opendcs","component":"java"} + + true + true + + + 200 + 20000 + true + + + + + + + + + diff --git a/opendcs/main.go b/opendcs/main.go index e9f331c3..86d9ffd5 100644 --- a/opendcs/main.go +++ b/opendcs/main.go @@ -622,11 +622,11 @@ func (a *App) checkKey(key string) error { } func (a *App) Run(ctx context.Context) error { - if err := a.rs.Start(); err != nil { + if err := a.importer.InitialImport(ctx); err != nil { return err } - if err := a.importer.InitialImport(ctx); err != nil { + if err := a.rs.Start(); err != nil { return err } diff --git a/opendcs/patch_opendcs.sh b/opendcs/patch_opendcs.sh new file mode 100755 index 00000000..aaf49e6f --- /dev/null +++ b/opendcs/patch_opendcs.sh @@ -0,0 +1,31 @@ +#!/bin/sh + +set -eu + +rm -f /opt/opendcs/dep/commons-net-*.jar \ + /opt/opendcs/dep/jackson-core*.jar \ + /opt/opendcs/dep/jackson-dataformat-toml*.jar \ + /opt/opendcs/dep/commons-vfs2-*.jar \ + /opt/opendcs/dep/javax.el-*.jar \ + /opt/opendcs/dep/jdom-*.jar \ + /opt/opendcs/dep/poi-*.jar \ + /opt/opendcs/dep/postgresql-*.jar \ + /opt/opendcs/dep/jetty-*.jar || true + +wget -qO /opt/opendcs/dep/commons-net-3.11.1.jar \ + "https://repo1.maven.org/maven2/commons-net/commons-net/3.11.1/commons-net-3.11.1.jar" && + wget -qO /opt/opendcs/dep/jackson-dataformat-toml-2.18.2.jar \ + "https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-toml/2.18.2/jackson-dataformat-toml-2.18.2.jar" && + wget -qO /opt/opendcs/dep/jackson-core-2.19.2.jar \ + "https://repo1.maven.org/maven2/com/fasterxml/jackson/core/jackson-core/2.19.2/jackson-core-2.19.2.jar" && + wget -qO /opt/opendcs/dep/commons-vfs2-2.10.0.jar \ + "https://repo1.maven.org/maven2/org/apache/commons/commons-vfs2/2.10.0/commons-vfs2-2.10.0.jar" && + wget -qO /opt/opendcs/dep/jdom2-2.0.6.1.jar \ + "https://repo1.maven.org/maven2/org/jdom/jdom2/2.0.6.1/jdom2-2.0.6.1.jar" && + wget -qO /opt/opendcs/dep/poi-5.4.1.jar \ + "https://repo1.maven.org/maven2/org/apache/poi/poi/5.4.1/poi-5.4.1.jar" && + wget -qO /opt/opendcs/dep/postgresql-42.7.7.jar \ + "https://repo1.maven.org/maven2/org/postgresql/postgresql/42.7.7/postgresql-42.7.7.jar" && + wget -qO /opt/opendcs/dep/logstash-logback-encoder.jar \ + "https://repo1.maven.org/maven2/net/logstash/logback/logstash-logback-encoder/8.0/logstash-logback-encoder-8.0.jar" && + chown opendcs:opendcs /opt/opendcs/dep/*.jar diff --git a/opendcs/rsgis/pom.xml b/opendcs/rsgis/pom.xml index f85f88ef..0bb326ed 100644 --- a/opendcs/rsgis/pom.xml +++ b/opendcs/rsgis/pom.xml @@ -1,32 +1,40 @@ - + 4.0.0 rsgis.consumer rsgis 1.0-SNAPSHOT - rsgis + jar UTF-8 - 8 + 17 + 7.0-nightly org.opendcs opendcs - 7.0.12 - system - ${project.basedir}/src/main/resources/opendcs.jar + ${opendcs.version} + provided + + + + org.slf4j + slf4j-api + 2.0.16 + provided - jar rsgis + @@ -53,18 +61,6 @@ maven-install-plugin 3.1.2 - - maven-deploy-plugin - 3.1.2 - - - maven-site-plugin - 3.12.1 - - - maven-project-info-reports-plugin - 3.6.1 - diff --git a/opendcs/rsgis/src/main/java/rsgis/consumer/MidasOutputFormatter.java b/opendcs/rsgis/src/main/java/rsgis/consumer/MidasOutputFormatter.java index 152bee96..fb21bf73 100644 --- a/opendcs/rsgis/src/main/java/rsgis/consumer/MidasOutputFormatter.java +++ b/opendcs/rsgis/src/main/java/rsgis/consumer/MidasOutputFormatter.java @@ -12,111 +12,113 @@ import decodes.decoder.Sensor; import decodes.decoder.TimeSeries; import decodes.util.PropertySpec; -import ilex.util.Logger; import ilex.util.PropertiesUtil; import ilex.var.TimedVariable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.text.SimpleDateFormat; import java.util.Iterator; import java.util.Properties; import java.util.TimeZone; -public class MidasOutputFormatter - extends OutputFormatter { - private final String module = "MidasOutputFormatter"; +public class MidasOutputFormatter extends OutputFormatter { + private static final Logger log = LoggerFactory.getLogger(MidasOutputFormatter.class); + private String delimiter = " "; private String dateFormat = "yyyy-MM-dd'T'HH:mmZ"; private PropertySpec[] propSpecs = new PropertySpec[] { - new PropertySpec("cwmsOfficeID", "s", "Three letter code for the CWMS office (District/Division)"), - new PropertySpec("delimiter", "s", "Used between columns (default=space)"), - new PropertySpec("justify", "b", "(default=true) Pad with blanks to line up columns."), - new PropertySpec("dateFormat", "s", "(default=yyyy-MM-dd'T'HH:mmZ) Java SimpleDateFormat spec.") }; - private SimpleDateFormat sdf = null; + new PropertySpec("cwmsOfficeID", "s", ""), + new PropertySpec("delimiter", "s", ""), + new PropertySpec("justify", "b", ""), + new PropertySpec("dateFormat", "s", "") + }; + private SimpleDateFormat sdf; - protected void initFormatter(String type, TimeZone timeZone, PresentationGroup presentationGroup, + protected void initFormatter( + String type, + TimeZone timeZone, + PresentationGroup presentationGroup, Properties properties) throws OutputFormatterException { - String s = PropertiesUtil.getIgnoreCase((Properties) properties, (String) "cwmsOfficeID"); - if ((s = PropertiesUtil.getIgnoreCase((Properties) properties, (String) "delimiter")) != null) { - this.delimiter = s; + + String s; + if ((s = PropertiesUtil.getIgnoreCase(properties, "delimiter")) != null) { + delimiter = s; } - if ((s = PropertiesUtil.getIgnoreCase((Properties) properties, (String) "dateFormat")) != null) { - this.dateFormat = s; + if ((s = PropertiesUtil.getIgnoreCase(properties, "dateFormat")) != null) { + dateFormat = s; } - this.sdf = new SimpleDateFormat(this.dateFormat); - this.sdf.setTimeZone(timeZone); + sdf = new SimpleDateFormat(dateFormat); + sdf.setTimeZone(timeZone); } public void shutdown() { } - public void formatMessage(DecodedMessage decodedMessage, DataConsumer dataConsumer) + public void formatMessage(DecodedMessage decodedMessage, DataConsumer consumer) throws DataConsumerException, OutputFormatterException { - Platform platform; - dataConsumer.startMessage(decodedMessage); - RawMessage rawMessage = decodedMessage.getRawMessage(); + consumer.startMessage(decodedMessage); + RawMessage raw = decodedMessage.getRawMessage(); + + Platform platform; try { - platform = rawMessage.getPlatform(); - } catch (UnknownPlatformException var23) { - throw new OutputFormatterException(var23.toString()); + platform = raw.getPlatform(); + } catch (UnknownPlatformException e) { + throw new OutputFormatterException(e.toString()); } String platformName = platform.getDisplayName(); String platformFileId = platform.getProperty("fileId"); - Iterator timeSeriesIterator = decodedMessage.getAllTimeSeries(); - while (timeSeriesIterator.hasNext()) { - TimeSeries timeSeries = (TimeSeries) timeSeriesIterator.next(); - Sensor sensor = timeSeries.getSensor(); + Iterator it = decodedMessage.getAllTimeSeries(); + while (it.hasNext()) { + TimeSeries ts = it.next(); + Sensor sensor = ts.getSensor(); if (sensor == null) { - Logger.instance().warning(String.format( - "%s: sensor is null, skipping...; platformName: %s; timeSeries: %s;", - this.module, platformName, timeSeries.getDisplayName())); + log.warn("sensor_null platform={} timeseries={}", platformName, ts.getDisplayName()); continue; } - - if (timeSeries.size() == 0) { - Logger.instance().warning(String.format( - "%s: no timeseries to record, skipping...; platformName: %s; timeSeries: %s;", - this.module, platformName, timeSeries.getDisplayName())); + if (ts.size() == 0) { + log.warn("timeseries_empty platform={} timeseries={}", platformName, ts.getDisplayName()); continue; } - String sensorNameNumber = String.format("%s.%d", sensor.getName(), sensor.getNumber()); - this.processDataOutput(dataConsumer, timeSeries, platformFileId, sensorNameNumber); + String sensorNameNumber = sensor.getName() + "." + sensor.getNumber(); + processDataOutput(consumer, ts, platformFileId, sensorNameNumber); - Logger.instance().info(String.format( - "%s: measurements recorded; timeSeries: %s; size: %d;", - this.module, timeSeries.getDisplayName(), timeSeries.size())); + log.info( + "measurements_written platform={} timeseries={} count={}", + platformName, + ts.getDisplayName(), + ts.size()); } - dataConsumer.endMessage(); + consumer.endMessage(); } public void processDataOutput( - DataConsumer dataConsumer, - TimeSeries timeSeries, + DataConsumer consumer, + TimeSeries ts, String platformFileId, String sensorNameNumber) { - StringBuffer sb = new StringBuffer(); - int tsSize = timeSeries.size(); - for (int i = 0; i < tsSize; ++i) { - TimedVariable tv = timeSeries.sampleAt(i); - if ((tv.getFlags() & 0x60000000) != 0) + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < ts.size(); i++) { + TimedVariable tv = ts.sampleAt(i); + if ((tv.getFlags() & 0x60000000) != 0) { continue; + } sb.setLength(0); - sb.append(platformFileId); - sb.append(this.delimiter); - sb.append(sensorNameNumber); - sb.append(this.delimiter); - sb.append(this.sdf.format(tv.getTime())); - sb.append(this.delimiter); - sb.append(timeSeries.formattedSampleAt(i)); - dataConsumer.println(sb.toString()); + sb.append(platformFileId).append(delimiter) + .append(sensorNameNumber).append(delimiter) + .append(sdf.format(tv.getTime())).append(delimiter) + .append(ts.formattedSampleAt(i)); + consumer.println(sb.toString()); } } public PropertySpec[] getSupportedProps() { - return this.propSpecs; + return propSpecs; } } From 0c6ca939e705c1d3e2305a48eda141b493bc318b Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 17 Dec 2025 09:12:47 -0500 Subject: [PATCH 10/22] chore: update opendcs log format --- opendcs/logback.xml | 41 +++++++++++++++++++---------------------- 1 file changed, 19 insertions(+), 22 deletions(-) diff --git a/opendcs/logback.xml b/opendcs/logback.xml index b766859e..ec3699f7 100644 --- a/opendcs/logback.xml +++ b/opendcs/logback.xml @@ -1,32 +1,29 @@ - - - time - level - msg - logger - thread - stacktrace - - UTC - - {"app":"opendcs","component":"java"} - - true - true - - - 200 - 20000 - true - + + + + time + yyyy-MM-dd'T'HH:mm:ss.SSS'Z' + + + + { + "level": "%level", + "thread": "%thread", + "msg": "%message" + } + + + + + - + From 042308cab50a45124dc96ad07d127ae7ed990459 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 17 Dec 2025 11:45:23 -0500 Subject: [PATCH 11/22] fix: update overlapping migration version --- api/migrations/schema/{V1.56.00__goes.sql => V1.57.00__goes.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename api/migrations/schema/{V1.56.00__goes.sql => V1.57.00__goes.sql} (100%) diff --git a/api/migrations/schema/V1.56.00__goes.sql b/api/migrations/schema/V1.57.00__goes.sql similarity index 100% rename from api/migrations/schema/V1.56.00__goes.sql rename to api/migrations/schema/V1.57.00__goes.sql From e73e1eb329053591ac2cfc8844f9a660b302bd68 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 17 Dec 2025 13:38:20 -0500 Subject: [PATCH 12/22] chore: make timeseries_id optional in goes mappings request body --- api/internal/dto/goes.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/internal/dto/goes.go b/api/internal/dto/goes.go index 2b4063ad..5d0034a1 100644 --- a/api/internal/dto/goes.go +++ b/api/internal/dto/goes.go @@ -4,5 +4,5 @@ import "github.com/google/uuid" type GoesTelemetryConfigMappingDTO struct { PlatformSensorKey string `json:"platform_sensor_key"` - TimeseriesID *uuid.UUID `json:"timeseries_id"` + TimeseriesID *uuid.UUID `json:"timeseries_id" required:"false"` } From d113cb9ad023e3b7dfe644782003db3a18b0b2de Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 17 Dec 2025 15:19:46 -0500 Subject: [PATCH 13/22] fix: update overlapping migration version --- api/migrations/schema/{V1.57.00__goes.sql => V1.58.00__goes.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename api/migrations/schema/{V1.57.00__goes.sql => V1.58.00__goes.sql} (100%) diff --git a/api/migrations/schema/V1.57.00__goes.sql b/api/migrations/schema/V1.58.00__goes.sql similarity index 100% rename from api/migrations/schema/V1.57.00__goes.sql rename to api/migrations/schema/V1.58.00__goes.sql From 6e656c522c7b8907920dd0e5b5b5c2c13f04f1e6 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 17 Dec 2025 15:58:26 -0500 Subject: [PATCH 14/22] chore: add "committed" status for goes platform configs --- api/internal/db/batch.go | 53 +++++++++++++++++++ api/internal/db/goes.sql_gen.go | 5 +- api/internal/db/models.go | 2 + api/internal/db/overrides.go | 1 + api/internal/db/querier.go | 1 + .../repeat/0190__views_telemetry.sql | 3 +- api/migrations/schema/V1.58.00__goes.sql | 2 + api/queries/goes.sql | 8 +++ 8 files changed, 73 insertions(+), 2 deletions(-) diff --git a/api/internal/db/batch.go b/api/internal/db/batch.go index cc1c429b..d4e6174c 100644 --- a/api/internal/db/batch.go +++ b/api/internal/db/batch.go @@ -658,9 +658,62 @@ func (b *EvaluationInstrumentCreateBatchBatchResults) Close() error { return b.br.Close() } +const goesPlatformConfigFileCommit = `-- name: GoesPlatformConfigFileCommit :batchexec +update goes_platform_config_file set + committed=true, + committed_at=$2 +where id=$1 +` + +type GoesPlatformConfigFileCommitBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type GoesPlatformConfigFileCommitParams struct { + ID uuid.UUID `json:"id"` + CommittedAt *time.Time `json:"committed_at"` +} + +func (q *Queries) GoesPlatformConfigFileCommit(ctx context.Context, arg []GoesPlatformConfigFileCommitParams) *GoesPlatformConfigFileCommitBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.ID, + a.CommittedAt, + } + batch.Queue(goesPlatformConfigFileCommit, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &GoesPlatformConfigFileCommitBatchResults{br, len(arg), false} +} + +func (b *GoesPlatformConfigFileCommitBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *GoesPlatformConfigFileCommitBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + const goesTelemetryConfigMappingsCreateBatch = `-- name: GoesTelemetryConfigMappingsCreateBatch :batchexec insert into goes_telemetry_config_mappings (goes_platform_config_file_id, platform_sensor_key, timeseries_id) values ($1, $2, $3) +on conflict on constraint unique_goes_platform_config_file_id_platform_sensor_key do nothing ` type GoesTelemetryConfigMappingsCreateBatchBatchResults struct { diff --git a/api/internal/db/goes.sql_gen.go b/api/internal/db/goes.sql_gen.go index 0487e727..4349372c 100644 --- a/api/internal/db/goes.sql_gen.go +++ b/api/internal/db/goes.sql_gen.go @@ -53,7 +53,7 @@ func (q *Queries) GoesPlatformConfigFileDelete(ctx context.Context, id uuid.UUID } const goesPlatformConfigFileGet = `-- name: GoesPlatformConfigFileGet :one -select id, goes_telemetry_source_id, project_id, name, alias, size_bytes, content, created_at, created_by, updated_at, updated_by from goes_platform_config_file where id=$1 +select id, goes_telemetry_source_id, project_id, name, alias, size_bytes, content, committed, committed_at, created_at, created_by, updated_at, updated_by from goes_platform_config_file where id=$1 ` func (q *Queries) GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) (GoesPlatformConfigFile, error) { @@ -67,6 +67,8 @@ func (q *Queries) GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) ( &i.Alias, &i.SizeBytes, &i.Content, + &i.Committed, + &i.CommittedAt, &i.CreatedAt, &i.CreatedBy, &i.UpdatedAt, @@ -81,6 +83,7 @@ update goes_platform_config_file set alias=$2, size_bytes=$3, content=$4::xml, + committed=false, updated_at=$5, updated_by=$6 where id=$7 diff --git a/api/internal/db/models.go b/api/internal/db/models.go index 1f5b30cb..1558b940 100644 --- a/api/internal/db/models.go +++ b/api/internal/db/models.go @@ -651,6 +651,8 @@ type GoesPlatformConfigFile struct { Alias string `json:"alias"` SizeBytes int64 `json:"size_bytes"` Content string `json:"content"` + Committed bool `json:"committed"` + CommittedAt *time.Time `json:"committed_at"` CreatedAt time.Time `json:"created_at"` CreatedBy uuid.UUID `json:"created_by"` UpdatedAt *time.Time `json:"updated_at"` diff --git a/api/internal/db/overrides.go b/api/internal/db/overrides.go index f1800fcb..9981831c 100644 --- a/api/internal/db/overrides.go +++ b/api/internal/db/overrides.go @@ -100,6 +100,7 @@ type VGoesTelemetrySourceFiles struct { ProjectID uuid.UUID `json:"project_id"` Alias string `json:"alias"` SizeBytes int64 `json:"size_bytes"` + Committed bool `json:"committed"` } type InstrumentIDName struct { diff --git a/api/internal/db/querier.go b/api/internal/db/querier.go index ebc5ab7e..7add110e 100644 --- a/api/internal/db/querier.go +++ b/api/internal/db/querier.go @@ -116,6 +116,7 @@ type Querier interface { EvaluationListForProject(ctx context.Context, projectID uuid.UUID) ([]VEvaluation, error) EvaluationListForProjectAlertConfig(ctx context.Context, arg EvaluationListForProjectAlertConfigParams) ([]VEvaluation, error) EvaluationUpdate(ctx context.Context, arg EvaluationUpdateParams) error + GoesPlatformConfigFileCommit(ctx context.Context, arg []GoesPlatformConfigFileCommitParams) *GoesPlatformConfigFileCommitBatchResults GoesPlatformConfigFileCreate(ctx context.Context, arg GoesPlatformConfigFileCreateParams) (uuid.UUID, error) GoesPlatformConfigFileDelete(ctx context.Context, id uuid.UUID) error GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) (GoesPlatformConfigFile, error) diff --git a/api/migrations/repeat/0190__views_telemetry.sql b/api/migrations/repeat/0190__views_telemetry.sql index b6767b7e..ca3f7908 100644 --- a/api/migrations/repeat/0190__views_telemetry.sql +++ b/api/migrations/repeat/0190__views_telemetry.sql @@ -9,7 +9,8 @@ left join ( 'name', cf.name, 'project_id', cf.project_id, 'alias', cf.alias, - 'size_bytes', cf.size_bytes + 'size_bytes', cf.size_bytes, + 'committed', cf.committed )), '[]'::jsonb) as files from goes_platform_config_file cf ) f on true; diff --git a/api/migrations/schema/V1.58.00__goes.sql b/api/migrations/schema/V1.58.00__goes.sql index 769cd24a..ff9975bb 100644 --- a/api/migrations/schema/V1.58.00__goes.sql +++ b/api/migrations/schema/V1.58.00__goes.sql @@ -12,6 +12,8 @@ create table goes_platform_config_file ( alias text not null, size_bytes bigint not null, content xml not null, + committed boolean not null default false, + committed_at timestamptz, created_at timestamptz not null default now(), created_by uuid not null references profile(id), updated_at timestamptz, diff --git a/api/queries/goes.sql b/api/queries/goes.sql index 18d5945d..93ebf80a 100644 --- a/api/queries/goes.sql +++ b/api/queries/goes.sql @@ -18,11 +18,19 @@ update goes_platform_config_file set alias=sqlc.arg(alias), size_bytes=sqlc.arg(size_bytes), content=sqlc.arg(content)::xml, + committed=false, updated_at=sqlc.arg(updated_at), updated_by=sqlc.arg(updated_by) where id=sqlc.arg(id); +-- name: GoesPlatformConfigFileCommit :batchexec +update goes_platform_config_file set + committed=true, + committed_at=$2 +where id=$1; + + -- name: GoesPlatformConfigFileDelete :exec delete from goes_platform_config_file where id=$1; From 6010aee2688b126c32db7a07ae48311c95c7157a Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 17 Dec 2025 16:48:29 -0500 Subject: [PATCH 15/22] chore: add stubs for validate, commit, and commit callback (internal app only) --- api/internal/dto/goes.go | 11 +++- api/internal/handler/goes.go | 56 +++++++++++++++++-- api/internal/service/goes.go | 105 +++++++++++++++++++++++++++++++---- 3 files changed, 154 insertions(+), 18 deletions(-) diff --git a/api/internal/dto/goes.go b/api/internal/dto/goes.go index 5d0034a1..79e3df69 100644 --- a/api/internal/dto/goes.go +++ b/api/internal/dto/goes.go @@ -1,8 +1,17 @@ package dto -import "github.com/google/uuid" +import ( + "time" + + "github.com/google/uuid" +) type GoesTelemetryConfigMappingDTO struct { PlatformSensorKey string `json:"platform_sensor_key"` TimeseriesID *uuid.UUID `json:"timeseries_id" required:"false"` } + +type GoesPlatformConfigFileCommitDTO struct { + ID uuid.UUID `json:"id"` + CommittedAt time.Time `json:"committed_at"` +} diff --git a/api/internal/handler/goes.go b/api/internal/handler/goes.go index daf4cc47..0872f393 100644 --- a/api/internal/handler/goes.go +++ b/api/internal/handler/goes.go @@ -109,7 +109,7 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { }) huma.Register(api, huma.Operation{ - Middlewares: h.Public, + Middlewares: h.ProjectAdmin, OperationID: "goes-telemetry-config-update", Method: http.MethodPut, Path: "/projects/{project_id}/goes/{telemetry_source_id}/configs/{telemetry_config_id}", @@ -135,8 +135,7 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { if alias == "" { alias = formData.PlatformConfig.Filename } - // TODO: return dbimport response - _, err = h.DBService.GoesPlatformConfigFileUpdate(ctx, db.GoesPlatformConfigFileUpdateParams{ + a, err := h.DBService.GoesPlatformConfigFileUpdate(ctx, db.GoesPlatformConfigFileUpdateParams{ ID: input.TelemetryConfigID.UUID, Name: formData.PlatformConfig.Filename, Alias: alias, @@ -148,7 +147,7 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { if err != nil { return nil, httperr.InternalServerError(err) } - return nil, nil + return NewResponse(a), nil }) huma.Register(api, huma.Operation{ @@ -189,7 +188,7 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { }) huma.Register(api, huma.Operation{ - Middlewares: h.ProjectMember, + Middlewares: h.ProjectAdmin, OperationID: "goes-update-mappings", Method: http.MethodPut, Path: "/projects/{project_id}/goes/{telemetry_source_id}/configs/{telemetry_config_id}/mappings", @@ -206,4 +205,51 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { } return nil, nil }) + + huma.Register(api, huma.Operation{ + Middlewares: h.ProjectAdmin, + OperationID: "goes-telemetry-validate", + Method: http.MethodPut, + Path: "/projects/{project_id}/goes/{telemetry_source_id}/validate", + Description: "updates goes telemetry timeseries mappings", + Tags: goesTags, + }, func(ctx context.Context, input *struct { + ProjectIDParam + TelemetrySourceIDParam + }) (*Response[service.DbImportResponse], error) { + // TODO + return NewResponse(service.DbImportResponse{}), nil + }) + + huma.Register(api, huma.Operation{ + Middlewares: h.ProjectAdmin, + OperationID: "goes-telemetry-commit", + Method: http.MethodPost, + Path: "/projects/{project_id}/goes/{telemetry_source_id}/commit", + Description: "starts a commit action to update an opendcs routescheduler", + Tags: goesTags, + }, func(ctx context.Context, input *struct { + ProjectIDParam + TelemetrySourceIDParam + }) (*Response[struct{}], error) { + // TODO + return nil, nil + }) + + huma.Register(api, huma.Operation{ + Middlewares: h.InternalApp, + OperationID: "goes-telemetry-commit-callback", + Method: http.MethodPost, + Path: "/callback/goes/{telemetry_source_id}/commit", + Description: "callback to update API DB state after OpenDCS wrapper commit completes", + Tags: goesTags, + }, func(ctx context.Context, input *struct { + TelemetrySourceIDParam + Body []dto.GoesPlatformConfigFileCommitDTO + }) (*struct{}, error) { + if err := h.DBService.GoesPlatformConfigCommit(ctx, input.Body); err != nil { + return nil, httperr.InternalServerError(err) + } + return nil, nil + }) } diff --git a/api/internal/service/goes.go b/api/internal/service/goes.go index c63045eb..b1542039 100644 --- a/api/internal/service/goes.go +++ b/api/internal/service/goes.go @@ -1,12 +1,16 @@ package service import ( + "bytes" "context" "encoding/json" "encoding/xml" "errors" "fmt" + "io" + "net/http" "strings" + "time" "github.com/USACE/instrumentation-api/api/v4/internal/db" "github.com/USACE/instrumentation-api/api/v4/internal/dto" @@ -30,13 +34,11 @@ type ConfigSensor struct { type DbImportCommandType string type DbImportResponse struct { - PlatformFileID uuid.UUID `json:"platform_file_id"` + PlatformFileID *uuid.UUID `json:"platform_file_id,omitempty"` Response json.RawMessage `json:"response"` } // GoesPlatformConfigFileCreate validates and creates a platform configuration file for a given MIDAS project -// -// TODO: This endpoint should return the results of the proxied dbimport validation func (s *DBService) GoesPlatformConfigFileCreate(ctx context.Context, arg db.GoesPlatformConfigFileCreateParams, dryRun bool) (DbImportResponse, error) { names, err := extractSensorNames(arg.Content) if err != nil { @@ -62,7 +64,7 @@ func (s *DBService) GoesPlatformConfigFileCreate(ctx context.Context, arg db.Goe if err != nil { return a, fmt.Errorf("GoesPlatformConfigFileCreate %w", err) } - a.PlatformFileID = newID + a.PlatformFileID = &newID mm := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, 0, len(names)) for _, n := range names { @@ -82,33 +84,34 @@ func (s *DBService) GoesPlatformConfigFileCreate(ctx context.Context, arg db.Goe } // TODO: return validation results -func (s *DBService) GoesPlatformConfigFileUpdate(ctx context.Context, arg db.GoesPlatformConfigFileUpdateParams, dryRun, deleteOldMappings bool) (uuid.UUID, error) { +func (s *DBService) GoesPlatformConfigFileUpdate(ctx context.Context, arg db.GoesPlatformConfigFileUpdateParams, dryRun, deleteOldMappings bool) (DbImportResponse, error) { + var a DbImportResponse names, err := extractSensorNames(arg.Content) if err != nil { - return uuid.Nil, err + return a, err } // TODO: proxy request to opendcs service to validate dbimport if dryRun { // TODO: respond with validation result / error - return uuid.Nil, errors.New("TODO") + return a, errors.New("TODO") } tx, err := s.db.Begin(ctx) if err != nil { - return uuid.Nil, err + return a, err } defer s.TxDo(ctx, tx.Rollback) qtx := s.WithTx(tx) if err := qtx.GoesPlatformConfigFileUpdate(ctx, arg); err != nil { - return uuid.Nil, fmt.Errorf("GoesPlatformConfigFileUpdate %w", err) + return a, fmt.Errorf("GoesPlatformConfigFileUpdate %w", err) } if deleteOldMappings { if err := qtx.GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile(ctx, arg.ID); err != nil { - return uuid.Nil, fmt.Errorf("GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile %w", err) + return a, fmt.Errorf("GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile %w", err) } } @@ -123,10 +126,10 @@ func (s *DBService) GoesPlatformConfigFileUpdate(ctx context.Context, arg db.Goe qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, mm).Exec(batchExecErr(&err)) if err != nil { - return uuid.Nil, fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err) + return a, fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err) } - return uuid.Nil, tx.Commit(ctx) + return a, tx.Commit(ctx) } func (s *DBService) GoesTelemetryConfigMappingsUpdate(ctx context.Context, cfgID uuid.UUID, mappings []dto.GoesTelemetryConfigMappingDTO) error { @@ -201,3 +204,81 @@ func extractFromPlatforms(platforms []Platform) []string { } return result } + +type OpendcsImportResponse struct { + Status string `json:"status"` + ValidateLog string `json:"validate_log,omitempty"` + ImportLog string `json:"import_log,omitempty"` + CommandOutput string `json:"command_output,omitempty"` + Error string `json:"error,omitempty"` +} + +type opendcsImportRequest struct { + Files []string `json:"files"` + ValidateOnly bool `json:"validate_only"` +} + +type OpendcsImportParams struct { + OpendcsBaseURL string + OpendcsAuthToken string + opendcsImportRequest +} + +func (s *DBService) OpendcsImport(ctx context.Context, arg OpendcsImportParams) (json.RawMessage, error) { + if arg.OpendcsBaseURL == "" { + return nil, fmt.Errorf("opendcsBaseURL not configured") + } + if arg.OpendcsAuthToken == "" { + return nil, fmt.Errorf("opendcsAuthToken not configured") + } + + reqBody, err := json.Marshal(opendcsImportRequest{ + Files: arg.Files, + ValidateOnly: arg.ValidateOnly, + }) + if err != nil { + return nil, err + } + + u := strings.TrimRight(arg.OpendcsBaseURL, "/") + "/import" + httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, u, bytes.NewReader(reqBody)) + if err != nil { + return nil, err + } + + httpReq.Header.Set("content-type", "application/json") + q := httpReq.URL.Query() + q.Set("key", arg.OpendcsAuthToken) + httpReq.URL.RawQuery = q.Encode() + + client := &http.Client{Timeout: 5 * time.Minute} + resp, err := client.Do(httpReq) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + return json.RawMessage(bodyBytes), fmt.Errorf("opendcs wrapper /import failed: status=%d body=%s", resp.StatusCode, string(bodyBytes)) + } + + return json.RawMessage(bodyBytes), nil +} + +func (s *DBService) GoesPlatformConfigCommit(ctx context.Context, arg []dto.GoesPlatformConfigFileCommitDTO) error { + bb := make([]db.GoesPlatformConfigFileCommitParams, len(arg)) + for idx, b := range arg { + bb[idx] = db.GoesPlatformConfigFileCommitParams{ + ID: b.ID, + CommittedAt: &b.CommittedAt, + } + } + var err error + s.Queries.GoesPlatformConfigFileCommit(ctx, bb).Exec(batchExecErr(&err)) + return err +} From 5db2b88ca353177ee5c59af57377423d8d6d9465 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Fri, 16 Jan 2026 15:29:22 -0500 Subject: [PATCH 16/22] feat: imlment validate / commit handlers --- api/internal/config/api.go | 1 + api/internal/db/batch.go | 148 ++- api/internal/db/copyfrom.go | 45 + api/internal/db/db.go | 1 + api/internal/db/goes.sql_gen.go | 123 +- api/internal/db/goes_commit.sql_gen.go | 566 +++++++++ api/internal/db/models.go | 64 +- api/internal/db/querier.go | 23 +- api/internal/dto/goes.go | 17 + api/internal/handler/goes.go | 156 ++- api/internal/handler/handler.go | 1 + api/internal/service/db.go | 1 + api/internal/service/goes.go | 339 +++--- api/internal/service/goes_commit.go | 785 +++++++++++++ api/internal/service/goes_xml.go | 255 +++++ api/migrations/schema/V1.58.00__goes.sql | 54 +- .../schema/V1.59.00__goes_project.sql | 27 + api/migrations/seed/V0.17.02__seed_data.sql | 114 +- .../seed/V0.17.19__seed_ts_measurements.sql | 228 ++-- api/migrations/seed/V0.17.22__seed_saa.sql | 110 +- api/migrations/seed/V0.17.23__seed_ipi.sql | 80 +- .../seed/V1.25.01__seed_uploader_config.sql | 114 +- api/queries/goes.sql | 89 +- api/queries/goes_commit.sql | 215 ++++ opendcs/Dockerfile | 3 +- opendcs/app.go | 54 + opendcs/dbimport.go | 189 +++ opendcs/go.mod | 3 +- opendcs/go.sum | 2 + opendcs/logback.xml | 4 +- opendcs/main.go | 1010 +++-------------- opendcs/rsgis/pom.xml | 8 +- .../rsgis/consumer/MidasOutputFormatter.java | 27 +- 33 files changed, 3432 insertions(+), 1424 deletions(-) create mode 100644 api/internal/db/copyfrom.go create mode 100644 api/internal/db/goes_commit.sql_gen.go create mode 100644 api/internal/service/goes_commit.go create mode 100644 api/internal/service/goes_xml.go create mode 100644 api/migrations/schema/V1.59.00__goes_project.sql create mode 100644 api/queries/goes_commit.sql create mode 100644 opendcs/app.go create mode 100644 opendcs/dbimport.go diff --git a/api/internal/config/api.go b/api/internal/config/api.go index e3b76744..b5f60603 100644 --- a/api/internal/config/api.go +++ b/api/internal/config/api.go @@ -23,6 +23,7 @@ type ApiConfig struct { AlertEventFlushWorkers int `env:"ALERT_EVENT_FLUSH_WORKERS" envDefault:"4"` IrisFdsnProxyURL string `env:"IRIS_FDSN_PROXY_URL" envDefault:"https://service.iris.edu/fdsnws/station/1/query"` CwmsProxyURL string `env:"CWMS_PROXY_URL" envDefault:"https://cwms-data.usace.army.mil/cwms-data/"` + OpenDCSWrapperURL string `env:"OPENDCS_WRAPPER_URL" envDefault:"http://opendcs:8080"` } // GetConfig returns environment variable config diff --git a/api/internal/db/batch.go b/api/internal/db/batch.go index d4e6174c..ba2838a4 100644 --- a/api/internal/db/batch.go +++ b/api/internal/db/batch.go @@ -660,9 +660,10 @@ func (b *EvaluationInstrumentCreateBatchBatchResults) Close() error { const goesPlatformConfigFileCommit = `-- name: GoesPlatformConfigFileCommit :batchexec update goes_platform_config_file set - committed=true, - committed_at=$2 -where id=$1 + committed = true, + committed_at = $1, + committed_commit_id = $2 +where id = $3 ` type GoesPlatformConfigFileCommitBatchResults struct { @@ -672,16 +673,18 @@ type GoesPlatformConfigFileCommitBatchResults struct { } type GoesPlatformConfigFileCommitParams struct { - ID uuid.UUID `json:"id"` - CommittedAt *time.Time `json:"committed_at"` + CommittedAt *time.Time `json:"committed_at"` + CommittedCommitID *uuid.UUID `json:"committed_commit_id"` + ID uuid.UUID `json:"id"` } func (q *Queries) GoesPlatformConfigFileCommit(ctx context.Context, arg []GoesPlatformConfigFileCommitParams) *GoesPlatformConfigFileCommitBatchResults { batch := &pgx.Batch{} for _, a := range arg { vals := []interface{}{ - a.ID, a.CommittedAt, + a.CommittedCommitID, + a.ID, } batch.Queue(goesPlatformConfigFileCommit, vals...) } @@ -710,10 +713,86 @@ func (b *GoesPlatformConfigFileCommitBatchResults) Close() error { return b.br.Close() } +const goesPlatformRegistryUpsert = `-- name: GoesPlatformRegistryUpsert :batchexec +insert into goes_platform_registry ( + platform_key, + project_id, + goes_telemetry_source_id, + platform_id, + site_name, + commit_id, + updated_at +) values ($1, $2, $3, $4, $5, $6, now()) +on conflict (platform_key) do update set + project_id = excluded.project_id, + goes_telemetry_source_id = excluded.goes_telemetry_source_id, + platform_id = excluded.platform_id, + site_name = excluded.site_name, + commit_id = excluded.commit_id, + updated_at = now() +` + +type GoesPlatformRegistryUpsertBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type GoesPlatformRegistryUpsertParams struct { + PlatformKey string `json:"platform_key"` + ProjectID uuid.UUID `json:"project_id"` + GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` + PlatformID *string `json:"platform_id"` + SiteName *string `json:"site_name"` + CommitID uuid.UUID `json:"commit_id"` +} + +func (q *Queries) GoesPlatformRegistryUpsert(ctx context.Context, arg []GoesPlatformRegistryUpsertParams) *GoesPlatformRegistryUpsertBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.PlatformKey, + a.ProjectID, + a.GoesTelemetrySourceID, + a.PlatformID, + a.SiteName, + a.CommitID, + } + batch.Queue(goesPlatformRegistryUpsert, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &GoesPlatformRegistryUpsertBatchResults{br, len(arg), false} +} + +func (b *GoesPlatformRegistryUpsertBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *GoesPlatformRegistryUpsertBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + const goesTelemetryConfigMappingsCreateBatch = `-- name: GoesTelemetryConfigMappingsCreateBatch :batchexec -insert into goes_telemetry_config_mappings (goes_platform_config_file_id, platform_sensor_key, timeseries_id) -values ($1, $2, $3) -on conflict on constraint unique_goes_platform_config_file_id_platform_sensor_key do nothing +insert into goes_telemetry_config_mappings ( + goes_platform_config_file_id, + platform_sensor_key, + timeseries_id +) values ($1, $2, $3) +on conflict on constraint unique_goes_platform_config_file_id_platform_sensor_key +do update set timeseries_id = excluded.timeseries_id ` type GoesTelemetryConfigMappingsCreateBatchBatchResults struct { @@ -763,6 +842,57 @@ func (b *GoesTelemetryConfigMappingsCreateBatchBatchResults) Close() error { return b.br.Close() } +const goesTelemetryConfigMappingsDeleteBatch = `-- name: GoesTelemetryConfigMappingsDeleteBatch :batchexec +delete from goes_telemetry_config_mappings +where goes_platform_config_file_id = $1 +and platform_sensor_key = $2 +` + +type GoesTelemetryConfigMappingsDeleteBatchBatchResults struct { + br pgx.BatchResults + tot int + closed bool +} + +type GoesTelemetryConfigMappingsDeleteBatchParams struct { + GoesPlatformConfigFileID uuid.UUID `json:"goes_platform_config_file_id"` + PlatformSensorKey string `json:"platform_sensor_key"` +} + +func (q *Queries) GoesTelemetryConfigMappingsDeleteBatch(ctx context.Context, arg []GoesTelemetryConfigMappingsDeleteBatchParams) *GoesTelemetryConfigMappingsDeleteBatchBatchResults { + batch := &pgx.Batch{} + for _, a := range arg { + vals := []interface{}{ + a.GoesPlatformConfigFileID, + a.PlatformSensorKey, + } + batch.Queue(goesTelemetryConfigMappingsDeleteBatch, vals...) + } + br := q.db.SendBatch(ctx, batch) + return &GoesTelemetryConfigMappingsDeleteBatchBatchResults{br, len(arg), false} +} + +func (b *GoesTelemetryConfigMappingsDeleteBatchBatchResults) Exec(f func(int, error)) { + defer b.br.Close() + for t := 0; t < b.tot; t++ { + if b.closed { + if f != nil { + f(t, ErrBatchAlreadyClosed) + } + continue + } + _, err := b.br.Exec() + if f != nil { + f(t, err) + } + } +} + +func (b *GoesTelemetryConfigMappingsDeleteBatchBatchResults) Close() error { + b.closed = true + return b.br.Close() +} + const inclOptsCreateBatch = `-- name: InclOptsCreateBatch :batchexec insert into incl_opts (instrument_id, num_segments, bottom_elevation_timeseries_id, initial_time) values ($1, $2, $3, $4) diff --git a/api/internal/db/copyfrom.go b/api/internal/db/copyfrom.go new file mode 100644 index 00000000..a51db827 --- /dev/null +++ b/api/internal/db/copyfrom.go @@ -0,0 +1,45 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.29.0 +// source: copyfrom.go + +package db + +import ( + "context" +) + +// iteratorForGoesMappingSetEntryCreateBatch implements pgx.CopyFromSource. +type iteratorForGoesMappingSetEntryCreateBatch struct { + rows []GoesMappingSetEntryCreateBatchParams + skippedFirstNextCall bool +} + +func (r *iteratorForGoesMappingSetEntryCreateBatch) Next() bool { + if len(r.rows) == 0 { + return false + } + if !r.skippedFirstNextCall { + r.skippedFirstNextCall = true + return true + } + r.rows = r.rows[1:] + return len(r.rows) > 0 +} + +func (r iteratorForGoesMappingSetEntryCreateBatch) Values() ([]interface{}, error) { + return []interface{}{ + r.rows[0].MappingSetID, + r.rows[0].GoesPlatformConfigFileID, + r.rows[0].PlatformSensorKey, + r.rows[0].TimeseriesID, + }, nil +} + +func (r iteratorForGoesMappingSetEntryCreateBatch) Err() error { + return nil +} + +func (q *Queries) GoesMappingSetEntryCreateBatch(ctx context.Context, arg []GoesMappingSetEntryCreateBatchParams) (int64, error) { + return q.db.CopyFrom(ctx, []string{"goes_mapping_set_entry"}, []string{"mapping_set_id", "goes_platform_config_file_id", "platform_sensor_key", "timeseries_id"}, &iteratorForGoesMappingSetEntryCreateBatch{rows: arg}) +} diff --git a/api/internal/db/db.go b/api/internal/db/db.go index b2f6bea4..037499be 100644 --- a/api/internal/db/db.go +++ b/api/internal/db/db.go @@ -15,6 +15,7 @@ type DBTX interface { Exec(context.Context, string, ...interface{}) (pgconn.CommandTag, error) Query(context.Context, string, ...interface{}) (pgx.Rows, error) QueryRow(context.Context, string, ...interface{}) pgx.Row + CopyFrom(ctx context.Context, tableName pgx.Identifier, columnNames []string, rowSrc pgx.CopyFromSource) (int64, error) SendBatch(context.Context, *pgx.Batch) pgx.BatchResults } diff --git a/api/internal/db/goes.sql_gen.go b/api/internal/db/goes.sql_gen.go index 4349372c..40a2a1d3 100644 --- a/api/internal/db/goes.sql_gen.go +++ b/api/internal/db/goes.sql_gen.go @@ -7,14 +7,28 @@ package db import ( "context" - "time" "github.com/google/uuid" ) const goesPlatformConfigFileCreate = `-- name: GoesPlatformConfigFileCreate :one -insert into goes_platform_config_file (goes_telemetry_source_id, project_id, name, alias, size_bytes, content, created_by) -values ($1, $2, $3, $4, $5, $6::xml, $7) +insert into goes_platform_config_file ( + goes_telemetry_source_id, + project_id, + name, + alias, + size_bytes, + content, + created_by +) values ( + $1, + $2, + $3, + $4, + $5, + $6::xml, + $7 +) returning id ` @@ -44,16 +58,28 @@ func (q *Queries) GoesPlatformConfigFileCreate(ctx context.Context, arg GoesPlat } const goesPlatformConfigFileDelete = `-- name: GoesPlatformConfigFileDelete :exec -delete from goes_platform_config_file where id=$1 +update goes_platform_config_file set + deleted = true, + deleted_at = now(), + deleted_by = $1 +where id = $2 ` -func (q *Queries) GoesPlatformConfigFileDelete(ctx context.Context, id uuid.UUID) error { - _, err := q.db.Exec(ctx, goesPlatformConfigFileDelete, id) +type GoesPlatformConfigFileDeleteParams struct { + DeletedBy *uuid.UUID `json:"deleted_by"` + ID uuid.UUID `json:"id"` +} + +func (q *Queries) GoesPlatformConfigFileDelete(ctx context.Context, arg GoesPlatformConfigFileDeleteParams) error { + _, err := q.db.Exec(ctx, goesPlatformConfigFileDelete, arg.DeletedBy, arg.ID) return err } const goesPlatformConfigFileGet = `-- name: GoesPlatformConfigFileGet :one -select id, goes_telemetry_source_id, project_id, name, alias, size_bytes, content, committed, committed_at, created_at, created_by, updated_at, updated_by from goes_platform_config_file where id=$1 +select id, goes_telemetry_source_id, project_id, name, alias, size_bytes, content, committed, committed_at, created_at, created_by, updated_at, updated_by, committed_content, committed_commit_id, deleted, deleted_at, deleted_by +from goes_platform_config_file +where id = $1 +and not deleted ` func (q *Queries) GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) (GoesPlatformConfigFile, error) { @@ -73,30 +99,67 @@ func (q *Queries) GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) ( &i.CreatedBy, &i.UpdatedAt, &i.UpdatedBy, + &i.CommittedContent, + &i.CommittedCommitID, + &i.Deleted, + &i.DeletedAt, + &i.DeletedBy, + ) + return i, err +} + +const goesPlatformConfigFileListUncommitedForProject = `-- name: GoesPlatformConfigFileListUncommitedForProject :one +select id, goes_telemetry_source_id, project_id, name, alias, size_bytes, content, committed, committed_at, created_at, created_by, updated_at, updated_by, committed_content, committed_commit_id, deleted, deleted_at, deleted_by +from goes_platform_config_file +where project_id = $1 +and not committed +and not deleted +` + +func (q *Queries) GoesPlatformConfigFileListUncommitedForProject(ctx context.Context, projectID uuid.UUID) (GoesPlatformConfigFile, error) { + row := q.db.QueryRow(ctx, goesPlatformConfigFileListUncommitedForProject, projectID) + var i GoesPlatformConfigFile + err := row.Scan( + &i.ID, + &i.GoesTelemetrySourceID, + &i.ProjectID, + &i.Name, + &i.Alias, + &i.SizeBytes, + &i.Content, + &i.Committed, + &i.CommittedAt, + &i.CreatedAt, + &i.CreatedBy, + &i.UpdatedAt, + &i.UpdatedBy, + &i.CommittedContent, + &i.CommittedCommitID, + &i.Deleted, + &i.DeletedAt, + &i.DeletedBy, ) return i, err } const goesPlatformConfigFileUpdate = `-- name: GoesPlatformConfigFileUpdate :exec update goes_platform_config_file set - name=$1, - alias=$2, - size_bytes=$3, - content=$4::xml, - committed=false, - updated_at=$5, - updated_by=$6 -where id=$7 + name = $1, + alias = $2, + size_bytes = $3, + content = $4::xml, + deleted = false, + deleted_at = null, + deleted_by = null +where id = $5 ` type GoesPlatformConfigFileUpdateParams struct { - Name string `json:"name"` - Alias string `json:"alias"` - SizeBytes int64 `json:"size_bytes"` - Content string `json:"content"` - UpdatedAt *time.Time `json:"updated_at"` - UpdatedBy *uuid.UUID `json:"updated_by"` - ID uuid.UUID `json:"id"` + Name string `json:"name"` + Alias string `json:"alias"` + SizeBytes int64 `json:"size_bytes"` + Content string `json:"content"` + ID uuid.UUID `json:"id"` } func (q *Queries) GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPlatformConfigFileUpdateParams) error { @@ -105,15 +168,14 @@ func (q *Queries) GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPlat arg.Alias, arg.SizeBytes, arg.Content, - arg.UpdatedAt, - arg.UpdatedBy, arg.ID, ) return err } const goesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile = `-- name: GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile :exec -delete from goes_telemetry_config_mappings where goes_platform_config_file_id=$1 +delete from goes_telemetry_config_mappings +where goes_platform_config_file_id = $1 ` func (q *Queries) GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile(ctx context.Context, goesPlatformConfigFileID uuid.UUID) error { @@ -122,7 +184,13 @@ func (q *Queries) GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile(ctx } const goesTelemetryConfigMappingsList = `-- name: GoesTelemetryConfigMappingsList :many -select goes_platform_config_file_id, platform_sensor_key, timeseries_id from goes_telemetry_config_mappings where goes_platform_config_file_id=$1 +select goes_platform_config_file_id, platform_sensor_key, timeseries_id +from goes_telemetry_config_mappings +where goes_platform_config_file_id = $1 +and goes_platform_config_file_id in ( + select id from goes_platform_config_file where deleted = false +) +order by platform_sensor_key ` func (q *Queries) GoesTelemetryConfigMappingsList(ctx context.Context, goesPlatformConfigFileID uuid.UUID) ([]GoesTelemetryConfigMappings, error) { @@ -146,7 +214,8 @@ func (q *Queries) GoesTelemetryConfigMappingsList(ctx context.Context, goesPlatf } const goesTelemetrySourceList = `-- name: GoesTelemetrySourceList :many -select id, name, files from v_goes_telemetry_source +select id, name, files +from v_goes_telemetry_source ` func (q *Queries) GoesTelemetrySourceList(ctx context.Context) ([]VGoesTelemetrySource, error) { diff --git a/api/internal/db/goes_commit.sql_gen.go b/api/internal/db/goes_commit.sql_gen.go new file mode 100644 index 00000000..bd68faa7 --- /dev/null +++ b/api/internal/db/goes_commit.sql_gen.go @@ -0,0 +1,566 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.29.0 +// source: goes_commit.sql + +package db + +import ( + "context" + "encoding/json" + + "github.com/google/uuid" +) + +const goesCommitCreatePending = `-- name: GoesCommitCreatePending :one +insert into goes_commit ( + project_id, + goes_telemetry_source_id, + created_by, + status, + previous_commit_id, + idempotency_key, + mapping_set_id +) values ( + $1, $2, $3, 'pending', $4, $5, $6 +) +returning id, project_id, goes_telemetry_source_id, created_at, created_by, status, opendcs_response, previous_commit_id, idempotency_key, mapping_set_id +` + +type GoesCommitCreatePendingParams struct { + ProjectID uuid.UUID `json:"project_id"` + GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` + CreatedBy uuid.UUID `json:"created_by"` + PreviousCommitID *uuid.UUID `json:"previous_commit_id"` + IdempotencyKey *string `json:"idempotency_key"` + MappingSetID *uuid.UUID `json:"mapping_set_id"` +} + +func (q *Queries) GoesCommitCreatePending(ctx context.Context, arg GoesCommitCreatePendingParams) (GoesCommit, error) { + row := q.db.QueryRow(ctx, goesCommitCreatePending, + arg.ProjectID, + arg.GoesTelemetrySourceID, + arg.CreatedBy, + arg.PreviousCommitID, + arg.IdempotencyKey, + arg.MappingSetID, + ) + var i GoesCommit + err := row.Scan( + &i.ID, + &i.ProjectID, + &i.GoesTelemetrySourceID, + &i.CreatedAt, + &i.CreatedBy, + &i.Status, + &i.OpendcsResponse, + &i.PreviousCommitID, + &i.IdempotencyKey, + &i.MappingSetID, + ) + return i, err +} + +const goesCommitGetActive = `-- name: GoesCommitGetActive :one +select id, project_id, goes_telemetry_source_id, created_at, created_by, status, opendcs_response, previous_commit_id, idempotency_key, mapping_set_id +from goes_commit +where + project_id = $1 + and goes_telemetry_source_id = $2 + and status = 'active' +order by created_at desc +limit 1 +` + +type GoesCommitGetActiveParams struct { + ProjectID uuid.UUID `json:"project_id"` + GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` +} + +func (q *Queries) GoesCommitGetActive(ctx context.Context, arg GoesCommitGetActiveParams) (GoesCommit, error) { + row := q.db.QueryRow(ctx, goesCommitGetActive, arg.ProjectID, arg.GoesTelemetrySourceID) + var i GoesCommit + err := row.Scan( + &i.ID, + &i.ProjectID, + &i.GoesTelemetrySourceID, + &i.CreatedAt, + &i.CreatedBy, + &i.Status, + &i.OpendcsResponse, + &i.PreviousCommitID, + &i.IdempotencyKey, + &i.MappingSetID, + ) + return i, err +} + +const goesCommitGetByID = `-- name: GoesCommitGetByID :one +select id, project_id, goes_telemetry_source_id, created_at, created_by, status, opendcs_response, previous_commit_id, idempotency_key, mapping_set_id +from goes_commit +where id = $1 +` + +func (q *Queries) GoesCommitGetByID(ctx context.Context, id uuid.UUID) (GoesCommit, error) { + row := q.db.QueryRow(ctx, goesCommitGetByID, id) + var i GoesCommit + err := row.Scan( + &i.ID, + &i.ProjectID, + &i.GoesTelemetrySourceID, + &i.CreatedAt, + &i.CreatedBy, + &i.Status, + &i.OpendcsResponse, + &i.PreviousCommitID, + &i.IdempotencyKey, + &i.MappingSetID, + ) + return i, err +} + +const goesCommitMarkActive = `-- name: GoesCommitMarkActive :exec +with target as ( + select + c.id, + c.project_id, + c.goes_telemetry_source_id + from goes_commit c + where c.id = $2 +), +cleared as ( + update goes_commit c + set status = 'inactive' + where c.project_id = (select t.project_id from target t) + and c.goes_telemetry_source_id = (select t.goes_telemetry_source_id from target t) + and c.status = 'active' +) +update goes_commit c +set status = 'active', + opendcs_response = $1::jsonb +where c.id = (select t.id from target t) +` + +type GoesCommitMarkActiveParams struct { + OpendcsResponse json.RawMessage `json:"opendcs_response"` + ID uuid.UUID `json:"id"` +} + +func (q *Queries) GoesCommitMarkActive(ctx context.Context, arg GoesCommitMarkActiveParams) error { + _, err := q.db.Exec(ctx, goesCommitMarkActive, arg.OpendcsResponse, arg.ID) + return err +} + +const goesCommitMarkFailed = `-- name: GoesCommitMarkFailed :exec +update goes_commit set status = 'failed', opendcs_response = $1::jsonb +where id = $2 +` + +type GoesCommitMarkFailedParams struct { + OpendcsResponse json.RawMessage `json:"opendcs_response"` + ID uuid.UUID `json:"id"` +} + +func (q *Queries) GoesCommitMarkFailed(ctx context.Context, arg GoesCommitMarkFailedParams) error { + _, err := q.db.Exec(ctx, goesCommitMarkFailed, arg.OpendcsResponse, arg.ID) + return err +} + +const goesMappingSetCreate = `-- name: GoesMappingSetCreate :one +insert into goes_mapping_set (project_id, created_by, content_hash, idempotency_key) +values ($1, $2, $3, $4) +returning id, project_id, created_at, created_by, content_hash, idempotency_key +` + +type GoesMappingSetCreateParams struct { + ProjectID uuid.UUID `json:"project_id"` + CreatedBy uuid.UUID `json:"created_by"` + ContentHash string `json:"content_hash"` + IdempotencyKey *string `json:"idempotency_key"` +} + +func (q *Queries) GoesMappingSetCreate(ctx context.Context, arg GoesMappingSetCreateParams) (GoesMappingSet, error) { + row := q.db.QueryRow(ctx, goesMappingSetCreate, + arg.ProjectID, + arg.CreatedBy, + arg.ContentHash, + arg.IdempotencyKey, + ) + var i GoesMappingSet + err := row.Scan( + &i.ID, + &i.ProjectID, + &i.CreatedAt, + &i.CreatedBy, + &i.ContentHash, + &i.IdempotencyKey, + ) + return i, err +} + +const goesMappingSetEntriesList = `-- name: GoesMappingSetEntriesList :many +select goes_platform_config_file_id, platform_sensor_key, timeseries_id +from goes_mapping_set_entry +where mapping_set_id = $1 +order by goes_platform_config_file_id, platform_sensor_key +` + +type GoesMappingSetEntriesListRow struct { + GoesPlatformConfigFileID uuid.UUID `json:"goes_platform_config_file_id"` + PlatformSensorKey string `json:"platform_sensor_key"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +func (q *Queries) GoesMappingSetEntriesList(ctx context.Context, mappingSetID uuid.UUID) ([]GoesMappingSetEntriesListRow, error) { + rows, err := q.db.Query(ctx, goesMappingSetEntriesList, mappingSetID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GoesMappingSetEntriesListRow{} + for rows.Next() { + var i GoesMappingSetEntriesListRow + if err := rows.Scan(&i.GoesPlatformConfigFileID, &i.PlatformSensorKey, &i.TimeseriesID); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +type GoesMappingSetEntryCreateBatchParams struct { + MappingSetID uuid.UUID `json:"mapping_set_id"` + GoesPlatformConfigFileID uuid.UUID `json:"goes_platform_config_file_id"` + PlatformSensorKey string `json:"platform_sensor_key"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +const goesPlatformConfigFileCommitArtifactsUpdate = `-- name: GoesPlatformConfigFileCommitArtifactsUpdate :exec +update goes_platform_config_file set + committed_content = $1::xml, + committed = true, + committed_at = now(), + committed_commit_id = $2 +where id = $3 +` + +type GoesPlatformConfigFileCommitArtifactsUpdateParams struct { + CommittedContent string `json:"committed_content"` + CommittedCommitID *uuid.UUID `json:"committed_commit_id"` + ID uuid.UUID `json:"id"` +} + +func (q *Queries) GoesPlatformConfigFileCommitArtifactsUpdate(ctx context.Context, arg GoesPlatformConfigFileCommitArtifactsUpdateParams) error { + _, err := q.db.Exec(ctx, goesPlatformConfigFileCommitArtifactsUpdate, arg.CommittedContent, arg.CommittedCommitID, arg.ID) + return err +} + +const goesPlatformConfigFileRestoreForRollback = `-- name: GoesPlatformConfigFileRestoreForRollback :exec +update goes_platform_config_file set + content = $1::xml, + committed_content = $1::xml, + committed = true, + committed_at = now(), + committed_commit_id = $2, + deleted = false, + deleted_at = null, + deleted_by = null +where id = $3 +` + +type GoesPlatformConfigFileRestoreForRollbackParams struct { + Content string `json:"content"` + CommittedCommitID *uuid.UUID `json:"committed_commit_id"` + ID uuid.UUID `json:"id"` +} + +func (q *Queries) GoesPlatformConfigFileRestoreForRollback(ctx context.Context, arg GoesPlatformConfigFileRestoreForRollbackParams) error { + _, err := q.db.Exec(ctx, goesPlatformConfigFileRestoreForRollback, arg.Content, arg.CommittedCommitID, arg.ID) + return err +} + +const goesPlatformConfigFileSoftDeleteNotInSet = `-- name: GoesPlatformConfigFileSoftDeleteNotInSet :exec +update goes_platform_config_file f set + deleted = true, + deleted_at = now(), + deleted_by = $3 +where f.project_id = $1 +and f.goes_telemetry_source_id = $2 +and not (f.id = any($4::uuid[])) +and f.deleted = false +` + +type GoesPlatformConfigFileSoftDeleteNotInSetParams struct { + ProjectID uuid.UUID `json:"project_id"` + GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` + DeletedBy *uuid.UUID `json:"deleted_by"` + FileIds []uuid.UUID `json:"file_ids"` +} + +func (q *Queries) GoesPlatformConfigFileSoftDeleteNotInSet(ctx context.Context, arg GoesPlatformConfigFileSoftDeleteNotInSetParams) error { + _, err := q.db.Exec(ctx, goesPlatformConfigFileSoftDeleteNotInSet, + arg.ProjectID, + arg.GoesTelemetrySourceID, + arg.DeletedBy, + arg.FileIds, + ) + return err +} + +const goesPlatformConfigFilesListForCommit = `-- name: GoesPlatformConfigFilesListForCommit :many +select id, name, alias, content +from goes_platform_config_file +where project_id = $1 +and goes_telemetry_source_id = $2 +and deleted = false +order by created_at asc +` + +type GoesPlatformConfigFilesListForCommitParams struct { + ProjectID uuid.UUID `json:"project_id"` + GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` +} + +type GoesPlatformConfigFilesListForCommitRow struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Alias string `json:"alias"` + Content string `json:"content"` +} + +func (q *Queries) GoesPlatformConfigFilesListForCommit(ctx context.Context, arg GoesPlatformConfigFilesListForCommitParams) ([]GoesPlatformConfigFilesListForCommitRow, error) { + rows, err := q.db.Query(ctx, goesPlatformConfigFilesListForCommit, arg.ProjectID, arg.GoesTelemetrySourceID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GoesPlatformConfigFilesListForCommitRow{} + for rows.Next() { + var i GoesPlatformConfigFilesListForCommitRow + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Alias, + &i.Content, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const goesPlatformConfigFilesListForCommitByCommitID = `-- name: GoesPlatformConfigFilesListForCommitByCommitID :many +select id, name, alias, committed_content::text as content +from goes_platform_config_file +where project_id = $1 +and goes_telemetry_source_id = $2 +and committed_commit_id = $3 +order by created_at asc +` + +type GoesPlatformConfigFilesListForCommitByCommitIDParams struct { + ProjectID uuid.UUID `json:"project_id"` + GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` + CommittedCommitID *uuid.UUID `json:"committed_commit_id"` +} + +type GoesPlatformConfigFilesListForCommitByCommitIDRow struct { + ID uuid.UUID `json:"id"` + Name string `json:"name"` + Alias string `json:"alias"` + Content string `json:"content"` +} + +func (q *Queries) GoesPlatformConfigFilesListForCommitByCommitID(ctx context.Context, arg GoesPlatformConfigFilesListForCommitByCommitIDParams) ([]GoesPlatformConfigFilesListForCommitByCommitIDRow, error) { + rows, err := q.db.Query(ctx, goesPlatformConfigFilesListForCommitByCommitID, arg.ProjectID, arg.GoesTelemetrySourceID, arg.CommittedCommitID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GoesPlatformConfigFilesListForCommitByCommitIDRow{} + for rows.Next() { + var i GoesPlatformConfigFilesListForCommitByCommitIDRow + if err := rows.Scan( + &i.ID, + &i.Name, + &i.Alias, + &i.Content, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const goesPlatformRegistryConflicts = `-- name: GoesPlatformRegistryConflicts :many +select platform_key, project_id +from goes_platform_registry +where goes_telemetry_source_id = $1 +and platform_key = any($3::text[]) +and project_id <> $2 +` + +type GoesPlatformRegistryConflictsParams struct { + GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` + ProjectID uuid.UUID `json:"project_id"` + PlatformKeys []string `json:"platform_keys"` +} + +type GoesPlatformRegistryConflictsRow struct { + PlatformKey string `json:"platform_key"` + ProjectID uuid.UUID `json:"project_id"` +} + +func (q *Queries) GoesPlatformRegistryConflicts(ctx context.Context, arg GoesPlatformRegistryConflictsParams) ([]GoesPlatformRegistryConflictsRow, error) { + rows, err := q.db.Query(ctx, goesPlatformRegistryConflicts, arg.GoesTelemetrySourceID, arg.ProjectID, arg.PlatformKeys) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GoesPlatformRegistryConflictsRow{} + for rows.Next() { + var i GoesPlatformRegistryConflictsRow + if err := rows.Scan(&i.PlatformKey, &i.ProjectID); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const goesPlatformRegistryDeleteMissing = `-- name: GoesPlatformRegistryDeleteMissing :exec +delete from goes_platform_registry r +where r.project_id = $1 +and r.goes_telemetry_source_id = $2 +and not (r.platform_key = any($3::text[])) +` + +type GoesPlatformRegistryDeleteMissingParams struct { + ProjectID uuid.UUID `json:"project_id"` + GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` + PlatformKeys []string `json:"platform_keys"` +} + +func (q *Queries) GoesPlatformRegistryDeleteMissing(ctx context.Context, arg GoesPlatformRegistryDeleteMissingParams) error { + _, err := q.db.Exec(ctx, goesPlatformRegistryDeleteMissing, arg.ProjectID, arg.GoesTelemetrySourceID, arg.PlatformKeys) + return err +} + +const goesPlatformRegistryListByProject = `-- name: GoesPlatformRegistryListByProject :many +select platform_key, platform_id, site_name +from goes_platform_registry +where project_id = $1 +and goes_telemetry_source_id = $2 +order by platform_key +` + +type GoesPlatformRegistryListByProjectParams struct { + ProjectID uuid.UUID `json:"project_id"` + GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` +} + +type GoesPlatformRegistryListByProjectRow struct { + PlatformKey string `json:"platform_key"` + PlatformID *string `json:"platform_id"` + SiteName *string `json:"site_name"` +} + +func (q *Queries) GoesPlatformRegistryListByProject(ctx context.Context, arg GoesPlatformRegistryListByProjectParams) ([]GoesPlatformRegistryListByProjectRow, error) { + rows, err := q.db.Query(ctx, goesPlatformRegistryListByProject, arg.ProjectID, arg.GoesTelemetrySourceID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GoesPlatformRegistryListByProjectRow{} + for rows.Next() { + var i GoesPlatformRegistryListByProjectRow + if err := rows.Scan(&i.PlatformKey, &i.PlatformID, &i.SiteName); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const goesTelemetryConfigMappingsListForFiles = `-- name: GoesTelemetryConfigMappingsListForFiles :many +select goes_platform_config_file_id, platform_sensor_key, timeseries_id +from goes_telemetry_config_mappings +where goes_platform_config_file_id = any($1::uuid[]) +order by goes_platform_config_file_id, platform_sensor_key +` + +func (q *Queries) GoesTelemetryConfigMappingsListForFiles(ctx context.Context, fileIds []uuid.UUID) ([]GoesTelemetryConfigMappings, error) { + rows, err := q.db.Query(ctx, goesTelemetryConfigMappingsListForFiles, fileIds) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GoesTelemetryConfigMappings{} + for rows.Next() { + var i GoesTelemetryConfigMappings + if err := rows.Scan(&i.GoesPlatformConfigFileID, &i.PlatformSensorKey, &i.TimeseriesID); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const goesTelemetryConfigMappingsReplaceForProjectFromMappingSet = `-- name: GoesTelemetryConfigMappingsReplaceForProjectFromMappingSet :exec +with file_ids as ( + select id + from goes_platform_config_file + where project_id = $1 + and goes_telemetry_source_id = $2 +) +, del as ( + delete from goes_telemetry_config_mappings m + using file_ids f + where m.goes_platform_config_file_id = f.id +) +insert into goes_telemetry_config_mappings ( + goes_platform_config_file_id, + platform_sensor_key, + timeseries_id +) +select + e.goes_platform_config_file_id, + e.platform_sensor_key, + e.timeseries_id +from goes_mapping_set_entry e +join file_ids f on f.id = e.goes_platform_config_file_id +where e.mapping_set_id = $3 +on conflict on constraint unique_goes_platform_config_file_id_platform_sensor_key +do update set timeseries_id = excluded.timeseries_id +` + +type GoesTelemetryConfigMappingsReplaceForProjectFromMappingSetParams struct { + ProjectID uuid.UUID `json:"project_id"` + GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` + MappingSetID uuid.UUID `json:"mapping_set_id"` +} + +func (q *Queries) GoesTelemetryConfigMappingsReplaceForProjectFromMappingSet(ctx context.Context, arg GoesTelemetryConfigMappingsReplaceForProjectFromMappingSetParams) error { + _, err := q.db.Exec(ctx, goesTelemetryConfigMappingsReplaceForProjectFromMappingSet, arg.ProjectID, arg.GoesTelemetrySourceID, arg.MappingSetID) + return err +} diff --git a/api/internal/db/models.go b/api/internal/db/models.go index 1558b940..aa921ebc 100644 --- a/api/internal/db/models.go +++ b/api/internal/db/models.go @@ -643,20 +643,64 @@ type EvaluationInstrument struct { InstrumentID *uuid.UUID `json:"instrument_id"` } -type GoesPlatformConfigFile struct { +type GoesCommit struct { ID uuid.UUID `json:"id"` - GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` ProjectID uuid.UUID `json:"project_id"` - Name string `json:"name"` - Alias string `json:"alias"` - SizeBytes int64 `json:"size_bytes"` - Content string `json:"content"` - Committed bool `json:"committed"` - CommittedAt *time.Time `json:"committed_at"` + GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` CreatedAt time.Time `json:"created_at"` CreatedBy uuid.UUID `json:"created_by"` - UpdatedAt *time.Time `json:"updated_at"` - UpdatedBy *uuid.UUID `json:"updated_by"` + Status string `json:"status"` + OpendcsResponse []byte `json:"opendcs_response"` + PreviousCommitID *uuid.UUID `json:"previous_commit_id"` + IdempotencyKey *string `json:"idempotency_key"` + MappingSetID *uuid.UUID `json:"mapping_set_id"` +} + +type GoesMappingSet struct { + ID uuid.UUID `json:"id"` + ProjectID uuid.UUID `json:"project_id"` + CreatedAt time.Time `json:"created_at"` + CreatedBy uuid.UUID `json:"created_by"` + ContentHash string `json:"content_hash"` + IdempotencyKey *string `json:"idempotency_key"` +} + +type GoesMappingSetEntry struct { + MappingSetID uuid.UUID `json:"mapping_set_id"` + GoesPlatformConfigFileID uuid.UUID `json:"goes_platform_config_file_id"` + PlatformSensorKey string `json:"platform_sensor_key"` + TimeseriesID *uuid.UUID `json:"timeseries_id"` +} + +type GoesPlatformConfigFile struct { + ID uuid.UUID `json:"id"` + GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` + ProjectID uuid.UUID `json:"project_id"` + Name string `json:"name"` + Alias string `json:"alias"` + SizeBytes int64 `json:"size_bytes"` + Content string `json:"content"` + Committed bool `json:"committed"` + CommittedAt *time.Time `json:"committed_at"` + CreatedAt time.Time `json:"created_at"` + CreatedBy uuid.UUID `json:"created_by"` + UpdatedAt *time.Time `json:"updated_at"` + UpdatedBy *uuid.UUID `json:"updated_by"` + CommittedContent interface{} `json:"committed_content"` + CommittedCommitID *uuid.UUID `json:"committed_commit_id"` + Deleted bool `json:"deleted"` + DeletedAt *time.Time `json:"deleted_at"` + DeletedBy *uuid.UUID `json:"deleted_by"` +} + +type GoesPlatformRegistry struct { + PlatformKey string `json:"platform_key"` + ProjectID uuid.UUID `json:"project_id"` + GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` + PlatformID *string `json:"platform_id"` + SiteName *string `json:"site_name"` + CommitID uuid.UUID `json:"commit_id"` + UpdatedAt time.Time `json:"updated_at"` } type GoesTelemetryConfigMappings struct { diff --git a/api/internal/db/querier.go b/api/internal/db/querier.go index 7add110e..be984c61 100644 --- a/api/internal/db/querier.go +++ b/api/internal/db/querier.go @@ -116,14 +116,35 @@ type Querier interface { EvaluationListForProject(ctx context.Context, projectID uuid.UUID) ([]VEvaluation, error) EvaluationListForProjectAlertConfig(ctx context.Context, arg EvaluationListForProjectAlertConfigParams) ([]VEvaluation, error) EvaluationUpdate(ctx context.Context, arg EvaluationUpdateParams) error + GoesCommitCreatePending(ctx context.Context, arg GoesCommitCreatePendingParams) (GoesCommit, error) + GoesCommitGetActive(ctx context.Context, arg GoesCommitGetActiveParams) (GoesCommit, error) + GoesCommitGetByID(ctx context.Context, id uuid.UUID) (GoesCommit, error) + GoesCommitMarkActive(ctx context.Context, arg GoesCommitMarkActiveParams) error + GoesCommitMarkFailed(ctx context.Context, arg GoesCommitMarkFailedParams) error + GoesMappingSetCreate(ctx context.Context, arg GoesMappingSetCreateParams) (GoesMappingSet, error) + GoesMappingSetEntriesList(ctx context.Context, mappingSetID uuid.UUID) ([]GoesMappingSetEntriesListRow, error) + GoesMappingSetEntryCreateBatch(ctx context.Context, arg []GoesMappingSetEntryCreateBatchParams) (int64, error) GoesPlatformConfigFileCommit(ctx context.Context, arg []GoesPlatformConfigFileCommitParams) *GoesPlatformConfigFileCommitBatchResults + GoesPlatformConfigFileCommitArtifactsUpdate(ctx context.Context, arg GoesPlatformConfigFileCommitArtifactsUpdateParams) error GoesPlatformConfigFileCreate(ctx context.Context, arg GoesPlatformConfigFileCreateParams) (uuid.UUID, error) - GoesPlatformConfigFileDelete(ctx context.Context, id uuid.UUID) error + GoesPlatformConfigFileDelete(ctx context.Context, arg GoesPlatformConfigFileDeleteParams) error GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) (GoesPlatformConfigFile, error) + GoesPlatformConfigFileListUncommitedForProject(ctx context.Context, projectID uuid.UUID) (GoesPlatformConfigFile, error) + GoesPlatformConfigFileRestoreForRollback(ctx context.Context, arg GoesPlatformConfigFileRestoreForRollbackParams) error + GoesPlatformConfigFileSoftDeleteNotInSet(ctx context.Context, arg GoesPlatformConfigFileSoftDeleteNotInSetParams) error GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPlatformConfigFileUpdateParams) error + GoesPlatformConfigFilesListForCommit(ctx context.Context, arg GoesPlatformConfigFilesListForCommitParams) ([]GoesPlatformConfigFilesListForCommitRow, error) + GoesPlatformConfigFilesListForCommitByCommitID(ctx context.Context, arg GoesPlatformConfigFilesListForCommitByCommitIDParams) ([]GoesPlatformConfigFilesListForCommitByCommitIDRow, error) + GoesPlatformRegistryConflicts(ctx context.Context, arg GoesPlatformRegistryConflictsParams) ([]GoesPlatformRegistryConflictsRow, error) + GoesPlatformRegistryDeleteMissing(ctx context.Context, arg GoesPlatformRegistryDeleteMissingParams) error + GoesPlatformRegistryListByProject(ctx context.Context, arg GoesPlatformRegistryListByProjectParams) ([]GoesPlatformRegistryListByProjectRow, error) + GoesPlatformRegistryUpsert(ctx context.Context, arg []GoesPlatformRegistryUpsertParams) *GoesPlatformRegistryUpsertBatchResults GoesTelemetryConfigMappingsCreateBatch(ctx context.Context, arg []GoesTelemetryConfigMappingsCreateBatchParams) *GoesTelemetryConfigMappingsCreateBatchBatchResults + GoesTelemetryConfigMappingsDeleteBatch(ctx context.Context, arg []GoesTelemetryConfigMappingsDeleteBatchParams) *GoesTelemetryConfigMappingsDeleteBatchBatchResults GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile(ctx context.Context, goesPlatformConfigFileID uuid.UUID) error GoesTelemetryConfigMappingsList(ctx context.Context, goesPlatformConfigFileID uuid.UUID) ([]GoesTelemetryConfigMappings, error) + GoesTelemetryConfigMappingsListForFiles(ctx context.Context, fileIds []uuid.UUID) ([]GoesTelemetryConfigMappings, error) + GoesTelemetryConfigMappingsReplaceForProjectFromMappingSet(ctx context.Context, arg GoesTelemetryConfigMappingsReplaceForProjectFromMappingSetParams) error GoesTelemetrySourceList(ctx context.Context) ([]VGoesTelemetrySource, error) HeartbeatCreate(ctx context.Context, argTime time.Time) (time.Time, error) HeartbeatGetLatest(ctx context.Context) (time.Time, error) diff --git a/api/internal/dto/goes.go b/api/internal/dto/goes.go index 79e3df69..e8aad6fa 100644 --- a/api/internal/dto/goes.go +++ b/api/internal/dto/goes.go @@ -3,6 +3,7 @@ package dto import ( "time" + "github.com/danielgtaylor/huma/v2" "github.com/google/uuid" ) @@ -14,4 +15,20 @@ type GoesTelemetryConfigMappingDTO struct { type GoesPlatformConfigFileCommitDTO struct { ID uuid.UUID `json:"id"` CommittedAt time.Time `json:"committed_at"` + CommitID uuid.UUID `json:"commit_id"` } + +type XmlPlatformConfigForm struct { + PlatformConfig huma.FormFile `form:"file" contentType:"text/xml" required:"true"` + Alias string `form:"alias"` + DryRun bool `form:"dry_run"` + UpdateType XmlPlatformConfigUpdateType `form:"update_type" enum:"preserve_all,delete_not_found,delete_all" default:"preserve_all"` +} + +type XmlPlatformConfigUpdateType string + +const ( + XmlPlatformConfigUpdateTypePreserveAll XmlPlatformConfigUpdateType = "preserve_all" + XmlPlatformConfigUpdateTypeDeleteNotFound XmlPlatformConfigUpdateType = "delete_not_found" + XmlPlatformConfigUpdateTypeDeleteAll XmlPlatformConfigUpdateType = "delete_all" +) diff --git a/api/internal/handler/goes.go b/api/internal/handler/goes.go index 0872f393..4d6f404e 100644 --- a/api/internal/handler/goes.go +++ b/api/internal/handler/goes.go @@ -2,10 +2,10 @@ package handler import ( "context" + "crypto/subtle" "errors" "io" "net/http" - "time" "github.com/USACE/instrumentation-api/api/v4/internal/ctxkey" "github.com/USACE/instrumentation-api/api/v4/internal/db" @@ -33,8 +33,7 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { Path: "/domains/goes", Description: "list of goes client instances (opendcs)", Tags: goesTags, - }, func(ctx context.Context, input *struct { - }) (*Response[[]db.VGoesTelemetrySource], error) { + }, func(ctx context.Context, input *struct{}) (*Response[[]db.VGoesTelemetrySource], error) { aa, err := h.DBService.GoesTelemetrySourceList(ctx) if err != nil { return nil, httperr.InternalServerError(err) @@ -47,7 +46,7 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { OperationID: "goes-telemetry-config-get", Method: http.MethodGet, Path: "/projects/{project_id}/goes/{telemetry_source_id}/configs/{telemetry_config_id}", - Description: "gets a platform configuraiton xml file", + Description: "gets a platform configuration xml file", Tags: goesTags, }, func(ctx context.Context, input *struct { ProjectIDParam @@ -61,13 +60,6 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { return NewResponse(a), nil }) - type XmlPlatformConfig struct { - PlatformConfig huma.FormFile `form:"file" contentType:"text/xml" required:"true"` - Alias string `form:"alias"` - DryRun bool `form:"dry_run"` - DeleteOldMappings bool `form:"delete_old_mappings"` - } - huma.Register(api, huma.Operation{ Middlewares: h.ProjectAdmin, OperationID: "goes-telemetry-config-create", @@ -78,7 +70,7 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { }, func(ctx context.Context, input *struct { ProjectIDParam TelemetrySourceIDParam - RawBody huma.MultipartFormFiles[XmlPlatformConfig] + RawBody huma.MultipartFormFiles[dto.XmlPlatformConfigForm] }) (*Response[service.DbImportResponse], error) { p := ctx.Value(ctxkey.Profile).(db.VProfile) formData := input.RawBody.Data() @@ -86,22 +78,25 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { if err != nil { return nil, httperr.BadRequest(err) } - if xmlDoc == nil { + if len(xmlDoc) == 0 { return nil, httperr.BadRequest(errors.New("uploaded file is empty")) } alias := formData.Alias if alias == "" { alias = formData.PlatformConfig.Filename } - a, err := h.DBService.GoesPlatformConfigFileCreate(ctx, db.GoesPlatformConfigFileCreateParams{ - GoesTelemetrySourceID: input.TelemetrySourceID.UUID, - ProjectID: input.ProjectID.UUID, - Name: formData.PlatformConfig.Filename, - SizeBytes: formData.PlatformConfig.Size, - Alias: alias, - Content: string(xmlDoc), - CreatedBy: p.ID, - }, formData.DryRun) + a, err := h.DBService.GoesPlatformConfigFileCreate(ctx, service.GoesPlatformConfigFileCreateParams{ + DryRun: formData.DryRun, + GoesPlatformConfigFileCreateParams: db.GoesPlatformConfigFileCreateParams{ + GoesTelemetrySourceID: input.TelemetrySourceID.UUID, + ProjectID: input.ProjectID.UUID, + Name: formData.PlatformConfig.Filename, + SizeBytes: formData.PlatformConfig.Size, + Alias: alias, + Content: string(xmlDoc), + CreatedBy: p.ID, + }, + }) if err != nil { return nil, httperr.InternalServerError(err) } @@ -113,40 +108,43 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { OperationID: "goes-telemetry-config-update", Method: http.MethodPut, Path: "/projects/{project_id}/goes/{telemetry_source_id}/configs/{telemetry_config_id}", - Description: "lists goes telemetry configurations", + Description: "updates a goes telemetry configuration", Tags: goesTags, }, func(ctx context.Context, input *struct { ProjectIDParam TelemetrySourceIDParam TelemetryConfigIDParam - RawBody huma.MultipartFormFiles[XmlPlatformConfig] + RawBody huma.MultipartFormFiles[dto.XmlPlatformConfigForm] }) (*Response[service.DbImportResponse], error) { - p := ctx.Value(ctxkey.Profile).(db.VProfile) formData := input.RawBody.Data() xmlDoc, err := io.ReadAll(formData.PlatformConfig) if err != nil { return nil, httperr.BadRequest(err) } - if xmlDoc == nil { + if len(xmlDoc) == 0 { return nil, httperr.BadRequest(errors.New("uploaded file is empty")) } - now := time.Now().UTC() + alias := formData.Alias if alias == "" { alias = formData.PlatformConfig.Filename } - a, err := h.DBService.GoesPlatformConfigFileUpdate(ctx, db.GoesPlatformConfigFileUpdateParams{ - ID: input.TelemetryConfigID.UUID, - Name: formData.PlatformConfig.Filename, - Alias: alias, - SizeBytes: formData.PlatformConfig.Size, - Content: string(xmlDoc), - UpdatedBy: &p.ID, - UpdatedAt: &now, - }, formData.DryRun, formData.DeleteOldMappings) + + a, err := h.DBService.GoesPlatformConfigFileUpdate(ctx, service.GoesPlatformConfigFileUpdateParams{ + DryRun: formData.DryRun, + UpdateType: formData.UpdateType, + GoesPlatformConfigFileUpdateParams: db.GoesPlatformConfigFileUpdateParams{ + ID: input.TelemetryConfigID.UUID, + Name: formData.PlatformConfig.Filename, + Alias: alias, + SizeBytes: formData.PlatformConfig.Size, + Content: string(xmlDoc), + }, + }) if err != nil { return nil, httperr.InternalServerError(err) } + return NewResponse(a), nil }) @@ -155,14 +153,18 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { OperationID: "goes-telemetry-config-delete", Method: http.MethodDelete, Path: "/projects/{project_id}/goes/{telemetry_source_id}/configs/{telemetry_config_id}", - Description: "delete a goes telemetry configuration", + Description: "soft-delete a goes telemetry configuration (removal will be applied on next commit)", Tags: goesTags, }, func(ctx context.Context, input *struct { ProjectIDParam TelemetrySourceIDParam TelemetryConfigIDParam }) (*Response[struct{}], error) { - if err := h.DBService.GoesPlatformConfigFileDelete(ctx, input.TelemetryConfigID.UUID); err != nil { + p := ctx.Value(ctxkey.Profile).(db.VProfile) + if err := h.DBService.GoesPlatformConfigFileDelete(ctx, db.GoesPlatformConfigFileDeleteParams{ + DeletedBy: &p.ID, + ID: input.TelemetryConfigID.UUID, + }); err != nil { return nil, httperr.InternalServerError(err) } return nil, nil @@ -198,7 +200,7 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { ProjectIDParam TelemetrySourceIDParam TelemetryConfigIDParam - Body []dto.GoesTelemetryConfigMappingDTO + Body []dto.GoesTelemetryConfigMappingDTO `contentType:"application/json"` }) (*Response[struct{}], error) { if err := h.DBService.GoesTelemetryConfigMappingsUpdate(ctx, input.TelemetryConfigID.UUID, input.Body); err != nil { return nil, httperr.InternalServerError(err) @@ -206,34 +208,92 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { return nil, nil }) + // Validate all uncommitted (desired) XML for this project/source. huma.Register(api, huma.Operation{ Middlewares: h.ProjectAdmin, OperationID: "goes-telemetry-validate", - Method: http.MethodPut, + Method: http.MethodPost, Path: "/projects/{project_id}/goes/{telemetry_source_id}/validate", - Description: "updates goes telemetry timeseries mappings", + Description: "validates all uncommitted platform xml files for the project", Tags: goesTags, }, func(ctx context.Context, input *struct { ProjectIDParam TelemetrySourceIDParam }) (*Response[service.DbImportResponse], error) { - // TODO - return NewResponse(service.DbImportResponse{}), nil + // Assumes a service method which validates the project desired set + // by calling the OpenDCS wrapper with validate-only. + resp, err := h.DBService.GoesValidateProjectUncommitted(ctx, service.GoesValidateProjectUncommittedParams{ + ProjectID: input.ProjectID.UUID, + GoesTelemetrySourceID: input.TelemetrySourceID.UUID, + HTTPClient: h.HTTPClient, + }) + if err != nil { + return nil, httperr.Message(http.StatusConflict, err.Error()) + } + return NewResponse(resp), nil }) + // Commit desired active set for project/source. huma.Register(api, huma.Operation{ Middlewares: h.ProjectAdmin, OperationID: "goes-telemetry-commit", Method: http.MethodPost, Path: "/projects/{project_id}/goes/{telemetry_source_id}/commit", - Description: "starts a commit action to update an opendcs routescheduler", + Description: "commits the desired project configuration to the shared OpenDCS instance", Tags: goesTags, }, func(ctx context.Context, input *struct { ProjectIDParam TelemetrySourceIDParam - }) (*Response[struct{}], error) { - // TODO - return nil, nil + }) (*Response[service.GoesTelemetryCommitResponse], error) { + p := ctx.Value(ctxkey.Profile).(db.VProfile) + + if matches := subtle.ConstantTimeCompare([]byte(h.Config.ApplicationKey), []byte("")); matches == 1 || h.Config.OpenDCSWrapperURL == "" { + return nil, httperr.InternalServerError(errors.New("missing OPENDCS_WRAPPER_URL and/or APPLICATION_KEY")) + } + + a, err := h.DBService.GoesCommitEntireSet(ctx, h.HTTPClient, service.GoesCommitEntireSetParams{ + ProjectID: input.ProjectID.UUID, + SourceID: input.TelemetrySourceID.UUID, + CreatedBy: p.ID, + OpendcsBaseURL: h.Config.OpenDCSWrapperURL, + OpendcsAuthToken: h.Config.ApplicationKey, + }) + if err != nil { + return nil, httperr.Message(http.StatusConflict, err.Error()) + } + + return NewResponse(a), nil + }) + + // Project-scoped rollback to previous commit (restores project mappings + xml set). + huma.Register(api, huma.Operation{ + Middlewares: h.ProjectAdmin, + OperationID: "goes-telemetry-rollback", + Method: http.MethodPost, + Path: "/projects/{project_id}/goes/{telemetry_source_id}/rollback", + Description: "rolls back project configuration (xml + mappings) to the previous commit", + Tags: goesTags, + }, func(ctx context.Context, input *struct { + ProjectIDParam + TelemetrySourceIDParam + }) (*Response[service.GoesTelemetryCommitResponse], error) { + p := ctx.Value(ctxkey.Profile).(db.VProfile) + + if matches := subtle.ConstantTimeCompare([]byte(h.Config.ApplicationKey), []byte("")); matches != 1 || h.Config.OpenDCSWrapperURL == "" { + return nil, httperr.InternalServerError(errors.New("missing OPENDCS_WRAPPER_URL and/or APPLICATION_KEY")) + } + + a, err := h.DBService.GoesRollbackProjectToPrevious(ctx, h.HTTPClient, service.GoesCommitEntireSetParams{ + ProjectID: input.ProjectID.UUID, + SourceID: input.TelemetrySourceID.UUID, + CreatedBy: p.ID, + OpendcsBaseURL: h.Config.OpenDCSWrapperURL, + OpendcsAuthToken: h.Config.ApplicationKey, + }) + if err != nil { + return nil, httperr.Message(http.StatusConflict, err.Error()) + } + return NewResponse(a), nil }) huma.Register(api, huma.Operation{ @@ -245,7 +305,7 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { Tags: goesTags, }, func(ctx context.Context, input *struct { TelemetrySourceIDParam - Body []dto.GoesPlatformConfigFileCommitDTO + Body []dto.GoesPlatformConfigFileCommitDTO `contentType:"application/json"` }) (*struct{}, error) { if err := h.DBService.GoesPlatformConfigCommit(ctx, input.Body); err != nil { return nil, httperr.InternalServerError(err) diff --git a/api/internal/handler/handler.go b/api/internal/handler/handler.go index 303a3a72..421ea384 100644 --- a/api/internal/handler/handler.go +++ b/api/internal/handler/handler.go @@ -1,3 +1,4 @@ +// Package handler provides common types and utilities for HTTP handlers. package handler import ( diff --git a/api/internal/service/db.go b/api/internal/service/db.go index 3b3527d3..d10c50fa 100644 --- a/api/internal/service/db.go +++ b/api/internal/service/db.go @@ -1,3 +1,4 @@ +// Package service implements services that use the database. package service import ( diff --git a/api/internal/service/goes.go b/api/internal/service/goes.go index b1542039..c4e28095 100644 --- a/api/internal/service/goes.go +++ b/api/internal/service/goes.go @@ -1,14 +1,13 @@ package service import ( - "bytes" "context" "encoding/json" "encoding/xml" "errors" "fmt" "io" - "net/http" + "sort" "strings" "time" @@ -17,130 +16,204 @@ import ( "github.com/google/uuid" ) -type Platform struct { - XMLName xml.Name `xml:"Platform"` - PlatformConfig PlatformConfig `xml:"PlatformConfig"` +type DbImportResponse struct { + PlatformFileID *uuid.UUID `json:"platform_file_id,omitempty"` + Response json.RawMessage `json:"response"` } -type PlatformConfig struct { - ConfigSensors []ConfigSensor `xml:"ConfigSensor"` +type xmlValidationResult struct { + valid bool `json:"-"` + Valid bool `json:"valid"` + Message string `json:"message,omitempty"` + SensorCount int `json:"sensor_count"` + SensorKeys []string `json:"sensor_keys,omitempty"` + ValidatedAt string `json:"validated_at"` + Warnings []string `json:"warnings,omitempty"` + ParseRootTag string `json:"root_tag,omitempty"` } -type ConfigSensor struct { - SensorName string `xml:"SensorName"` - SensorNumber string `xml:"SensorNumber"` +type GoesPlatformConfigFileCreateParams struct { + DryRun bool + db.GoesPlatformConfigFileCreateParams } -type DbImportCommandType string - -type DbImportResponse struct { - PlatformFileID *uuid.UUID `json:"platform_file_id,omitempty"` - Response json.RawMessage `json:"response"` -} +func (s *DBService) GoesPlatformConfigFileCreate(ctx context.Context, arg GoesPlatformConfigFileCreateParams) (DbImportResponse, error) { + var out DbImportResponse -// GoesPlatformConfigFileCreate validates and creates a platform configuration file for a given MIDAS project -func (s *DBService) GoesPlatformConfigFileCreate(ctx context.Context, arg db.GoesPlatformConfigFileCreateParams, dryRun bool) (DbImportResponse, error) { - names, err := extractSensorNames(arg.Content) + root, names, err := extractSensorNames(arg.Content) if err != nil { - return DbImportResponse{}, err + out.Response, _ = json.Marshal(xmlValidationResult{ + Valid: false, + Message: err.Error(), + SensorCount: 0, + SensorKeys: nil, + ValidatedAt: time.Now().UTC().Format(time.RFC3339Nano), + Warnings: []string{"xml is not structurally valid; wrapper validation not attempted"}, + }) + return out, err } - // TODO: proxy request to opendcs service to validate dbimport - var a DbImportResponse - // http.Get... + out.Response, err = buildLocalValidationResponse(root, names) + if err != nil { + return out, err + } - if dryRun { - return a, nil + if arg.DryRun { + return out, nil } tx, err := s.db.Begin(ctx) if err != nil { - return a, err + return out, err } defer s.TxDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - newID, err := qtx.GoesPlatformConfigFileCreate(ctx, arg) + newID, err := qtx.GoesPlatformConfigFileCreate(ctx, arg.GoesPlatformConfigFileCreateParams) if err != nil { - return a, fmt.Errorf("GoesPlatformConfigFileCreate %w", err) + return out, fmt.Errorf("GoesPlatformConfigFileCreate %w", err) } - a.PlatformFileID = &newID + out.PlatformFileID = &newID - mm := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, 0, len(names)) - for _, n := range names { - mm = append(mm, db.GoesTelemetryConfigMappingsCreateBatchParams{ + batch := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, 0, len(names)) + for _, k := range names { + batch = append(batch, db.GoesTelemetryConfigMappingsCreateBatchParams{ GoesPlatformConfigFileID: newID, - PlatformSensorKey: n, + PlatformSensorKey: k, TimeseriesID: nil, }) } - qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, mm).Exec(batchExecErr(&err)) + qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, batch).Exec(batchExecErr(&err)) if err != nil { - return a, fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err) + return out, fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err) } - return a, tx.Commit(ctx) + return out, tx.Commit(ctx) } -// TODO: return validation results -func (s *DBService) GoesPlatformConfigFileUpdate(ctx context.Context, arg db.GoesPlatformConfigFileUpdateParams, dryRun, deleteOldMappings bool) (DbImportResponse, error) { - var a DbImportResponse - names, err := extractSensorNames(arg.Content) +type GoesPlatformConfigFileUpdateParams struct { + DryRun bool + UpdateType dto.XmlPlatformConfigUpdateType + db.GoesPlatformConfigFileUpdateParams +} + +func (s *DBService) GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPlatformConfigFileUpdateParams) (DbImportResponse, error) { + var out DbImportResponse + + root, names, err := extractSensorNames(arg.Content) if err != nil { - return a, err + out.Response, _ = json.Marshal(xmlValidationResult{ + Valid: false, + Message: err.Error(), + SensorCount: 0, + SensorKeys: nil, + ValidatedAt: time.Now().UTC().Format(time.RFC3339Nano), + Warnings: []string{"xml is not structurally valid; wrapper validation not attempted"}, + }) + return out, err } - // TODO: proxy request to opendcs service to validate dbimport + out.Response, err = buildLocalValidationResponse(root, names) + if err != nil { + return out, err + } - if dryRun { - // TODO: respond with validation result / error - return a, errors.New("TODO") + if arg.DryRun { + return out, nil } tx, err := s.db.Begin(ctx) if err != nil { - return a, err + return out, err } defer s.TxDo(ctx, tx.Rollback) qtx := s.WithTx(tx) - if err := qtx.GoesPlatformConfigFileUpdate(ctx, arg); err != nil { - return a, fmt.Errorf("GoesPlatformConfigFileUpdate %w", err) + if err := qtx.GoesPlatformConfigFileUpdate(ctx, arg.GoesPlatformConfigFileUpdateParams); err != nil { + return out, fmt.Errorf("GoesPlatformConfigFileUpdate %w", err) } - if deleteOldMappings { + if arg.UpdateType == dto.XmlPlatformConfigUpdateTypeDeleteAll { if err := qtx.GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile(ctx, arg.ID); err != nil { - return a, fmt.Errorf("GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile %w", err) + return out, fmt.Errorf("GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile %w", err) + } + + batch := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, 0, len(names)) + for _, k := range names { + batch = append(batch, db.GoesTelemetryConfigMappingsCreateBatchParams{ + GoesPlatformConfigFileID: arg.ID, + PlatformSensorKey: k, + TimeseriesID: nil, + }) } + + qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, batch).Exec(batchExecErr(&err)) + if err != nil { + return out, fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err) + } + + return out, tx.Commit(ctx) + } + + existing, err := qtx.GoesTelemetryConfigMappingsList(ctx, arg.ID) + if err != nil { + return out, fmt.Errorf("GoesTelemetryConfigMappingsList %w", err) } - mm := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, 0, len(names)) - for _, n := range names { - mm = append(mm, db.GoesTelemetryConfigMappingsCreateBatchParams{ + existingKeys := make(map[string]struct{}, len(existing)) + for _, m := range existing { + existingKeys[m.PlatformSensorKey] = struct{}{} + } + + var newMappings []db.GoesTelemetryConfigMappingsCreateBatchParams + var removedMappings []db.GoesTelemetryConfigMappingsDeleteBatchParams + for _, k := range names { + if _, ok := existingKeys[k]; ok { + delete(existingKeys, k) + continue + } + newMappings = append(newMappings, db.GoesTelemetryConfigMappingsCreateBatchParams{ GoesPlatformConfigFileID: arg.ID, - PlatformSensorKey: n, + PlatformSensorKey: k, TimeseriesID: nil, }) } + if arg.UpdateType == dto.XmlPlatformConfigUpdateTypeDeleteNotFound { + for name := range existingKeys { + removedMappings = append(removedMappings, db.GoesTelemetryConfigMappingsDeleteBatchParams{ + GoesPlatformConfigFileID: arg.ID, + PlatformSensorKey: name, + }) + } + } - qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, mm).Exec(batchExecErr(&err)) - if err != nil { - return a, fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err) + if len(newMappings) > 0 { + qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, newMappings).Exec(batchExecErr(&err)) + if err != nil { + return out, fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err) + } + } + if len(removedMappings) > 0 { + qtx.GoesTelemetryConfigMappingsDeleteBatch(ctx, removedMappings).Exec(batchExecErr(&err)) + if err != nil { + return out, fmt.Errorf("GoesTelemetryConfigMappingsDeleteBatch %w", err) + } } - return a, tx.Commit(ctx) + return out, tx.Commit(ctx) } func (s *DBService) GoesTelemetryConfigMappingsUpdate(ctx context.Context, cfgID uuid.UUID, mappings []dto.GoesTelemetryConfigMappingDTO) error { - mm := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, len(mappings)) + batch := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, len(mappings)) for i, m := range mappings { - mm[i] = db.GoesTelemetryConfigMappingsCreateBatchParams{ + batch[i] = db.GoesTelemetryConfigMappingsCreateBatchParams{ GoesPlatformConfigFileID: cfgID, PlatformSensorKey: m.PlatformSensorKey, TimeseriesID: m.TimeseriesID, } } + tx, err := s.db.Begin(ctx) if err != nil { return err @@ -152,7 +225,7 @@ func (s *DBService) GoesTelemetryConfigMappingsUpdate(ctx context.Context, cfgID return fmt.Errorf("GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile %w", err) } - qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, mm).Exec(batchExecErr(&err)) + qtx.GoesTelemetryConfigMappingsCreateBatch(ctx, batch).Exec(batchExecErr(&err)) if err != nil { return fmt.Errorf("GoesTelemetryConfigMappingsCreateBatch %w", err) } @@ -160,12 +233,30 @@ func (s *DBService) GoesTelemetryConfigMappingsUpdate(ctx context.Context, cfgID return tx.Commit(ctx) } -func extractSensorNames(xmlStr string) ([]string, error) { +type PlatformXML struct { + XMLName xml.Name `xml:"Platform"` + PlatformConfig PlatformConfigXML `xml:"PlatformConfig"` +} + +type PlatformConfigXML struct { + ConfigSensors []ConfigSensorXML `xml:"ConfigSensor"` +} + +type ConfigSensorXML struct { + SensorName string `xml:"SensorName"` + SensorNumber string `xml:"SensorNumber,attr"` +} + +func extractSensorNames(xmlStr string) (string, []string, error) { dec := xml.NewDecoder(strings.NewReader(xmlStr)) + for { tok, err := dec.Token() if err != nil { - return nil, fmt.Errorf("failed to read xml token: %w", err) + if errors.Is(err, io.EOF) { + return "", nil, fmt.Errorf("empty xml document") + } + return "", nil, fmt.Errorf("failed to read xml token: %w", err) } start, ok := tok.(xml.StartElement) @@ -175,110 +266,74 @@ func extractSensorNames(xmlStr string) ([]string, error) { switch start.Name.Local { case "Platform": - var p Platform + var p PlatformXML if err := dec.DecodeElement(&p, &start); err != nil { - return nil, fmt.Errorf("failed to decode Platform: %w", err) + return "Platform", nil, fmt.Errorf("failed to decode PlatformXML: %w", err) } - return extractFromPlatforms([]Platform{p}), nil + return "Platform", normalizeSensorKeys(extractFromPlatforms([]PlatformXML{p})), nil + case "Database": var wrapper struct { - Platforms []Platform `xml:"Platform"` + Platforms []PlatformXML `xml:"Platform"` } if err := dec.DecodeElement(&wrapper, &start); err != nil { - return nil, fmt.Errorf("failed to decode Database: %w", err) + return "Database", nil, fmt.Errorf("failed to decode Database: %w", err) } - return extractFromPlatforms(wrapper.Platforms), nil + return "Database", normalizeSensorKeys(extractFromPlatforms(wrapper.Platforms)), nil default: - return nil, fmt.Errorf("unexpected root element <%s>", start.Name.Local) + return start.Name.Local, nil, fmt.Errorf("unexpected root element <%s>", start.Name.Local) } } } -func extractFromPlatforms(platforms []Platform) []string { - var result []string +func extractFromPlatforms(platforms []PlatformXML) []string { + out := make([]string, 0) for _, platform := range platforms { for _, sensor := range platform.PlatformConfig.ConfigSensors { - result = append(result, sensor.SensorName+"."+sensor.SensorNumber) + name := strings.TrimSpace(sensor.SensorName) + num := strings.TrimSpace(sensor.SensorNumber) + if name == "" || num == "" { + continue + } + out = append(out, name+"."+num) } } - return result -} - -type OpendcsImportResponse struct { - Status string `json:"status"` - ValidateLog string `json:"validate_log,omitempty"` - ImportLog string `json:"import_log,omitempty"` - CommandOutput string `json:"command_output,omitempty"` - Error string `json:"error,omitempty"` -} - -type opendcsImportRequest struct { - Files []string `json:"files"` - ValidateOnly bool `json:"validate_only"` + return out } -type OpendcsImportParams struct { - OpendcsBaseURL string - OpendcsAuthToken string - opendcsImportRequest -} - -func (s *DBService) OpendcsImport(ctx context.Context, arg OpendcsImportParams) (json.RawMessage, error) { - if arg.OpendcsBaseURL == "" { - return nil, fmt.Errorf("opendcsBaseURL not configured") - } - if arg.OpendcsAuthToken == "" { - return nil, fmt.Errorf("opendcsAuthToken not configured") - } - - reqBody, err := json.Marshal(opendcsImportRequest{ - Files: arg.Files, - ValidateOnly: arg.ValidateOnly, - }) - if err != nil { - return nil, err - } +func normalizeSensorKeys(keys []string) []string { + seen := make(map[string]struct{}, len(keys)) + out := make([]string, 0, len(keys)) - u := strings.TrimRight(arg.OpendcsBaseURL, "/") + "/import" - httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, u, bytes.NewReader(reqBody)) - if err != nil { - return nil, err + for _, k := range keys { + k = strings.TrimSpace(k) + if k == "" { + continue + } + if _, ok := seen[k]; ok { + continue + } + seen[k] = struct{}{} + out = append(out, k) } - httpReq.Header.Set("content-type", "application/json") - q := httpReq.URL.Query() - q.Set("key", arg.OpendcsAuthToken) - httpReq.URL.RawQuery = q.Encode() + sort.Strings(out) + return out +} - client := &http.Client{Timeout: 5 * time.Minute} - resp, err := client.Do(httpReq) - if err != nil { - return nil, err +func buildLocalValidationResponse(root string, sensorKeys []string) (json.RawMessage, error) { + res := xmlValidationResult{ + Valid: true, + SensorCount: len(sensorKeys), + SensorKeys: sensorKeys, + ValidatedAt: time.Now().UTC().Format(time.RFC3339Nano), + Warnings: []string{"wrapper dbimport validation not executed for upload/update endpoints"}, + ParseRootTag: root, } - defer resp.Body.Close() - - bodyBytes, err := io.ReadAll(resp.Body) + b, err := json.Marshal(res) if err != nil { return nil, err } - - if resp.StatusCode < 200 || resp.StatusCode >= 300 { - return json.RawMessage(bodyBytes), fmt.Errorf("opendcs wrapper /import failed: status=%d body=%s", resp.StatusCode, string(bodyBytes)) - } - - return json.RawMessage(bodyBytes), nil -} - -func (s *DBService) GoesPlatformConfigCommit(ctx context.Context, arg []dto.GoesPlatformConfigFileCommitDTO) error { - bb := make([]db.GoesPlatformConfigFileCommitParams, len(arg)) - for idx, b := range arg { - bb[idx] = db.GoesPlatformConfigFileCommitParams{ - ID: b.ID, - CommittedAt: &b.CommittedAt, - } - } - var err error - s.Queries.GoesPlatformConfigFileCommit(ctx, bb).Exec(batchExecErr(&err)) - return err + return b, nil } diff --git a/api/internal/service/goes_commit.go b/api/internal/service/goes_commit.go new file mode 100644 index 00000000..e0719c4d --- /dev/null +++ b/api/internal/service/goes_commit.go @@ -0,0 +1,785 @@ +package service + +import ( + "bytes" + "context" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "fmt" + "io" + "mime/multipart" + "net/http" + "sort" + "strings" + "time" + + "github.com/USACE/instrumentation-api/api/v4/internal/db" + "github.com/USACE/instrumentation-api/api/v4/internal/dto" + "github.com/google/uuid" +) + +type OpendcsCommitDeleteMode string + +const ( + OpendcsCommitDeleteModeID OpendcsCommitDeleteMode = "id" + OpendcsCommitDeleteModeSite OpendcsCommitDeleteMode = "site" +) + +type OpendcsCommitDelete struct { + Mode OpendcsCommitDeleteMode `json:"mode"` + Value string `json:"value"` // platform id or site name +} + +type OpendcsCommitFile struct { + FileID uuid.UUID `json:"file_id"` + Name string `json:"name"` + Alias string `json:"alias"` + XML string `json:"xml"` + Checksum string `json:"checksum"` +} + +type OpendcsCommitRequest struct { + CommitID uuid.UUID `json:"commit_id"` + ProjectID uuid.UUID `json:"project_id"` + SourceID uuid.UUID `json:"goes_telemetry_source_id"` + Files []OpendcsCommitFile `json:"files"` + Deletes []OpendcsCommitDelete `json:"deletes"` +} + +type OpendcsCommitResponse struct { + Status string `json:"status"` // "ok"|"error" + Log string `json:"log,omitempty"` + Data json.RawMessage `json:"data,omitempty"` +} + +type GoesCommitEntireSetParams struct { + ProjectID uuid.UUID + SourceID uuid.UUID + CreatedBy uuid.UUID + OpendcsBaseURL string + OpendcsAuthToken string +} + +type OpendcsCommitParams struct { + OpendcsBaseURL string + OpendcsAuthToken string + OpendcsCommitRequest OpendcsCommitRequest +} + +type platformIdentity struct { + PlatformID string + SiteName string +} + +func platformKey(platformID, site string) (string, error) { + platformID = strings.TrimSpace(platformID) + site = strings.TrimSpace(site) + if platformID != "" { + return "platform:" + strings.ToLower(platformID), nil + } + if site != "" { + return "site:" + strings.ToLower(site), nil + } + return "", fmt.Errorf("missing platform_id and site") +} + +type GoesTelemetryCommitResponse struct { + CommitID uuid.UUID `json:"commit_id"` + RawResponse json.RawMessage `json:"raw_response"` +} + +func (s *DBService) GoesCommitEntireSet(ctx context.Context, httpClient *http.Client, arg GoesCommitEntireSetParams) (GoesTelemetryCommitResponse, error) { + var a GoesTelemetryCommitResponse + tx, err := s.db.Begin(ctx) + if err != nil { + return a, err + } + defer s.TxDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + var prevCommitID *uuid.UUID + prev, err := qtx.GoesCommitGetActive(ctx, db.GoesCommitGetActiveParams{ + ProjectID: arg.ProjectID, + GoesTelemetrySourceID: arg.SourceID, + }) + if err == nil { + prevCommitID = &prev.ID + } + + files, err := qtx.GoesPlatformConfigFilesListForCommit(ctx, db.GoesPlatformConfigFilesListForCommitParams{ + ProjectID: arg.ProjectID, + GoesTelemetrySourceID: arg.SourceID, + }) + if err != nil { + return a, fmt.Errorf("GoesPlatformConfigFilesListForCommit: %w", err) + } + if len(files) == 0 { + return a, fmt.Errorf("no platform config files found for project/source") + } + + desiredKeys := make(map[string]platformIdentity, len(files)) + platformKeys := make([]string, 0, len(files)) + for _, f := range files { + pid, site, err := extractPlatformIDAndSite([]byte(f.Content)) + if err != nil { + return a, fmt.Errorf("extract platform id/site for file %s: %w", f.ID, err) + } + k, err := platformKey(pid, site) + if err != nil { + return a, fmt.Errorf("platform key for file %s: %w", f.ID, err) + } + if _, exists := desiredKeys[k]; !exists { + platformKeys = append(platformKeys, k) + } + desiredKeys[k] = platformIdentity{PlatformID: pid, SiteName: site} + } + + conflicts, err := qtx.GoesPlatformRegistryConflicts(ctx, db.GoesPlatformRegistryConflictsParams{ + GoesTelemetrySourceID: arg.SourceID, + ProjectID: arg.ProjectID, + PlatformKeys: platformKeys, + }) + if err != nil { + return a, fmt.Errorf("GoesPlatformRegistryConflicts: %w", err) + } + if len(conflicts) > 0 { + c := conflicts[0] + return a, fmt.Errorf("platform ownership conflict for %q (owned by project %s)", c.PlatformKey, c.ProjectID) + } + + owned, err := qtx.GoesPlatformRegistryListByProject(ctx, db.GoesPlatformRegistryListByProjectParams{ + ProjectID: arg.ProjectID, + GoesTelemetrySourceID: arg.SourceID, + }) + if err != nil { + return a, fmt.Errorf("GoesPlatformRegistryListByProject: %w", err) + } + deletes := make([]OpendcsCommitDelete, 0) + for _, o := range owned { + if _, ok := desiredKeys[o.PlatformKey]; ok { + continue + } + if o.PlatformID != nil && *o.PlatformID != "" { + deletes = append(deletes, OpendcsCommitDelete{Mode: OpendcsCommitDeleteModeID, Value: *o.PlatformID}) + } else if o.SiteName != nil && *o.SiteName != "" { + deletes = append(deletes, OpendcsCommitDelete{Mode: OpendcsCommitDeleteModeSite, Value: *o.SiteName}) + } + } + + fileIDs := make([]uuid.UUID, len(files)) + for i, f := range files { + fileIDs[i] = f.ID + } + + mappings, err := qtx.GoesTelemetryConfigMappingsListForFiles(ctx, fileIDs) + if err != nil { + return a, fmt.Errorf("GoesTelemetryConfigMappingsListForFiles: %w", err) + } + + type mapRow struct { + fileID uuid.UUID + key string + ts uuid.UUID + } + rows := make([]mapRow, 0, len(mappings)) + for _, m := range mappings { + var ts uuid.UUID + if m.TimeseriesID != nil { + ts = *m.TimeseriesID + } + rows = append(rows, mapRow{fileID: m.GoesPlatformConfigFileID, key: m.PlatformSensorKey, ts: ts}) + } + sort.Slice(rows, func(i, j int) bool { + if rows[i].fileID != rows[j].fileID { + return rows[i].fileID.String() < rows[j].fileID.String() + } + if rows[i].key != rows[j].key { + return rows[i].key < rows[j].key + } + return rows[i].ts.String() < rows[j].ts.String() + }) + + h := sha256.New() + for _, r := range rows { + _, _ = h.Write([]byte(r.fileID.String())) + _, _ = h.Write([]byte{0}) + _, _ = h.Write([]byte(r.key)) + _, _ = h.Write([]byte{0}) + _, _ = h.Write([]byte(r.ts.String())) + _, _ = h.Write([]byte{0}) + } + contentHash := hex.EncodeToString(h.Sum(nil)) + + idempotencyKey := uuid.NewString() + + mappingSet, err := qtx.GoesMappingSetCreate(ctx, db.GoesMappingSetCreateParams{ + ProjectID: arg.ProjectID, + CreatedBy: arg.CreatedBy, + ContentHash: contentHash, + IdempotencyKey: &idempotencyKey, + }) + if err != nil { + return a, fmt.Errorf("GoesMappingSetCreate: %w", err) + } + + entryParams := make([]db.GoesMappingSetEntryCreateBatchParams, 0, len(mappings)) + for _, m := range mappings { + entryParams = append(entryParams, db.GoesMappingSetEntryCreateBatchParams{ + MappingSetID: mappingSet.ID, + GoesPlatformConfigFileID: m.GoesPlatformConfigFileID, + PlatformSensorKey: m.PlatformSensorKey, + TimeseriesID: m.TimeseriesID, + }) + } + if len(entryParams) > 0 { + if _, err := qtx.GoesMappingSetEntryCreateBatch(ctx, entryParams); err != nil { + return a, fmt.Errorf("GoesMappingSetEntryCreateBatch: %w", err) + } + } + + pending, err := qtx.GoesCommitCreatePending(ctx, db.GoesCommitCreatePendingParams{ + ProjectID: arg.ProjectID, + GoesTelemetrySourceID: arg.SourceID, + CreatedBy: arg.CreatedBy, + PreviousCommitID: prevCommitID, + IdempotencyKey: &idempotencyKey, + MappingSetID: &mappingSet.ID, + }) + if err != nil { + return a, fmt.Errorf("GoesCommitCreatePending: %w", err) + } + + keyToTS := make(map[string]uuid.UUID, len(mappings)) + for _, m := range mappings { + if m.TimeseriesID == nil { + continue + } + keyToTS[m.PlatformSensorKey] = *m.TimeseriesID + } + + committedFiles := make([]OpendcsCommitFile, 0, len(files)) + for _, f := range files { + committedXMLBytes, err := injectTimeseriesIDIntoPlatformXML([]byte(f.Content), keyToTS) + if err != nil { + return a, fmt.Errorf("inject timeseries id for file %s: %w", f.ID, err) + } + committedXML := string(committedXMLBytes) + + if err := qtx.GoesPlatformConfigFileCommitArtifactsUpdate(ctx, db.GoesPlatformConfigFileCommitArtifactsUpdateParams{ + ID: f.ID, + CommittedContent: committedXML, + CommittedCommitID: &pending.ID, + }); err != nil { + return a, fmt.Errorf("GoesPlatformConfigFileCommitArtifactsUpdate: %w", err) + } + + sum := sha256.Sum256(committedXMLBytes) + committedFiles = append(committedFiles, OpendcsCommitFile{ + FileID: f.ID, + Name: f.Name, + Alias: f.Alias, + XML: committedXML, + Checksum: hex.EncodeToString(sum[:]), + }) + } + + if err := tx.Commit(ctx); err != nil { + return a, err + } + + req := OpendcsCommitParams{ + OpendcsBaseURL: arg.OpendcsBaseURL, + OpendcsAuthToken: arg.OpendcsAuthToken, + OpendcsCommitRequest: OpendcsCommitRequest{ + CommitID: pending.ID, + ProjectID: arg.ProjectID, + SourceID: arg.SourceID, + Files: committedFiles, + Deletes: deletes, + }, + } + rawResp, callErr := s.opendcsCommit(ctx, httpClient, req) + + a.CommitID = pending.ID + a.RawResponse = rawResp + + tx2, err := s.db.Begin(ctx) + if err != nil { + if callErr != nil { + return a, callErr + } + return a, nil + } + defer s.TxDo(ctx, tx2.Rollback) + qtx2 := s.WithTx(tx2) + + if callErr != nil { + if err := qtx2.GoesCommitMarkFailed(ctx, db.GoesCommitMarkFailedParams{ + ID: pending.ID, + OpendcsResponse: rawResp, + }); err != nil { + s.logger.Error(ctx, "GoesCommitMarkFailed query failed", "error", err) + } + if err := tx2.Commit(ctx); err != nil { + s.logger.Error(ctx, "tx2.Commit failed", "error", err) + } + return a, callErr + } + + if err := qtx2.GoesCommitMarkActive(ctx, db.GoesCommitMarkActiveParams{ + ID: pending.ID, + OpendcsResponse: rawResp, + }); err != nil { + return a, fmt.Errorf("GoesCommitMarkActive: %w", err) + } + + if err := qtx2.GoesPlatformRegistryDeleteMissing(ctx, db.GoesPlatformRegistryDeleteMissingParams{ + ProjectID: arg.ProjectID, + GoesTelemetrySourceID: arg.SourceID, + PlatformKeys: platformKeys, + }); err != nil { + return a, fmt.Errorf("GoesPlatformRegistryDeleteMissing: %w", err) + } + + upParams := make([]db.GoesPlatformRegistryUpsertParams, 0, len(platformKeys)) + for _, k := range platformKeys { + id := desiredKeys[k] + + var pidPtr, sitePtr *string + if id.PlatformID != "" { + v := id.PlatformID + pidPtr = &v + } + if id.SiteName != "" { + v := id.SiteName + sitePtr = &v + } + + upParams = append(upParams, db.GoesPlatformRegistryUpsertParams{ + PlatformKey: k, + ProjectID: arg.ProjectID, + GoesTelemetrySourceID: arg.SourceID, + PlatformID: pidPtr, + SiteName: sitePtr, + CommitID: pending.ID, + }) + } + if len(upParams) > 0 { + var upErr error + qtx2.GoesPlatformRegistryUpsert(ctx, upParams).Exec(batchExecErr(&upErr)) + if upErr != nil { + return a, fmt.Errorf("GoesPlatformRegistryUpsert: %w", upErr) + } + } + + if err := tx2.Commit(ctx); err != nil { + return a, err + } + + return a, nil +} + +func (s *DBService) GoesRollbackProjectToPrevious(ctx context.Context, httpClient *http.Client, arg GoesCommitEntireSetParams) (GoesTelemetryCommitResponse, error) { + var a GoesTelemetryCommitResponse + + tx0, err := s.db.Begin(ctx) + if err != nil { + return a, err + } + defer s.TxDo(ctx, tx0.Rollback) + q0 := s.WithTx(tx0) + + active, err := q0.GoesCommitGetActive(ctx, db.GoesCommitGetActiveParams{ + ProjectID: arg.ProjectID, + GoesTelemetrySourceID: arg.SourceID, + }) + if err != nil { + return a, fmt.Errorf("no active commit: %w", err) + } + if active.PreviousCommitID == nil { + return a, fmt.Errorf("active commit has no previous_commit_id") + } + + prev, err := q0.GoesCommitGetByID(ctx, *active.PreviousCommitID) + if err != nil { + return a, fmt.Errorf("previous commit not found: %w", err) + } + if prev.MappingSetID == nil { + return a, fmt.Errorf("previous commit missing mapping_set_id") + } + + prevFiles, err := q0.GoesPlatformConfigFilesListForCommitByCommitID(ctx, db.GoesPlatformConfigFilesListForCommitByCommitIDParams{ + ProjectID: arg.ProjectID, + GoesTelemetrySourceID: arg.SourceID, + CommittedCommitID: &prev.ID, + }) + if err != nil { + return a, fmt.Errorf("GoesPlatformConfigFilesListForCommitByCommitID: %w", err) + } + if len(prevFiles) == 0 { + return a, fmt.Errorf("previous commit has no files") + } + + owned, err := q0.GoesPlatformRegistryListByProject(ctx, db.GoesPlatformRegistryListByProjectParams{ + ProjectID: arg.ProjectID, + GoesTelemetrySourceID: arg.SourceID, + }) + if err != nil { + return a, fmt.Errorf("GoesPlatformRegistryListByProject: %w", err) + } + + if err := tx0.Commit(ctx); err != nil { + return a, err + } + + desiredKeys := make(map[string]platformIdentity, len(prevFiles)) + platformKeys := make([]string, 0, len(prevFiles)) + for _, f := range prevFiles { + pid, site, err := extractPlatformIDAndSite([]byte(f.Content)) + if err != nil { + return a, fmt.Errorf("extract platform id/site for file %s: %w", f.ID, err) + } + k, err := platformKey(pid, site) + if err != nil { + return a, fmt.Errorf("platform key for file %s: %w", f.ID, err) + } + if _, exists := desiredKeys[k]; !exists { + platformKeys = append(platformKeys, k) + } + desiredKeys[k] = platformIdentity{PlatformID: pid, SiteName: site} + } + + deletes := make([]OpendcsCommitDelete, 0) + for _, o := range owned { + if _, ok := desiredKeys[o.PlatformKey]; ok { + continue + } + if o.PlatformID != nil && *o.PlatformID != "" { + deletes = append(deletes, OpendcsCommitDelete{Mode: OpendcsCommitDeleteModeID, Value: *o.PlatformID}) + } else if o.SiteName != nil && *o.SiteName != "" { + deletes = append(deletes, OpendcsCommitDelete{Mode: OpendcsCommitDeleteModeSite, Value: *o.SiteName}) + } + } + + tx, err := s.db.Begin(ctx) + if err != nil { + return a, err + } + defer s.TxDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + idempotencyKey := uuid.NewString() + + pending, err := qtx.GoesCommitCreatePending(ctx, db.GoesCommitCreatePendingParams{ + ProjectID: arg.ProjectID, + GoesTelemetrySourceID: arg.SourceID, + CreatedBy: arg.CreatedBy, + PreviousCommitID: &active.ID, + IdempotencyKey: &idempotencyKey, + MappingSetID: prev.MappingSetID, + }) + if err != nil { + return a, fmt.Errorf("GoesCommitCreatePending(rollback): %w", err) + } + + if err := tx.Commit(ctx); err != nil { + return a, err + } + + commitFiles := make([]OpendcsCommitFile, 0, len(prevFiles)) + fileIDs := make([]uuid.UUID, 0, len(prevFiles)) + for _, f := range prevFiles { + fileIDs = append(fileIDs, f.ID) + xb := []byte(f.Content) + sum := sha256.Sum256(xb) + commitFiles = append(commitFiles, OpendcsCommitFile{ + FileID: f.ID, + Name: f.Name, + Alias: f.Alias, + XML: string(xb), + Checksum: hex.EncodeToString(sum[:]), + }) + } + + req := OpendcsCommitParams{ + OpendcsBaseURL: arg.OpendcsBaseURL, + OpendcsAuthToken: arg.OpendcsAuthToken, + OpendcsCommitRequest: OpendcsCommitRequest{ + CommitID: pending.ID, + ProjectID: arg.ProjectID, + SourceID: arg.SourceID, + Files: commitFiles, + Deletes: deletes, + }, + } + rawResp, callErr := s.opendcsCommit(ctx, httpClient, req) + + a.CommitID = pending.ID + a.RawResponse = rawResp + + tx2, err := s.db.Begin(ctx) + if err != nil { + if callErr != nil { + return a, callErr + } + return a, nil + } + defer s.TxDo(ctx, tx2.Rollback) + qtx2 := s.WithTx(tx2) + + if callErr != nil { + _ = qtx2.GoesCommitMarkFailed(ctx, db.GoesCommitMarkFailedParams{ID: pending.ID, OpendcsResponse: rawResp}) + _ = tx2.Commit(ctx) + return a, callErr + } + + if err := qtx2.GoesTelemetryConfigMappingsReplaceForProjectFromMappingSet(ctx, db.GoesTelemetryConfigMappingsReplaceForProjectFromMappingSetParams{ + ProjectID: arg.ProjectID, + GoesTelemetrySourceID: arg.SourceID, + MappingSetID: *prev.MappingSetID, + }); err != nil { + return a, fmt.Errorf("replace mappings from previous mapping_set: %w", err) + } + + for _, f := range prevFiles { + if err := qtx2.GoesPlatformConfigFileRestoreForRollback(ctx, db.GoesPlatformConfigFileRestoreForRollbackParams{ + ID: f.ID, + Content: f.Content, + CommittedCommitID: &pending.ID, + }); err != nil { + return a, fmt.Errorf("restore file %s: %w", f.ID, err) + } + } + + if err := qtx2.GoesPlatformConfigFileSoftDeleteNotInSet(ctx, db.GoesPlatformConfigFileSoftDeleteNotInSetParams{ + ProjectID: arg.ProjectID, + GoesTelemetrySourceID: arg.SourceID, + DeletedBy: &arg.CreatedBy, + FileIds: fileIDs, + }); err != nil { + return a, fmt.Errorf("soft delete files not in previous set: %w", err) + } + + if err := qtx2.GoesCommitMarkActive(ctx, db.GoesCommitMarkActiveParams{ID: pending.ID, OpendcsResponse: rawResp}); err != nil { + return a, fmt.Errorf("GoesCommitMarkActive(rollback): %w", err) + } + + if err := qtx2.GoesPlatformRegistryDeleteMissing(ctx, db.GoesPlatformRegistryDeleteMissingParams{ + ProjectID: arg.ProjectID, + GoesTelemetrySourceID: arg.SourceID, + PlatformKeys: platformKeys, + }); err != nil { + return a, fmt.Errorf("GoesPlatformRegistryDeleteMissing: %w", err) + } + + upParams := make([]db.GoesPlatformRegistryUpsertParams, 0, len(platformKeys)) + for _, k := range platformKeys { + id := desiredKeys[k] + var pidPtr, sitePtr *string + if id.PlatformID != "" { + v := id.PlatformID + pidPtr = &v + } + if id.SiteName != "" { + v := id.SiteName + sitePtr = &v + } + upParams = append(upParams, db.GoesPlatformRegistryUpsertParams{ + PlatformKey: k, + ProjectID: arg.ProjectID, + GoesTelemetrySourceID: arg.SourceID, + PlatformID: pidPtr, + SiteName: sitePtr, + CommitID: pending.ID, + }) + } + if len(upParams) > 0 { + var upErr error + qtx2.GoesPlatformRegistryUpsert(ctx, upParams).Exec(batchExecErr(&upErr)) + if upErr != nil { + return a, fmt.Errorf("GoesPlatformRegistryUpsert: %w", upErr) + } + } + + if err := tx2.Commit(ctx); err != nil { + return a, err + } + + return a, nil +} + +func (s *DBService) opendcsCommit(ctx context.Context, httpClient *http.Client, arg OpendcsCommitParams) (json.RawMessage, error) { + if arg.OpendcsBaseURL == "" { + return nil, fmt.Errorf("opendcsBaseURL not configured") + } + if arg.OpendcsAuthToken == "" { + return nil, fmt.Errorf("opendcsAuthToken not configured") + } + + var buf bytes.Buffer + writer := multipart.NewWriter(&buf) + + _ = writer.WriteField("commit_id", arg.OpendcsCommitRequest.CommitID.String()) + _ = writer.WriteField("project_id", arg.OpendcsCommitRequest.ProjectID.String()) + _ = writer.WriteField("goes_telemetry_source_id", arg.OpendcsCommitRequest.SourceID.String()) + + if len(arg.OpendcsCommitRequest.Deletes) > 0 { + deletesBytes, err := json.Marshal(arg.OpendcsCommitRequest.Deletes) + if err != nil { + return nil, fmt.Errorf("marshal deletes: %w", err) + } + _ = writer.WriteField("deletes", string(deletesBytes)) + } + + for _, f := range arg.OpendcsCommitRequest.Files { + part, err := writer.CreateFormFile("files", f.FileID.String()+".xml") + if err != nil { + return nil, fmt.Errorf("create form file: %w", err) + } + r := strings.NewReader(f.XML) + if _, err := io.Copy(part, r); err != nil { + return nil, fmt.Errorf("copy file: %w", err) + } + } + + if err := writer.Close(); err != nil { + return nil, fmt.Errorf("close multipart writer: %w", err) + } + + u := strings.TrimRight(arg.OpendcsBaseURL, "/") + "/commit" + httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, u, &buf) + if err != nil { + return nil, err + } + + httpReq.Header.Set("Content-Type", writer.FormDataContentType()) + q := httpReq.URL.Query() + q.Set("key", arg.OpendcsAuthToken) + httpReq.URL.RawQuery = q.Encode() + + resp, err := httpClient.Do(httpReq) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + return nil, err + } + + if resp.StatusCode < http.StatusOK || resp.StatusCode >= http.StatusMultipleChoices { + s.logger.Error(ctx, "opendcs commit failed", "status_code", resp.StatusCode, "response_body", string(bodyBytes)) + return bodyBytes, fmt.Errorf("opendcs commit failed: http %d", resp.StatusCode) + } + + return bodyBytes, nil +} + +type GoesValidateProjectUncommittedParams struct { + ProjectID uuid.UUID + GoesTelemetrySourceID uuid.UUID + HTTPClient *http.Client // kept for future wrapper validation; unused with current wrapper API +} + +type GoesPlatformValidation struct { + PlatformFileID uuid.UUID `json:"platform_file_id"` + Name string `json:"name"` + Alias string `json:"alias"` + Valid bool `json:"valid"` + Message string `json:"message,omitempty"` + SensorCount int `json:"sensor_count,omitempty"` + SensorKeys []string `json:"sensor_keys,omitempty"` +} + +type GoesProjectValidationResult struct { + Valid bool `json:"valid"` + ValidatedAt string `json:"validated_at"` + Files []GoesPlatformValidation `json:"files"` + Warnings []string `json:"warnings,omitempty"` +} + +func (s *DBService) GoesValidateProjectUncommitted(ctx context.Context, arg GoesValidateProjectUncommittedParams) (DbImportResponse, error) { + var out DbImportResponse + + files, err := s.GoesPlatformConfigFilesListForCommit(ctx, db.GoesPlatformConfigFilesListForCommitParams{ + ProjectID: arg.ProjectID, + GoesTelemetrySourceID: arg.GoesTelemetrySourceID, + }) + if err != nil { + return out, fmt.Errorf("GoesPlatformConfigFilesListForCommit: %w", err) + } + if len(files) == 0 { + result := GoesProjectValidationResult{ + Valid: true, + ValidatedAt: time.Now().UTC().Format(time.RFC3339Nano), + Files: nil, + Warnings: []string{"no platform configuration files found for project/source"}, + } + b, _ := json.Marshal(result) + out.Response = b + return out, nil + } + + res := GoesProjectValidationResult{ + Valid: true, + ValidatedAt: time.Now().UTC().Format(time.RFC3339Nano), + Files: make([]GoesPlatformValidation, 0, len(files)), + Warnings: []string{ + "wrapper dbimport validation is not executed by this endpoint (current wrapper requires file paths or would import)", + }, + } + + var firstErr error + + for _, f := range files { + v := GoesPlatformValidation{ + PlatformFileID: f.ID, + Name: f.Name, + Alias: f.Alias, + Valid: true, + } + + _, keys, e := extractSensorNames(f.Content) + if e != nil { + v.Valid = false + v.Message = e.Error() + res.Valid = false + if firstErr == nil { + firstErr = fmt.Errorf("xml validation failed for %s: %w", f.ID, e) + } + } else { + v.SensorKeys = keys + v.SensorCount = len(keys) + } + + res.Files = append(res.Files, v) + } + + b, err := json.Marshal(res) + if err != nil { + return out, err + } + out.Response = b + + if firstErr != nil { + return out, firstErr + } + return out, nil +} + +func (s *DBService) GoesPlatformConfigCommit(ctx context.Context, files []dto.GoesPlatformConfigFileCommitDTO) error { + params := make([]db.GoesPlatformConfigFileCommitParams, len(files)) + for i, f := range files { + params[i] = db.GoesPlatformConfigFileCommitParams{ + ID: f.ID, + CommittedAt: &f.CommittedAt, + CommittedCommitID: &f.CommitID, + } + } + + var batchErr error + s.Queries.GoesPlatformConfigFileCommit(ctx, params).Exec(batchExecErr(&batchErr)) + return batchErr +} diff --git a/api/internal/service/goes_xml.go b/api/internal/service/goes_xml.go new file mode 100644 index 00000000..d9b6dc3a --- /dev/null +++ b/api/internal/service/goes_xml.go @@ -0,0 +1,255 @@ +package service + +import ( + "bytes" + "encoding/xml" + "fmt" + "io" + "strings" + + "github.com/google/uuid" +) + +type PlatformSensor struct { + Key string +} + +func ParsePlatformSensors(xmlBytes []byte) ([]PlatformSensor, error) { + dec := xml.NewDecoder(bytes.NewReader(xmlBytes)) + + var sensors []PlatformSensor + var stack []string + + for { + tok, err := dec.Token() + if err == io.EOF { + break + } + if err != nil { + return nil, err + } + + switch t := tok.(type) { + case xml.StartElement: + stack = append(stack, strings.ToLower(t.Name.Local)) + case xml.EndElement: + if len(stack) > 0 { + stack = stack[:len(stack)-1] + } + case xml.CharData: + if len(stack) < 1 { + continue + } + _ = t + } + } + + return sensors, nil +} + +func extractPlatformIDAndSite(xmlIn []byte) (platformID, site string, err error) { + dec := xml.NewDecoder(bytes.NewReader(xmlIn)) + + var cur string + for { + tok, tokErr := dec.Token() + if tokErr == io.EOF { + break + } + if tokErr != nil { + return "", "", tokErr + } + + switch t := tok.(type) { + case xml.StartElement: + cur = strings.ToLower(t.Name.Local) + case xml.EndElement: + cur = "" + case xml.CharData: + val := strings.TrimSpace(string(t)) + if val == "" || cur == "" { + continue + } + switch cur { + case "platformid", "platform_id", "platform-id": + if platformID == "" { + platformID = val + } + case "site", "sitename", "site_name", "site-name": + if site == "" { + site = val + } + } + } + if platformID != "" && site != "" { + break + } + } + return platformID, site, nil +} + +func injectTimeseriesIDIntoPlatformXML(xmlIn []byte, keyToTS map[string]uuid.UUID) ([]byte, error) { + dec := xml.NewDecoder(bytes.NewReader(xmlIn)) + + var out bytes.Buffer + enc := xml.NewEncoder(&out) + + inConfigSensor := false + var sensorName, sensorNumber strings.Builder + readingSensorName := false + readingSensorNumber := false + + sawTimeseriesProp := false + skippingTimeseriesProp := false + skipDepth := 0 + + getTS := func() (uuid.UUID, bool) { + if !inConfigSensor { + return uuid.Nil, false + } + n := strings.TrimSpace(sensorName.String()) + num := strings.TrimSpace(sensorNumber.String()) + if n == "" || num == "" { + return uuid.Nil, false + } + ts, ok := keyToTS[n+"."+num] + if !ok || ts == uuid.Nil { + return uuid.Nil, false + } + return ts, true + } + + for { + tok, err := dec.Token() + if err == io.EOF { + break + } + if err != nil { + return nil, fmt.Errorf("xml decode token: %w", err) + } + + if skippingTimeseriesProp { + switch tok.(type) { + case xml.StartElement: + skipDepth++ + case xml.EndElement: + skipDepth-- + if skipDepth == 0 { + skippingTimeseriesProp = false + } + } + continue + } + + switch t := tok.(type) { + case xml.StartElement: + if t.Name.Local == "ConfigSensor" { + inConfigSensor = true + sensorName.Reset() + sensorNumber.Reset() + readingSensorName = false + readingSensorNumber = false + sawTimeseriesProp = false + } + + if inConfigSensor && t.Name.Local == "SensorName" { + readingSensorName = true + } + if inConfigSensor && t.Name.Local == "SensorNumber" { + readingSensorNumber = true + } + + if inConfigSensor && t.Name.Local == "PlatformSensorProperty" { + var propName string + for _, a := range t.Attr { + if a.Name.Local == "PropertyName" { + propName = a.Value + break + } + } + if strings.EqualFold(propName, "timeseries_id") { + sawTimeseriesProp = true + + if err := enc.EncodeToken(t); err != nil { + return nil, fmt.Errorf("xml encode start: %w", err) + } + + if ts, ok := getTS(); ok { + if err := enc.EncodeToken(xml.CharData([]byte(ts.String()))); err != nil { + return nil, fmt.Errorf("xml encode timeseries_id value: %w", err) + } + } + + skippingTimeseriesProp = true + skipDepth = 1 + continue + } + } + + if err := enc.EncodeToken(t); err != nil { + return nil, fmt.Errorf("xml encode start: %w", err) + } + + case xml.CharData: + if inConfigSensor && readingSensorName { + sensorName.Write([]byte(t)) + } + if inConfigSensor && readingSensorNumber { + sensorNumber.Write([]byte(t)) + } + + if err := enc.EncodeToken(t); err != nil { + return nil, fmt.Errorf("xml encode chardata: %w", err) + } + + case xml.EndElement: + if inConfigSensor && t.Name.Local == "SensorName" { + readingSensorName = false + } + if inConfigSensor && t.Name.Local == "SensorNumber" { + readingSensorNumber = false + } + + if inConfigSensor && t.Name.Local == "ConfigSensor" { + if !sawTimeseriesProp { + if ts, ok := getTS(); ok { + start := xml.StartElement{ + Name: xml.Name{Local: "PlatformSensorProperty"}, + Attr: []xml.Attr{ + {Name: xml.Name{Local: "PropertyName"}, Value: "timeseries_id"}, + }, + } + if err := enc.EncodeToken(start); err != nil { + return nil, fmt.Errorf("xml encode inserted prop start: %w", err) + } + if err := enc.EncodeToken(xml.CharData([]byte(ts.String()))); err != nil { + return nil, fmt.Errorf("xml encode inserted prop value: %w", err) + } + if err := enc.EncodeToken(xml.EndElement{Name: start.Name}); err != nil { + return nil, fmt.Errorf("xml encode inserted prop end: %w", err) + } + } + } + + inConfigSensor = false + readingSensorName = false + readingSensorNumber = false + } + + if err := enc.EncodeToken(t); err != nil { + return nil, fmt.Errorf("xml encode end: %w", err) + } + + default: + if err := enc.EncodeToken(tok); err != nil { + return nil, fmt.Errorf("xml encode token: %w", err) + } + } + } + + if err := enc.Flush(); err != nil { + return nil, fmt.Errorf("xml encoder flush: %w", err) + } + + return out.Bytes(), nil +} diff --git a/api/migrations/schema/V1.58.00__goes.sql b/api/migrations/schema/V1.58.00__goes.sql index ff9975bb..23c47ac8 100644 --- a/api/migrations/schema/V1.58.00__goes.sql +++ b/api/migrations/schema/V1.58.00__goes.sql @@ -24,9 +24,59 @@ create table goes_platform_config_file ( create table goes_telemetry_config_mappings ( goes_platform_config_file_id uuid not null references goes_platform_config_file(id) on delete cascade, platform_sensor_key text not null, - timeseries_id uuid unique references timeseries(id), - constraint unique_goes_platform_config_file_id_platform_sensor_key unique (goes_platform_config_file_id, platform_sensor_key) + timeseries_id uuid references timeseries(id), + constraint unique_goes_platform_config_file_id_platform_sensor_key unique (goes_platform_config_file_id, platform_sensor_key), + primary key (goes_platform_config_file_id, platform_sensor_key) ); +create unique index if not exists unique_timeseries_id_not_null +on goes_telemetry_config_mappings(timeseries_id) +where timeseries_id is not null; + + +create table goes_commit ( + id uuid primary key default uuid_generate_v4(), + project_id uuid not null references project(id), + goes_telemetry_source_id uuid not null references goes_telemetry_source(id), + created_at timestamptz not null default now(), + created_by uuid not null references profile(id), + status text not null, + opendcs_response jsonb, + previous_commit_id uuid references goes_commit(id), + idempotency_key text, + constraint unique_commit_idempotency unique (project_id, goes_telemetry_source_id, idempotency_key) +); + + +create index goes_commit_active_idx +on goes_commit (project_id, goes_telemetry_source_id) +where status = 'active'; + + +create table goes_mapping_set ( + id uuid primary key default uuid_generate_v4(), + project_id uuid not null references project(id), + created_at timestamptz not null default now(), + created_by uuid not null references profile(id), + content_hash text not null, + idempotency_key text, + constraint unique_mapping_idempotency unique (project_id, idempotency_key) +); + + +create table goes_mapping_set_entry ( + mapping_set_id uuid not null references goes_mapping_set(id) on delete cascade, + goes_platform_config_file_id uuid not null references goes_platform_config_file(id) on delete cascade, + platform_sensor_key text not null, + timeseries_id uuid references timeseries(id), + primary key (mapping_set_id, goes_platform_config_file_id, platform_sensor_key) +); + + +alter table goes_platform_config_file +add column committed_content xml, +add column committed_commit_id uuid references goes_commit(id); + + insert into goes_telemetry_source (id, name) values ('666e60ec-2c0a-4446-9eda-6f45cbcd0a60', 'OpenDCS #1'); diff --git a/api/migrations/schema/V1.59.00__goes_project.sql b/api/migrations/schema/V1.59.00__goes_project.sql new file mode 100644 index 00000000..855a95fe --- /dev/null +++ b/api/migrations/schema/V1.59.00__goes_project.sql @@ -0,0 +1,27 @@ +alter table goes_platform_config_file +add column if not exists deleted boolean not null default false, +add column if not exists deleted_at timestamptz, +add column if not exists deleted_by uuid references profile(id); + +create index if not exists goes_platform_config_file_not_deleted_idx +on goes_platform_config_file (project_id, goes_telemetry_source_id) +where deleted = false; + +alter table goes_commit +add column if not exists mapping_set_id uuid references goes_mapping_set(id); + +create index if not exists goes_commit_mapping_set_id_idx +on goes_commit (mapping_set_id); + +create table if not exists goes_platform_registry ( + platform_key text primary key, + project_id uuid not null references project(id), + goes_telemetry_source_id uuid not null references goes_telemetry_source(id), + platform_id text, + site_name text, + commit_id uuid not null references goes_commit(id), + updated_at timestamptz not null default now() +); + +create index if not exists goes_platform_registry_project_idx +on goes_platform_registry (project_id, goes_telemetry_source_id); diff --git a/api/migrations/seed/V0.17.02__seed_data.sql b/api/migrations/seed/V0.17.02__seed_data.sql index ba30042e..3b21d3dd 100644 --- a/api/migrations/seed/V0.17.02__seed_data.sql +++ b/api/migrations/seed/V0.17.02__seed_data.sql @@ -110,63 +110,63 @@ INSERT INTO instrument_constants (instrument_id, timeseries_id) VALUES ('a7540f69-c41e-43b3-b655-6e44097edb7e', '14247bc8-b264-4857-836f-182d47ebb39d'); -- Time Series Measurements -INSERT INTO timeseries_measurement (timeseries_id, time, value) VALUES -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/1/2020' , 13.16), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/2/2020' , 13.16), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/3/2020' , 13.17), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/4/2020' , 13.17), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/5/2020' , 13.13), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/6/2020' , 13.12), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/7/2020' , 13.10), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/8/2020' , 13.08), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/9/2020' , 13.07), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/10/2020', 13.05), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/11/2020', 13.16), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/12/2020', 13.16), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/13/2020', 13.17), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/14/2020', 13.17), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/15/2020', 13.13), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/16/2020', 13.12), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/17/2020', 13.10), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/18/2020', 13.08), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/19/2020', 13.07), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/20/2020', 13.05), -('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/21/2020', 13.05), -('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/1/2020' , 20.16), -('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/2/2020' , 20.16), -('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/3/2020' , 20.17), -('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/4/2020' , 20.17), -('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/5/2020' , 20.13), -('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/6/2020' , 20.12), -('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/7/2020' , 20.10), -('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/8/2020' , 20.08), -('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/9/2020' , 20.07), -('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/10/2020', 20.05), -('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/1/2020' , 20.16), -('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/2/2020' , 20.16), -('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/3/2020' , 20.17), -('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/4/2020' , 20.17), -('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/5/2020' , 20.13), -('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/6/2020' , 20.12), -('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/7/2020' , 20.10), -('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/8/2020' , 20.08), -('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/9/2020' , 20.07), -('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/10/2020', 20.05), -('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/1/2020' , 20.16), -('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/2/2020' , 20.16), -('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/3/2020' , 20.17), -('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/4/2020' , 20.17), -('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/5/2020' , 20.13), -('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/6/2020' , 20.12), -('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/7/2020' , 20.10), -('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/8/2020' , 20.08), -('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/9/2020' , 20.07), -('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/10/2020', 20.05), -('d9697351-3a38-4194-9ac4-41541927e475', '3/10/2015', 40.50), -('d9697351-3a38-4194-9ac4-41541927e475', '6/10/2020', 40.00), -('d9697351-3a38-4194-9ac4-41541927e475', '3/10/2020', 39.50), -('22a734d6-dc24-451d-a462-43a32f335ae8', '3/10/2015', 10.0), -('479d90eb-3454-4f39-be9a-bfd23099a552', '6/21/2021', 20000.0); +-- INSERT INTO timeseries_measurement (timeseries_id, time, value) VALUES +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/1/2020' , 13.16), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/2/2020' , 13.16), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/3/2020' , 13.17), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/4/2020' , 13.17), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/5/2020' , 13.13), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/6/2020' , 13.12), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/7/2020' , 13.10), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/8/2020' , 13.08), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/9/2020' , 13.07), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/10/2020', 13.05), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/11/2020', 13.16), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/12/2020', 13.16), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/13/2020', 13.17), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/14/2020', 13.17), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/15/2020', 13.13), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/16/2020', 13.12), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/17/2020', 13.10), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/18/2020', 13.08), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/19/2020', 13.07), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/20/2020', 13.05), +-- ('869465fc-dc1e-445e-81f4-9979b5fadda9', '1/21/2020', 13.05), +-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/1/2020' , 20.16), +-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/2/2020' , 20.16), +-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/3/2020' , 20.17), +-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/4/2020' , 20.17), +-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/5/2020' , 20.13), +-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/6/2020' , 20.12), +-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/7/2020' , 20.10), +-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/8/2020' , 20.08), +-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/9/2020' , 20.07), +-- ('9a3864a8-8766-4bfa-bad1-0328b166f6a8', '1/10/2020', 20.05), +-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/1/2020' , 20.16), +-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/2/2020' , 20.16), +-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/3/2020' , 20.17), +-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/4/2020' , 20.17), +-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/5/2020' , 20.13), +-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/6/2020' , 20.12), +-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/7/2020' , 20.10), +-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/8/2020' , 20.08), +-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/9/2020' , 20.07), +-- ('7ee902a3-56d0-4acf-8956-67ac82c03a96', '3/10/2020', 20.05), +-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/1/2020' , 20.16), +-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/2/2020' , 20.16), +-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/3/2020' , 20.17), +-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/4/2020' , 20.17), +-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/5/2020' , 20.13), +-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/6/2020' , 20.12), +-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/7/2020' , 20.10), +-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/8/2020' , 20.08), +-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/9/2020' , 20.07), +-- ('8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', '3/10/2020', 20.05), +-- ('d9697351-3a38-4194-9ac4-41541927e475', '3/10/2015', 40.50), +-- ('d9697351-3a38-4194-9ac4-41541927e475', '6/10/2020', 40.00), +-- ('d9697351-3a38-4194-9ac4-41541927e475', '3/10/2020', 39.50), +-- ('22a734d6-dc24-451d-a462-43a32f335ae8', '3/10/2015', 10.0), +-- ('479d90eb-3454-4f39-be9a-bfd23099a552', '6/21/2021', 20000.0); -- inclinometers INSERT INTO inclinometer_measurement (timeseries_id, time, creator, create_date, values) VALUES diff --git a/api/migrations/seed/V0.17.19__seed_ts_measurements.sql b/api/migrations/seed/V0.17.19__seed_ts_measurements.sql index cf4378d2..ffee34e8 100644 --- a/api/migrations/seed/V0.17.19__seed_ts_measurements.sql +++ b/api/migrations/seed/V0.17.19__seed_ts_measurements.sql @@ -1,114 +1,114 @@ --- https://www.timescale.com/blog/how-to-shape-sample-data-with-postgresql-generate_series-and-sql/ -create table if not exists seed_data_overrides ( - m_val int not null, - p_inc float4 not null -); - --- provides baseline curve for more reaslistic looking test data -insert into seed_data_overrides(m_val, p_inc) values - (1,1.04), - (2,1), - (3,1), - (4,1), - (5,1), - (6,1.10), - (7,1), - (8,0.09), - (9,1), - (10,1), - (11,1.08), - (12,1.18); - -create or replace function seed_timeseries_measurements( - timeseries_ids uuid[], - begin_time timestamptz, - end_time timestamptz, - intv interval -) returns void language sql volatile as $$ -insert into timeseries_measurement (timeseries_id, time, value) -select - timeseries_id, - m.time, - m.value -from -unnest(timeseries_ids) as timeseries_id, -( - with intv_series as ( - select ts, date(ts) as day, rownum - from generate_series(begin_time, end_time, intv) with ordinality as t(ts, rownum) - ), - intv_value as ( - select ts, day, date_part('month', ts) as m_val, rownum, random() as val - from intv_series - order by day - ), - intv_wave as ( - select - day, - 1 + .2 * cos(rownum * 6.28/180) as p_mod - from intv_series - day - ) - select dv.ts as time, (500 + 20 * val) * p_mod * rownum * p_inc as value - from intv_value dv - inner join intv_wave dw on dv.day=dw.day - inner join seed_data_overrides o on dv.m_val=o.m_val - order by ts -) m -on conflict do nothing; -$$; - -select seed_timeseries_measurements(array[ -'869465fc-dc1e-445e-81f4-9979b5fadda9'::uuid, -'9a3864a8-8766-4bfa-bad1-0328b166f6a8'::uuid, -'7ee902a3-56d0-4acf-8956-67ac82c03a96'::uuid, -'8f4ca3a3-5971-4597-bd6f-332d1cf5af7c'::uuid, -'d9697351-3a38-4194-9ac4-41541927e475'::uuid -], '2020-01-01'::timestamptz, now(), '1 day'::interval); - -with ranked as ( - select - timeseries_id, - time, - row_number() over (partition by timeseries_id order by time) as rn, - count(*) over (partition by timeseries_id) as total - from timeseries_measurement - where timeseries_id in ( - '869465fc-dc1e-445e-81f4-9979b5fadda9', - '9a3864a8-8766-4bfa-bad1-0328b166f6a8', - '7ee902a3-56d0-4acf-8956-67ac82c03a96', - '8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', - 'd9697351-3a38-4194-9ac4-41541927e475' - ) -), -chunk_sizes as ( - select - timeseries_id, - floor(total * 0.3) as total_chunk, - floor(total * 0.3 / 3) as chunk_size - from ranked - group by timeseries_id, total -), -chunks as ( - select - timeseries_id, - chunk_size, - 1 as masked_start, - 1 + chunk_size as validated_start, - 1 + 2 * chunk_size as annotation_start - from chunk_sizes -) -insert into timeseries_notes (masked, validated, annotation, timeseries_id, time) -select - case when r.rn between c.masked_start and c.masked_start + c.chunk_size - 1 then true else null end as masked, - case when r.rn between c.validated_start and c.validated_start + c.chunk_size - 1 then true else null end as validated, - case when r.rn between c.annotation_start and c.annotation_start + c.chunk_size - 1 then 'Test annotation' else null end as annotation, - r.timeseries_id, - r.time -from ranked r -join chunks c on r.timeseries_id = c.timeseries_id -where - (r.rn between c.masked_start and c.masked_start + c.chunk_size - 1) - or (r.rn between c.validated_start and c.validated_start + c.chunk_size - 1) - or (r.rn between c.annotation_start and c.annotation_start + c.chunk_size - 1) -on conflict do nothing; +-- -- https://www.timescale.com/blog/how-to-shape-sample-data-with-postgresql-generate_series-and-sql/ +-- create table if not exists seed_data_overrides ( +-- m_val int not null, +-- p_inc float4 not null +-- ); +-- +-- -- provides baseline curve for more reaslistic looking test data +-- insert into seed_data_overrides(m_val, p_inc) values +-- (1,1.04), +-- (2,1), +-- (3,1), +-- (4,1), +-- (5,1), +-- (6,1.10), +-- (7,1), +-- (8,0.09), +-- (9,1), +-- (10,1), +-- (11,1.08), +-- (12,1.18); +-- +-- create or replace function seed_timeseries_measurements( +-- timeseries_ids uuid[], +-- begin_time timestamptz, +-- end_time timestamptz, +-- intv interval +-- ) returns void language sql volatile as $$ +-- insert into timeseries_measurement (timeseries_id, time, value) +-- select +-- timeseries_id, +-- m.time, +-- m.value +-- from +-- unnest(timeseries_ids) as timeseries_id, +-- ( +-- with intv_series as ( +-- select ts, date(ts) as day, rownum +-- from generate_series(begin_time, end_time, intv) with ordinality as t(ts, rownum) +-- ), +-- intv_value as ( +-- select ts, day, date_part('month', ts) as m_val, rownum, random() as val +-- from intv_series +-- order by day +-- ), +-- intv_wave as ( +-- select +-- day, +-- 1 + .2 * cos(rownum * 6.28/180) as p_mod +-- from intv_series +-- day +-- ) +-- select dv.ts as time, (500 + 20 * val) * p_mod * rownum * p_inc as value +-- from intv_value dv +-- inner join intv_wave dw on dv.day=dw.day +-- inner join seed_data_overrides o on dv.m_val=o.m_val +-- order by ts +-- ) m +-- on conflict do nothing; +-- $$; +-- +-- select seed_timeseries_measurements(array[ +-- '869465fc-dc1e-445e-81f4-9979b5fadda9'::uuid, +-- '9a3864a8-8766-4bfa-bad1-0328b166f6a8'::uuid, +-- '7ee902a3-56d0-4acf-8956-67ac82c03a96'::uuid, +-- '8f4ca3a3-5971-4597-bd6f-332d1cf5af7c'::uuid, +-- 'd9697351-3a38-4194-9ac4-41541927e475'::uuid +-- ], '2020-01-01'::timestamptz, now(), '1 day'::interval); +-- +-- with ranked as ( +-- select +-- timeseries_id, +-- time, +-- row_number() over (partition by timeseries_id order by time) as rn, +-- count(*) over (partition by timeseries_id) as total +-- from timeseries_measurement +-- where timeseries_id in ( +-- '869465fc-dc1e-445e-81f4-9979b5fadda9', +-- '9a3864a8-8766-4bfa-bad1-0328b166f6a8', +-- '7ee902a3-56d0-4acf-8956-67ac82c03a96', +-- '8f4ca3a3-5971-4597-bd6f-332d1cf5af7c', +-- 'd9697351-3a38-4194-9ac4-41541927e475' +-- ) +-- ), +-- chunk_sizes as ( +-- select +-- timeseries_id, +-- floor(total * 0.3) as total_chunk, +-- floor(total * 0.3 / 3) as chunk_size +-- from ranked +-- group by timeseries_id, total +-- ), +-- chunks as ( +-- select +-- timeseries_id, +-- chunk_size, +-- 1 as masked_start, +-- 1 + chunk_size as validated_start, +-- 1 + 2 * chunk_size as annotation_start +-- from chunk_sizes +-- ) +-- insert into timeseries_notes (masked, validated, annotation, timeseries_id, time) +-- select +-- case when r.rn between c.masked_start and c.masked_start + c.chunk_size - 1 then true else null end as masked, +-- case when r.rn between c.validated_start and c.validated_start + c.chunk_size - 1 then true else null end as validated, +-- case when r.rn between c.annotation_start and c.annotation_start + c.chunk_size - 1 then 'Test annotation' else null end as annotation, +-- r.timeseries_id, +-- r.time +-- from ranked r +-- join chunks c on r.timeseries_id = c.timeseries_id +-- where +-- (r.rn between c.masked_start and c.masked_start + c.chunk_size - 1) +-- or (r.rn between c.validated_start and c.validated_start + c.chunk_size - 1) +-- or (r.rn between c.annotation_start and c.annotation_start + c.chunk_size - 1) +-- on conflict do nothing; diff --git a/api/migrations/seed/V0.17.22__seed_saa.sql b/api/migrations/seed/V0.17.22__seed_saa.sql index 78003893..3f344065 100644 --- a/api/migrations/seed/V0.17.22__seed_saa.sql +++ b/api/migrations/seed/V0.17.22__seed_saa.sql @@ -51,16 +51,16 @@ INSERT INTO saa_opts (instrument_id, num_segments, bottom_elevation_timeseries_i ('eca4040e-aecb-4cd3-bcde-3e308f0356a6', 8, '4affc367-ea0f-41f5-a4bc-5f387b01d7a4', NOW() - INTERVAL '1 month'); -INSERT INTO timeseries_measurement (timeseries_id, time, value) VALUES -('4affc367-ea0f-41f5-a4bc-5f387b01d7a4', NOW() - INTERVAL '1 month', 0), -('cf2f2304-d44e-4363-bc8d-95533222efd6', NOW() - INTERVAL '1 month', 200), -('ff2086ae-0eae-42a8-b598-2e97be2ab3b0', NOW() - INTERVAL '1 month', 200), -('669b63d7-87b2-4aed-9b15-e19ea39789b9', NOW() - INTERVAL '1 month', 200), -('e404e8f4-41c6-4355-9ddb-9d8c635525fc', NOW() - INTERVAL '1 month', 200), -('ccb80fd4-8902-450f-bb3b-cc1e6718b03c', NOW() - INTERVAL '1 month', 200), -('7f98f239-ac1e-4651-9d69-c163b2dc06a6', NOW() - INTERVAL '1 month', 200), -('72bd19f1-23d3-4edb-b16f-9ebb121cf921', NOW() - INTERVAL '1 month', 200), -('df6a9cca-29fc-4ec3-9415-d497fbae1a58', NOW() - INTERVAL '1 month', 200); +-- INSERT INTO timeseries_measurement (timeseries_id, time, value) VALUES +-- ('4affc367-ea0f-41f5-a4bc-5f387b01d7a4', NOW() - INTERVAL '1 month', 0), +-- ('cf2f2304-d44e-4363-bc8d-95533222efd6', NOW() - INTERVAL '1 month', 200), +-- ('ff2086ae-0eae-42a8-b598-2e97be2ab3b0', NOW() - INTERVAL '1 month', 200), +-- ('669b63d7-87b2-4aed-9b15-e19ea39789b9', NOW() - INTERVAL '1 month', 200), +-- ('e404e8f4-41c6-4355-9ddb-9d8c635525fc', NOW() - INTERVAL '1 month', 200), +-- ('ccb80fd4-8902-450f-bb3b-cc1e6718b03c', NOW() - INTERVAL '1 month', 200), +-- ('7f98f239-ac1e-4651-9d69-c163b2dc06a6', NOW() - INTERVAL '1 month', 200), +-- ('72bd19f1-23d3-4edb-b16f-9ebb121cf921', NOW() - INTERVAL '1 month', 200), +-- ('df6a9cca-29fc-4ec3-9415-d497fbae1a58', NOW() - INTERVAL '1 month', 200); INSERT INTO instrument_constants (timeseries_id, instrument_id) VALUES @@ -86,48 +86,48 @@ INSERT INTO saa_segment (instrument_id, id, length_timeseries_id, x_timeseries_i ('eca4040e-aecb-4cd3-bcde-3e308f0356a6',8,'df6a9cca-29fc-4ec3-9415-d497fbae1a58','fb0795ba-9d80-4a41-abd7-5de140392454','32889a6d-93d0-49f9-b281-44e19e88474c','bcb95c35-08f7-4c5a-83ff-b505b8d76481','54dcd1e1-e9da-4db5-95e5-3c28fab5c03c'); -INSERT INTO timeseries_measurement (timeseries_id, time, value) -SELECT - timeseries_id, - time, - round((random() * (100-3) + 3)::NUMERIC, 4) AS value -FROM - unnest(ARRAY[ - '8b3762ef-a852-4edc-8e87-746a92eaac9d'::uuid, - 'ecfa267b-339b-4bb8-b7ae-eda550257878'::uuid, - 'a31a24c4-aa8e-4e52-9895-43cdb69fe703'::uuid, - 'eec831d1-56a5-47ef-85eb-02c7622d6cb8'::uuid, - 'eb25ab9f-af8b-4383-839a-7d24899e02c4'::uuid, - '8e641473-d7bf-433c-a24b-55fa065ca0c3'::uuid, - '21cfe121-d29d-40a2-b04f-6be71ba479fe'::uuid, - '23bda2f6-c479-48e0-a0c2-db48c3b08c3c'::uuid, - '2598aa5f-cb8f-4ab7-8ebf-6de0c30bce70'::uuid, - '4759bdac-656e-47c3-b403-d3118cf57342'::uuid, - '1f47a1b9-a2bb-4282-8618-42ba1341533e'::uuid, - 'd2dbac06-ad03-45d9-a7ad-1e7fb9d09ce2'::uuid, - 'c22ffd8a-eae3-41cb-a75b-faae36236465'::uuid, - 'd11a0e91-0125-46cc-a3fc-b0252361bd9c'::uuid, - '9fbf2061-cf73-45f3-9e6c-b745ae7f72a1'::uuid, - '0503e693-bc58-49b5-a477-288174dc90ed'::uuid, - '24ad9638-5c5e-48b6-9ad6-a2eb0b93f87c'::uuid, - '8cfaffb4-80b2-411b-be81-776385fc5862'::uuid, - 'ea0f561f-e3f4-4155-a360-17407a0884d4'::uuid, - 'a10e8627-621c-4aa7-8301-a2142a760e0c'::uuid, - '88e22274-021e-4e91-88bb-046b67171a36'::uuid, - 'f684bec8-9cc3-470f-a355-21d65f2be435'::uuid, - '1a8c9bfc-0e65-4f76-aba9-fc32d643748f'::uuid, - '2bf6aecd-3df0-4237-b28b-95731b7e333d'::uuid, - '00f3e1f2-e7ff-4901-abfb-e9bf695802f6'::uuid, - '2ef9b1d9-ee8f-4f2d-a482-2e0f0dd76f80'::uuid, - '00ae950d-5bdd-455e-a72a-56da67dafb85'::uuid, - '3d07cbc0-4aff-4efa-a162-ec1800801665'::uuid, - 'fb0795ba-9d80-4a41-abd7-5de140392454'::uuid, - '32889a6d-93d0-49f9-b281-44e19e88474c'::uuid, - 'bcb95c35-08f7-4c5a-83ff-b505b8d76481'::uuid, - '54dcd1e1-e9da-4db5-95e5-3c28fab5c03c'::uuid - ]) AS timeseries_id, - generate_series( - now() - INTERVAL '1 month', - now(), - INTERVAL '1 hour' - ) AS time; +-- INSERT INTO timeseries_measurement (timeseries_id, time, value) +-- SELECT +-- timeseries_id, +-- time, +-- round((random() * (100-3) + 3)::NUMERIC, 4) AS value +-- FROM +-- unnest(ARRAY[ +-- '8b3762ef-a852-4edc-8e87-746a92eaac9d'::uuid, +-- 'ecfa267b-339b-4bb8-b7ae-eda550257878'::uuid, +-- 'a31a24c4-aa8e-4e52-9895-43cdb69fe703'::uuid, +-- 'eec831d1-56a5-47ef-85eb-02c7622d6cb8'::uuid, +-- 'eb25ab9f-af8b-4383-839a-7d24899e02c4'::uuid, +-- '8e641473-d7bf-433c-a24b-55fa065ca0c3'::uuid, +-- '21cfe121-d29d-40a2-b04f-6be71ba479fe'::uuid, +-- '23bda2f6-c479-48e0-a0c2-db48c3b08c3c'::uuid, +-- '2598aa5f-cb8f-4ab7-8ebf-6de0c30bce70'::uuid, +-- '4759bdac-656e-47c3-b403-d3118cf57342'::uuid, +-- '1f47a1b9-a2bb-4282-8618-42ba1341533e'::uuid, +-- 'd2dbac06-ad03-45d9-a7ad-1e7fb9d09ce2'::uuid, +-- 'c22ffd8a-eae3-41cb-a75b-faae36236465'::uuid, +-- 'd11a0e91-0125-46cc-a3fc-b0252361bd9c'::uuid, +-- '9fbf2061-cf73-45f3-9e6c-b745ae7f72a1'::uuid, +-- '0503e693-bc58-49b5-a477-288174dc90ed'::uuid, +-- '24ad9638-5c5e-48b6-9ad6-a2eb0b93f87c'::uuid, +-- '8cfaffb4-80b2-411b-be81-776385fc5862'::uuid, +-- 'ea0f561f-e3f4-4155-a360-17407a0884d4'::uuid, +-- 'a10e8627-621c-4aa7-8301-a2142a760e0c'::uuid, +-- '88e22274-021e-4e91-88bb-046b67171a36'::uuid, +-- 'f684bec8-9cc3-470f-a355-21d65f2be435'::uuid, +-- '1a8c9bfc-0e65-4f76-aba9-fc32d643748f'::uuid, +-- '2bf6aecd-3df0-4237-b28b-95731b7e333d'::uuid, +-- '00f3e1f2-e7ff-4901-abfb-e9bf695802f6'::uuid, +-- '2ef9b1d9-ee8f-4f2d-a482-2e0f0dd76f80'::uuid, +-- '00ae950d-5bdd-455e-a72a-56da67dafb85'::uuid, +-- '3d07cbc0-4aff-4efa-a162-ec1800801665'::uuid, +-- 'fb0795ba-9d80-4a41-abd7-5de140392454'::uuid, +-- '32889a6d-93d0-49f9-b281-44e19e88474c'::uuid, +-- 'bcb95c35-08f7-4c5a-83ff-b505b8d76481'::uuid, +-- '54dcd1e1-e9da-4db5-95e5-3c28fab5c03c'::uuid +-- ]) AS timeseries_id, +-- generate_series( +-- now() - INTERVAL '1 month', +-- now(), +-- INTERVAL '1 hour' +-- ) AS time; diff --git a/api/migrations/seed/V0.17.23__seed_ipi.sql b/api/migrations/seed/V0.17.23__seed_ipi.sql index 4ba0ccdf..b9430675 100644 --- a/api/migrations/seed/V0.17.23__seed_ipi.sql +++ b/api/migrations/seed/V0.17.23__seed_ipi.sql @@ -50,17 +50,17 @@ INSERT INTO ipi_segment (instrument_id, id, length_timeseries_id, tilt_timeserie ('01ac435f-fe3c-4af1-9979-f5e00467e7f5',4,'d28efb95-962d-4233-9002-827154bd76ad','3a297a4e-093a-4f9b-b201-1a994e2f4da7', NULL, NULL); -INSERT INTO timeseries_measurement (timeseries_id, time, value) VALUES -('5842c707-b4be-4d10-a89c-1064e282e555', NOW() - INTERVAL '1 month', 0), -('7d515571-d6a2-4990-a1e2-d6d42049d864', NOW() - INTERVAL '1 month', 50), -('bce99683-59bd-4e4b-ad79-64a03553cfdc', NOW() - INTERVAL '1 month', 012), -('e891ca7c-59b2-41bc-9d4a-43995e35b855', NOW() - INTERVAL '1 month', 123), -('18f17db2-4bc8-44cb-a9fa-ba84d13b8444', NOW() - INTERVAL '1 month', 234), -('d5c236cf-dca5-4a35-bc59-a9ecac4d572b', NOW() - INTERVAL '1 month', 345), -('88accf78-6f41-4342-86b5-026a8880cbb4', NOW() - INTERVAL '1 month', 100), -('fc332ef5-55a8-4657-9d6d-b0abeeb985f2', NOW() - INTERVAL '1 month', 200), -('a86c7468-09a7-4090-98e0-f7979103bbcd', NOW() - INTERVAL '1 month', 150), -('d28efb95-962d-4233-9002-827154bd76ad', NOW() - INTERVAL '1 month', 050); +-- INSERT INTO timeseries_measurement (timeseries_id, time, value) VALUES +-- ('5842c707-b4be-4d10-a89c-1064e282e555', NOW() - INTERVAL '1 month', 0), +-- ('7d515571-d6a2-4990-a1e2-d6d42049d864', NOW() - INTERVAL '1 month', 50), +-- ('bce99683-59bd-4e4b-ad79-64a03553cfdc', NOW() - INTERVAL '1 month', 012), +-- ('e891ca7c-59b2-41bc-9d4a-43995e35b855', NOW() - INTERVAL '1 month', 123), +-- ('18f17db2-4bc8-44cb-a9fa-ba84d13b8444', NOW() - INTERVAL '1 month', 234), +-- ('d5c236cf-dca5-4a35-bc59-a9ecac4d572b', NOW() - INTERVAL '1 month', 345), +-- ('88accf78-6f41-4342-86b5-026a8880cbb4', NOW() - INTERVAL '1 month', 100), +-- ('fc332ef5-55a8-4657-9d6d-b0abeeb985f2', NOW() - INTERVAL '1 month', 200), +-- ('a86c7468-09a7-4090-98e0-f7979103bbcd', NOW() - INTERVAL '1 month', 150), +-- ('d28efb95-962d-4233-9002-827154bd76ad', NOW() - INTERVAL '1 month', 050); INSERT INTO instrument_constants (timeseries_id, instrument_id) VALUES @@ -76,32 +76,32 @@ INSERT INTO instrument_constants (timeseries_id, instrument_id) VALUES ('d28efb95-962d-4233-9002-827154bd76ad','01ac435f-fe3c-4af1-9979-f5e00467e7f5'); -INSERT INTO timeseries_measurement (timeseries_id, time, value) -SELECT - timeseries_id, - time, - round((random() * (100-3) + 3)::NUMERIC, 4) AS value -FROM - unnest(ARRAY[ - 'f7fa0d85-c684-4315-a7c6-e18e60667969'::UUID, - '1bf787e9-8363-4047-8b03-fbaf9ff03eaf'::UUID, - '258a5834-20bf-45fc-a60c-f245b2822592'::UUID, - '4ffcb98f-962a-46ea-8923-8f992ef07c58'::UUID, - '3bd67db5-abd6-4b35-a649-427791f9eeb7'::UUID, - '1db6717b-6cde-4f46-b7fb-bc82b75051d7'::UUID, - 'a3c4254b-1448-4f70-a1b6-d7f5e5c66eb7'::UUID, - '6d90eb76-f292-461e-a82b-0faee9999778'::UUID, - 'b2968456-b26a-4bbb-b8d9-f1217a6147ff'::UUID, - 'afcc8471-c91b-466e-833d-f173cc58797f'::UUID, - '26cb2cfa-910a-46c3-b03f-9dbcf823f8d8'::UUID, - '3a297a4e-093a-4f9b-b201-1a994e2f4da7'::UUID, - '8d10fbd9-2669-4727-b4c1-746361691388'::UUID, - '6044cffb-c241-4b66-9873-068c2bbac451'::UUID, - '98385e5a-c5d8-4441-aa2e-0f6120414352'::UUID, - 'c488fc08-18ff-4e3d-851f-46cfd1257b6c'::UUID -]) AS timeseries_id, - generate_series( - now() - INTERVAL '1 month', - now(), - INTERVAL '1 hour' - ) AS time; +-- INSERT INTO timeseries_measurement (timeseries_id, time, value) +-- SELECT +-- timeseries_id, +-- time, +-- round((random() * (100-3) + 3)::NUMERIC, 4) AS value +-- FROM +-- unnest(ARRAY[ +-- 'f7fa0d85-c684-4315-a7c6-e18e60667969'::UUID, +-- '1bf787e9-8363-4047-8b03-fbaf9ff03eaf'::UUID, +-- '258a5834-20bf-45fc-a60c-f245b2822592'::UUID, +-- '4ffcb98f-962a-46ea-8923-8f992ef07c58'::UUID, +-- '3bd67db5-abd6-4b35-a649-427791f9eeb7'::UUID, +-- '1db6717b-6cde-4f46-b7fb-bc82b75051d7'::UUID, +-- 'a3c4254b-1448-4f70-a1b6-d7f5e5c66eb7'::UUID, +-- '6d90eb76-f292-461e-a82b-0faee9999778'::UUID, +-- 'b2968456-b26a-4bbb-b8d9-f1217a6147ff'::UUID, +-- 'afcc8471-c91b-466e-833d-f173cc58797f'::UUID, +-- '26cb2cfa-910a-46c3-b03f-9dbcf823f8d8'::UUID, +-- '3a297a4e-093a-4f9b-b201-1a994e2f4da7'::UUID, +-- '8d10fbd9-2669-4727-b4c1-746361691388'::UUID, +-- '6044cffb-c241-4b66-9873-068c2bbac451'::UUID, +-- '98385e5a-c5d8-4441-aa2e-0f6120414352'::UUID, +-- 'c488fc08-18ff-4e3d-851f-46cfd1257b6c'::UUID +-- ]) AS timeseries_id, +-- generate_series( +-- now() - INTERVAL '1 month', +-- now(), +-- INTERVAL '1 hour' +-- ) AS time; diff --git a/api/migrations/seed/V1.25.01__seed_uploader_config.sql b/api/migrations/seed/V1.25.01__seed_uploader_config.sql index 53bcda78..b7c95bf2 100644 --- a/api/migrations/seed/V1.25.01__seed_uploader_config.sql +++ b/api/migrations/seed/V1.25.01__seed_uploader_config.sql @@ -10,7 +10,7 @@ INSERT INTO project_instrument (project_id, instrument_id) VALUES INSERT INTO timeseries (id, slug, name, instrument_id, parameter_id, unit_id, type) VALUES ('ac3d9cb5-4e7b-420d-82c7-207d85e48f50', 'constant-bottom-elevation', 'Bottom Elevation Constant', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', '00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000000', 'constant'); -INSERT INTO timeseries_measurement (timeseries_id, time, value) values ('ac3d9cb5-4e7b-420d-82c7-207d85e48f50', now(), 100.5); +-- INSERT INTO timeseries_measurement (timeseries_id, time, value) values ('ac3d9cb5-4e7b-420d-82c7-207d85e48f50', now(), 100.5); INSERT INTO timeseries (id, slug, name, instrument_id, type) VALUES ('1e3a1d3c-38e3-4f34-b65f-d2b8287ed591', 'depth-segment-1', 'Depth Segment 1', 'f4a9c03b-9c52-4bb2-8b2b-103df48e6f9c', 'standard'), @@ -72,59 +72,59 @@ INSERT INTO uploader_config_mapping (uploader_config_id, field_name, timeseries_ ('32f69e6f-c4bf-616e-f044-2f86f4e7c2bb', 'UploaderTimeseries4', 'adb08e25-7b0e-482f-cd24-7b3b1a4a6b07', NULL); -insert into timeseries_measurement (timeseries_id, time, value) -select - timeseries_id, - m.time, - m.value -from -unnest(array[ - '1e3a1d3c-38e3-4f34-b65f-d2b8287ed591'::uuid, - '229d8f8d-fd60-465e-94c4-c25bc79d4f7e'::uuid, - '74d19174-f911-4234-96e4-fae1a49969e6'::uuid, - 'fd7c720e-7119-45dc-bf7a-44da303a9aa4'::uuid, - 'e3f7d76b-8aa3-4d25-a5f1-4ad715dd13c1'::uuid, - '6d623d88-b6e8-4f0d-86be-d4445b6b6789'::uuid, - '40dfcce3-7f36-475e-969c-2b0b8633c856'::uuid, - '20792b7d-0f87-4f4b-81c6-616a8d76613a'::uuid, - '659d026e-3f47-4efe-899f-4129b5466228'::uuid, - '015a07f9-4005-4d2d-96dc-7f7d611ca51a'::uuid, - '600d164d-432c-40e8-a1f7-b4ebc112939e'::uuid, - 'b5e12c65-acdb-4439-a436-c762096e54d3'::uuid, - '34bfea2d-f312-4c13-bf4f-ac760236484c'::uuid, - '3f53c9ef-0058-49f3-b62c-b8cad5f92d4d'::uuid, - '14a4ae19-a857-44bb-a9e6-3df7e563847d'::uuid, - '1a547f81-ff98-4d2c-8fb4-9151e28b8d7a'::uuid, - '3c4a0e1d-03a1-4d2b-9b6f-4521b52f491d'::uuid, - '4d5b281f-14b8-42d7-bb1e-9c6118da813f'::uuid, - '5e6c3920-25b9-43e9-a58e-28d6e49516b2'::uuid, - '6f7d4a21-36ca-44fb-99e0-59a7e60627c3'::uuid, - '7a8e5b22-47db-45fc-aa91-8a08f71738d4'::uuid, - '8b9f6c23-58ec-460d-ab02-3b19f82849e5'::uuid, - '9caf7d24-69fd-471e-bc13-6a2a09395af6'::uuid, - 'adb08e25-7b0e-482f-cd24-7b3b1a4a6b07'::uuid -]) as timeseries_id, -( - with daily_series as ( - select ts, date(ts) as day, rownum - from generate_series(now() - interval '1 year', now(), '1 day'::interval) with ordinality as t(ts, rownum) - ), - daily_value as ( - select ts, day, date_part('month', ts) as m_val, rownum, random() as val - from daily_series - order by day - ), - daily_wave as ( - select - day, - 1 + .2 * cos(rownum * 6.28/180) as p_mod - from daily_series - day - ) - select dv.ts as time, (500 + 20 * val) * p_mod * rownum * p_inc as value - from daily_value dv - inner join daily_wave dw on dv.day=dw.day - inner join seed_data_overrides o on dv.m_val=o.m_val - order by ts -) m -on conflict do nothing; +-- insert into timeseries_measurement (timeseries_id, time, value) +-- select +-- timeseries_id, +-- m.time, +-- m.value +-- from +-- unnest(array[ +-- '1e3a1d3c-38e3-4f34-b65f-d2b8287ed591'::uuid, +-- '229d8f8d-fd60-465e-94c4-c25bc79d4f7e'::uuid, +-- '74d19174-f911-4234-96e4-fae1a49969e6'::uuid, +-- 'fd7c720e-7119-45dc-bf7a-44da303a9aa4'::uuid, +-- 'e3f7d76b-8aa3-4d25-a5f1-4ad715dd13c1'::uuid, +-- '6d623d88-b6e8-4f0d-86be-d4445b6b6789'::uuid, +-- '40dfcce3-7f36-475e-969c-2b0b8633c856'::uuid, +-- '20792b7d-0f87-4f4b-81c6-616a8d76613a'::uuid, +-- '659d026e-3f47-4efe-899f-4129b5466228'::uuid, +-- '015a07f9-4005-4d2d-96dc-7f7d611ca51a'::uuid, +-- '600d164d-432c-40e8-a1f7-b4ebc112939e'::uuid, +-- 'b5e12c65-acdb-4439-a436-c762096e54d3'::uuid, +-- '34bfea2d-f312-4c13-bf4f-ac760236484c'::uuid, +-- '3f53c9ef-0058-49f3-b62c-b8cad5f92d4d'::uuid, +-- '14a4ae19-a857-44bb-a9e6-3df7e563847d'::uuid, +-- '1a547f81-ff98-4d2c-8fb4-9151e28b8d7a'::uuid, +-- '3c4a0e1d-03a1-4d2b-9b6f-4521b52f491d'::uuid, +-- '4d5b281f-14b8-42d7-bb1e-9c6118da813f'::uuid, +-- '5e6c3920-25b9-43e9-a58e-28d6e49516b2'::uuid, +-- '6f7d4a21-36ca-44fb-99e0-59a7e60627c3'::uuid, +-- '7a8e5b22-47db-45fc-aa91-8a08f71738d4'::uuid, +-- '8b9f6c23-58ec-460d-ab02-3b19f82849e5'::uuid, +-- '9caf7d24-69fd-471e-bc13-6a2a09395af6'::uuid, +-- 'adb08e25-7b0e-482f-cd24-7b3b1a4a6b07'::uuid +-- ]) as timeseries_id, +-- ( +-- with daily_series as ( +-- select ts, date(ts) as day, rownum +-- from generate_series(now() - interval '1 year', now(), '1 day'::interval) with ordinality as t(ts, rownum) +-- ), +-- daily_value as ( +-- select ts, day, date_part('month', ts) as m_val, rownum, random() as val +-- from daily_series +-- order by day +-- ), +-- daily_wave as ( +-- select +-- day, +-- 1 + .2 * cos(rownum * 6.28/180) as p_mod +-- from daily_series +-- day +-- ) +-- select dv.ts as time, (500 + 20 * val) * p_mod * rownum * p_inc as value +-- from daily_value dv +-- inner join daily_wave dw on dv.day=dw.day +-- inner join seed_data_overrides o on dv.m_val=o.m_val +-- order by ts +-- ) m +-- on conflict do nothing; diff --git a/api/queries/goes.sql b/api/queries/goes.sql index 93ebf80a..0cf7a57e 100644 --- a/api/queries/goes.sql +++ b/api/queries/goes.sql @@ -1,49 +1,90 @@ -- name: GoesTelemetrySourceList :many -select * from v_goes_telemetry_source; +select * +from v_goes_telemetry_source; -- name: GoesPlatformConfigFileCreate :one -insert into goes_platform_config_file (goes_telemetry_source_id, project_id, name, alias, size_bytes, content, created_by) -values (sqlc.arg(goes_telemetry_source_id), sqlc.arg(project_id), sqlc.arg(name), sqlc.arg(alias), sqlc.arg(size_bytes), sqlc.arg(content)::xml, sqlc.arg(created_by)) +insert into goes_platform_config_file ( + goes_telemetry_source_id, + project_id, + name, + alias, + size_bytes, + content, + created_by +) values ( + sqlc.arg(goes_telemetry_source_id), + sqlc.arg(project_id), + sqlc.arg(name), + sqlc.arg(alias), + sqlc.arg(size_bytes), + sqlc.arg(content)::xml, + sqlc.arg(created_by) +) returning id; -- name: GoesPlatformConfigFileGet :one -select * from goes_platform_config_file where id=$1; +select * +from goes_platform_config_file +where id = $1 +and not deleted; --- name: GoesPlatformConfigFileUpdate :exec -update goes_platform_config_file set - name=sqlc.arg(name), - alias=sqlc.arg(alias), - size_bytes=sqlc.arg(size_bytes), - content=sqlc.arg(content)::xml, - committed=false, - updated_at=sqlc.arg(updated_at), - updated_by=sqlc.arg(updated_by) -where id=sqlc.arg(id); +-- name: GoesPlatformConfigFileListUncommitedForProject :one +select * +from goes_platform_config_file +where project_id = $1 +and not committed +and not deleted; --- name: GoesPlatformConfigFileCommit :batchexec +-- name: GoesPlatformConfigFileUpdate :exec update goes_platform_config_file set - committed=true, - committed_at=$2 -where id=$1; + name = sqlc.arg(name), + alias = sqlc.arg(alias), + size_bytes = sqlc.arg(size_bytes), + content = sqlc.arg(content)::xml, + deleted = false, + deleted_at = null, + deleted_by = null +where id = sqlc.arg(id); -- name: GoesPlatformConfigFileDelete :exec -delete from goes_platform_config_file where id=$1; +update goes_platform_config_file set + deleted = true, + deleted_at = now(), + deleted_by = sqlc.arg(deleted_by) +where id = sqlc.arg(id); -- name: GoesTelemetryConfigMappingsCreateBatch :batchexec -insert into goes_telemetry_config_mappings (goes_platform_config_file_id, platform_sensor_key, timeseries_id) -values ($1, $2, $3) -on conflict on constraint unique_goes_platform_config_file_id_platform_sensor_key do nothing; +insert into goes_telemetry_config_mappings ( + goes_platform_config_file_id, + platform_sensor_key, + timeseries_id +) values ($1, $2, $3) +on conflict on constraint unique_goes_platform_config_file_id_platform_sensor_key +do update set timeseries_id = excluded.timeseries_id; + + +-- name: GoesTelemetryConfigMappingsDeleteBatch :batchexec +delete from goes_telemetry_config_mappings +where goes_platform_config_file_id = $1 +and platform_sensor_key = $2; -- name: GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile :exec -delete from goes_telemetry_config_mappings where goes_platform_config_file_id=$1; +delete from goes_telemetry_config_mappings +where goes_platform_config_file_id = $1; -- name: GoesTelemetryConfigMappingsList :many -select * from goes_telemetry_config_mappings where goes_platform_config_file_id=$1; +select * +from goes_telemetry_config_mappings +where goes_platform_config_file_id = $1 +and goes_platform_config_file_id in ( + select id from goes_platform_config_file where deleted = false +) +order by platform_sensor_key; diff --git a/api/queries/goes_commit.sql b/api/queries/goes_commit.sql new file mode 100644 index 00000000..a7cea29f --- /dev/null +++ b/api/queries/goes_commit.sql @@ -0,0 +1,215 @@ +-- name: GoesCommitGetActive :one +select * +from goes_commit +where + project_id = $1 + and goes_telemetry_source_id = $2 + and status = 'active' +order by created_at desc +limit 1; + + +-- name: GoesCommitGetByID :one +select * +from goes_commit +where id = $1; + + +-- name: GoesCommitCreatePending :one +insert into goes_commit ( + project_id, + goes_telemetry_source_id, + created_by, + status, + previous_commit_id, + idempotency_key, + mapping_set_id +) values ( + $1, $2, $3, 'pending', $4, $5, $6 +) +returning *; + + +-- name: GoesCommitMarkActive :exec +with target as ( + select + c.id, + c.project_id, + c.goes_telemetry_source_id + from goes_commit c + where c.id = sqlc.arg(id) +), +cleared as ( + update goes_commit c + set status = 'inactive' + where c.project_id = (select t.project_id from target t) + and c.goes_telemetry_source_id = (select t.goes_telemetry_source_id from target t) + and c.status = 'active' +) +update goes_commit c +set status = 'active', + opendcs_response = sqlc.arg(opendcs_response)::jsonb +where c.id = (select t.id from target t); + + +-- name: GoesCommitMarkFailed :exec +update goes_commit set status = 'failed', opendcs_response = sqlc.arg(opendcs_response)::jsonb +where id = sqlc.arg(id); + + +-- name: GoesPlatformConfigFilesListForCommit :many +select id, name, alias, content +from goes_platform_config_file +where project_id = $1 +and goes_telemetry_source_id = $2 +and deleted = false +order by created_at asc; + + +-- name: GoesPlatformConfigFilesListForCommitByCommitID :many +select id, name, alias, committed_content::text as content +from goes_platform_config_file +where project_id = $1 +and goes_telemetry_source_id = $2 +and committed_commit_id = $3 +order by created_at asc; + + +-- name: GoesPlatformConfigFileCommitArtifactsUpdate :exec +update goes_platform_config_file set + committed_content = sqlc.arg(committed_content)::xml, + committed = true, + committed_at = now(), + committed_commit_id = sqlc.arg(committed_commit_id) +where id = sqlc.arg(id); + + +-- name: GoesTelemetryConfigMappingsListForFiles :many +select goes_platform_config_file_id, platform_sensor_key, timeseries_id +from goes_telemetry_config_mappings +where goes_platform_config_file_id = any(sqlc.arg(file_ids)::uuid[]) +order by goes_platform_config_file_id, platform_sensor_key; + + +-- name: GoesMappingSetCreate :one +insert into goes_mapping_set (project_id, created_by, content_hash, idempotency_key) +values ($1, $2, $3, $4) +returning *; + + +-- name: GoesMappingSetEntryCreateBatch :copyfrom +insert into goes_mapping_set_entry ( + mapping_set_id, + goes_platform_config_file_id, + platform_sensor_key, + timeseries_id +) values ($1, $2, $3, $4); + + +-- name: GoesMappingSetEntriesList :many +select goes_platform_config_file_id, platform_sensor_key, timeseries_id +from goes_mapping_set_entry +where mapping_set_id = $1 +order by goes_platform_config_file_id, platform_sensor_key; + + +-- name: GoesTelemetryConfigMappingsReplaceForProjectFromMappingSet :exec +with file_ids as ( + select id + from goes_platform_config_file + where project_id = $1 + and goes_telemetry_source_id = $2 +) +, del as ( + delete from goes_telemetry_config_mappings m + using file_ids f + where m.goes_platform_config_file_id = f.id +) +insert into goes_telemetry_config_mappings ( + goes_platform_config_file_id, + platform_sensor_key, + timeseries_id +) +select + e.goes_platform_config_file_id, + e.platform_sensor_key, + e.timeseries_id +from goes_mapping_set_entry e +join file_ids f on f.id = e.goes_platform_config_file_id +where e.mapping_set_id = $3 +on conflict on constraint unique_goes_platform_config_file_id_platform_sensor_key +do update set timeseries_id = excluded.timeseries_id; + + +-- name: GoesPlatformRegistryListByProject :many +select platform_key, platform_id, site_name +from goes_platform_registry +where project_id = $1 +and goes_telemetry_source_id = $2 +order by platform_key; + + +-- name: GoesPlatformRegistryConflicts :many +select platform_key, project_id +from goes_platform_registry +where goes_telemetry_source_id = $1 +and platform_key = any(sqlc.arg(platform_keys)::text[]) +and project_id <> $2; + + +-- name: GoesPlatformRegistryUpsert :batchexec +insert into goes_platform_registry ( + platform_key, + project_id, + goes_telemetry_source_id, + platform_id, + site_name, + commit_id, + updated_at +) values ($1, $2, $3, $4, $5, $6, now()) +on conflict (platform_key) do update set + project_id = excluded.project_id, + goes_telemetry_source_id = excluded.goes_telemetry_source_id, + platform_id = excluded.platform_id, + site_name = excluded.site_name, + commit_id = excluded.commit_id, + updated_at = now(); + + +-- name: GoesPlatformRegistryDeleteMissing :exec +delete from goes_platform_registry r +where r.project_id = $1 +and r.goes_telemetry_source_id = $2 +and not (r.platform_key = any(sqlc.arg(platform_keys)::text[])); + + +-- name: GoesPlatformConfigFileRestoreForRollback :exec +update goes_platform_config_file set + content = sqlc.arg(content)::xml, + committed_content = sqlc.arg(content)::xml, + committed = true, + committed_at = now(), + committed_commit_id = sqlc.arg(committed_commit_id), + deleted = false, + deleted_at = null, + deleted_by = null +where id = sqlc.arg(id); + + +-- name: GoesPlatformConfigFileSoftDeleteNotInSet :exec +update goes_platform_config_file f set + deleted = true, + deleted_at = now(), + deleted_by = $3 +where f.project_id = $1 +and f.goes_telemetry_source_id = $2 +and not (f.id = any(sqlc.arg(file_ids)::uuid[])) +and f.deleted = false; + + +-- name: GoesPlatformConfigFileCommit :batchexec +update goes_platform_config_file set + committed = true, + committed_at = sqlc.arg(committed_at), + committed_commit_id = sqlc.arg(committed_commit_id) +where id = sqlc.arg(id); diff --git a/opendcs/Dockerfile b/opendcs/Dockerfile index 8b3fc773..c183d016 100644 --- a/opendcs/Dockerfile +++ b/opendcs/Dockerfile @@ -56,7 +56,6 @@ RUN rm -rf /opt/java/openjdk/release RUN apk add --no-cache coreutils ca-certificates -ENV INSTRUMENTATION_DCS_CONFIG=${HOME}/midas_config ENV DCSTOOL_USERDIR=/opt/opendcs ENV DATABASE_URL=/opt/opendcs/edit-db ENV OPENDCS_IMPORT_DIR=/opt/opendcs/import @@ -72,7 +71,7 @@ COPY --chown=opendcs:opendcs --from=maven_builder /opt/rsgis/target/rsgis.jar ${ COPY --chown=opendcs:opendcs ./logback.xml ${DCSTOOL_HOME}/logback.xml COPY --chown=opendcs:opendcs ./decodes.properties ${DCSTOOL_HOME}/decodes.properties -COPY --chown=opendcs:opendcs ./midas_config ${INSTRUMENTATION_DCS_CONFIG} +COPY --chown=opendcs:opendcs ./midas_config /usr/local/share/midas_config COPY --chown=opendcs:opendcs --from=go_builder /opendcs-wrapper /usr/local/bin/opendcs-wrapper USER opendcs diff --git a/opendcs/app.go b/opendcs/app.go new file mode 100644 index 00000000..2cde9be9 --- /dev/null +++ b/opendcs/app.go @@ -0,0 +1,54 @@ +package main + +import ( + "bytes" + "context" + "crypto/subtle" + "fmt" + "log/slog" + "net/http" + "os" + "os/exec" + "strings" + "sync" + + "github.com/danielgtaylor/huma/v2" +) + +type App struct { + cfg Config + logger *slog.Logger + dbimport *dbimport + mu sync.Mutex + httpServer *http.Server +} + +func (a *App) checkKey(got string) error { + if subtle.ConstantTimeCompare([]byte(got), []byte(a.cfg.AuthToken)) != 1 { + return huma.NewError(http.StatusUnauthorized, "unauthorized") + } + return nil +} + +func (a *App) runDbUtil(ctx context.Context, mode, value string) (string, error) { + mode = strings.ToLower(strings.TrimSpace(mode)) + value = strings.TrimSpace(value) + if mode != "id" && mode != "site" { + return "", fmt.Errorf("invalid delete mode %q", mode) + } + if value == "" { + return "", fmt.Errorf("empty delete value") + } + + cmd := exec.CommandContext(ctx, "decj", "decodes.tsdb.DbUtil") + cmd.Env = os.Environ() + + input := fmt.Sprintf("delete-platform %s %s\nquit\n", mode, value) + cmd.Stdin = bytes.NewBufferString(input) + + out, err := cmd.CombinedOutput() + if err != nil { + return string(out), fmt.Errorf("dbutil error: %w: %s", err, string(out)) + } + return string(out), nil +} diff --git a/opendcs/dbimport.go b/opendcs/dbimport.go new file mode 100644 index 00000000..3adfabb3 --- /dev/null +++ b/opendcs/dbimport.go @@ -0,0 +1,189 @@ +package main + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "log/slog" + "net/http" + "os" + "os/exec" + "path/filepath" + "slices" + "sort" + "strconv" + "syscall" +) + +type dbimport struct { + cfg Config + logger *slog.Logger +} + +func NewDbimport(ctx context.Context, cfg Config, logger *slog.Logger) (*dbimport, error) { + i := &dbimport{ + cfg: cfg, + logger: logger, + } + files, err := i.resolveImportFiles([]string{midasConfigDir}) + if err != nil { + return nil, fmt.Errorf("unable to resolve initial import files: %w", err) + } + if len(files) == 0 { + return nil, errors.New("no inital import files") + } + + // TODO: we should also query any existing platform configs that exist for this opendcs instance (fetched from the API) + + _, err = i.runDbImport(ctx, files) + if err != nil { + return nil, fmt.Errorf("validation failed: %w", err) + } + return i, nil +} + +type dbimportParams struct { + Files []string `json:"files"` + ValidateOnly bool `json:"validate_only"` +} + +type dbimportOutput struct { + Status int `json:"status"` + Log string `json:"log,omitempty"` +} + +func (i *dbimport) ProcessAtomic(ctx context.Context, req *dbimportParams) (*dbimportOutput, error) { + files, err := i.resolveImportFiles(req.Files) + if err != nil { + return nil, err + } + if len(files) == 0 { + return nil, errors.New("no import files") + } + + dbimportArgs := []string{} + if req.ValidateOnly { + dbimportArgs = append(dbimportArgs, "-v") + } + + logOut, err := i.runDbImport(ctx, files, dbimportArgs...) + if err != nil { + return nil, fmt.Errorf("import failed: %w", err) + } + + if req.ValidateOnly { + return &dbimportOutput{Status: http.StatusOK, Log: logOut}, nil + } + + return &dbimportOutput{Log: logOut}, nil +} + +func (i *dbimport) resolveImportFiles(files []string) ([]string, error) { + var out []string + for _, f := range files { + info, err := os.Stat(f) + if err != nil { + return nil, err + } + if info.IsDir() { + err := filepath.WalkDir(f, func(p string, d os.DirEntry, err error) error { + if err != nil { + return err + } + if d.IsDir() { + return nil + } + out = append(out, p) + return nil + }) + if err != nil { + return nil, err + } + continue + } + out = append(out, f) + } + sort.Strings(out) + return out, nil +} + +func (i *dbimport) runDbImport(ctx context.Context, files []string, extraArgs ...string) (string, error) { + args := slices.Clone(extraArgs) + args = append(args, files...) + cmd := exec.CommandContext(ctx, "dbimport", args...) + cmd.Env = os.Environ() + + var buf bytes.Buffer + mw := io.MultiWriter(os.Stdout, &buf) + cmd.Stdout = mw + cmd.Stderr = mw + + err := cmd.Run() + return buf.String(), err +} + +func (i *dbimport) startRoutingScheduler(ctx context.Context) error { + lockfile := filepath.Join(os.TempDir(), "rs.lock") + + // Ensure lockfile exists (create empty if not) + if _, err := os.Stat(lockfile); os.IsNotExist(err) { + if err := os.WriteFile(lockfile, []byte{}, 0644); err != nil { + return fmt.Errorf("failed to create lockfile: %w", err) + } + } + + // Kill previous rs process if lockfile contains a PID + if pid, err := readLockfilePID(lockfile); err == nil && pid > 0 { + if err := killProcess(pid); err != nil && !errors.Is(err, os.ErrNotExist) { + i.logger.Warn("failed to kill previous rs process", "pid", pid, "error", err) + } + _ = os.Remove(lockfile) + } + + cmd := exec.CommandContext(ctx, "rs", i.cfg.RoutingSpec) + cmd.Env = os.Environ() + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + + if err := cmd.Start(); err != nil { + return fmt.Errorf("rs failed to run: %w", err) + } + + return nil +} + +// Helper to read PID from lockfile +func readLockfilePID(lockfile string) (int, error) { + data, err := os.ReadFile(lockfile) + if err != nil { + return 0, err + } + pid, err := strconv.Atoi(string(data)) + if err != nil { + return 0, err + } + return pid, nil +} + +// Helper to kill process by PID +func killProcess(pid int) error { + proc, err := os.FindProcess(pid) + if err != nil { + return err + } + return proc.Signal(syscall.SIGTERM) +} + +type Response[T any] struct { + Body T +} + +func NewResponse[T any](body T) *Response[T] { + return &Response[T]{Body: body} +} + +type KeyQueryParam struct { + Key string `query:"key"` +} diff --git a/opendcs/go.mod b/opendcs/go.mod index 1e7e5c72..b7c8ac6b 100644 --- a/opendcs/go.mod +++ b/opendcs/go.mod @@ -3,7 +3,9 @@ module github.com/USACE/instrumentation-api/opendcs go 1.25.5 require ( + github.com/caarlos0/env/v11 v11.3.1 github.com/danielgtaylor/huma/v2 v2.34.1 + github.com/google/uuid v1.6.0 gocloud.dev v0.44.0 ) @@ -29,7 +31,6 @@ require ( github.com/aws/smithy-go v1.23.2 // indirect github.com/go-logr/logr v1.4.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect - github.com/google/uuid v1.6.0 // indirect github.com/google/wire v0.7.0 // indirect github.com/googleapis/gax-go/v2 v2.15.0 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect diff --git a/opendcs/go.sum b/opendcs/go.sum index bfa4b0ca..390891e2 100644 --- a/opendcs/go.sum +++ b/opendcs/go.sum @@ -58,6 +58,8 @@ github.com/aws/aws-sdk-go-v2/service/sts v1.39.1 h1:mLlUgHn02ue8whiR4BmxxGJLR2gw github.com/aws/aws-sdk-go-v2/service/sts v1.39.1/go.mod h1:E19xDjpzPZC7LS2knI9E6BaRFDK43Eul7vd6rSq2HWk= github.com/aws/smithy-go v1.23.2 h1:Crv0eatJUQhaManss33hS5r40CG3ZFH+21XSkqMrIUM= github.com/aws/smithy-go v1.23.2/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0= +github.com/caarlos0/env/v11 v11.3.1 h1:cArPWC15hWmEt+gWk7YBi7lEXTXCvpaSdCiZE2X5mCA= +github.com/caarlos0/env/v11 v11.3.1/go.mod h1:qupehSf/Y0TUTsxKywqRt/vJjN5nz6vauiYEUUr8P4U= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls= diff --git a/opendcs/logback.xml b/opendcs/logback.xml index ec3699f7..b8169db4 100644 --- a/opendcs/logback.xml +++ b/opendcs/logback.xml @@ -23,7 +23,9 @@ - + + + diff --git a/opendcs/main.go b/opendcs/main.go index 86d9ffd5..795a9666 100644 --- a/opendcs/main.go +++ b/opendcs/main.go @@ -3,928 +3,294 @@ package main import ( "bytes" "context" - "crypto/subtle" + "encoding/json" "errors" "fmt" "io" + "log" "log/slog" "net/http" "net/url" "os" - "os/exec" "os/signal" - "path" "path/filepath" - "sort" "strings" - "sync" "syscall" "time" + "github.com/caarlos0/env/v11" "github.com/danielgtaylor/huma/v2" "github.com/danielgtaylor/huma/v2/adapters/humago" + "github.com/google/uuid" "gocloud.dev/blob" _ "gocloud.dev/blob/s3blob" ) const ( - VERSION = "1.0.0" + VERSION = "1.0.0" + midasConfigDir = "/usr/local/share/midas_config" ) type Config struct { - InstrConfigDir string - ImportDir string - RoutingSpec string - AuthToken string - ListenAddr string - LogDir string - - DcsToolUserDir string - DecodesProps string - - DataloadS3Root string - AWSEndpointURL string + RoutingSpec string `env:"OPENDCS_ROUTING_SPEC" envDefault:"goes"` + AuthToken string `env:"OPENDCS_IMPORT_TOKEN"` + ListenAddr string `env:"OPENDCS_HTTP_ADDR" envDefault:":8080"` + MidasApiHost string `env:"MIDAS_API_HOST" envDefault:"http://api:80"` + S3BucketURL string `env:"S3_LOAD_DATAROOT" envDefault:"s3://corpmsmap-data-incoming/instrumentation/goes"` } -type Response[T any] struct { - Body T -} - -func NewResponse[T any](body T) *Response[T] { return &Response[T]{Body: body} } - -type KeyQueryParam struct { - Key string `query:"key" required:"true" doc:"API key for authentication"` -} - -type ImportRequest struct { - Files []string `json:"files"` - ValidateOnly bool `json:"validate_only"` -} - -type ImportResponse struct { - Status string `json:"status"` - ValidateLog string `json:"validate_log,omitempty"` - ImportLog string `json:"import_log,omitempty"` - CommandOutput string `json:"command_output,omitempty"` - Error string `json:"error,omitempty"` -} - -type TryMutex struct { - ch chan struct{} -} - -func NewTryMutex() *TryMutex { - m := &TryMutex{ch: make(chan struct{}, 1)} - m.ch <- struct{}{} - return m -} - -func (m *TryMutex) TryLock() bool { - select { - case <-m.ch: - return true - default: - return false - } -} - -func (m *TryMutex) Unlock() { - select { - case m.ch <- struct{}{}: - default: - } -} - -type CommandRunner struct{} - -func (r *CommandRunner) Run(ctx context.Context, name string, args []string, env []string) ([]byte, error) { - slog.Info("exec command", "name", name, "args", strings.Join(args, " ")) - cmd := exec.CommandContext(ctx, name, args...) - if env != nil { - cmd.Env = env - } - - var buf bytes.Buffer - cmd.Stdout = &buf - cmd.Stderr = &buf - - err := cmd.Run() - out := buf.Bytes() - - if ctx.Err() != nil { - return out, fmt.Errorf("command canceled: %w", ctx.Err()) - } - if err != nil { - return out, fmt.Errorf("%s failed: %w", name, err) - } - return out, nil -} - -type RouterScheduler struct { - mu sync.Mutex - cmd *exec.Cmd - - logDir string - routingSpec string -} - -func NewRouterScheduler(logDir, routingSpec string) *RouterScheduler { - return &RouterScheduler{ - logDir: logDir, - routingSpec: routingSpec, - } -} - -func (s *RouterScheduler) Start() error { - s.mu.Lock() - defer s.mu.Unlock() - - if s.cmd != nil { - return errors.New("routing scheduler already running") - } - - runtimeLogPath := filepath.Join(s.logDir, "runtime.log") - slog.Info("starting routing scheduler 'rs'", "spec", s.routingSpec, "runtimeLogPath", runtimeLogPath) - - cmd := exec.Command("rs", "-l", runtimeLogPath, s.routingSpec) - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - if err := cmd.Start(); err != nil { - return fmt.Errorf("failed to start rs: %w", err) - } - s.cmd = cmd - return nil -} - -func (s *RouterScheduler) Stop(ctx context.Context) error { - s.mu.Lock() - cmd := s.cmd - s.mu.Unlock() - - if cmd == nil || cmd.Process == nil { - return nil - } - - slog.Info("stopping routing scheduler") - _ = cmd.Process.Signal(syscall.SIGTERM) - - done := make(chan error, 1) - go func() { done <- cmd.Wait() }() - - select { - case err := <-done: - s.mu.Lock() - s.cmd = nil - s.mu.Unlock() - - if err != nil { - slog.Warn("routing scheduler exited with error during stop", "err", err) - } - return nil - case <-ctx.Done(): - slog.Warn("routing scheduler did not stop in time; sending SIGKILL") - _ = cmd.Process.Kill() - <-done - - s.mu.Lock() - s.cmd = nil - s.mu.Unlock() - - return fmt.Errorf("rs stop timeout: %w", ctx.Err()) - } -} - -func (s *RouterScheduler) ForwardSignal(sig os.Signal) { - s.mu.Lock() - defer s.mu.Unlock() - if s.cmd != nil && s.cmd.Process != nil { - _ = s.cmd.Process.Signal(sig.(syscall.Signal)) - } -} - -type Importer struct { - cfg Config - rs *RouterScheduler - runner CommandRunner -} - -func NewImporter(cfg Config, rs *RouterScheduler) *Importer { - return &Importer{cfg: cfg, rs: rs, runner: CommandRunner{}} -} - -func (i *Importer) InitialImport(ctx context.Context) error { - slog.Info("performing initial import", "dir", i.cfg.InstrConfigDir) - - files, err := i.findInitialXMLFiles() - if err != nil { - return fmt.Errorf("find initial xml files: %w", err) - } - if len(files) == 0 { - slog.Info("no initial XML files found") - return nil - } - - args := make([]string, 0, len(files)+2) - args = append(args, "-l", "/proc/self/fd/1") - args = append(args, files...) +func main() { + logger := slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelInfo})) + slog.SetDefault(logger) - slog.Info("initial dbimport", "command", "dbimport", "args", strings.Join(args, " ")) - _, err = i.runner.Run(ctx, "dbimport", args, nil) - if err != nil { - return fmt.Errorf("dbimport command failed: %w", err) + var cfg Config + if err := env.Parse(&cfg); err != nil { + log.Fatalf("error initializing Config from env: %v", err) } - return nil -} -func (i *Importer) ProcessAtomic(ctx context.Context, req ImportRequest) ImportResponse { - ctx, cancel := context.WithTimeout(ctx, 15*time.Minute) + ctx, cancel := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM) defer cancel() - files, err := i.resolveImportFiles(req.Files) - if err != nil { - return ImportResponse{Status: "error", Error: err.Error()} - } - if len(files) == 0 { - return ImportResponse{Status: "error", Error: "no xml files found to import"} - } - - basePropsBytes, err := os.ReadFile(i.cfg.DecodesProps) - if err != nil { - return ImportResponse{Status: "error", Error: fmt.Sprintf("failed to read decodes.properties: %v", err)} - } - baseProps := string(basePropsBytes) - - liveEditDB, err := parseEditDBLocation(baseProps) - if err != nil { - return ImportResponse{Status: "error", Error: err.Error()} - } - - ts := time.Now().UTC().Format("20060102-150405") - - // NOTE: stage on the same filesystem as liveEditDB for atomic rename semantics - stageRoot := filepath.Dir(liveEditDB) - stageUserDir := filepath.Join(stageRoot, ".opendcs-userdir-stage-"+ts) - stageEditDB := filepath.Join(stageRoot, ".opendcs-edit-db-stage-"+ts) - backupEditDB := liveEditDB + ".prev-" + ts - - if err := os.MkdirAll(stageUserDir, 0o775); err != nil { - return ImportResponse{Status: "error", Error: fmt.Sprintf("failed to create staging userdir: %v", err)} - } - defer func() { _ = os.RemoveAll(stageUserDir) }() - - if err := copyDir(liveEditDB, stageEditDB); err != nil { - return ImportResponse{Status: "error", Error: fmt.Sprintf("failed to stage edit-db copy: %v", err)} - } - defer func() { _ = os.RemoveAll(stageEditDB) }() - - stageProps := rewriteEditDBLocation(baseProps, stageEditDB) - stagePropsPath := filepath.Join(stageUserDir, "decodes.properties") - if err := os.WriteFile(stagePropsPath, []byte(stageProps), 0o664); err != nil { - return ImportResponse{Status: "error", Error: fmt.Sprintf("failed to write staging decodes.properties: %v", err)} - } - - env := append(os.Environ(), "DCSTOOL_USERDIR="+stageUserDir) - - validateLogPath := filepath.Join(i.cfg.LogDir, fmt.Sprintf("dbimport-validate-%s.log", ts)) - importLogPath := filepath.Join(i.cfg.LogDir, fmt.Sprintf("dbimport-import-%s.log", ts)) - - valArgs := append([]string{"-v", "-l", validateLogPath}, files...) - valOut, valErr := i.runner.Run(ctx, "dbimport", valArgs, env) - - resp := ImportResponse{ - ValidateLog: readFileOrEmpty(validateLogPath), - CommandOutput: string(valOut), - } - - if valErr != nil { - resp.Status = "validation_failed" - resp.Error = valErr.Error() - return resp - } - - if req.ValidateOnly { - resp.Status = "validation_ok" - return resp - } - - impArgs := append([]string{"-l", importLogPath}, files...) - impOut, impErr := i.runner.Run(ctx, "dbimport", impArgs, env) - resp.ImportLog = readFileOrEmpty(importLogPath) - resp.CommandOutput = string(impOut) - - if impErr != nil { - resp.Status = "import_failed" - resp.Error = impErr.Error() - return resp - } - - stopCtx, stopCancel := context.WithTimeout(ctx, 30*time.Second) - defer stopCancel() - if err := i.rs.Stop(stopCtx); err != nil { - return ImportResponse{Status: "error", Error: fmt.Sprintf("failed stopping rs: %v", err)} - } - - if err := atomicSwapDir(liveEditDB, stageEditDB, backupEditDB); err != nil { - _ = i.rs.Start() - return ImportResponse{Status: "error", Error: fmt.Sprintf("failed swapping edit-db: %v", err)} - } - - if err := i.rs.Start(); err != nil { - slog.Error("rs failed to start after commit; rolling back", "err", err) - - if rbErr := atomicRollbackDir(liveEditDB, backupEditDB); rbErr != nil { - return ImportResponse{ - Status: "error", - Error: fmt.Sprintf("rs restart failed (%v) and rollback failed (%v)", err, rbErr), - } + if len(os.Args) > 2 && os.Args[1] == "upload" { + filePath := os.Args[2] + if cfg.S3BucketURL == "" { + log.Fatalf("S3_LOAD_DATAROOT env not set") } - _ = i.rs.Start() - return ImportResponse{Status: "error", Error: fmt.Sprintf("rs restart failed; rolled back to previous db: %v", err)} - } - - resp.Status = "success" - return resp -} - -func (i *Importer) findInitialXMLFiles() ([]string, error) { - var results []string - root := i.cfg.InstrConfigDir - - err := filepath.WalkDir(root, func(pathStr string, d os.DirEntry, err error) error { + bucket, err := blob.OpenBucket(ctx, cfg.S3BucketURL) if err != nil { - return err - } - if d.IsDir() { - return nil + log.Fatalf("failed to open bucket: %v", err) } - if strings.EqualFold(filepath.Ext(pathStr), ".xml") { - results = append(results, pathStr) - } - return nil - }) - if err != nil { - return nil, err - } - - sort.Strings(results) - return results, nil -} - -func (i *Importer) resolveImportFiles(files []string) ([]string, error) { - var resolved []string + defer bucket.Close() - if len(files) == 0 { - err := filepath.WalkDir(i.cfg.ImportDir, func(pathStr string, d os.DirEntry, err error) error { - if err != nil { - return err - } - if d.IsDir() { - return nil - } - if strings.EqualFold(filepath.Ext(pathStr), ".xml") { - resolved = append(resolved, pathStr) - } - return nil - }) + file, err := os.Open(filePath) if err != nil { - return nil, fmt.Errorf("walk import dir: %w", err) - } - sort.Strings(resolved) - return resolved, nil - } - - baseImport := filepath.Clean(i.cfg.ImportDir) - - for _, f := range files { - if !filepath.IsAbs(f) { - f = filepath.Join(i.cfg.ImportDir, f) + log.Fatalf("failed to open file: %v", err) } - clean := filepath.Clean(f) - if !strings.HasPrefix(clean, baseImport+string(os.PathSeparator)) && clean != baseImport { - return nil, fmt.Errorf("file %q is outside allowed import dir %q", clean, i.cfg.ImportDir) - } - resolved = append(resolved, clean) - } - - sort.Strings(resolved) - return resolved, nil -} - -type Uploader struct { - root string - awsEndpoint string + defer file.Close() - once sync.Once - b *blob.Bucket - err error -} - -func NewUploader(root, endpoint string) *Uploader { - return &Uploader{root: root, awsEndpoint: endpoint} -} - -func (u *Uploader) Bucket(ctx context.Context) (*blob.Bucket, error) { - u.once.Do(func() { - if u.root == "" { - u.err = fmt.Errorf("DATALOAD_S3_ROOT is not set; cannot upload") - return - } - bucketURL, _, err := buildBucketURLFromRoot(u.root, u.awsEndpoint) + key := filepath.Base(filePath) + writer, err := bucket.NewWriter(ctx, key, nil) if err != nil { - u.err = err - return + log.Fatalf("failed to create writer: %v", err) } - b, err := blob.OpenBucket(ctx, bucketURL) - if err != nil { - u.err = fmt.Errorf("failed to open bucket %q: %w", bucketURL, err) - return + if _, err := io.Copy(writer, file); err != nil { + log.Fatalf("failed to upload file: %v", err) } - u.b = b - }) - return u.b, u.err -} - -func (u *Uploader) Close() { - if u.b != nil { - if err := u.b.Close(); err != nil { - slog.Warn("failed to close bucket", "err", err) + if err := writer.Close(); err != nil { + log.Fatalf("failed to close writer: %v", err) } + logger.InfoContext(ctx, "File uploaded", "file", filePath, "bucket", cfg.S3BucketURL, "key", key) + return } -} - -func (u *Uploader) RunUploadCLI(ctx context.Context, filePath string) error { - if u.root == "" { - return fmt.Errorf("DATALOAD_S3_ROOT is not set; cannot upload") - } - - const app = "goes" - - stat, err := os.Stat(filePath) - if err != nil { - return fmt.Errorf("input file %q does not exist: %w", filePath, err) - } - if stat.Size() == 0 { - if err := os.Remove(filePath); err != nil && !os.IsNotExist(err) { - slog.Warn("failed to remove empty file", "file", filePath, "err", err) - } - return nil - } - - bucketURL, prefix, err := buildBucketURLFromRoot(u.root, u.awsEndpoint) - if err != nil { - return err - } - - b, err := u.Bucket(ctx) - if err != nil { - return err - } - - base := filepath.Base(filePath) - platform := derivePlatformFromFilename(base) - key := path.Join(prefix, app, platform, base) - - slog.Info("uploading file to bucket", "file", filePath, "bucketURL", bucketURL, "key", key) - - f, err := os.Open(filePath) - if err != nil { - return fmt.Errorf("failed to open file %q: %w", filePath, err) - } - defer f.Close() - w, err := b.NewWriter(ctx, key, nil) + i, err := NewDbimport(ctx, cfg, logger) if err != nil { - return fmt.Errorf("failed to create blob writer: %w", err) + log.Fatalf("NewDbimport: %v", err) } - if _, err := io.Copy(w, f); err != nil { - _ = w.Close() - return fmt.Errorf("failed to stream file to bucket: %w", err) - } - if err := w.Close(); err != nil { - return fmt.Errorf("failed to finalize blob write: %w", err) - } - - if err := os.Remove(filePath); err != nil && !os.IsNotExist(err) { - slog.Warn("failed to remove local file after upload", "file", filePath, "err", err) - } - return nil -} - -type App struct { - cfg Config - importMu *TryMutex - - rs *RouterScheduler - importer *Importer - uploader *Uploader - - httpServer *http.Server -} - -func NewApp(cfg Config) *App { - rs := NewRouterScheduler(cfg.LogDir, cfg.RoutingSpec) - return &App{ + app := &App{ cfg: cfg, - importMu: NewTryMutex(), - rs: rs, - importer: NewImporter(cfg, rs), - uploader: NewUploader(cfg.DataloadS3Root, cfg.AWSEndpointURL), + logger: logger, + dbimport: i, } -} -func (a *App) Close() { - a.uploader.Close() -} - -func (a *App) RunServer(ctx context.Context) error { router := http.NewServeMux() api := humago.New(router, huma.DefaultConfig("OpenDCS Wrapper", VERSION)) - huma.Post(api, "/import", func(ctx context.Context, input *struct { - KeyQueryParam - Body ImportRequest `contentType:"application/json"` - }) (*Response[ImportResponse], error) { - if err := a.checkKey(input.Key); err != nil { - return nil, err - } - - if !a.importMu.TryLock() { - return nil, huma.NewError(http.StatusConflict, "import already in progress") - } - defer a.importMu.Unlock() - - resp := a.importer.ProcessAtomic(ctx, input.Body) - if resp.Status == "conflict" { - return nil, huma.NewError(http.StatusConflict, resp.Error) - } - return NewResponse(resp), nil - }) - - type RuntimeLogs struct { - Log string `json:"log" doc:"Contents of routing scheduler runtime log"` + type ValidateForm struct { + Files []huma.FormFile `form:"files" required:"true"` } - huma.Get(api, "/logs/runtime", func(ctx context.Context, input *struct { + + huma.Post(api, "/validate", func(ctx context.Context, input *struct { KeyQueryParam - }) (*Response[RuntimeLogs], error) { - if err := a.checkKey(input.Key); err != nil { + RawBody huma.MultipartFormFiles[ValidateForm] + }) (*Response[dbimportOutput], error) { + if err := app.checkKey(input.Key); err != nil { return nil, err } - runtimeLogPath := filepath.Join(a.cfg.LogDir, "runtime.log") - data, err := os.ReadFile(runtimeLogPath) - if err != nil { - return nil, huma.NewError(http.StatusInternalServerError, fmt.Sprintf("failed to read runtime log: %v", err)) - } - return NewResponse(RuntimeLogs{Log: string(data)}), nil - }) - - router.HandleFunc("/healthz", handleHealth) - - a.httpServer = &http.Server{ - Addr: a.cfg.ListenAddr, - Handler: router, - ReadHeaderTimeout: 5 * time.Second, - ReadTimeout: 30 * time.Second, - WriteTimeout: 30 * time.Second, - IdleTimeout: 2 * time.Minute, - MaxHeaderBytes: 1 << 20, - } - - errCh := make(chan error, 1) - go func() { - slog.Info("http api listening", "addr", a.cfg.ListenAddr) - if err := a.httpServer.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) { - errCh <- err - return + if !app.mu.TryLock() { + return nil, huma.NewError(http.StatusConflict, "dbimport validation already in progress") } - errCh <- nil - }() - - select { - case <-ctx.Done(): - shutdownCtx, cancel := context.WithTimeout(context.Background(), 20*time.Second) - defer cancel() - _ = a.httpServer.Shutdown(shutdownCtx) - return ctx.Err() - case err := <-errCh: - return err - } -} - -func (a *App) checkKey(key string) error { - if subtle.ConstantTimeCompare([]byte(key), []byte(a.cfg.AuthToken)) != 1 { - return huma.NewError(http.StatusUnauthorized, "invalid key") - } - return nil -} + defer app.mu.Unlock() -func (a *App) Run(ctx context.Context) error { - if err := a.importer.InitialImport(ctx); err != nil { - return err - } + formData := input.RawBody.Data() + files := make([]string, len(formData.Files)) + tmpDir := os.TempDir() - if err := a.rs.Start(); err != nil { - return err - } + for i, file := range formData.Files { + content, err := io.ReadAll(file) + if err != nil { + // TODO handle error appropriately + continue + } + tmpFile, err := os.CreateTemp(tmpDir, "upload-*"+filepath.Ext(file.Filename)) + if err != nil { + // TODO handle error appropriately + continue + } + defer tmpFile.Close() - go func() { - if err := a.RunServer(ctx); err != nil && !errors.Is(err, context.Canceled) { - slog.Error("http server failed", "err", err) + if _, err := tmpFile.Write(content); err != nil { + // TODO handle error appropriately + continue + } + files[i] = tmpFile.Name() } - }() - - <-ctx.Done() - - stopCtx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - defer cancel() - _ = a.rs.Stop(stopCtx) - return ctx.Err() -} - -func main() { - initLogger() - - cfg, err := loadConfig() - if err != nil { - slog.Error("config error", "err", err) - os.Exit(1) - } - - app := NewApp(cfg) - defer app.Close() - if len(os.Args) > 1 && os.Args[1] == "upload" { - if len(os.Args) < 3 { - slog.Error("usage: upload requires file path argument", "argv", os.Args) - os.Exit(1) + dbiout, err := app.dbimport.ProcessAtomic(ctx, &dbimportParams{ + Files: files, + ValidateOnly: true, + }) + if err != nil { + return nil, huma.NewError(http.StatusConflict, err.Error()) } - if err := app.uploader.RunUploadCLI(context.Background(), os.Args[2]); err != nil { - slog.Error("upload failed", "err", err) - os.Exit(1) + if dbiout == nil { + return nil, huma.NewError(http.StatusInternalServerError, "response is nil; this should never happen") } - return - } - - ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM) - defer stop() - - slog.Info("starting opendcs wrapper (server mode)", "version", VERSION) - - if err := app.Run(ctx); err != nil && !errors.Is(err, context.Canceled) { - slog.Error("app exited with error", "err", err) - os.Exit(1) - } -} - -func initLogger() { - var level slog.Level - levelText := getenvDefault("LOGLEVEL", "INFO") - if err := level.UnmarshalText([]byte(levelText)); err != nil { - panic(err) - } - logger := slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{Level: level})) - slog.SetDefault(logger) -} - -func loadConfig() (Config, error) { - userDir := getenvDefault("DCSTOOL_USERDIR", "/opt/opendcs") - props := getenvDefault("DECODES_PROPERTIES_PATH", filepath.Join(userDir, "decodes.properties")) - - c := Config{ - InstrConfigDir: os.Getenv("INSTRUMENTATION_DCS_CONFIG"), - ImportDir: getenvDefault("OPENDCS_IMPORT_DIR", "/opt/opendcs/import"), - RoutingSpec: getenvDefault("ROUTING_SPEC", "goes"), - AuthToken: os.Getenv("OPENDCS_IMPORT_TOKEN"), - ListenAddr: getenvDefault("OPENDCS_HTTP_ADDR", ":8080"), - LogDir: getenvDefault("OPENDCS_LOG_DIR", "/opendcs_output"), - - DcsToolUserDir: userDir, - DecodesProps: props, - - DataloadS3Root: os.Getenv("DATALOAD_S3_ROOT"), - AWSEndpointURL: os.Getenv("AWS_ENDPOINT_URL"), - } - - if c.InstrConfigDir == "" { - return Config{}, fmt.Errorf("INSTRUMENTATION_DCS_CONFIG must be set") - } - if c.AuthToken == "" { - return Config{}, fmt.Errorf("OPENDCS_IMPORT_TOKEN must be set for secure access") - } - if err := os.MkdirAll(c.ImportDir, 0o775); err != nil { - return Config{}, fmt.Errorf("failed to ensure import dir exists (%s): %w", c.ImportDir, err) - } - if err := os.MkdirAll(c.LogDir, 0o775); err != nil { - return Config{}, fmt.Errorf("failed to ensure log dir exists (%s): %w", c.LogDir, err) - } - return c, nil -} + return NewResponse(*dbiout), nil + }) -func getenvDefault(key, def string) string { - if v := os.Getenv(key); v != "" { - return v + type GoesCommitDelete struct { + Mode string `json:"mode" enum:"id,site"` + Value string `json:"value"` } - return def -} - -func handleHealth(w http.ResponseWriter, _ *http.Request) { - w.WriteHeader(http.StatusOK) - _, _ = io.WriteString(w, "ok\n") -} -func readFileOrEmpty(pathStr string) string { - data, err := os.ReadFile(pathStr) - if err != nil { - return "" + type GoesCommitForm struct { + CommitID string `form:"commit_id"` + ProjectID string `form:"project_id"` + GoesTelemetrySourceID string `form:"goes_telemetry_source_id"` + Files []huma.FormFile `form:"files"` + Deletes []GoesCommitDelete `form:"deletes"` } - return string(data) -} -func parseEditDBLocation(props string) (string, error) { - lines := strings.SplitSeq(props, "\n") - for line := range lines { - trim := strings.TrimSpace(line) - if trim == "" || strings.HasPrefix(trim, "#") { - continue - } - if v, found := strings.CutPrefix(trim, "EditDatabaseLocation="); found { - v = strings.TrimSpace(v) - if v == "" { - return "", fmt.Errorf("EditDatabaseLocation present but empty in decodes.properties") - } - return v, nil + huma.Post(api, "/commit", func(ctx context.Context, input *struct { + KeyQueryParam + RawBody huma.MultipartFormFiles[GoesCommitForm] + }) (*Response[dbimportOutput], error) { + if err := app.checkKey(input.Key); err != nil { + return nil, err } - } - return "", fmt.Errorf("EditDatabaseLocation not found in decodes.properties") -} -func rewriteEditDBLocation(props, newPath string) string { - lines := strings.Split(props, "\n") - out := make([]string, 0, len(lines)+1) - replaced := false - - for _, line := range lines { - trim := strings.TrimSpace(line) - if strings.HasPrefix(trim, "EditDatabaseLocation=") { - out = append(out, "EditDatabaseLocation="+newPath) - replaced = true - continue + if !app.mu.TryLock() { + return nil, huma.NewError(http.StatusConflict, "import already in progress") } - out = append(out, line) - } - if !replaced { - out = append(out, "EditDatabaseLocation="+newPath) - } - return strings.Join(out, "\n") -} + defer app.mu.Unlock() -func atomicSwapDir(liveDir, stagedDir, backupDir string) error { - if _, err := os.Stat(stagedDir); err != nil { - return fmt.Errorf("staged dir missing: %w", err) - } + formData := input.RawBody.Data() - if _, err := os.Stat(liveDir); err == nil { - if err := os.Rename(liveDir, backupDir); err != nil { - return fmt.Errorf("failed to move live->backup: %w", err) + for _, d := range formData.Deletes { + if d.Mode == "" || d.Value == "" { + continue + } + if _, err := app.runDbUtil(ctx, d.Mode, d.Value); err != nil { + return nil, huma.NewError(http.StatusConflict, fmt.Sprintf("delete-platform failed (%s %s): %v", d.Mode, d.Value, err)) + } } - } - - if err := os.Rename(stagedDir, liveDir); err != nil { - _ = os.Rename(backupDir, liveDir) - return fmt.Errorf("failed to move staged->live: %w", err) - } - return nil -} - -func atomicRollbackDir(liveDir, backupDir string) error { - if _, err := os.Stat(backupDir); err != nil { - return fmt.Errorf("backup dir missing: %w", err) - } - - badDir := liveDir + ".bad-" + time.Now().UTC().Format("20060102-150405") - if _, err := os.Stat(liveDir); err == nil { - _ = os.Rename(liveDir, badDir) - } - if err := os.Rename(backupDir, liveDir); err != nil { - return fmt.Errorf("failed to restore backup->live: %w", err) - } - return nil -} + files := make([]string, len(formData.Files)) + tmpDir := os.TempDir() -func copyDir(src, dst string) error { - src = filepath.Clean(src) - dst = filepath.Clean(dst) - - info, err := os.Stat(src) - if err != nil { - return err - } - if !info.IsDir() { - return fmt.Errorf("source is not a directory: %s", src) - } + type GoesPlatformConfigFileCommitDTO struct { + ID uuid.UUID `json:"id"` + CommittedAt time.Time `json:"committed_at"` + CommitID string `json:"commit_id"` + } - if err := os.MkdirAll(dst, info.Mode().Perm()); err != nil { - return err - } + commitPayload := make([]GoesPlatformConfigFileCommitDTO, len(formData.Files)) + for i, file := range formData.Files { + content, err := io.ReadAll(file) + if err != nil { + // handle error appropriately + continue + } + tmpFile, err := os.CreateTemp(tmpDir, "upload-*"+filepath.Ext(file.Filename)) + if err != nil { + // handle error appropriately + continue + } + defer tmpFile.Close() - return filepath.WalkDir(src, func(p string, d os.DirEntry, walkErr error) error { - if walkErr != nil { - return walkErr + if _, err := tmpFile.Write(content); err != nil { + // handle error appropriately + continue + } + files[i] = tmpFile.Name() + fileBaseName := strings.TrimSuffix(file.Filename, ".xml") + fileID, err := uuid.Parse(fileBaseName) + if err != nil { + // handle error appropriately + continue + } + commitPayload[i] = GoesPlatformConfigFileCommitDTO{ + ID: fileID, + CommittedAt: time.Now().UTC(), + CommitID: formData.CommitID, + } } - rel, err := filepath.Rel(src, p) + + dbiout, err := app.dbimport.ProcessAtomic(ctx, &dbimportParams{ + Files: files, + ValidateOnly: false, + }) if err != nil { - return err + return nil, huma.NewError(http.StatusConflict, fmt.Errorf("dbimport.ProcessAtomic %w", err).Error()) } - if rel == "." { - return nil + if dbiout == nil { + return nil, huma.NewError(http.StatusInternalServerError, "response is nil; this should never happen") } - target := filepath.Join(dst, rel) - if d.IsDir() { - di, err := d.Info() - if err != nil { - return err - } - return os.MkdirAll(target, di.Mode().Perm()) + u, err := url.Parse(cfg.MidasApiHost) + if err != nil { + return nil, huma.NewError(http.StatusInternalServerError, "could not parse bad url base path") } + u.Path = fmt.Sprintf("/v4/callback/goes/%s/commit", formData.GoesTelemetrySourceID) + q := u.Query() + q.Add("key", cfg.AuthToken) + u.RawQuery = q.Encode() - fi, err := d.Info() + body, err := json.Marshal(commitPayload) if err != nil { - return err + return nil, huma.NewError(http.StatusInternalServerError, "failed to marshal callback payload") } - if fi.Mode()&os.ModeSymlink != 0 { - linkTarget, err := os.Readlink(p) - if err != nil { - return err - } - return os.Symlink(linkTarget, target) + resp, err := http.Post(u.String(), "application/json", bytes.NewReader(body)) + if err != nil { + return nil, huma.NewError(http.StatusBadGateway, err.Error()) } + dbiout.Status = resp.StatusCode - return copyFile(p, target, fi.Mode().Perm()) + return NewResponse(*dbiout), nil }) -} - -func copyFile(src, dst string, perm os.FileMode) error { - in, err := os.Open(src) - if err != nil { - return err - } - defer in.Close() - - if err := os.MkdirAll(filepath.Dir(dst), 0o775); err != nil { - return err - } - out, err := os.OpenFile(dst, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, perm) - if err != nil { - return err - } - defer func() { _ = out.Close() }() - - if _, err := io.Copy(out, in); err != nil { - return err - } - return out.Close() -} - -func buildBucketURLFromRoot(root, awsEndpoint string) (bucketURL string, prefix string, err error) { - u, err := url.Parse(root) - if err != nil { - return "", "", fmt.Errorf("invalid DATALOAD_S3_ROOT %q: %w", root, err) - } - if u.Scheme != "s3" { - return "", "", fmt.Errorf("DATALOAD_S3_ROOT %q must use s3:// scheme", root) - } - if u.Host == "" { - return "", "", fmt.Errorf("DATALOAD_S3_ROOT %q missing bucket name", root) - } - - prefix = strings.TrimPrefix(u.Path, "/") + router.HandleFunc("/healthz", func(w http.ResponseWriter, _ *http.Request) { + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte("ok")) + }) - v := u.Query() - if awsEndpoint != "" { - v.Set("endpoint", awsEndpoint) + app.httpServer = &http.Server{ + Addr: cfg.ListenAddr, + Handler: router, + ReadHeaderTimeout: 5 * time.Second, } - u.Path = "" - u.RawQuery = v.Encode() - bucketURL = u.Scheme + "://" + u.Host - if u.RawQuery != "" { - bucketURL += "?" + u.RawQuery + if err := i.startRoutingScheduler(ctx); err != nil { + logger.ErrorContext(ctx, "error starting routing scheduler", "error", err) } - return bucketURL, prefix, nil -} + go func() { + <-ctx.Done() + shutdownCtx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + if err := app.httpServer.Shutdown(shutdownCtx); err != nil { + slog.Error("error shutting down httpServer", "error", err) + } + }() -func derivePlatformFromFilename(filename string) string { - name := strings.TrimSuffix(filename, filepath.Ext(filename)) - if idx := strings.LastIndex(name, "-"); idx > 0 { - return name[:idx] + logger.Info("listening", "addr", cfg.ListenAddr) + if err := app.httpServer.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) { + logger.Error("server failed", "error", err) + os.Exit(1) } - return name } diff --git a/opendcs/rsgis/pom.xml b/opendcs/rsgis/pom.xml index 0bb326ed..7533ce09 100644 --- a/opendcs/rsgis/pom.xml +++ b/opendcs/rsgis/pom.xml @@ -27,9 +27,15 @@ org.slf4j slf4j-api - 2.0.16 + 2.0.17 provided + + + ch.qos.logback + logback-classic + 1.5.24 + diff --git a/opendcs/rsgis/src/main/java/rsgis/consumer/MidasOutputFormatter.java b/opendcs/rsgis/src/main/java/rsgis/consumer/MidasOutputFormatter.java index fb21bf73..ec279aa9 100644 --- a/opendcs/rsgis/src/main/java/rsgis/consumer/MidasOutputFormatter.java +++ b/opendcs/rsgis/src/main/java/rsgis/consumer/MidasOutputFormatter.java @@ -65,11 +65,13 @@ public void formatMessage(DecodedMessage decodedMessage, DataConsumer consumer) try { platform = raw.getPlatform(); } catch (UnknownPlatformException e) { - throw new OutputFormatterException(e.toString()); + if (log.isDebugEnabled()) { + log.debug("Unknown platform for raw message: " + e.toString()); + } + return; } String platformName = platform.getDisplayName(); - String platformFileId = platform.getProperty("fileId"); Iterator it = decodedMessage.getAllTimeSeries(); while (it.hasNext()) { @@ -85,8 +87,11 @@ public void formatMessage(DecodedMessage decodedMessage, DataConsumer consumer) continue; } - String sensorNameNumber = sensor.getName() + "." + sensor.getNumber(); - processDataOutput(consumer, ts, platformFileId, sensorNameNumber); + String midasTsId = sensor.getProperty("timeseries_id"); + if (midasTsId != null) { + this.processDataOutput(consumer, ts, platformName, midasTsId); + continue; + } log.info( "measurements_written platform={} timeseries={} count={}", @@ -102,18 +107,18 @@ public void processDataOutput( DataConsumer consumer, TimeSeries ts, String platformFileId, - String sensorNameNumber) { + String midasTsId) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < ts.size(); i++) { TimedVariable tv = ts.sampleAt(i); - if ((tv.getFlags() & 0x60000000) != 0) { + if ((tv.getFlags() & 0x60000000) != 0) continue; - } sb.setLength(0); - sb.append(platformFileId).append(delimiter) - .append(sensorNameNumber).append(delimiter) - .append(sdf.format(tv.getTime())).append(delimiter) - .append(ts.formattedSampleAt(i)); + sb.append(midasTsId); + sb.append(this.delimiter); + sb.append(this.sdf.format(tv.getTime())); + sb.append(this.delimiter); + sb.append(ts.formattedSampleAt(i)); consumer.println(sb.toString()); } } From f88fc3fb021548c01535e9789b6bb459074257cf Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Fri, 16 Jan 2026 17:17:52 -0500 Subject: [PATCH 17/22] chore: fix more staticcheck warnings after merge from develop --- api/internal/dto/goes.go | 12 ++++++------ api/internal/email/client.go | 6 +++--- api/internal/email/client_test.go | 2 +- api/internal/email/email.go | 1 + api/internal/handler/goes.go | 12 ++++++------ api/internal/service/goes.go | 17 ++++++++--------- api/internal/service/goes_commit.go | 4 ++-- api/migrations/migrations.go | 1 + opendcs/app.go | 2 +- opendcs/dbimport.go | 6 +++--- opendcs/main.go | 6 +++--- 11 files changed, 35 insertions(+), 34 deletions(-) diff --git a/api/internal/dto/goes.go b/api/internal/dto/goes.go index e8aad6fa..79eec79e 100644 --- a/api/internal/dto/goes.go +++ b/api/internal/dto/goes.go @@ -18,17 +18,17 @@ type GoesPlatformConfigFileCommitDTO struct { CommitID uuid.UUID `json:"commit_id"` } -type XmlPlatformConfigForm struct { +type XMLPlatformConfigForm struct { PlatformConfig huma.FormFile `form:"file" contentType:"text/xml" required:"true"` Alias string `form:"alias"` DryRun bool `form:"dry_run"` - UpdateType XmlPlatformConfigUpdateType `form:"update_type" enum:"preserve_all,delete_not_found,delete_all" default:"preserve_all"` + UpdateType XMLPlatformConfigUpdateType `form:"update_type" enum:"preserve_all,delete_not_found,delete_all" default:"preserve_all"` } -type XmlPlatformConfigUpdateType string +type XMLPlatformConfigUpdateType string const ( - XmlPlatformConfigUpdateTypePreserveAll XmlPlatformConfigUpdateType = "preserve_all" - XmlPlatformConfigUpdateTypeDeleteNotFound XmlPlatformConfigUpdateType = "delete_not_found" - XmlPlatformConfigUpdateTypeDeleteAll XmlPlatformConfigUpdateType = "delete_all" + XMLPlatformConfigUpdateTypePreserveAll XMLPlatformConfigUpdateType = "preserve_all" + XMLPlatformConfigUpdateTypeDeleteNotFound XMLPlatformConfigUpdateType = "delete_not_found" + XMLPlatformConfigUpdateTypeDeleteAll XMLPlatformConfigUpdateType = "delete_all" ) diff --git a/api/internal/email/client.go b/api/internal/email/client.go index 063e07c7..06e4e477 100644 --- a/api/internal/email/client.go +++ b/api/internal/email/client.go @@ -26,11 +26,11 @@ func (c *Client) Send(ctx context.Context, subject, textBody string, bcc []strin // URLOpenerFunc turns a URL into a *Client. type URLOpenerFunc func(ctx context.Context, u *url.URL) (*Client, error) -var UrlOpeners = map[string]URLOpenerFunc{} +var URLOpeners = map[string]URLOpenerFunc{} // RegisterURLScheme registers an opener for scheme. func RegisterURLScheme(sch string, opener URLOpenerFunc) { - UrlOpeners[sch] = opener + URLOpeners[sch] = opener } // OpenURL parses rawurl, looks up its scheme, and invokes the opener. @@ -39,7 +39,7 @@ func OpenURL(ctx context.Context, rawurl string) (*Client, error) { if err != nil { return nil, fmt.Errorf("email: parse URL: %w", err) } - opener, ok := UrlOpeners[u.Scheme] + opener, ok := URLOpeners[u.Scheme] if !ok { return nil, fmt.Errorf("email: unsupported scheme %q", u.Scheme) } diff --git a/api/internal/email/client_test.go b/api/internal/email/client_test.go index 56f443c1..244a7605 100644 --- a/api/internal/email/client_test.go +++ b/api/internal/email/client_test.go @@ -70,7 +70,7 @@ func TestOpenURL_RegisterAndOpen_Success(t *testing.T) { // Register and ensure clean-up email.RegisterURLScheme(scheme, opener) - defer delete(email.UrlOpeners, scheme) + defer delete(email.URLOpeners, scheme) raw := scheme + "://foo.bar/baz?x=1" cli, err := email.OpenURL(context.Background(), raw) diff --git a/api/internal/email/email.go b/api/internal/email/email.go index 9d2929a6..fdfda752 100644 --- a/api/internal/email/email.go +++ b/api/internal/email/email.go @@ -1,3 +1,4 @@ +// Package email provides structures and methods for formatting email content using templates. package email import ( diff --git a/api/internal/handler/goes.go b/api/internal/handler/goes.go index 4d6f404e..82083847 100644 --- a/api/internal/handler/goes.go +++ b/api/internal/handler/goes.go @@ -25,7 +25,7 @@ type TelemetryConfigIDParam struct { TelemetryConfigID UUID `path:"telemetry_config_id"` } -func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { +func (h *APIHandler) RegisterGoesTelemetry(api huma.API) { huma.Register(api, huma.Operation{ Middlewares: h.Public, OperationID: "goes-telemetry-client-list", @@ -70,8 +70,8 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { }, func(ctx context.Context, input *struct { ProjectIDParam TelemetrySourceIDParam - RawBody huma.MultipartFormFiles[dto.XmlPlatformConfigForm] - }) (*Response[service.DbImportResponse], error) { + RawBody huma.MultipartFormFiles[dto.XMLPlatformConfigForm] + }) (*Response[service.DBImportResponse], error) { p := ctx.Value(ctxkey.Profile).(db.VProfile) formData := input.RawBody.Data() xmlDoc, err := io.ReadAll(formData.PlatformConfig) @@ -114,8 +114,8 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { ProjectIDParam TelemetrySourceIDParam TelemetryConfigIDParam - RawBody huma.MultipartFormFiles[dto.XmlPlatformConfigForm] - }) (*Response[service.DbImportResponse], error) { + RawBody huma.MultipartFormFiles[dto.XMLPlatformConfigForm] + }) (*Response[service.DBImportResponse], error) { formData := input.RawBody.Data() xmlDoc, err := io.ReadAll(formData.PlatformConfig) if err != nil { @@ -219,7 +219,7 @@ func (h *ApiHandler) RegisterGoesTelemetry(api huma.API) { }, func(ctx context.Context, input *struct { ProjectIDParam TelemetrySourceIDParam - }) (*Response[service.DbImportResponse], error) { + }) (*Response[service.DBImportResponse], error) { // Assumes a service method which validates the project desired set // by calling the OpenDCS wrapper with validate-only. resp, err := h.DBService.GoesValidateProjectUncommitted(ctx, service.GoesValidateProjectUncommittedParams{ diff --git a/api/internal/service/goes.go b/api/internal/service/goes.go index c4e28095..d4fe89d3 100644 --- a/api/internal/service/goes.go +++ b/api/internal/service/goes.go @@ -16,13 +16,12 @@ import ( "github.com/google/uuid" ) -type DbImportResponse struct { +type DBImportResponse struct { PlatformFileID *uuid.UUID `json:"platform_file_id,omitempty"` Response json.RawMessage `json:"response"` } type xmlValidationResult struct { - valid bool `json:"-"` Valid bool `json:"valid"` Message string `json:"message,omitempty"` SensorCount int `json:"sensor_count"` @@ -37,8 +36,8 @@ type GoesPlatformConfigFileCreateParams struct { db.GoesPlatformConfigFileCreateParams } -func (s *DBService) GoesPlatformConfigFileCreate(ctx context.Context, arg GoesPlatformConfigFileCreateParams) (DbImportResponse, error) { - var out DbImportResponse +func (s *DBService) GoesPlatformConfigFileCreate(ctx context.Context, arg GoesPlatformConfigFileCreateParams) (DBImportResponse, error) { + var out DBImportResponse root, names, err := extractSensorNames(arg.Content) if err != nil { @@ -94,12 +93,12 @@ func (s *DBService) GoesPlatformConfigFileCreate(ctx context.Context, arg GoesPl type GoesPlatformConfigFileUpdateParams struct { DryRun bool - UpdateType dto.XmlPlatformConfigUpdateType + UpdateType dto.XMLPlatformConfigUpdateType db.GoesPlatformConfigFileUpdateParams } -func (s *DBService) GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPlatformConfigFileUpdateParams) (DbImportResponse, error) { - var out DbImportResponse +func (s *DBService) GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPlatformConfigFileUpdateParams) (DBImportResponse, error) { + var out DBImportResponse root, names, err := extractSensorNames(arg.Content) if err != nil { @@ -134,7 +133,7 @@ func (s *DBService) GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPl return out, fmt.Errorf("GoesPlatformConfigFileUpdate %w", err) } - if arg.UpdateType == dto.XmlPlatformConfigUpdateTypeDeleteAll { + if arg.UpdateType == dto.XMLPlatformConfigUpdateTypeDeleteAll { if err := qtx.GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile(ctx, arg.ID); err != nil { return out, fmt.Errorf("GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile %w", err) } @@ -179,7 +178,7 @@ func (s *DBService) GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPl TimeseriesID: nil, }) } - if arg.UpdateType == dto.XmlPlatformConfigUpdateTypeDeleteNotFound { + if arg.UpdateType == dto.XMLPlatformConfigUpdateTypeDeleteNotFound { for name := range existingKeys { removedMappings = append(removedMappings, db.GoesTelemetryConfigMappingsDeleteBatchParams{ GoesPlatformConfigFileID: arg.ID, diff --git a/api/internal/service/goes_commit.go b/api/internal/service/goes_commit.go index e0719c4d..111edb29 100644 --- a/api/internal/service/goes_commit.go +++ b/api/internal/service/goes_commit.go @@ -700,8 +700,8 @@ type GoesProjectValidationResult struct { Warnings []string `json:"warnings,omitempty"` } -func (s *DBService) GoesValidateProjectUncommitted(ctx context.Context, arg GoesValidateProjectUncommittedParams) (DbImportResponse, error) { - var out DbImportResponse +func (s *DBService) GoesValidateProjectUncommitted(ctx context.Context, arg GoesValidateProjectUncommittedParams) (DBImportResponse, error) { + var out DBImportResponse files, err := s.GoesPlatformConfigFilesListForCommit(ctx, db.GoesPlatformConfigFilesListForCommitParams{ ProjectID: arg.ProjectID, diff --git a/api/migrations/migrations.go b/api/migrations/migrations.go index 669b8fa8..0a1689cf 100644 --- a/api/migrations/migrations.go +++ b/api/migrations/migrations.go @@ -1,3 +1,4 @@ +// Package migrations provides access to the embedded database migration files. package migrations import "embed" diff --git a/opendcs/app.go b/opendcs/app.go index 2cde9be9..b909cbc7 100644 --- a/opendcs/app.go +++ b/opendcs/app.go @@ -30,7 +30,7 @@ func (a *App) checkKey(got string) error { return nil } -func (a *App) runDbUtil(ctx context.Context, mode, value string) (string, error) { +func (a *App) runDBUtil(ctx context.Context, mode, value string) (string, error) { mode = strings.ToLower(strings.TrimSpace(mode)) value = strings.TrimSpace(value) if mode != "id" && mode != "site" { diff --git a/opendcs/dbimport.go b/opendcs/dbimport.go index 3adfabb3..1e5414bc 100644 --- a/opendcs/dbimport.go +++ b/opendcs/dbimport.go @@ -37,7 +37,7 @@ func NewDbimport(ctx context.Context, cfg Config, logger *slog.Logger) (*dbimpor // TODO: we should also query any existing platform configs that exist for this opendcs instance (fetched from the API) - _, err = i.runDbImport(ctx, files) + _, err = i.runDBImport(ctx, files) if err != nil { return nil, fmt.Errorf("validation failed: %w", err) } @@ -68,7 +68,7 @@ func (i *dbimport) ProcessAtomic(ctx context.Context, req *dbimportParams) (*dbi dbimportArgs = append(dbimportArgs, "-v") } - logOut, err := i.runDbImport(ctx, files, dbimportArgs...) + logOut, err := i.runDBImport(ctx, files, dbimportArgs...) if err != nil { return nil, fmt.Errorf("import failed: %w", err) } @@ -109,7 +109,7 @@ func (i *dbimport) resolveImportFiles(files []string) ([]string, error) { return out, nil } -func (i *dbimport) runDbImport(ctx context.Context, files []string, extraArgs ...string) (string, error) { +func (i *dbimport) runDBImport(ctx context.Context, files []string, extraArgs ...string) (string, error) { args := slices.Clone(extraArgs) args = append(args, files...) cmd := exec.CommandContext(ctx, "dbimport", args...) diff --git a/opendcs/main.go b/opendcs/main.go index 795a9666..b60ca7b8 100644 --- a/opendcs/main.go +++ b/opendcs/main.go @@ -36,7 +36,7 @@ type Config struct { RoutingSpec string `env:"OPENDCS_ROUTING_SPEC" envDefault:"goes"` AuthToken string `env:"OPENDCS_IMPORT_TOKEN"` ListenAddr string `env:"OPENDCS_HTTP_ADDR" envDefault:":8080"` - MidasApiHost string `env:"MIDAS_API_HOST" envDefault:"http://api:80"` + MidasAPIHost string `env:"MIDAS_API_HOST" envDefault:"http://api:80"` S3BucketURL string `env:"S3_LOAD_DATAROOT" envDefault:"s3://corpmsmap-data-incoming/instrumentation/goes"` } @@ -184,7 +184,7 @@ func main() { if d.Mode == "" || d.Value == "" { continue } - if _, err := app.runDbUtil(ctx, d.Mode, d.Value); err != nil { + if _, err := app.runDBUtil(ctx, d.Mode, d.Value); err != nil { return nil, huma.NewError(http.StatusConflict, fmt.Sprintf("delete-platform failed (%s %s): %v", d.Mode, d.Value, err)) } } @@ -241,7 +241,7 @@ func main() { return nil, huma.NewError(http.StatusInternalServerError, "response is nil; this should never happen") } - u, err := url.Parse(cfg.MidasApiHost) + u, err := url.Parse(cfg.MidasAPIHost) if err != nil { return nil, huma.NewError(http.StatusInternalServerError, "could not parse bad url base path") } From 0f4c1161e956e00644df0c983a0abeb229888383 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Tue, 20 Jan 2026 10:51:52 -0500 Subject: [PATCH 18/22] fix: incorrect xml platform sensor lookup for injection --- api/internal/db/goes.sql_gen.go | 105 ++++++--- api/internal/db/goes_commit.sql_gen.go | 50 ++-- api/internal/db/querier.go | 6 +- api/internal/handler/goes.go | 87 ++++--- api/internal/service/goes.go | 4 + api/internal/service/goes_commit.go | 88 ++----- api/internal/service/goes_xml.go | 283 +++++++++++++++-------- api/queries/goes.sql | 19 +- api/queries/goes_commit.sql | 8 +- docker-compose.yaml | 1 + env_files/opendcs.env | 7 +- opendcs/app.go | 30 +-- opendcs/dbimport.go | 8 +- opendcs/main.go | 172 ++++++++++---- opendcs/midas_config/routing/goes.xml | 2 +- opendcs/midas_config/routing/monitor.xml | 2 +- report/src/main.ts | 18 +- 17 files changed, 552 insertions(+), 338 deletions(-) diff --git a/api/internal/db/goes.sql_gen.go b/api/internal/db/goes.sql_gen.go index 40a2a1d3..3b0556a7 100644 --- a/api/internal/db/goes.sql_gen.go +++ b/api/internal/db/goes.sql_gen.go @@ -11,6 +11,34 @@ import ( "github.com/google/uuid" ) +const goesPlatformConfigFileCommittedContentListCommitedForTelemetrySource = `-- name: GoesPlatformConfigFileCommittedContentListCommitedForTelemetrySource :many +select committed_content::xml +from goes_platform_config_file +where goes_telemetry_source_id = $1 +and committed +and not deleted +` + +func (q *Queries) GoesPlatformConfigFileCommittedContentListCommitedForTelemetrySource(ctx context.Context, goesTelemetrySourceID uuid.UUID) ([]string, error) { + rows, err := q.db.Query(ctx, goesPlatformConfigFileCommittedContentListCommitedForTelemetrySource, goesTelemetrySourceID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []string{} + for rows.Next() { + var committed_content string + if err := rows.Scan(&committed_content); err != nil { + return nil, err + } + items = append(items, committed_content) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const goesPlatformConfigFileCreate = `-- name: GoesPlatformConfigFileCreate :one insert into goes_platform_config_file ( goes_telemetry_source_id, @@ -61,7 +89,8 @@ const goesPlatformConfigFileDelete = `-- name: GoesPlatformConfigFileDelete :exe update goes_platform_config_file set deleted = true, deleted_at = now(), - deleted_by = $1 + deleted_by = $1, + committed = false where id = $2 ` @@ -108,7 +137,7 @@ func (q *Queries) GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) ( return i, err } -const goesPlatformConfigFileListUncommitedForProject = `-- name: GoesPlatformConfigFileListUncommitedForProject :one +const goesPlatformConfigFileListUncommittedForProject = `-- name: GoesPlatformConfigFileListUncommittedForProject :many select id, goes_telemetry_source_id, project_id, name, alias, size_bytes, content, committed, committed_at, created_at, created_by, updated_at, updated_by, committed_content, committed_commit_id, deleted, deleted_at, deleted_by from goes_platform_config_file where project_id = $1 @@ -116,30 +145,43 @@ and not committed and not deleted ` -func (q *Queries) GoesPlatformConfigFileListUncommitedForProject(ctx context.Context, projectID uuid.UUID) (GoesPlatformConfigFile, error) { - row := q.db.QueryRow(ctx, goesPlatformConfigFileListUncommitedForProject, projectID) - var i GoesPlatformConfigFile - err := row.Scan( - &i.ID, - &i.GoesTelemetrySourceID, - &i.ProjectID, - &i.Name, - &i.Alias, - &i.SizeBytes, - &i.Content, - &i.Committed, - &i.CommittedAt, - &i.CreatedAt, - &i.CreatedBy, - &i.UpdatedAt, - &i.UpdatedBy, - &i.CommittedContent, - &i.CommittedCommitID, - &i.Deleted, - &i.DeletedAt, - &i.DeletedBy, - ) - return i, err +func (q *Queries) GoesPlatformConfigFileListUncommittedForProject(ctx context.Context, projectID uuid.UUID) ([]GoesPlatformConfigFile, error) { + rows, err := q.db.Query(ctx, goesPlatformConfigFileListUncommittedForProject, projectID) + if err != nil { + return nil, err + } + defer rows.Close() + items := []GoesPlatformConfigFile{} + for rows.Next() { + var i GoesPlatformConfigFile + if err := rows.Scan( + &i.ID, + &i.GoesTelemetrySourceID, + &i.ProjectID, + &i.Name, + &i.Alias, + &i.SizeBytes, + &i.Content, + &i.Committed, + &i.CommittedAt, + &i.CreatedAt, + &i.CreatedBy, + &i.UpdatedAt, + &i.UpdatedBy, + &i.CommittedContent, + &i.CommittedCommitID, + &i.Deleted, + &i.DeletedAt, + &i.DeletedBy, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil } const goesPlatformConfigFileUpdate = `-- name: GoesPlatformConfigFileUpdate :exec @@ -213,6 +255,17 @@ func (q *Queries) GoesTelemetryConfigMappingsList(ctx context.Context, goesPlatf return items, nil } +const goesTelemetryConfigSetUncommitted = `-- name: GoesTelemetryConfigSetUncommitted :exec +update goes_platform_config_file set + committed = false +where id = $1 +` + +func (q *Queries) GoesTelemetryConfigSetUncommitted(ctx context.Context, id uuid.UUID) error { + _, err := q.db.Exec(ctx, goesTelemetryConfigSetUncommitted, id) + return err +} + const goesTelemetrySourceList = `-- name: GoesTelemetrySourceList :many select id, name, files from v_goes_telemetry_source diff --git a/api/internal/db/goes_commit.sql_gen.go b/api/internal/db/goes_commit.sql_gen.go index bd68faa7..b989584f 100644 --- a/api/internal/db/goes_commit.sql_gen.go +++ b/api/internal/db/goes_commit.sql_gen.go @@ -310,36 +310,37 @@ func (q *Queries) GoesPlatformConfigFileSoftDeleteNotInSet(ctx context.Context, return err } -const goesPlatformConfigFilesListForCommit = `-- name: GoesPlatformConfigFilesListForCommit :many -select id, name, alias, content +const goesPlatformConfigFilesListForCommitByCommitID = `-- name: GoesPlatformConfigFilesListForCommitByCommitID :many +select id, name, alias, committed_content::text as content from goes_platform_config_file where project_id = $1 and goes_telemetry_source_id = $2 -and deleted = false +and committed_commit_id = $3 order by created_at asc ` -type GoesPlatformConfigFilesListForCommitParams struct { - ProjectID uuid.UUID `json:"project_id"` - GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` +type GoesPlatformConfigFilesListForCommitByCommitIDParams struct { + ProjectID uuid.UUID `json:"project_id"` + GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` + CommittedCommitID *uuid.UUID `json:"committed_commit_id"` } -type GoesPlatformConfigFilesListForCommitRow struct { +type GoesPlatformConfigFilesListForCommitByCommitIDRow struct { ID uuid.UUID `json:"id"` Name string `json:"name"` Alias string `json:"alias"` Content string `json:"content"` } -func (q *Queries) GoesPlatformConfigFilesListForCommit(ctx context.Context, arg GoesPlatformConfigFilesListForCommitParams) ([]GoesPlatformConfigFilesListForCommitRow, error) { - rows, err := q.db.Query(ctx, goesPlatformConfigFilesListForCommit, arg.ProjectID, arg.GoesTelemetrySourceID) +func (q *Queries) GoesPlatformConfigFilesListForCommitByCommitID(ctx context.Context, arg GoesPlatformConfigFilesListForCommitByCommitIDParams) ([]GoesPlatformConfigFilesListForCommitByCommitIDRow, error) { + rows, err := q.db.Query(ctx, goesPlatformConfigFilesListForCommitByCommitID, arg.ProjectID, arg.GoesTelemetrySourceID, arg.CommittedCommitID) if err != nil { return nil, err } defer rows.Close() - items := []GoesPlatformConfigFilesListForCommitRow{} + items := []GoesPlatformConfigFilesListForCommitByCommitIDRow{} for rows.Next() { - var i GoesPlatformConfigFilesListForCommitRow + var i GoesPlatformConfigFilesListForCommitByCommitIDRow if err := rows.Scan( &i.ID, &i.Name, @@ -356,42 +357,43 @@ func (q *Queries) GoesPlatformConfigFilesListForCommit(ctx context.Context, arg return items, nil } -const goesPlatformConfigFilesListForCommitByCommitID = `-- name: GoesPlatformConfigFilesListForCommitByCommitID :many -select id, name, alias, committed_content::text as content +const goesPlatformConfigFilesListUncommitted = `-- name: GoesPlatformConfigFilesListUncommitted :many +select id, name, alias, content, deleted from goes_platform_config_file where project_id = $1 and goes_telemetry_source_id = $2 -and committed_commit_id = $3 -order by created_at asc +and not committed +order by deleted desc, created_at asc ` -type GoesPlatformConfigFilesListForCommitByCommitIDParams struct { - ProjectID uuid.UUID `json:"project_id"` - GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` - CommittedCommitID *uuid.UUID `json:"committed_commit_id"` +type GoesPlatformConfigFilesListUncommittedParams struct { + ProjectID uuid.UUID `json:"project_id"` + GoesTelemetrySourceID uuid.UUID `json:"goes_telemetry_source_id"` } -type GoesPlatformConfigFilesListForCommitByCommitIDRow struct { +type GoesPlatformConfigFilesListUncommittedRow struct { ID uuid.UUID `json:"id"` Name string `json:"name"` Alias string `json:"alias"` Content string `json:"content"` + Deleted bool `json:"deleted"` } -func (q *Queries) GoesPlatformConfigFilesListForCommitByCommitID(ctx context.Context, arg GoesPlatformConfigFilesListForCommitByCommitIDParams) ([]GoesPlatformConfigFilesListForCommitByCommitIDRow, error) { - rows, err := q.db.Query(ctx, goesPlatformConfigFilesListForCommitByCommitID, arg.ProjectID, arg.GoesTelemetrySourceID, arg.CommittedCommitID) +func (q *Queries) GoesPlatformConfigFilesListUncommitted(ctx context.Context, arg GoesPlatformConfigFilesListUncommittedParams) ([]GoesPlatformConfigFilesListUncommittedRow, error) { + rows, err := q.db.Query(ctx, goesPlatformConfigFilesListUncommitted, arg.ProjectID, arg.GoesTelemetrySourceID) if err != nil { return nil, err } defer rows.Close() - items := []GoesPlatformConfigFilesListForCommitByCommitIDRow{} + items := []GoesPlatformConfigFilesListUncommittedRow{} for rows.Next() { - var i GoesPlatformConfigFilesListForCommitByCommitIDRow + var i GoesPlatformConfigFilesListUncommittedRow if err := rows.Scan( &i.ID, &i.Name, &i.Alias, &i.Content, + &i.Deleted, ); err != nil { return nil, err } diff --git a/api/internal/db/querier.go b/api/internal/db/querier.go index be984c61..c7e53983 100644 --- a/api/internal/db/querier.go +++ b/api/internal/db/querier.go @@ -126,15 +126,16 @@ type Querier interface { GoesMappingSetEntryCreateBatch(ctx context.Context, arg []GoesMappingSetEntryCreateBatchParams) (int64, error) GoesPlatformConfigFileCommit(ctx context.Context, arg []GoesPlatformConfigFileCommitParams) *GoesPlatformConfigFileCommitBatchResults GoesPlatformConfigFileCommitArtifactsUpdate(ctx context.Context, arg GoesPlatformConfigFileCommitArtifactsUpdateParams) error + GoesPlatformConfigFileCommittedContentListCommitedForTelemetrySource(ctx context.Context, goesTelemetrySourceID uuid.UUID) ([]string, error) GoesPlatformConfigFileCreate(ctx context.Context, arg GoesPlatformConfigFileCreateParams) (uuid.UUID, error) GoesPlatformConfigFileDelete(ctx context.Context, arg GoesPlatformConfigFileDeleteParams) error GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) (GoesPlatformConfigFile, error) - GoesPlatformConfigFileListUncommitedForProject(ctx context.Context, projectID uuid.UUID) (GoesPlatformConfigFile, error) + GoesPlatformConfigFileListUncommittedForProject(ctx context.Context, projectID uuid.UUID) ([]GoesPlatformConfigFile, error) GoesPlatformConfigFileRestoreForRollback(ctx context.Context, arg GoesPlatformConfigFileRestoreForRollbackParams) error GoesPlatformConfigFileSoftDeleteNotInSet(ctx context.Context, arg GoesPlatformConfigFileSoftDeleteNotInSetParams) error GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPlatformConfigFileUpdateParams) error - GoesPlatformConfigFilesListForCommit(ctx context.Context, arg GoesPlatformConfigFilesListForCommitParams) ([]GoesPlatformConfigFilesListForCommitRow, error) GoesPlatformConfigFilesListForCommitByCommitID(ctx context.Context, arg GoesPlatformConfigFilesListForCommitByCommitIDParams) ([]GoesPlatformConfigFilesListForCommitByCommitIDRow, error) + GoesPlatformConfigFilesListUncommitted(ctx context.Context, arg GoesPlatformConfigFilesListUncommittedParams) ([]GoesPlatformConfigFilesListUncommittedRow, error) GoesPlatformRegistryConflicts(ctx context.Context, arg GoesPlatformRegistryConflictsParams) ([]GoesPlatformRegistryConflictsRow, error) GoesPlatformRegistryDeleteMissing(ctx context.Context, arg GoesPlatformRegistryDeleteMissingParams) error GoesPlatformRegistryListByProject(ctx context.Context, arg GoesPlatformRegistryListByProjectParams) ([]GoesPlatformRegistryListByProjectRow, error) @@ -145,6 +146,7 @@ type Querier interface { GoesTelemetryConfigMappingsList(ctx context.Context, goesPlatformConfigFileID uuid.UUID) ([]GoesTelemetryConfigMappings, error) GoesTelemetryConfigMappingsListForFiles(ctx context.Context, fileIds []uuid.UUID) ([]GoesTelemetryConfigMappings, error) GoesTelemetryConfigMappingsReplaceForProjectFromMappingSet(ctx context.Context, arg GoesTelemetryConfigMappingsReplaceForProjectFromMappingSetParams) error + GoesTelemetryConfigSetUncommitted(ctx context.Context, id uuid.UUID) error GoesTelemetrySourceList(ctx context.Context) ([]VGoesTelemetrySource, error) HeartbeatCreate(ctx context.Context, argTime time.Time) (time.Time, error) HeartbeatGetLatest(ctx context.Context) (time.Time, error) diff --git a/api/internal/handler/goes.go b/api/internal/handler/goes.go index 82083847..c55d9ac7 100644 --- a/api/internal/handler/goes.go +++ b/api/internal/handler/goes.go @@ -6,12 +6,14 @@ import ( "errors" "io" "net/http" + "net/url" "github.com/USACE/instrumentation-api/api/v4/internal/ctxkey" "github.com/USACE/instrumentation-api/api/v4/internal/db" "github.com/USACE/instrumentation-api/api/v4/internal/dto" "github.com/USACE/instrumentation-api/api/v4/internal/httperr" "github.com/USACE/instrumentation-api/api/v4/internal/service" + "github.com/USACE/instrumentation-api/api/v4/internal/util" "github.com/danielgtaylor/huma/v2" ) @@ -259,42 +261,49 @@ func (h *APIHandler) RegisterGoesTelemetry(api huma.API) { OpendcsAuthToken: h.Config.ApplicationKey, }) if err != nil { + var urlErr *url.Error + if errors.As(err, &urlErr) { + u, _ := url.Parse(urlErr.URL) + util.RedactQueryParams(u, "key") + urlErr.URL = u.String() + err = urlErr + } return nil, httperr.Message(http.StatusConflict, err.Error()) } return NewResponse(a), nil }) - // Project-scoped rollback to previous commit (restores project mappings + xml set). - huma.Register(api, huma.Operation{ - Middlewares: h.ProjectAdmin, - OperationID: "goes-telemetry-rollback", - Method: http.MethodPost, - Path: "/projects/{project_id}/goes/{telemetry_source_id}/rollback", - Description: "rolls back project configuration (xml + mappings) to the previous commit", - Tags: goesTags, - }, func(ctx context.Context, input *struct { - ProjectIDParam - TelemetrySourceIDParam - }) (*Response[service.GoesTelemetryCommitResponse], error) { - p := ctx.Value(ctxkey.Profile).(db.VProfile) - - if matches := subtle.ConstantTimeCompare([]byte(h.Config.ApplicationKey), []byte("")); matches != 1 || h.Config.OpenDCSWrapperURL == "" { - return nil, httperr.InternalServerError(errors.New("missing OPENDCS_WRAPPER_URL and/or APPLICATION_KEY")) - } - - a, err := h.DBService.GoesRollbackProjectToPrevious(ctx, h.HTTPClient, service.GoesCommitEntireSetParams{ - ProjectID: input.ProjectID.UUID, - SourceID: input.TelemetrySourceID.UUID, - CreatedBy: p.ID, - OpendcsBaseURL: h.Config.OpenDCSWrapperURL, - OpendcsAuthToken: h.Config.ApplicationKey, - }) - if err != nil { - return nil, httperr.Message(http.StatusConflict, err.Error()) - } - return NewResponse(a), nil - }) + // // Project-scoped rollback to previous commit (restores project mappings + xml set). + // huma.Register(api, huma.Operation{ + // Middlewares: h.ProjectAdmin, + // OperationID: "goes-telemetry-rollback", + // Method: http.MethodPost, + // Path: "/projects/{project_id}/goes/{telemetry_source_id}/rollback", + // Description: "rolls back project configuration (xml + mappings) to the previous commit", + // Tags: goesTags, + // }, func(ctx context.Context, input *struct { + // ProjectIDParam + // TelemetrySourceIDParam + // }) (*Response[service.GoesTelemetryCommitResponse], error) { + // p := ctx.Value(ctxkey.Profile).(db.VProfile) + // + // if matches := subtle.ConstantTimeCompare([]byte(h.Config.ApplicationKey), []byte("")); matches != 1 || h.Config.OpenDCSWrapperURL == "" { + // return nil, httperr.InternalServerError(errors.New("missing OPENDCS_WRAPPER_URL and/or APPLICATION_KEY")) + // } + // + // a, err := h.DBService.GoesRollbackProjectToPrevious(ctx, h.HTTPClient, service.GoesCommitEntireSetParams{ + // ProjectID: input.ProjectID.UUID, + // SourceID: input.TelemetrySourceID.UUID, + // CreatedBy: p.ID, + // OpendcsBaseURL: h.Config.OpenDCSWrapperURL, + // OpendcsAuthToken: h.Config.ApplicationKey, + // }) + // if err != nil { + // return nil, httperr.Message(http.StatusConflict, err.Error()) + // } + // return NewResponse(a), nil + // }) huma.Register(api, huma.Operation{ Middlewares: h.InternalApp, @@ -312,4 +321,22 @@ func (h *APIHandler) RegisterGoesTelemetry(api huma.API) { } return nil, nil }) + + huma.Register(api, huma.Operation{ + Middlewares: h.InternalApp, + OperationID: "goes-files-list-for-telemetry-source", + Method: http.MethodGet, + Path: "/goes/{telemetry_source_id}/configs/committed", + Description: "callback to update API DB state after OpenDCS wrapper commit completes", + Tags: goesTags, + }, func(ctx context.Context, input *struct { + TelemetrySourceIDParam + }) (*Response[[]string], error) { + aa, err := h.DBService.GoesPlatformConfigFileCommittedContentListCommitedForTelemetrySource(ctx, input.TelemetrySourceID.UUID) + if err != nil { + return nil, httperr.InternalServerError(err) + } + return NewResponse(aa), nil + }) + } diff --git a/api/internal/service/goes.go b/api/internal/service/goes.go index d4fe89d3..d2d24670 100644 --- a/api/internal/service/goes.go +++ b/api/internal/service/goes.go @@ -220,6 +220,10 @@ func (s *DBService) GoesTelemetryConfigMappingsUpdate(ctx context.Context, cfgID defer s.TxDo(ctx, tx.Rollback) qtx := s.WithTx(tx) + if err := qtx.GoesTelemetryConfigSetUncommitted(ctx, cfgID); err != nil { + return fmt.Errorf("GoesTelemetryConfigSetUncommitted %w", err) + } + if err := qtx.GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile(ctx, cfgID); err != nil { return fmt.Errorf("GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile %w", err) } diff --git a/api/internal/service/goes_commit.go b/api/internal/service/goes_commit.go index 111edb29..078fec79 100644 --- a/api/internal/service/goes_commit.go +++ b/api/internal/service/goes_commit.go @@ -19,18 +19,6 @@ import ( "github.com/google/uuid" ) -type OpendcsCommitDeleteMode string - -const ( - OpendcsCommitDeleteModeID OpendcsCommitDeleteMode = "id" - OpendcsCommitDeleteModeSite OpendcsCommitDeleteMode = "site" -) - -type OpendcsCommitDelete struct { - Mode OpendcsCommitDeleteMode `json:"mode"` - Value string `json:"value"` // platform id or site name -} - type OpendcsCommitFile struct { FileID uuid.UUID `json:"file_id"` Name string `json:"name"` @@ -40,11 +28,10 @@ type OpendcsCommitFile struct { } type OpendcsCommitRequest struct { - CommitID uuid.UUID `json:"commit_id"` - ProjectID uuid.UUID `json:"project_id"` - SourceID uuid.UUID `json:"goes_telemetry_source_id"` - Files []OpendcsCommitFile `json:"files"` - Deletes []OpendcsCommitDelete `json:"deletes"` + CommitID uuid.UUID `json:"commit_id"` + ProjectID uuid.UUID `json:"project_id"` + SourceID uuid.UUID `json:"goes_telemetry_source_id"` + Files []OpendcsCommitFile `json:"files"` } type OpendcsCommitResponse struct { @@ -107,7 +94,7 @@ func (s *DBService) GoesCommitEntireSet(ctx context.Context, httpClient *http.Cl prevCommitID = &prev.ID } - files, err := qtx.GoesPlatformConfigFilesListForCommit(ctx, db.GoesPlatformConfigFilesListForCommitParams{ + files, err := qtx.GoesPlatformConfigFilesListUncommitted(ctx, db.GoesPlatformConfigFilesListUncommittedParams{ ProjectID: arg.ProjectID, GoesTelemetrySourceID: arg.SourceID, }) @@ -121,6 +108,9 @@ func (s *DBService) GoesCommitEntireSet(ctx context.Context, httpClient *http.Cl desiredKeys := make(map[string]platformIdentity, len(files)) platformKeys := make([]string, 0, len(files)) for _, f := range files { + if f.Deleted { + continue + } pid, site, err := extractPlatformIDAndSite([]byte(f.Content)) if err != nil { return a, fmt.Errorf("extract platform id/site for file %s: %w", f.ID, err) @@ -148,25 +138,6 @@ func (s *DBService) GoesCommitEntireSet(ctx context.Context, httpClient *http.Cl return a, fmt.Errorf("platform ownership conflict for %q (owned by project %s)", c.PlatformKey, c.ProjectID) } - owned, err := qtx.GoesPlatformRegistryListByProject(ctx, db.GoesPlatformRegistryListByProjectParams{ - ProjectID: arg.ProjectID, - GoesTelemetrySourceID: arg.SourceID, - }) - if err != nil { - return a, fmt.Errorf("GoesPlatformRegistryListByProject: %w", err) - } - deletes := make([]OpendcsCommitDelete, 0) - for _, o := range owned { - if _, ok := desiredKeys[o.PlatformKey]; ok { - continue - } - if o.PlatformID != nil && *o.PlatformID != "" { - deletes = append(deletes, OpendcsCommitDelete{Mode: OpendcsCommitDeleteModeID, Value: *o.PlatformID}) - } else if o.SiteName != nil && *o.SiteName != "" { - deletes = append(deletes, OpendcsCommitDelete{Mode: OpendcsCommitDeleteModeSite, Value: *o.SiteName}) - } - } - fileIDs := make([]uuid.UUID, len(files)) for i, f := range files { fileIDs[i] = f.ID @@ -260,6 +231,17 @@ func (s *DBService) GoesCommitEntireSet(ctx context.Context, httpClient *http.Cl committedFiles := make([]OpendcsCommitFile, 0, len(files)) for _, f := range files { + if f.Deleted { + sum := sha256.Sum256([]byte(f.Content)) + committedFiles = append(committedFiles, OpendcsCommitFile{ + FileID: f.ID, + Name: f.Name, + Alias: f.Alias, + XML: f.Content, // raw content to override injected + Checksum: hex.EncodeToString(sum[:]), + }) + } + committedXMLBytes, err := injectTimeseriesIDIntoPlatformXML([]byte(f.Content), keyToTS) if err != nil { return a, fmt.Errorf("inject timeseries id for file %s: %w", f.ID, err) @@ -296,7 +278,6 @@ func (s *DBService) GoesCommitEntireSet(ctx context.Context, httpClient *http.Cl ProjectID: arg.ProjectID, SourceID: arg.SourceID, Files: committedFiles, - Deletes: deletes, }, } rawResp, callErr := s.opendcsCommit(ctx, httpClient, req) @@ -421,14 +402,6 @@ func (s *DBService) GoesRollbackProjectToPrevious(ctx context.Context, httpClien return a, fmt.Errorf("previous commit has no files") } - owned, err := q0.GoesPlatformRegistryListByProject(ctx, db.GoesPlatformRegistryListByProjectParams{ - ProjectID: arg.ProjectID, - GoesTelemetrySourceID: arg.SourceID, - }) - if err != nil { - return a, fmt.Errorf("GoesPlatformRegistryListByProject: %w", err) - } - if err := tx0.Commit(ctx); err != nil { return a, err } @@ -450,18 +423,6 @@ func (s *DBService) GoesRollbackProjectToPrevious(ctx context.Context, httpClien desiredKeys[k] = platformIdentity{PlatformID: pid, SiteName: site} } - deletes := make([]OpendcsCommitDelete, 0) - for _, o := range owned { - if _, ok := desiredKeys[o.PlatformKey]; ok { - continue - } - if o.PlatformID != nil && *o.PlatformID != "" { - deletes = append(deletes, OpendcsCommitDelete{Mode: OpendcsCommitDeleteModeID, Value: *o.PlatformID}) - } else if o.SiteName != nil && *o.SiteName != "" { - deletes = append(deletes, OpendcsCommitDelete{Mode: OpendcsCommitDeleteModeSite, Value: *o.SiteName}) - } - } - tx, err := s.db.Begin(ctx) if err != nil { return a, err @@ -510,7 +471,6 @@ func (s *DBService) GoesRollbackProjectToPrevious(ctx context.Context, httpClien ProjectID: arg.ProjectID, SourceID: arg.SourceID, Files: commitFiles, - Deletes: deletes, }, } rawResp, callErr := s.opendcsCommit(ctx, httpClient, req) @@ -624,14 +584,6 @@ func (s *DBService) opendcsCommit(ctx context.Context, httpClient *http.Client, _ = writer.WriteField("project_id", arg.OpendcsCommitRequest.ProjectID.String()) _ = writer.WriteField("goes_telemetry_source_id", arg.OpendcsCommitRequest.SourceID.String()) - if len(arg.OpendcsCommitRequest.Deletes) > 0 { - deletesBytes, err := json.Marshal(arg.OpendcsCommitRequest.Deletes) - if err != nil { - return nil, fmt.Errorf("marshal deletes: %w", err) - } - _ = writer.WriteField("deletes", string(deletesBytes)) - } - for _, f := range arg.OpendcsCommitRequest.Files { part, err := writer.CreateFormFile("files", f.FileID.String()+".xml") if err != nil { @@ -703,7 +655,7 @@ type GoesProjectValidationResult struct { func (s *DBService) GoesValidateProjectUncommitted(ctx context.Context, arg GoesValidateProjectUncommittedParams) (DBImportResponse, error) { var out DBImportResponse - files, err := s.GoesPlatformConfigFilesListForCommit(ctx, db.GoesPlatformConfigFilesListForCommitParams{ + files, err := s.GoesPlatformConfigFilesListUncommitted(ctx, db.GoesPlatformConfigFilesListUncommittedParams{ ProjectID: arg.ProjectID, GoesTelemetrySourceID: arg.GoesTelemetrySourceID, }) diff --git a/api/internal/service/goes_xml.go b/api/internal/service/goes_xml.go index d9b6dc3a..cb2b9e19 100644 --- a/api/internal/service/goes_xml.go +++ b/api/internal/service/goes_xml.go @@ -89,53 +89,147 @@ func extractPlatformIDAndSite(xmlIn []byte) (platformID, site string, err error) } func injectTimeseriesIDIntoPlatformXML(xmlIn []byte, keyToTS map[string]uuid.UUID) ([]byte, error) { - dec := xml.NewDecoder(bytes.NewReader(xmlIn)) + perPlatform := map[string]map[string]string{} + + { + dec := xml.NewDecoder(bytes.NewReader(xmlIn)) + + var ( + platformID string + inConfigSensor bool + cfgNum string + readingName bool + nameBuf strings.Builder + ) + + for { + tok, err := dec.Token() + if err == io.EOF { + break + } + if err != nil { + return nil, fmt.Errorf("pass1 decode: %w", err) + } - var out bytes.Buffer - enc := xml.NewEncoder(&out) + switch t := tok.(type) { + case xml.StartElement: + switch t.Name.Local { + case "Platform": + platformID = strings.TrimSpace(attr(t.Attr, "PlatformId")) + if platformID == "" { + platformID = "__no_platform_id__" + } + if perPlatform[platformID] == nil { + perPlatform[platformID] = map[string]string{} + } + case "ConfigSensor": + inConfigSensor = true + cfgNum = strings.TrimSpace(attr(t.Attr, "SensorNumber")) + nameBuf.Reset() + readingName = false + case "SensorName": + if inConfigSensor { + readingName = true + } + } - inConfigSensor := false - var sensorName, sensorNumber strings.Builder - readingSensorName := false - readingSensorNumber := false + case xml.CharData: + if inConfigSensor && readingName { + nameBuf.Write([]byte(t)) + } - sawTimeseriesProp := false - skippingTimeseriesProp := false - skipDepth := 0 + case xml.EndElement: + switch t.Name.Local { + case "SensorName": + readingName = false + case "ConfigSensor": + if inConfigSensor { + n := strings.TrimSpace(nameBuf.String()) + if platformID != "" && cfgNum != "" && n != "" { + perPlatform[platformID][cfgNum] = n + } + inConfigSensor = false + cfgNum = "" + } + case "Platform": + platformID = "" + } + } + } + } - getTS := func() (uuid.UUID, bool) { - if !inConfigSensor { + lookupTS := func(platformID, sensorNum string) (uuid.UUID, bool) { + m := perPlatform[platformID] + if m == nil { return uuid.Nil, false } - n := strings.TrimSpace(sensorName.String()) - num := strings.TrimSpace(sensorNumber.String()) - if n == "" || num == "" { + num := strings.TrimSpace(sensorNum) + name := strings.TrimSpace(m[num]) + if name == "" || num == "" { return uuid.Nil, false } - ts, ok := keyToTS[n+"."+num] + ts, ok := keyToTS[name+"."+num] if !ok || ts == uuid.Nil { return uuid.Nil, false } return ts, true } + dec := xml.NewDecoder(bytes.NewReader(xmlIn)) + var out bytes.Buffer + enc := xml.NewEncoder(&out) + + var ( + platformID string + + inPS bool + psNum string + sawPSProp bool + + inSS bool + ssNum string + sawSSProp bool + + replacing bool + repDepth int + repName string + ) + + writeProp := func(elemLocal string, ts uuid.UUID) error { + start := xml.StartElement{ + Name: xml.Name{Local: elemLocal}, + Attr: []xml.Attr{{Name: xml.Name{Local: "PropertyName"}, Value: "timeseries_id"}}, + } + if err := enc.EncodeToken(start); err != nil { + return err + } + if err := enc.EncodeToken(xml.CharData([]byte(ts.String()))); err != nil { + return err + } + return enc.EncodeToken(xml.EndElement{Name: start.Name}) + } + for { tok, err := dec.Token() if err == io.EOF { break } if err != nil { - return nil, fmt.Errorf("xml decode token: %w", err) + return nil, fmt.Errorf("pass2 decode: %w", err) } - if skippingTimeseriesProp { + if replacing { switch tok.(type) { case xml.StartElement: - skipDepth++ + repDepth++ case xml.EndElement: - skipDepth-- - if skipDepth == 0 { - skippingTimeseriesProp = false + repDepth-- + if repDepth == 0 { + if err := enc.EncodeToken(xml.EndElement{Name: xml.Name{Local: repName}}); err != nil { + return nil, err + } + replacing = false + repName = "" } } continue @@ -143,113 +237,114 @@ func injectTimeseriesIDIntoPlatformXML(xmlIn []byte, keyToTS map[string]uuid.UUI switch t := tok.(type) { case xml.StartElement: - if t.Name.Local == "ConfigSensor" { - inConfigSensor = true - sensorName.Reset() - sensorNumber.Reset() - readingSensorName = false - readingSensorNumber = false - sawTimeseriesProp = false - } + switch t.Name.Local { + case "Platform": + platformID = strings.TrimSpace(attr(t.Attr, "PlatformId")) + if platformID == "" { + platformID = "__no_platform_id__" + } - if inConfigSensor && t.Name.Local == "SensorName" { - readingSensorName = true - } - if inConfigSensor && t.Name.Local == "SensorNumber" { - readingSensorNumber = true - } + case "PlatformSensor": + inPS = true + psNum = strings.TrimSpace(attr(t.Attr, "SensorNumber")) + sawPSProp = false - if inConfigSensor && t.Name.Local == "PlatformSensorProperty" { - var propName string - for _, a := range t.Attr { - if a.Name.Local == "PropertyName" { - propName = a.Value - break + case "ScriptSensor": + inSS = true + ssNum = strings.TrimSpace(attr(t.Attr, "SensorNumber")) + sawSSProp = false + + case "PlatformSensorProperty": + if inPS && strings.EqualFold(attr(t.Attr, "PropertyName"), "timeseries_id") { + sawPSProp = true + if err := enc.EncodeToken(t); err != nil { + return nil, err + } + if ts, ok := lookupTS(platformID, psNum); ok { + if err := enc.EncodeToken(xml.CharData([]byte(ts.String()))); err != nil { + return nil, err + } } + replacing = true + repDepth = 1 + repName = "PlatformSensorProperty" + continue } - if strings.EqualFold(propName, "timeseries_id") { - sawTimeseriesProp = true + case "SensorProperty": + if inSS && strings.EqualFold(attr(t.Attr, "PropertyName"), "timeseries_id") { + sawSSProp = true if err := enc.EncodeToken(t); err != nil { - return nil, fmt.Errorf("xml encode start: %w", err) + return nil, err } - - if ts, ok := getTS(); ok { + if ts, ok := lookupTS(platformID, ssNum); ok { if err := enc.EncodeToken(xml.CharData([]byte(ts.String()))); err != nil { - return nil, fmt.Errorf("xml encode timeseries_id value: %w", err) + return nil, err } } - - skippingTimeseriesProp = true - skipDepth = 1 + replacing = true + repDepth = 1 + repName = "SensorProperty" continue } } if err := enc.EncodeToken(t); err != nil { - return nil, fmt.Errorf("xml encode start: %w", err) - } - - case xml.CharData: - if inConfigSensor && readingSensorName { - sensorName.Write([]byte(t)) - } - if inConfigSensor && readingSensorNumber { - sensorNumber.Write([]byte(t)) - } - - if err := enc.EncodeToken(t); err != nil { - return nil, fmt.Errorf("xml encode chardata: %w", err) + return nil, err } case xml.EndElement: - if inConfigSensor && t.Name.Local == "SensorName" { - readingSensorName = false - } - if inConfigSensor && t.Name.Local == "SensorNumber" { - readingSensorNumber = false - } - - if inConfigSensor && t.Name.Local == "ConfigSensor" { - if !sawTimeseriesProp { - if ts, ok := getTS(); ok { - start := xml.StartElement{ - Name: xml.Name{Local: "PlatformSensorProperty"}, - Attr: []xml.Attr{ - {Name: xml.Name{Local: "PropertyName"}, Value: "timeseries_id"}, - }, - } - if err := enc.EncodeToken(start); err != nil { - return nil, fmt.Errorf("xml encode inserted prop start: %w", err) - } - if err := enc.EncodeToken(xml.CharData([]byte(ts.String()))); err != nil { - return nil, fmt.Errorf("xml encode inserted prop value: %w", err) + switch t.Name.Local { + case "PlatformSensor": + if inPS && !sawPSProp { + if ts, ok := lookupTS(platformID, psNum); ok { + if err := writeProp("PlatformSensorProperty", ts); err != nil { + return nil, err } - if err := enc.EncodeToken(xml.EndElement{Name: start.Name}); err != nil { - return nil, fmt.Errorf("xml encode inserted prop end: %w", err) + } + } + inPS = false + psNum = "" + sawPSProp = false + + case "ScriptSensor": + if inSS && !sawSSProp { + if ts, ok := lookupTS(platformID, ssNum); ok { + if err := writeProp("SensorProperty", ts); err != nil { + return nil, err } } } + inSS = false + ssNum = "" + sawSSProp = false - inConfigSensor = false - readingSensorName = false - readingSensorNumber = false + case "Platform": + platformID = "" } if err := enc.EncodeToken(t); err != nil { - return nil, fmt.Errorf("xml encode end: %w", err) + return nil, err } default: if err := enc.EncodeToken(tok); err != nil { - return nil, fmt.Errorf("xml encode token: %w", err) + return nil, err } } } if err := enc.Flush(); err != nil { - return nil, fmt.Errorf("xml encoder flush: %w", err) + return nil, err } - return out.Bytes(), nil } + +func attr(attrs []xml.Attr, local string) string { + for _, a := range attrs { + if a.Name.Local == local { + return a.Value + } + } + return "" +} diff --git a/api/queries/goes.sql b/api/queries/goes.sql index 0cf7a57e..e46b1652 100644 --- a/api/queries/goes.sql +++ b/api/queries/goes.sql @@ -31,7 +31,7 @@ where id = $1 and not deleted; --- name: GoesPlatformConfigFileListUncommitedForProject :one +-- name: GoesPlatformConfigFileListUncommittedForProject :many select * from goes_platform_config_file where project_id = $1 @@ -39,6 +39,14 @@ and not committed and not deleted; +-- name: GoesPlatformConfigFileCommittedContentListCommitedForTelemetrySource :many +select committed_content::xml +from goes_platform_config_file +where goes_telemetry_source_id = $1 +and committed +and not deleted; + + -- name: GoesPlatformConfigFileUpdate :exec update goes_platform_config_file set name = sqlc.arg(name), @@ -55,7 +63,8 @@ where id = sqlc.arg(id); update goes_platform_config_file set deleted = true, deleted_at = now(), - deleted_by = sqlc.arg(deleted_by) + deleted_by = sqlc.arg(deleted_by), + committed = false where id = sqlc.arg(id); @@ -75,6 +84,12 @@ where goes_platform_config_file_id = $1 and platform_sensor_key = $2; +-- name: GoesTelemetryConfigSetUncommitted :exec +update goes_platform_config_file set + committed = false +where id = sqlc.arg(id); + + -- name: GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile :exec delete from goes_telemetry_config_mappings where goes_platform_config_file_id = $1; diff --git a/api/queries/goes_commit.sql b/api/queries/goes_commit.sql index a7cea29f..ea964919 100644 --- a/api/queries/goes_commit.sql +++ b/api/queries/goes_commit.sql @@ -57,13 +57,13 @@ update goes_commit set status = 'failed', opendcs_response = sqlc.arg(opendcs_re where id = sqlc.arg(id); --- name: GoesPlatformConfigFilesListForCommit :many -select id, name, alias, content +-- name: GoesPlatformConfigFilesListUncommitted :many +select id, name, alias, content, deleted from goes_platform_config_file where project_id = $1 and goes_telemetry_source_id = $2 -and deleted = false -order by created_at asc; +and not committed +order by deleted desc, created_at asc; -- name: GoesPlatformConfigFilesListForCommitByCommitID :many diff --git a/docker-compose.yaml b/docker-compose.yaml index 990d16b4..a4d16aca 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -200,6 +200,7 @@ services: required: true - path: .env required: true + restart: unless-stopped localstack: image: localstack/localstack:4 diff --git a/env_files/opendcs.env b/env_files/opendcs.env index a4b77198..adf4227a 100644 --- a/env_files/opendcs.env +++ b/env_files/opendcs.env @@ -1,6 +1,9 @@ +AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE +AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY + AWS_ENDPOINT_URL="http://localstack:4566" -DATALOAD_S3_ROOT="s3://corpsmap-data-incoming/instrumentation" -OPENDCS_IMPORT_TOKEN="appkey" +DATALOAD_S3_ROOT="s3://corpsmap-data-incoming?region=us-east-1&prefix=instrumentation/goes/&endpoint=http://localstack:4566&use_path_style=true&awssdk=v2" +PLATFORM_IMPORT_PREFIX=appkey CDADATA_USERNAME= CDADATA_PASSWORD= CDABACKUP_USERNAME= diff --git a/opendcs/app.go b/opendcs/app.go index b909cbc7..18c8763e 100644 --- a/opendcs/app.go +++ b/opendcs/app.go @@ -1,15 +1,9 @@ package main import ( - "bytes" - "context" "crypto/subtle" - "fmt" "log/slog" "net/http" - "os" - "os/exec" - "strings" "sync" "github.com/danielgtaylor/huma/v2" @@ -21,6 +15,7 @@ type App struct { dbimport *dbimport mu sync.Mutex httpServer *http.Server + httpClient *http.Client } func (a *App) checkKey(got string) error { @@ -29,26 +24,3 @@ func (a *App) checkKey(got string) error { } return nil } - -func (a *App) runDBUtil(ctx context.Context, mode, value string) (string, error) { - mode = strings.ToLower(strings.TrimSpace(mode)) - value = strings.TrimSpace(value) - if mode != "id" && mode != "site" { - return "", fmt.Errorf("invalid delete mode %q", mode) - } - if value == "" { - return "", fmt.Errorf("empty delete value") - } - - cmd := exec.CommandContext(ctx, "decj", "decodes.tsdb.DbUtil") - cmd.Env = os.Environ() - - input := fmt.Sprintf("delete-platform %s %s\nquit\n", mode, value) - cmd.Stdin = bytes.NewBufferString(input) - - out, err := cmd.CombinedOutput() - if err != nil { - return string(out), fmt.Errorf("dbutil error: %w: %s", err, string(out)) - } - return string(out), nil -} diff --git a/opendcs/dbimport.go b/opendcs/dbimport.go index 1e5414bc..ba363be8 100644 --- a/opendcs/dbimport.go +++ b/opendcs/dbimport.go @@ -34,9 +34,6 @@ func NewDbimport(ctx context.Context, cfg Config, logger *slog.Logger) (*dbimpor if len(files) == 0 { return nil, errors.New("no inital import files") } - - // TODO: we should also query any existing platform configs that exist for this opendcs instance (fetched from the API) - _, err = i.runDBImport(ctx, files) if err != nil { return nil, fmt.Errorf("validation failed: %w", err) @@ -50,8 +47,9 @@ type dbimportParams struct { } type dbimportOutput struct { - Status int `json:"status"` - Log string `json:"log,omitempty"` + Status int `json:"status"` + Log string `json:"log,omitempty"` + ParsingErrors []string `json:"parsing_errors,omitempty"` } func (i *dbimport) ProcessAtomic(ctx context.Context, req *dbimportParams) (*dbimportOutput, error) { diff --git a/opendcs/main.go b/opendcs/main.go index b60ca7b8..36d7b461 100644 --- a/opendcs/main.go +++ b/opendcs/main.go @@ -33,11 +33,14 @@ const ( ) type Config struct { - RoutingSpec string `env:"OPENDCS_ROUTING_SPEC" envDefault:"goes"` - AuthToken string `env:"OPENDCS_IMPORT_TOKEN"` - ListenAddr string `env:"OPENDCS_HTTP_ADDR" envDefault:":8080"` - MidasAPIHost string `env:"MIDAS_API_HOST" envDefault:"http://api:80"` - S3BucketURL string `env:"S3_LOAD_DATAROOT" envDefault:"s3://corpmsmap-data-incoming/instrumentation/goes"` + // TODO: use build or default values while container mappings not set in environment + RoutingSpec string `env:"OPENDCS_ROUTING_SPEC" envDefault:"goes"` + ListenAddr string `env:"OPENDCS_HTTP_ADDR" envDefault:":8080"` + MidasAPIHost string `env:"MIDAS_API_HOST" envDefault:"http://api:80"` // TODO: make sure to remove this + TelemetrySourceID uuid.UUID `env:"TELEMETRY_SOURCE_ID" envDefault:"666e60ec-2c0a-4446-9eda-6f45cbcd0a60"` + + AuthToken string `env:"PLATFORM_IMPORT_PREFIX"` // TODO: update this name instead of reusing existing env var mapping + S3BucketURL string `env:"DATALOAD_S3_ROOT"` // TODO: rename } func main() { @@ -85,27 +88,82 @@ func main() { return } - i, err := NewDbimport(ctx, cfg, logger) + dbi, err := NewDbimport(ctx, cfg, logger) if err != nil { log.Fatalf("NewDbimport: %v", err) } - app := &App{ - cfg: cfg, - logger: logger, - dbimport: i, + { + u, err := url.Parse(cfg.MidasAPIHost) + if err != nil { + log.Fatalf("could not parse bad url base path: %v", err) + } + u.Path = fmt.Sprintf("/v4/goes/%s/configs/committed", cfg.TelemetrySourceID) + q := u.Query() + q.Add("key", cfg.AuthToken) + u.RawQuery = q.Encode() + + res, err := http.Get(u.String()) + if err != nil { + var urlErr *url.Error + if errors.As(err, &urlErr) { + if u, parseErr := url.Parse(urlErr.URL); parseErr == nil { + redactQueryParams(u, "key") + urlErr.URL = u.String() + err = urlErr + } else { + err = errors.New("failed to redact query param when parsing error") + } + } + log.Fatal("unable to reach api for existing committed platform config files", "error", err) + } + defer res.Body.Close() + + resBody, err := io.ReadAll(res.Body) + if err != nil { + log.Fatal("unable to reach api for existing committed platform config files", "error", err) + } + var platformConfigs []string + if err := json.Unmarshal(resBody, &platformConfigs); err != nil { + log.Fatal("unable to reach api for existing committed platform config files", "error", err) + } + var files []string + tmpDir := os.TempDir() + for _, pc := range platformConfigs { + tmpFile, err := os.CreateTemp(tmpDir, "platform-*.xml") + if err != nil { + log.Fatal("failed to create temp file for platform config", "error", err) + } + if _, err := tmpFile.Write([]byte(pc)); err != nil { + log.Fatal("failed to write platform config to temp file", "error", err) + } + files = append(files, tmpFile.Name()) + tmpFile.Close() + } + if len(files) > 0 { + if _, err := dbi.ProcessAtomic(ctx, &dbimportParams{ + Files: files, + ValidateOnly: false, + }); err != nil { + log.Fatal("failed to load platform configs into dbimport", "error", err) + } + } } router := http.NewServeMux() api := humago.New(router, huma.DefaultConfig("OpenDCS Wrapper", VERSION)) - type ValidateForm struct { - Files []huma.FormFile `form:"files" required:"true"` + app := &App{ + cfg: cfg, + logger: logger, + dbimport: dbi, } huma.Post(api, "/validate", func(ctx context.Context, input *struct { KeyQueryParam - RawBody huma.MultipartFormFiles[ValidateForm] + RawBody huma.MultipartFormFiles[struct { + Files []huma.FormFile `form:"files" required:"true"` + }] }) (*Response[dbimportOutput], error) { if err := app.checkKey(input.Key); err != nil { return nil, err @@ -119,21 +177,28 @@ func main() { files := make([]string, len(formData.Files)) tmpDir := os.TempDir() + var parsingErrors []string for i, file := range formData.Files { content, err := io.ReadAll(file) if err != nil { - // TODO handle error appropriately + msg := fmt.Sprintf("failed to read uploaded file %q", file.Filename) + parsingErrors = append(parsingErrors, msg) + logger.ErrorContext(ctx, msg, "filename", file.Filename, "error", err) continue } tmpFile, err := os.CreateTemp(tmpDir, "upload-*"+filepath.Ext(file.Filename)) if err != nil { - // TODO handle error appropriately + msg := fmt.Sprintf("failed to create temp file for uploaded file %q", file.Filename) + parsingErrors = append(parsingErrors, msg) + logger.ErrorContext(ctx, msg, "filename", file.Filename, "error", err) continue } defer tmpFile.Close() if _, err := tmpFile.Write(content); err != nil { - // TODO handle error appropriately + msg := fmt.Sprintf("failed to write uploaded file %q to temp file", file.Filename) + parsingErrors = append(parsingErrors, msg) + logger.ErrorContext(ctx, msg, "filename", file.Filename, "error", err) continue } files[i] = tmpFile.Name() @@ -143,6 +208,7 @@ func main() { Files: files, ValidateOnly: true, }) + dbiout.ParsingErrors = parsingErrors if err != nil { return nil, huma.NewError(http.StatusConflict, err.Error()) } @@ -152,22 +218,14 @@ func main() { return NewResponse(*dbiout), nil }) - type GoesCommitDelete struct { - Mode string `json:"mode" enum:"id,site"` - Value string `json:"value"` - } - - type GoesCommitForm struct { - CommitID string `form:"commit_id"` - ProjectID string `form:"project_id"` - GoesTelemetrySourceID string `form:"goes_telemetry_source_id"` - Files []huma.FormFile `form:"files"` - Deletes []GoesCommitDelete `form:"deletes"` - } - huma.Post(api, "/commit", func(ctx context.Context, input *struct { KeyQueryParam - RawBody huma.MultipartFormFiles[GoesCommitForm] + RawBody huma.MultipartFormFiles[struct { + CommitID string `form:"commit_id"` + ProjectID string `form:"project_id"` + GoesTelemetrySourceID string `form:"goes_telemetry_source_id"` + Files []huma.FormFile `form:"files"` + }] }) (*Response[dbimportOutput], error) { if err := app.checkKey(input.Key); err != nil { return nil, err @@ -180,15 +238,6 @@ func main() { formData := input.RawBody.Data() - for _, d := range formData.Deletes { - if d.Mode == "" || d.Value == "" { - continue - } - if _, err := app.runDBUtil(ctx, d.Mode, d.Value); err != nil { - return nil, huma.NewError(http.StatusConflict, fmt.Sprintf("delete-platform failed (%s %s): %v", d.Mode, d.Value, err)) - } - } - files := make([]string, len(formData.Files)) tmpDir := os.TempDir() @@ -198,29 +247,38 @@ func main() { CommitID string `json:"commit_id"` } + var parsingErrors []string commitPayload := make([]GoesPlatformConfigFileCommitDTO, len(formData.Files)) for i, file := range formData.Files { content, err := io.ReadAll(file) if err != nil { - // handle error appropriately + msg := fmt.Sprintf("failed to read uploaded file %q", file.Filename) + parsingErrors = append(parsingErrors, msg) + logger.ErrorContext(ctx, msg, "filename", file.Filename, "error", err) continue } tmpFile, err := os.CreateTemp(tmpDir, "upload-*"+filepath.Ext(file.Filename)) if err != nil { - // handle error appropriately + msg := fmt.Sprintf("failed to create temp file for uploaded file %q", file.Filename) + parsingErrors = append(parsingErrors, msg) + logger.ErrorContext(ctx, msg, "filename", file.Filename, "error", err) continue } defer tmpFile.Close() if _, err := tmpFile.Write(content); err != nil { - // handle error appropriately + msg := fmt.Sprintf("failed to write uploaded file %q to temp file", file.Filename) + parsingErrors = append(parsingErrors, msg) + logger.ErrorContext(ctx, msg, "filename", file.Filename, "error", err) continue } files[i] = tmpFile.Name() fileBaseName := strings.TrimSuffix(file.Filename, ".xml") fileID, err := uuid.Parse(fileBaseName) if err != nil { - // handle error appropriately + msg := fmt.Sprintf("failed to parse uuid from filename %q", file.Filename) + parsingErrors = append(parsingErrors, msg) + logger.ErrorContext(ctx, msg, "filename", file.Filename, "error", err) continue } commitPayload[i] = GoesPlatformConfigFileCommitDTO{ @@ -254,12 +312,24 @@ func main() { if err != nil { return nil, huma.NewError(http.StatusInternalServerError, "failed to marshal callback payload") } - resp, err := http.Post(u.String(), "application/json", bytes.NewReader(body)) if err != nil { + var urlErr *url.Error + if errors.As(err, &urlErr) { + if u, parseErr := url.Parse(urlErr.URL); parseErr == nil { + redactQueryParams(u, "key") + urlErr.URL = u.String() + err = urlErr + } else { + err = errors.New("failed to redact query param when parsing error") + } + } return nil, huma.NewError(http.StatusBadGateway, err.Error()) } + defer resp.Body.Close() + dbiout.Status = resp.StatusCode + dbiout.ParsingErrors = parsingErrors return NewResponse(*dbiout), nil }) @@ -275,8 +345,8 @@ func main() { ReadHeaderTimeout: 5 * time.Second, } - if err := i.startRoutingScheduler(ctx); err != nil { - logger.ErrorContext(ctx, "error starting routing scheduler", "error", err) + if err := dbi.startRoutingScheduler(ctx); err != nil { + log.Fatalf("error starting routing scheduler: %v", err) } go func() { @@ -294,3 +364,13 @@ func main() { os.Exit(1) } } + +func redactQueryParams(u *url.URL, queryParams ...string) { + q := u.Query() + for _, p := range queryParams { + if q.Has(p) { + q.Set(p, "REDACTED") + } + } + u.RawQuery = q.Encode() +} diff --git a/opendcs/midas_config/routing/goes.xml b/opendcs/midas_config/routing/goes.xml index c2b4bddb..4fe54f76 100644 --- a/opendcs/midas_config/routing/goes.xml +++ b/opendcs/midas_config/routing/goes.xml @@ -50,6 +50,6 @@ l - /opendcs-wrapper upload ${java.FILENAME} + /usr/local/bin/opendcs-wrapper upload ${java.FILENAME} diff --git a/opendcs/midas_config/routing/monitor.xml b/opendcs/midas_config/routing/monitor.xml index efa0e196..16026a78 100644 --- a/opendcs/midas_config/routing/monitor.xml +++ b/opendcs/midas_config/routing/monitor.xml @@ -46,6 +46,6 @@ l - /opendcs-wrapper upload ${java.FILENAME} + /usr/local/bin/opendcs-wrapper upload ${java.FILENAME} diff --git a/report/src/main.ts b/report/src/main.ts index 7c690ec3..87d90a09 100644 --- a/report/src/main.ts +++ b/report/src/main.ts @@ -414,9 +414,17 @@ async function upload( key: string, bucket: string, ): Promise { + const tomorrow = new Date( + new Date().getTime() + FILE_EXPIRY_DURATION_HOURS * 60 * 60 * 1000, + ); const uploader = new Upload({ client: s3Client, - params: { Bucket: bucket, Key: key, Body: buf }, + params: { + Bucket: bucket, + Key: key, + Body: buf, + Expires: tomorrow, + }, }); const s3UploaderResponse = await uploader.done(); const statusCode = s3UploaderResponse.$metadata.httpStatusCode; @@ -470,13 +478,15 @@ async function updateJob( throw new Error(JSON.stringify(failData ?? failErr)); } + const tomorrow = new Date( + new Date().getTime() + FILE_EXPIRY_DURATION_HOURS * 60 * 60 * 1000, + ); + const body: ReportDownloadJobDTO = { status: "SUCCESS", progress: 100, file_key: fileKey, - file_expiry: new Date( - new Date().getTime() + FILE_EXPIRY_DURATION_HOURS * 60 * 60 * 1000, - ).toISOString(), + file_expiry: tomorrow.toISOString(), }; const { data, error } = await apiClient.PUT("/report_jobs/{job_id}", { From 5de25add47e10d28eba260c6473badbc98432626 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 21 Jan 2026 15:05:06 -0500 Subject: [PATCH 19/22] fix: uncommitted deleted files should be listed until hard-delete chore: hide internal endpoint from openapi doc chore: fix typos chore: fix staticcheck nested Queries warning --- api/internal/db/goes.sql_gen.go | 53 ++----------------- api/internal/db/querier.go | 3 +- api/internal/handler/goes.go | 4 +- api/internal/service/aware.go | 2 +- api/internal/service/goes.go | 19 +++++++ .../repeat/0190__views_telemetry.sql | 1 + api/queries/goes.sql | 10 +--- 7 files changed, 29 insertions(+), 63 deletions(-) diff --git a/api/internal/db/goes.sql_gen.go b/api/internal/db/goes.sql_gen.go index 3b0556a7..6e9cabd1 100644 --- a/api/internal/db/goes.sql_gen.go +++ b/api/internal/db/goes.sql_gen.go @@ -11,7 +11,7 @@ import ( "github.com/google/uuid" ) -const goesPlatformConfigFileCommittedContentListCommitedForTelemetrySource = `-- name: GoesPlatformConfigFileCommittedContentListCommitedForTelemetrySource :many +const goesPlatformConfigFileCommittedContentListCommittedForTelemetrySource = `-- name: GoesPlatformConfigFileCommittedContentListCommittedForTelemetrySource :many select committed_content::xml from goes_platform_config_file where goes_telemetry_source_id = $1 @@ -19,8 +19,8 @@ and committed and not deleted ` -func (q *Queries) GoesPlatformConfigFileCommittedContentListCommitedForTelemetrySource(ctx context.Context, goesTelemetrySourceID uuid.UUID) ([]string, error) { - rows, err := q.db.Query(ctx, goesPlatformConfigFileCommittedContentListCommitedForTelemetrySource, goesTelemetrySourceID) +func (q *Queries) GoesPlatformConfigFileCommittedContentListCommittedForTelemetrySource(ctx context.Context, goesTelemetrySourceID uuid.UUID) ([]string, error) { + rows, err := q.db.Query(ctx, goesPlatformConfigFileCommittedContentListCommittedForTelemetrySource, goesTelemetrySourceID) if err != nil { return nil, err } @@ -137,53 +137,6 @@ func (q *Queries) GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) ( return i, err } -const goesPlatformConfigFileListUncommittedForProject = `-- name: GoesPlatformConfigFileListUncommittedForProject :many -select id, goes_telemetry_source_id, project_id, name, alias, size_bytes, content, committed, committed_at, created_at, created_by, updated_at, updated_by, committed_content, committed_commit_id, deleted, deleted_at, deleted_by -from goes_platform_config_file -where project_id = $1 -and not committed -and not deleted -` - -func (q *Queries) GoesPlatformConfigFileListUncommittedForProject(ctx context.Context, projectID uuid.UUID) ([]GoesPlatformConfigFile, error) { - rows, err := q.db.Query(ctx, goesPlatformConfigFileListUncommittedForProject, projectID) - if err != nil { - return nil, err - } - defer rows.Close() - items := []GoesPlatformConfigFile{} - for rows.Next() { - var i GoesPlatformConfigFile - if err := rows.Scan( - &i.ID, - &i.GoesTelemetrySourceID, - &i.ProjectID, - &i.Name, - &i.Alias, - &i.SizeBytes, - &i.Content, - &i.Committed, - &i.CommittedAt, - &i.CreatedAt, - &i.CreatedBy, - &i.UpdatedAt, - &i.UpdatedBy, - &i.CommittedContent, - &i.CommittedCommitID, - &i.Deleted, - &i.DeletedAt, - &i.DeletedBy, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - const goesPlatformConfigFileUpdate = `-- name: GoesPlatformConfigFileUpdate :exec update goes_platform_config_file set name = $1, diff --git a/api/internal/db/querier.go b/api/internal/db/querier.go index c7e53983..746c6a4f 100644 --- a/api/internal/db/querier.go +++ b/api/internal/db/querier.go @@ -126,11 +126,10 @@ type Querier interface { GoesMappingSetEntryCreateBatch(ctx context.Context, arg []GoesMappingSetEntryCreateBatchParams) (int64, error) GoesPlatformConfigFileCommit(ctx context.Context, arg []GoesPlatformConfigFileCommitParams) *GoesPlatformConfigFileCommitBatchResults GoesPlatformConfigFileCommitArtifactsUpdate(ctx context.Context, arg GoesPlatformConfigFileCommitArtifactsUpdateParams) error - GoesPlatformConfigFileCommittedContentListCommitedForTelemetrySource(ctx context.Context, goesTelemetrySourceID uuid.UUID) ([]string, error) + GoesPlatformConfigFileCommittedContentListCommittedForTelemetrySource(ctx context.Context, goesTelemetrySourceID uuid.UUID) ([]string, error) GoesPlatformConfigFileCreate(ctx context.Context, arg GoesPlatformConfigFileCreateParams) (uuid.UUID, error) GoesPlatformConfigFileDelete(ctx context.Context, arg GoesPlatformConfigFileDeleteParams) error GoesPlatformConfigFileGet(ctx context.Context, id uuid.UUID) (GoesPlatformConfigFile, error) - GoesPlatformConfigFileListUncommittedForProject(ctx context.Context, projectID uuid.UUID) ([]GoesPlatformConfigFile, error) GoesPlatformConfigFileRestoreForRollback(ctx context.Context, arg GoesPlatformConfigFileRestoreForRollbackParams) error GoesPlatformConfigFileSoftDeleteNotInSet(ctx context.Context, arg GoesPlatformConfigFileSoftDeleteNotInSetParams) error GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPlatformConfigFileUpdateParams) error diff --git a/api/internal/handler/goes.go b/api/internal/handler/goes.go index c55d9ac7..d4039083 100644 --- a/api/internal/handler/goes.go +++ b/api/internal/handler/goes.go @@ -306,6 +306,7 @@ func (h *APIHandler) RegisterGoesTelemetry(api huma.API) { // }) huma.Register(api, huma.Operation{ + Hidden: true, Middlewares: h.InternalApp, OperationID: "goes-telemetry-commit-callback", Method: http.MethodPost, @@ -323,6 +324,7 @@ func (h *APIHandler) RegisterGoesTelemetry(api huma.API) { }) huma.Register(api, huma.Operation{ + Hidden: true, Middlewares: h.InternalApp, OperationID: "goes-files-list-for-telemetry-source", Method: http.MethodGet, @@ -332,7 +334,7 @@ func (h *APIHandler) RegisterGoesTelemetry(api huma.API) { }, func(ctx context.Context, input *struct { TelemetrySourceIDParam }) (*Response[[]string], error) { - aa, err := h.DBService.GoesPlatformConfigFileCommittedContentListCommitedForTelemetrySource(ctx, input.TelemetrySourceID.UUID) + aa, err := h.DBService.GoesPlatformConfigFileCommittedContentListCommittedForTelemetrySource(ctx, input.TelemetrySourceID.UUID) if err != nil { return nil, httperr.InternalServerError(err) } diff --git a/api/internal/service/aware.go b/api/internal/service/aware.go index cda075c1..a56ec4cc 100644 --- a/api/internal/service/aware.go +++ b/api/internal/service/aware.go @@ -14,7 +14,7 @@ type AwarePlatformParameterConfig struct { func (s *DBService) AwarePlatformParameterConfigList(ctx context.Context) ([]AwarePlatformParameterConfig, error) { aa := make([]AwarePlatformParameterConfig, 0) - ee, err := s.Queries.AwarePlatformParameterListEnabled(ctx) + ee, err := s.AwarePlatformParameterListEnabled(ctx) if err != nil { return aa, err } diff --git a/api/internal/service/goes.go b/api/internal/service/goes.go index d2d24670..6072cf2d 100644 --- a/api/internal/service/goes.go +++ b/api/internal/service/goes.go @@ -203,6 +203,25 @@ func (s *DBService) GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPl return out, tx.Commit(ctx) } +func (s *DBService) GoesPlatformConfigFileDelete(ctx context.Context, arg db.GoesPlatformConfigFileDeleteParams) error { + tx, err := s.db.Begin(ctx) + if err != nil { + return err + } + defer s.TxDo(ctx, tx.Rollback) + qtx := s.WithTx(tx) + + if err := qtx.GoesPlatformConfigFileDelete(ctx, arg); err != nil { + return fmt.Errorf("GoesPlatformConfigFileDelete %w", err) + } + + if err := qtx.GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile(ctx, arg.ID); err != nil { + return fmt.Errorf("GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile %w", err) + } + + return tx.Commit(ctx) +} + func (s *DBService) GoesTelemetryConfigMappingsUpdate(ctx context.Context, cfgID uuid.UUID, mappings []dto.GoesTelemetryConfigMappingDTO) error { batch := make([]db.GoesTelemetryConfigMappingsCreateBatchParams, len(mappings)) for i, m := range mappings { diff --git a/api/migrations/repeat/0190__views_telemetry.sql b/api/migrations/repeat/0190__views_telemetry.sql index ca3f7908..279e4d83 100644 --- a/api/migrations/repeat/0190__views_telemetry.sql +++ b/api/migrations/repeat/0190__views_telemetry.sql @@ -13,4 +13,5 @@ left join ( 'committed', cf.committed )), '[]'::jsonb) as files from goes_platform_config_file cf + where not (cf.deleted and cf.committed) ) f on true; diff --git a/api/queries/goes.sql b/api/queries/goes.sql index e46b1652..7a2078a5 100644 --- a/api/queries/goes.sql +++ b/api/queries/goes.sql @@ -31,15 +31,7 @@ where id = $1 and not deleted; --- name: GoesPlatformConfigFileListUncommittedForProject :many -select * -from goes_platform_config_file -where project_id = $1 -and not committed -and not deleted; - - --- name: GoesPlatformConfigFileCommittedContentListCommitedForTelemetrySource :many +-- name: GoesPlatformConfigFileCommittedContentListCommittedForTelemetrySource :many select committed_content::xml from goes_platform_config_file where goes_telemetry_source_id = $1 From ca67f30f48598f6eb12e7bcca06a89da22b993e0 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Wed, 21 Jan 2026 16:11:09 -0500 Subject: [PATCH 20/22] chore: add to_be_deleted flag for un-committed deleted files --- api/internal/db/overrides.go | 9 +++++---- api/migrations/repeat/0190__views_telemetry.sql | 3 ++- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/api/internal/db/overrides.go b/api/internal/db/overrides.go index 1fe56be2..af68eedc 100644 --- a/api/internal/db/overrides.go +++ b/api/internal/db/overrides.go @@ -98,10 +98,11 @@ type IDName struct { type VGoesTelemetrySourceFiles struct { IDName - ProjectID uuid.UUID `json:"project_id"` - Alias string `json:"alias"` - SizeBytes int64 `json:"size_bytes"` - Committed bool `json:"committed"` + ProjectID uuid.UUID `json:"project_id"` + Alias string `json:"alias"` + SizeBytes int64 `json:"size_bytes"` + Committed bool `json:"committed"` + ToBeDeleted bool `json:"to_be_deleted"` } type InstrumentIDName struct { diff --git a/api/migrations/repeat/0190__views_telemetry.sql b/api/migrations/repeat/0190__views_telemetry.sql index 279e4d83..3e4bf77a 100644 --- a/api/migrations/repeat/0190__views_telemetry.sql +++ b/api/migrations/repeat/0190__views_telemetry.sql @@ -10,7 +10,8 @@ left join ( 'project_id', cf.project_id, 'alias', cf.alias, 'size_bytes', cf.size_bytes, - 'committed', cf.committed + 'committed', cf.committed, + 'to_be_deleted', cf.deleted )), '[]'::jsonb) as files from goes_platform_config_file cf where not (cf.deleted and cf.committed) From 5979fda69f286602cb1b54fe1bc667bd8b23a2b7 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Thu, 22 Jan 2026 17:06:39 -0500 Subject: [PATCH 21/22] chore: add endpoint to update platform config file metadata --- api/internal/db/goes.sql_gen.go | 16 ++++++++++++++++ api/internal/db/querier.go | 1 + api/internal/dto/goes.go | 6 +++++- api/internal/handler/goes.go | 26 ++++++++++++++++++++++++++ api/queries/goes.sql | 6 ++++++ 5 files changed, 54 insertions(+), 1 deletion(-) diff --git a/api/internal/db/goes.sql_gen.go b/api/internal/db/goes.sql_gen.go index 6e9cabd1..122d4556 100644 --- a/api/internal/db/goes.sql_gen.go +++ b/api/internal/db/goes.sql_gen.go @@ -168,6 +168,22 @@ func (q *Queries) GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPlat return err } +const goesPlatformConfigFileUpdateMetadata = `-- name: GoesPlatformConfigFileUpdateMetadata :exec +update goes_platform_config_file set + alias = $1 +where id = $2 +` + +type GoesPlatformConfigFileUpdateMetadataParams struct { + Alias string `json:"alias"` + ID uuid.UUID `json:"id"` +} + +func (q *Queries) GoesPlatformConfigFileUpdateMetadata(ctx context.Context, arg GoesPlatformConfigFileUpdateMetadataParams) error { + _, err := q.db.Exec(ctx, goesPlatformConfigFileUpdateMetadata, arg.Alias, arg.ID) + return err +} + const goesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile = `-- name: GoesTelemetryConfigMappingsDeleteForGoesPlatformConfigFile :exec delete from goes_telemetry_config_mappings where goes_platform_config_file_id = $1 diff --git a/api/internal/db/querier.go b/api/internal/db/querier.go index 746c6a4f..a960f9c1 100644 --- a/api/internal/db/querier.go +++ b/api/internal/db/querier.go @@ -133,6 +133,7 @@ type Querier interface { GoesPlatformConfigFileRestoreForRollback(ctx context.Context, arg GoesPlatformConfigFileRestoreForRollbackParams) error GoesPlatformConfigFileSoftDeleteNotInSet(ctx context.Context, arg GoesPlatformConfigFileSoftDeleteNotInSetParams) error GoesPlatformConfigFileUpdate(ctx context.Context, arg GoesPlatformConfigFileUpdateParams) error + GoesPlatformConfigFileUpdateMetadata(ctx context.Context, arg GoesPlatformConfigFileUpdateMetadataParams) error GoesPlatformConfigFilesListForCommitByCommitID(ctx context.Context, arg GoesPlatformConfigFilesListForCommitByCommitIDParams) ([]GoesPlatformConfigFilesListForCommitByCommitIDRow, error) GoesPlatformConfigFilesListUncommitted(ctx context.Context, arg GoesPlatformConfigFilesListUncommittedParams) ([]GoesPlatformConfigFilesListUncommittedRow, error) GoesPlatformRegistryConflicts(ctx context.Context, arg GoesPlatformRegistryConflictsParams) ([]GoesPlatformRegistryConflictsRow, error) diff --git a/api/internal/dto/goes.go b/api/internal/dto/goes.go index 79eec79e..ce87fca8 100644 --- a/api/internal/dto/goes.go +++ b/api/internal/dto/goes.go @@ -18,11 +18,15 @@ type GoesPlatformConfigFileCommitDTO struct { CommitID uuid.UUID `json:"commit_id"` } +type GoesPlatformConfigFilesMetadataDTO struct { + Alias string `form:"alias"` +} + type XMLPlatformConfigForm struct { PlatformConfig huma.FormFile `form:"file" contentType:"text/xml" required:"true"` - Alias string `form:"alias"` DryRun bool `form:"dry_run"` UpdateType XMLPlatformConfigUpdateType `form:"update_type" enum:"preserve_all,delete_not_found,delete_all" default:"preserve_all"` + GoesPlatformConfigFilesMetadataDTO } type XMLPlatformConfigUpdateType string diff --git a/api/internal/handler/goes.go b/api/internal/handler/goes.go index d4039083..03d69de0 100644 --- a/api/internal/handler/goes.go +++ b/api/internal/handler/goes.go @@ -150,6 +150,32 @@ func (h *APIHandler) RegisterGoesTelemetry(api huma.API) { return NewResponse(a), nil }) + huma.Register(api, huma.Operation{ + Middlewares: h.ProjectAdmin, + OperationID: "goes-telemetry-config-update-metadata", + Method: http.MethodPut, + Path: "/projects/{project_id}/goes/{telemetry_source_id}/configs/{telemetry_config_id}/metadata", + Description: "updates metadata for a goes telemetry configuration", + Tags: goesTags, + }, func(ctx context.Context, input *struct { + ProjectIDParam + TelemetrySourceIDParam + TelemetryConfigIDParam + Body dto.XMLPlatformConfigForm + }) (*Response[service.DBImportResponse], error) { + alias := input.Body.Alias + if alias == "" { + return nil, httperr.BadRequest(errors.New("alias is required")) + } + if err := h.DBService.GoesPlatformConfigFileUpdateMetadata(ctx, db.GoesPlatformConfigFileUpdateMetadataParams{ + ID: input.TelemetryConfigID.UUID, + Alias: alias, + }); err != nil { + return nil, httperr.InternalServerError(err) + } + return nil, nil + }) + huma.Register(api, huma.Operation{ Middlewares: h.ProjectAdmin, OperationID: "goes-telemetry-config-delete", diff --git a/api/queries/goes.sql b/api/queries/goes.sql index 7a2078a5..151512a8 100644 --- a/api/queries/goes.sql +++ b/api/queries/goes.sql @@ -51,6 +51,12 @@ update goes_platform_config_file set where id = sqlc.arg(id); +-- name: GoesPlatformConfigFileUpdateMetadata :exec +update goes_platform_config_file set + alias = sqlc.arg(alias) +where id = sqlc.arg(id); + + -- name: GoesPlatformConfigFileDelete :exec update goes_platform_config_file set deleted = true, From 123588806f842c8858e173ca67691aa717ab2ee0 Mon Sep 17 00:00:00 2001 From: Dennis Smith Date: Fri, 23 Jan 2026 12:28:44 -0500 Subject: [PATCH 22/22] fix: set committed to false when updating source file fix: incorrect request body validation when updating file alias --- api/internal/db/goes.sql_gen.go | 1 + api/internal/handler/goes.go | 2 +- api/queries/goes.sql | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/api/internal/db/goes.sql_gen.go b/api/internal/db/goes.sql_gen.go index 122d4556..3210a517 100644 --- a/api/internal/db/goes.sql_gen.go +++ b/api/internal/db/goes.sql_gen.go @@ -143,6 +143,7 @@ update goes_platform_config_file set alias = $2, size_bytes = $3, content = $4::xml, + committed = false, deleted = false, deleted_at = null, deleted_by = null diff --git a/api/internal/handler/goes.go b/api/internal/handler/goes.go index 03d69de0..f8ae9e1f 100644 --- a/api/internal/handler/goes.go +++ b/api/internal/handler/goes.go @@ -161,7 +161,7 @@ func (h *APIHandler) RegisterGoesTelemetry(api huma.API) { ProjectIDParam TelemetrySourceIDParam TelemetryConfigIDParam - Body dto.XMLPlatformConfigForm + Body dto.GoesPlatformConfigFilesMetadataDTO }) (*Response[service.DBImportResponse], error) { alias := input.Body.Alias if alias == "" { diff --git a/api/queries/goes.sql b/api/queries/goes.sql index 151512a8..b8a0443b 100644 --- a/api/queries/goes.sql +++ b/api/queries/goes.sql @@ -45,6 +45,7 @@ update goes_platform_config_file set alias = sqlc.arg(alias), size_bytes = sqlc.arg(size_bytes), content = sqlc.arg(content)::xml, + committed = false, deleted = false, deleted_at = null, deleted_by = null