diff --git a/.envrc b/.envrc index 32fd448864a..beb0b08ea1f 100644 --- a/.envrc +++ b/.envrc @@ -100,7 +100,23 @@ export APPLICATION=app # Migration Path export MIGRATION_PATH="file://${MYMOVE_DIR}/migrations/app/schema;file://${MYMOVE_DIR}/migrations/app/secure" -export MIGRATION_MANIFEST="${MYMOVE_DIR}/migrations/app/migrations_manifest.txt" +export MIGRATION_MANIFEST="${MYMOVE_DIR}/migrations/app/migrations_manifest.txt" ##deprecated +export DML_MIGRATION_MANIFEST="${MYMOVE_DIR}/migrations/app/dml_migrations_manifest.txt" + +# DDL Migrations +export DDL_TYPES_MIGRATION_PATH="file://${MYMOVE_DIR}/migrations/app/ddl_migrations/ddl_types" +export DDL_TYPES_MIGRATION_MANIFEST="${MYMOVE_DIR}/migrations/app/ddl_types_manifest.txt" + +export DDL_TABLES_MIGRATION_PATH="file://${MYMOVE_DIR}/migrations/app/ddl_migrations/ddl_tables" +export DDL_TABLES_MIGRATION_MANIFEST="${MYMOVE_DIR}/migrations/app/ddl_tables_manifest.txt" + +export DDL_VIEWS_MIGRATION_PATH="file://${MYMOVE_DIR}/migrations/app/ddl_migrations/ddl_views" +export DDL_VIEWS_MIGRATION_MANIFEST="${MYMOVE_DIR}/migrations/app/ddl_views_manifest.txt" + +export DDL_FUNCTIONS_MIGRATION_PATH="file://${MYMOVE_DIR}/migrations/app/ddl_migrations/ddl_functions" +export DDL_FUNCTIONS_MIGRATION_MANIFEST="${MYMOVE_DIR}/migrations/app/ddl_functions_manifest.txt" + + # Default DB configuration export DB_PASSWORD=mysecretpassword @@ -232,19 +248,22 @@ export TZ="UTC" # AWS development access # -# To use S3/SES for local builds, you'll need to uncomment the following. +# To use S3/SES or SNS & SQS for local builds, you'll need to uncomment the following. # Do not commit the change: # # export STORAGE_BACKEND=s3 # export EMAIL_BACKEND=ses +# export RECEIVER_BACKEND=sns_sqs # # Instructions for using S3 storage backend here: https://dp3.atlassian.net/wiki/spaces/MT/pages/1470955567/How+to+test+storing+data+in+S3+locally # Instructions for using SES email backend here: https://dp3.atlassian.net/wiki/spaces/MT/pages/1467973894/How+to+test+sending+email+locally +# Instructions for using SNS&SQS backend here: https://dp3.atlassian.net/wiki/spaces/MT/pages/2793242625/How+to+test+notifications+receiver+locally # # The default and equivalent to not being set is: # # export STORAGE_BACKEND=local # export EMAIL_BACKEND=local +# export RECEIVER_BACKEND=local # # Setting region and profile conditionally while we migrate from com to govcloud. if [ "$STORAGE_BACKEND" == "s3" ]; then @@ -258,6 +277,13 @@ export AWS_S3_KEY_NAMESPACE=$USER export AWS_SES_DOMAIN="devlocal.dp3.us" export AWS_SES_REGION="us-gov-west-1" +if [ "$RECEIVER_BACKEND" == "sns_sqs" ]; then + export SNS_TAGS_UPDATED_TOPIC="app_s3_tag_events" + export SNS_REGION="us-gov-west-1" +# cleanup flag false by default, only used at server startup to wipe receiver artifacts from previous runs +# export RECEIVER_CLEANUP_ON_START=false +fi + # To use s3 links aws-bucketname/xx/user/ for local builds, # you'll need to add the following to your .envrc.local: # @@ -444,4 +470,4 @@ then fi # Check that all required environment variables are set -check_required_variables \ No newline at end of file +check_required_variables diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index f83d084326b..38013700a29 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -220,7 +220,16 @@ stages: export DB_HOST=localhost export DB_PORT=5432 export MIGRATION_MANIFEST='/builds/milmove/mymove/migrations/app/migrations_manifest.txt' + export DML_MIGRATION_MANIFEST='/builds/milmove/mymove/migrations/app/dml_migrations_manifest.txt' + export DDL_TYPES_MIGRATION_MANIFEST='/builds/milmove/mymove/migrations/app/ddl_types_manifest.txt' + export DDL_TABLES_MIGRATION_MANIFEST='/builds/milmove/mymove/migrations/app/ddl_types_manifest.txt' + export DDL_VIEWS_MIGRATION_MANIFEST='/builds/milmove/mymove/migrations/app/ddl_types_manifest.txt' + export DDL_FUNCTIONS_MIGRATION_MANIFEST='/builds/milmove/mymove/migrations/app/ddl_types_manifest.txt' export MIGRATION_PATH='file:///builds/milmove/mymove/migrations/app/schema;file:///builds/milmove/mymove/migrations/app/secure' + export DDL_TYPES_MIGRATION_PATH='file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_types' + export DDL_TABLES_MIGRATION_PATH='file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_tables' + export DDL_VIEWS_MIGRATION_PATH='file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_views' + export DDL_FUNCTIONS_MIGRATION_PATH='file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_functions' export EIA_KEY=db2522a43820268a41a802a16ae9fd26 .setup_devseed_env_variables: &setup_devseed_env_variables @@ -789,7 +798,16 @@ server_test: DB_NAME_TEST: test_db DTOD_USE_MOCK: 'true' MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/migrations_manifest.txt' + DML_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/dml_migrations_manifest.txt' + DDL_TYPES_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_types_manifest.txt' + DDL_TABLES_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_tables_manifest.txt' + DDL_VIEWS_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_views_manifest.txt' + DDL_FUNCTIONS_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_functions_manifest.txt' MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/schema;file:///builds/milmove/mymove/migrations/app/secure' + DDL_TYPES_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_types' + DDL_TABLES_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_tables' + DDL_VIEWS_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_views' + DDL_FUNCTIONS_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_functions' EIA_KEY: db2522a43820268a41a802a16ae9fd26 # dummy key generated with openssl rand -hex 16 ENV: test ENVIRONMENT: test @@ -948,7 +966,16 @@ integration_test_devseed: DB_NAME: dev_db DB_NAME_DEV: dev_db MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/migrations_manifest.txt' + DML_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/dml_migrations_manifest.txt' + DDL_TYPES_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_types_manifest.txt' + DDL_TABLES_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_tables_manifest.txt' + DDL_VIEWS_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_views_manifest.txt' + DDL_FUNCTIONS_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_functions_manifest.txt' MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/schema;file:///builds/milmove/mymove/migrations/app/secure' + DDL_TYPES_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_types' + DDL_TABLES_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_tables' + DDL_VIEWS_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_views' + DDL_FUNCTIONS_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_functions' EIA_KEY: db2522a43820268a41a802a16ae9fd26 # dummy key generated with openssl rand -hex 16 ENVIRONMENT: development DOD_CA_PACKAGE: /builds/milmove/mymove/config/tls/milmove-cert-bundle.p7b @@ -1023,7 +1050,16 @@ integration_test_mtls: DB_NAME: dev_db DB_NAME_DEV: dev_db MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/migrations_manifest.txt' + DML_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/dml_migrations_manifest.txt' + DDL_TYPES_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_types_manifest.txt' + DDL_TABLES_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_tables_manifest.txt' + DDL_VIEWS_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_views_manifest.txt' + DDL_FUNCTIONS_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_functions_manifest.txt' MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/schema;file:///builds/milmove/mymove/migrations/app/secure' + DDL_TYPES_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_types' + DDL_TABLES_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_tables' + DDL_VIEWS_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_views' + DDL_FUNCTIONS_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_functions' EIA_KEY: db2522a43820268a41a802a16ae9fd26 # dummy key generated with openssl rand -hex 16 ENVIRONMENT: development DOD_CA_PACKAGE: /builds/milmove/mymove/config/tls/milmove-cert-bundle.p7b @@ -1076,7 +1112,16 @@ integration_test_admin: DB_NAME: dev_db DB_NAME_DEV: dev_db MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/migrations_manifest.txt' + DML_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/dml_migrations_manifest.txt' + DDL_TYPES_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_types_manifest.txt' + DDL_TABLES_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_tables_manifest.txt' + DDL_VIEWS_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_views_manifest.txt' + DDL_FUNCTIONS_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_functions_manifest.txt' MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/schema;file:///builds/milmove/mymove/migrations/app/secure' + DDL_TYPES_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_types' + DDL_TABLES_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_tables' + DDL_VIEWS_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_views' + DDL_FUNCTIONS_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_functions' EIA_KEY: db2522a43820268a41a802a16ae9fd26 # dummy key generated with openssl rand -hex 16 ENVIRONMENT: development DOD_CA_PACKAGE: /builds/milmove/mymove/config/tls/milmove-cert-bundle.p7b @@ -1134,7 +1179,16 @@ integration_test_my: DB_NAME: dev_db DB_NAME_DEV: dev_db MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/migrations_manifest.txt' + DML_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/dml_migrations_manifest.txt' + DDL_TYPES_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_types_manifest.txt' + DDL_TABLES_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_tables_manifest.txt' + DDL_VIEWS_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_views_manifest.txt' + DDL_FUNCTIONS_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_functions_manifest.txt' MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/schema;file:///builds/milmove/mymove/migrations/app/secure' + DDL_TYPES_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_types' + DDL_TABLES_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_tables' + DDL_VIEWS_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_views' + DDL_FUNCTIONS_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_functions' EIA_KEY: db2522a43820268a41a802a16ae9fd26 # dummy key generated with openssl rand -hex 16 ENVIRONMENT: development DOD_CA_PACKAGE: /builds/milmove/mymove/config/tls/milmove-cert-bundle.p7b @@ -1193,7 +1247,16 @@ integration_test_office: DB_NAME: dev_db DB_NAME_DEV: dev_db MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/migrations_manifest.txt' + DML_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/dml_migrations_manifest.txt' + DDL_TYPES_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_types_manifest.txt' + DDL_TABLES_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_tables_manifest.txt' + DDL_VIEWS_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_views_manifest.txt' + DDL_FUNCTIONS_MIGRATION_MANIFEST: '/builds/milmove/mymove/migrations/app/ddl_functions_manifest.txt' MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/schema;file:///builds/milmove/mymove/migrations/app/secure' + DDL_TYPES_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_types' + DDL_TABLES_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_tables' + DDL_VIEWS_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_views' + DDL_FUNCTIONS_MIGRATION_PATH: 'file:///builds/milmove/mymove/migrations/app/ddl_migrations/ddl_functions' EIA_KEY: db2522a43820268a41a802a16ae9fd26 # dummy key generated with openssl rand -hex 16 ENVIRONMENT: development DOD_CA_PACKAGE: /builds/milmove/mymove/config/tls/milmove-cert-bundle.p7b diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a3a309e7285..50e4b31b0ec 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -159,18 +159,10 @@ repos: rev: v1.1.1 hooks: - id: gen-docs - args: ['docs/adr'] + args: ["docs/adr"] - id: markdown-toc - id: hadolint - - repo: local - hooks: - - id: migrations-manifest - name: migrations manifest - entry: scripts/update-migrations-manifest - language: script - pass_filenames: false - - repo: local hooks: - id: scripts-docs diff --git a/Dockerfile.migrations b/Dockerfile.migrations index 021428de322..f6d81bfd325 100644 --- a/Dockerfile.migrations +++ b/Dockerfile.migrations @@ -20,6 +20,15 @@ COPY bin/rds-ca-2019-root.pem /bin/rds-ca-2019-root.pem COPY bin/milmove /bin/milmove COPY migrations/app/schema /migrate/schema +COPY migrations/app/ddl_migrations/ddl_types /migrate/ddl_migrations/ddl_types +COPY migrations/app/ddl_migrations/ddl_tables /migrate/ddl_migrations/ddl_tables +COPY migrations/app/ddl_migrations/ddl_views /migrate/ddl_migrations/ddl_views +COPY migrations/app/ddl_migrations/ddl_functions /migrate/ddl_migrations/ddl_functions COPY migrations/app/migrations_manifest.txt /migrate/migrations_manifest.txt - -ENTRYPOINT ["/bin/milmove", "migrate", "-p", "file:///migrate/migrations", "-m", "/migrate/migrations_manifest.txt"] \ No newline at end of file +COPY migrations/app/dml_migrations_manifest.txt /migrate/dml_migrations_manifest.txt +COPY migrations/app/ddl_types_manifest.txt /migrate/ddl_types_manifest.txt +COPY migrations/app/ddl_tables_manifest.txt /migrate/ddl_tables_manifest.txt +COPY migrations/app/ddl_views_manifest.txt /migrate/ddl_views_manifest.txt +COPY migrations/app/ddl_functions_manifest.txt /migrate/ddl_functions_manifest.txt +# hadolint ignore=DL3025 +ENTRYPOINT ["/bin/milmove", "migrate", "-p", "file:///migrate/migrations", "-m", "/migrate/migrations_manifest.txt", '-d', '/migrate/dml_migrations_manifest.txt', '-t', '/migrate/ddl_types_manifest.txt', '-T', '/migrate/ddl_tables_manifest.txt', '-V', '/migrate/ddl_views_manifest.txt', '-F', '/migrate/ddl_functions_manifest.txt'] \ No newline at end of file diff --git a/cmd/milmove/gen_migration.go b/cmd/milmove/gen_migration.go index a3df776d8ce..9fb3f48a279 100644 --- a/cmd/milmove/gen_migration.go +++ b/cmd/milmove/gen_migration.go @@ -69,7 +69,7 @@ func genMigrationFunction(cmd *cobra.Command, args []string) error { } migrationPath := v.GetString(cli.MigrationGenPathFlag) - migrationManifest := v.GetString(cli.MigrationManifestFlag) + migrationManifest := v.GetString(cli.DMLMigrationManifestFlag) migrationVersion := v.GetString(cli.MigrationVersionFlag) migrationName := v.GetString(cli.MigrationNameFlag) migrationType := v.GetString(cli.MigrationTypeFlag) diff --git a/cmd/milmove/migrate.go b/cmd/milmove/migrate.go index 507c747bf00..8e3abb98e78 100644 --- a/cmd/milmove/migrate.go +++ b/cmd/milmove/migrate.go @@ -174,7 +174,6 @@ func migrateFunction(cmd *cobra.Command, args []string) error { // Remove any extra quotes around path trimmedMigrationPaths := strings.Trim(v.GetString(cli.MigrationPathFlag), "\"") migrationPaths := expandPaths(strings.Split(trimmedMigrationPaths, ";")) - logger.Info(fmt.Sprintf("using migration paths %q", migrationPaths)) logger.Info("migration Path from s3") @@ -308,5 +307,136 @@ func migrateFunction(cmd *cobra.Command, args []string) error { return errors.Wrap(errUp, "error running migrations") } + // Begin DDL migrations + ddlTypesManifest := expandPath(v.GetString(cli.DDLTypesMigrationManifestFlag)) + ddlTypesManifestPath := expandPath(v.GetString(cli.DDLTypesMigrationPathFlag)) + + ddlTablesManifest := expandPath(v.GetString(cli.DDLTablesMigrationManifestFlag)) + ddlTablesPath := expandPath(v.GetString(cli.DDLTablesMigrationPathFlag)) + + ddlViewsManifest := expandPath(v.GetString(cli.DDLViewsMigrationManifestFlag)) + ddlViewsPath := expandPath(v.GetString(cli.DDLViewsMigrationPathFlag)) + + ddlFunctionsManifest := expandPath(v.GetString(cli.DDLFunctionsMigrationManifestFlag)) + ddlFunctionsPath := expandPath(v.GetString(cli.DDLFunctionsMigrationPathFlag)) + + ddlObjects := []struct { + name string + manifest string + path string + }{ + {"DDL Types", ddlTypesManifest, ddlTypesManifestPath}, + {"DDL Tables", ddlTablesManifest, ddlTablesPath}, + {"DDL Views", ddlViewsManifest, ddlViewsPath}, + {"DDL Functions", ddlFunctionsManifest, ddlFunctionsPath}, + } + + for _, ddlObj := range ddlObjects { + logger.Info(fmt.Sprintf("=== Processing %s ===", ddlObj.name)) + logger.Info(fmt.Sprintf("Using manifest %q", ddlObj.manifest)) + filenames, errListFiles := fileHelper.ListFiles(ddlObj.path, s3Client) + if errListFiles != nil { + logger.Fatal(fmt.Sprintf("Error listing %s directory %s", ddlObj.name, ddlObj.path), zap.Error(errListFiles)) + } + + ddlMigrationFiles := map[string][]string{ + ddlObj.path: filenames, + } + + manifest, err := os.Open(ddlObj.manifest[len("file://"):]) + if err != nil { + return errors.Wrap(err, fmt.Sprintf("error reading %s manifest", ddlObj.name)) + } + + scanner := bufio.NewScanner(manifest) + for scanner.Scan() { + target := scanner.Text() + if strings.HasPrefix(target, "#") { + continue + } + + uri := "" + for dir, files := range ddlMigrationFiles { + for _, filename := range files { + if target == filename { + uri = fmt.Sprintf("%s/%s", dir, filename) + break + } + } + } + + if len(uri) == 0 { + return errors.Errorf("Error finding %s migration for filename %q", ddlObj.name, target) + } + + m, err := pop.ParseMigrationFilename(target) + if err != nil { + return errors.Wrapf(err, "error parsing %s migration filename %q", ddlObj.name, uri) + } + + b := &migrate.Builder{Match: m, Path: uri} + migration, errCompile := b.Compile(s3Client, wait, logger) + if errCompile != nil { + return errors.Wrap(errCompile, fmt.Sprintf("Error compiling %s migration", ddlObj.name)) + } + + if err := migration.Run(dbConnection); err != nil { + return errors.Wrap(err, fmt.Sprintf("error executing %s migration", ddlObj.name)) + } + + logger.Info(fmt.Sprintf("Successfully executed %s: %s", ddlObj.name, target)) + } + manifest.Close() + } + + // After DDL migrations, process DML migrations + dmlManifest := expandPath(v.GetString(cli.DMLMigrationManifestFlag)) + logger.Info(fmt.Sprintf("using DML migration manifest %q", dmlManifest)) + + // Create a new migrator for DML migrations + dmlMigrator := pop.NewMigrator(dbConnection) + + manifest, err = os.Open(dmlManifest[len("file://"):]) + if err != nil { + return errors.Wrap(err, "error reading DML manifest") + } + + scanner = bufio.NewScanner(manifest) + for scanner.Scan() { + target := scanner.Text() + if strings.HasPrefix(target, "#") { + continue + } + uri := "" + for dir, filenames := range migrationFiles { + for _, filename := range filenames { + if target == filename { + uri = fmt.Sprintf("%s/%s", dir, filename) + break + } + } + } + if len(uri) == 0 { + return errors.Errorf("Error finding DML migration for filename %q", target) + } + m, err := pop.ParseMigrationFilename(target) + if err != nil { + return errors.Wrapf(err, "error parsing DML migration filename %q", uri) + } + b := &migrate.Builder{Match: m, Path: uri} + migration, errCompile := b.Compile(s3Client, wait, logger) + if errCompile != nil { + return errors.Wrap(errCompile, "Error compiling DML migration") + } + + dmlMigrator.UpMigrations.Migrations = append(dmlMigrator.UpMigrations.Migrations, *migration) + } + + // Run DML migrations and track versions + errUp = dmlMigrator.Up() + if errUp != nil { + return errors.Wrap(errUp, "error running DML migrations") + } + return nil } diff --git a/cmd/milmove/serve.go b/cmd/milmove/serve.go index 505936d3868..4f05b86beaa 100644 --- a/cmd/milmove/serve.go +++ b/cmd/milmove/serve.go @@ -478,6 +478,13 @@ func buildRoutingConfig(appCtx appcontext.AppContext, v *viper.Viper, redisPool appCtx.Logger().Fatal("notification sender sending not enabled", zap.Error(err)) } + // Notification Receiver + runReceiverCleanup := v.GetBool(cli.ReceiverCleanupOnStartFlag) // Cleanup aws artifacts left over from previous runs + notificationReceiver, err := notifications.InitReceiver(v, appCtx.Logger(), runReceiverCleanup) + if err != nil { + appCtx.Logger().Fatal("notification receiver not enabled", zap.Error(err)) + } + routingConfig.BuildRoot = v.GetString(cli.BuildRootFlag) sendProductionInvoice := v.GetBool(cli.GEXSendProdInvoiceFlag) @@ -567,6 +574,7 @@ func buildRoutingConfig(appCtx appcontext.AppContext, v *viper.Viper, redisPool dtodRoutePlanner, fileStorer, notificationSender, + notificationReceiver, iwsPersonLookup, sendProductionInvoice, gexSender, diff --git a/config/env/demo.migrations.env b/config/env/demo.migrations.env index 346e85f8d21..b407f1c498c 100644 --- a/config/env/demo.migrations.env +++ b/config/env/demo.migrations.env @@ -6,3 +6,8 @@ DB_SSL_MODE=verify-full DB_SSL_ROOT_CERT=/bin/rds-ca-rsa4096-g1.pem DB_USER=ecs_user MIGRATION_MANIFEST=/migrate/migrations_manifest.txt +DML_MIGRATION_MANIFEST=/migrate/dml_migrations_manifest.txt +DDL_TYPES_MIGRATION_MANIFEST=/migrate/ddl_types_manifest.txt +DDL_TABLES_MIGRATION_MANIFEST=/migrate/ddl_tables_manifest.txt +DDL_VIEWS_MIGRATION_MANIFEST=/migrate/ddl_views_manifest.txt +DDL_FUNCTIONS_MIGRATION_MANIFEST=/migrate/ddl_functions_manifest.txt \ No newline at end of file diff --git a/config/env/exp.migrations.env b/config/env/exp.migrations.env index 346e85f8d21..d215d9e7797 100644 --- a/config/env/exp.migrations.env +++ b/config/env/exp.migrations.env @@ -6,3 +6,8 @@ DB_SSL_MODE=verify-full DB_SSL_ROOT_CERT=/bin/rds-ca-rsa4096-g1.pem DB_USER=ecs_user MIGRATION_MANIFEST=/migrate/migrations_manifest.txt +DML_MIGRATION_MANIFEST=/migrate/dml_migrations_manifest.txt +DDL_TYPES_MIGRATION_MANIFEST=/migrate/ddl_types_manifest.txt +DDL_TABLES_MIGRATION_MANIFEST=/migrate/ddl_tables_manifest.txt +DDL_VIEWS_MIGRATION_MANIFEST=/migrate/ddl_views_manifest.txt +DDL_FUNCTIONS_MIGRATION_MANIFEST=/migrate/ddl_functions_manifest.txt diff --git a/config/env/loadtest.migrations.env b/config/env/loadtest.migrations.env index 346e85f8d21..b407f1c498c 100644 --- a/config/env/loadtest.migrations.env +++ b/config/env/loadtest.migrations.env @@ -6,3 +6,8 @@ DB_SSL_MODE=verify-full DB_SSL_ROOT_CERT=/bin/rds-ca-rsa4096-g1.pem DB_USER=ecs_user MIGRATION_MANIFEST=/migrate/migrations_manifest.txt +DML_MIGRATION_MANIFEST=/migrate/dml_migrations_manifest.txt +DDL_TYPES_MIGRATION_MANIFEST=/migrate/ddl_types_manifest.txt +DDL_TABLES_MIGRATION_MANIFEST=/migrate/ddl_tables_manifest.txt +DDL_VIEWS_MIGRATION_MANIFEST=/migrate/ddl_views_manifest.txt +DDL_FUNCTIONS_MIGRATION_MANIFEST=/migrate/ddl_functions_manifest.txt \ No newline at end of file diff --git a/config/env/prd.migrations.env b/config/env/prd.migrations.env index 346e85f8d21..b407f1c498c 100644 --- a/config/env/prd.migrations.env +++ b/config/env/prd.migrations.env @@ -6,3 +6,8 @@ DB_SSL_MODE=verify-full DB_SSL_ROOT_CERT=/bin/rds-ca-rsa4096-g1.pem DB_USER=ecs_user MIGRATION_MANIFEST=/migrate/migrations_manifest.txt +DML_MIGRATION_MANIFEST=/migrate/dml_migrations_manifest.txt +DDL_TYPES_MIGRATION_MANIFEST=/migrate/ddl_types_manifest.txt +DDL_TABLES_MIGRATION_MANIFEST=/migrate/ddl_tables_manifest.txt +DDL_VIEWS_MIGRATION_MANIFEST=/migrate/ddl_views_manifest.txt +DDL_FUNCTIONS_MIGRATION_MANIFEST=/migrate/ddl_functions_manifest.txt \ No newline at end of file diff --git a/config/env/stg.migrations.env b/config/env/stg.migrations.env index 346e85f8d21..b407f1c498c 100644 --- a/config/env/stg.migrations.env +++ b/config/env/stg.migrations.env @@ -6,3 +6,8 @@ DB_SSL_MODE=verify-full DB_SSL_ROOT_CERT=/bin/rds-ca-rsa4096-g1.pem DB_USER=ecs_user MIGRATION_MANIFEST=/migrate/migrations_manifest.txt +DML_MIGRATION_MANIFEST=/migrate/dml_migrations_manifest.txt +DDL_TYPES_MIGRATION_MANIFEST=/migrate/ddl_types_manifest.txt +DDL_TABLES_MIGRATION_MANIFEST=/migrate/ddl_tables_manifest.txt +DDL_VIEWS_MIGRATION_MANIFEST=/migrate/ddl_views_manifest.txt +DDL_FUNCTIONS_MIGRATION_MANIFEST=/migrate/ddl_functions_manifest.txt \ No newline at end of file diff --git a/go.mod b/go.mod index c20f9d25bfd..b01ccab8cf7 100644 --- a/go.mod +++ b/go.mod @@ -21,6 +21,8 @@ require ( github.com/aws/aws-sdk-go-v2/service/rds v1.78.2 github.com/aws/aws-sdk-go-v2/service/s3 v1.59.0 github.com/aws/aws-sdk-go-v2/service/ses v1.25.3 + github.com/aws/aws-sdk-go-v2/service/sns v1.31.8 + github.com/aws/aws-sdk-go-v2/service/sqs v1.34.6 github.com/aws/aws-sdk-go-v2/service/ssm v1.52.8 github.com/aws/aws-sdk-go-v2/service/sts v1.30.7 github.com/aws/smithy-go v1.20.4 @@ -278,4 +280,4 @@ require ( pault.ag/go/piv v0.0.0-20190320181422-d9d61c70919c // indirect ) -replace github.com/pdfcpu/pdfcpu => github.com/transcom/pdfcpu v0.0.0-20250131173611-4b416bd62126 +replace github.com/pdfcpu/pdfcpu => github.com/transcom/pdfcpu v0.0.0-20250225161110-ce2f81788248 diff --git a/go.sum b/go.sum index f6cf27c293a..79d05fbcb36 100644 --- a/go.sum +++ b/go.sum @@ -82,6 +82,10 @@ github.com/aws/aws-sdk-go-v2/service/s3 v1.59.0 h1:Cso4Ev/XauMVsbwdhYEoxg8rxZWw4 github.com/aws/aws-sdk-go-v2/service/s3 v1.59.0/go.mod h1:BSPI0EfnYUuNHPS0uqIo5VrRwzie+Fp+YhQOUs16sKI= github.com/aws/aws-sdk-go-v2/service/ses v1.25.3 h1:wcfUsE2nqsXhEj68gxr7MnGXNPcBPKx0RW2DzBVgVlM= github.com/aws/aws-sdk-go-v2/service/ses v1.25.3/go.mod h1:6Ul/Ir8oOCsI3dFN0prULK9fvpxP+WTYmlHDkFzaAVA= +github.com/aws/aws-sdk-go-v2/service/sns v1.31.8 h1:vRSk062d1SmaEVbiqFePkvYuhCTnW2JnPkUdt19nqeY= +github.com/aws/aws-sdk-go-v2/service/sns v1.31.8/go.mod h1:wjhxA9hlVu75dCL/5Wcx8Cwmszvu6t0i8WEDypcB4+s= +github.com/aws/aws-sdk-go-v2/service/sqs v1.34.6 h1:DbjODDHumQBdJ3T+EO7AXVoFUeUhAsJYOdjStH5Ws4A= +github.com/aws/aws-sdk-go-v2/service/sqs v1.34.6/go.mod h1:7idt3XszF6sE9WPS1GqZRiDJOxw4oPtlRBXodWnCGjU= github.com/aws/aws-sdk-go-v2/service/ssm v1.52.8 h1:7cjN4Wp3U3cud17TsnUxSomTwKzKQGUWdq/N1aWqgMk= github.com/aws/aws-sdk-go-v2/service/ssm v1.52.8/go.mod h1:nUSNPaG8mv5rIu7EclHnFqZOjhreEUwRKENtKTtJ9aw= github.com/aws/aws-sdk-go-v2/service/sso v1.22.7 h1:pIaGg+08llrP7Q5aiz9ICWbY8cqhTkyy+0SHvfzQpTc= @@ -629,8 +633,8 @@ github.com/tiaguinho/gosoap v1.4.4 h1:4XZlaqf/y2UAbCPFGcZS4uLKrEvnMr+5pccIyQAUVg github.com/tiaguinho/gosoap v1.4.4/go.mod h1:4vv86Jl19UkOeoJW/aawihXYNJ/Iy2NHkhgmBUJ2Ibk= github.com/toqueteos/webbrowser v1.2.0 h1:tVP/gpK69Fx+qMJKsLE7TD8LuGWPnEV71wBN9rrstGQ= github.com/toqueteos/webbrowser v1.2.0/go.mod h1:XWoZq4cyp9WeUeak7w7LXRUQf1F1ATJMir8RTqb4ayM= -github.com/transcom/pdfcpu v0.0.0-20250131173611-4b416bd62126 h1:XbLtbZvPTc5bY6DuXF2ZHPLPmE3GVe3T/o8PzfmITCA= -github.com/transcom/pdfcpu v0.0.0-20250131173611-4b416bd62126/go.mod h1:8EAma3IBIS7ssMiPlcNIPWwISTuP31WToXfGvc327vI= +github.com/transcom/pdfcpu v0.0.0-20250225161110-ce2f81788248 h1:G1EenmQJPQ5EO1U2iOi3olQxpM0bW+AsPWFpJhnfL1w= +github.com/transcom/pdfcpu v0.0.0-20250225161110-ce2f81788248/go.mod h1:8EAma3IBIS7ssMiPlcNIPWwISTuP31WToXfGvc327vI= github.com/urfave/cli v1.22.10 h1:p8Fspmz3iTctJstry1PYS3HVdllxnEzTEsgIgtxTrCk= github.com/urfave/cli v1.22.10/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/vektra/mockery/v2 v2.45.1 h1:6HpdnKiLCjVtzlRLQPUNIM0u7yrvAoZ7VWF1TltJvTM= diff --git a/migrations/app/ddl_functions_manifest.txt b/migrations/app/ddl_functions_manifest.txt new file mode 100644 index 00000000000..237796e829e --- /dev/null +++ b/migrations/app/ddl_functions_manifest.txt @@ -0,0 +1,3 @@ +# This is the functions(procedures) migrations manifest. +# If a migration is not recorded here, then it will error. +# Naming convention: fn_some_function.up.sql running will create this file. diff --git a/migrations/app/ddl_migrations/ddl_functions/README.md b/migrations/app/ddl_migrations/ddl_functions/README.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/migrations/app/ddl_migrations/ddl_tables/README.md b/migrations/app/ddl_migrations/ddl_tables/README.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/migrations/app/ddl_migrations/ddl_types/README.md b/migrations/app/ddl_migrations/ddl_types/README.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/migrations/app/ddl_migrations/ddl_views/README.md b/migrations/app/ddl_migrations/ddl_views/README.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/migrations/app/ddl_tables_manifest.txt b/migrations/app/ddl_tables_manifest.txt new file mode 100644 index 00000000000..8fd6841c337 --- /dev/null +++ b/migrations/app/ddl_tables_manifest.txt @@ -0,0 +1,3 @@ +# This is the tables migrations manifest. +# If a migration is not recorded here, then it will error. +# Naming convention: tbl_some_table.up.sql running will create this file. diff --git a/migrations/app/ddl_types_manifest.txt b/migrations/app/ddl_types_manifest.txt new file mode 100644 index 00000000000..9229c96f599 --- /dev/null +++ b/migrations/app/ddl_types_manifest.txt @@ -0,0 +1,3 @@ +# This is the types migrations manifest. +# If a migration is not recorded here, then it will error. +# Naming convention: ty_some_type.up.sql running will create this file. diff --git a/migrations/app/ddl_views_manifest.txt b/migrations/app/ddl_views_manifest.txt new file mode 100644 index 00000000000..939945b6618 --- /dev/null +++ b/migrations/app/ddl_views_manifest.txt @@ -0,0 +1,3 @@ +# This is the views migrations manifest. +# If a migration is not recorded here, then it will error. +# Naming convention: vw_some_view.up.sql running will create this file. diff --git a/migrations/app/dml_migrations_manifest.txt b/migrations/app/dml_migrations_manifest.txt new file mode 100644 index 00000000000..570749e1cfa --- /dev/null +++ b/migrations/app/dml_migrations_manifest.txt @@ -0,0 +1,3 @@ +# This is the migrations manifest. +# If a migration is not recorded here, then it will error. +# Naming convention: 202502201325_B-123456_update_some_table.up.sql running will create this file. diff --git a/migrations/app/migrations_manifest.txt b/migrations/app/migrations_manifest.txt index eb8aa1d8fa8..a044a514cad 100644 --- a/migrations/app/migrations_manifest.txt +++ b/migrations/app/migrations_manifest.txt @@ -1091,6 +1091,10 @@ 20250121153007_update_pricing_proc_to_handle_international_shuttle.up.sql 20250121184450_upd_duty_loc_B-22242.up.sql 20250123173216_add_destination_queue_db_func_and_gbloc_view.up.sql +20250123210535_update_re_intl_transit_times_for_ak_hhg.up.sql 20250204162411_updating_create_accessorial_service_item_proc_for_crating.up.sql 20250206173204_add_hawaii_data.up.sql +20250207153450_add_fetch_documents_func.up.sql 20250210175754_B22451_update_dest_queue_to_consider_sit_extensions.up.sql +20250213151815_fix_spacing_fetch_documents.up.sql +# nothing should be added below this line this file is archived and only needed for rebuilding db Locally to be run prior to new migrations process to keep the current state diff --git a/migrations/app/schema/20250106202424_update_duty_locs.up.sql b/migrations/app/schema/20250106202424_update_duty_locs.up.sql index d3bc09560e5..2ecfaf6a399 100644 --- a/migrations/app/schema/20250106202424_update_duty_locs.up.sql +++ b/migrations/app/schema/20250106202424_update_duty_locs.up.sql @@ -24,11 +24,11 @@ BEGIN SELECT '8d613f71-b80e-4ad4-95e7-00781b084c7c'::uuid, 'n/a', NULL, 'NAS NORTH ISLAND', 'CA', '39125', now(), now(), NULL, 'SAN DIEGO', false, '791899e6-cd77-46f2-981b-176ecb8d7098'::uuid, '191165db-d30a-414d-862b-54afdfc7aeb9'::uuid WHERE NOT EXISTS (select * from addresses where id = '8d613f71-b80e-4ad4-95e7-00781b084c7c'); - INSERT INTO duty_locations (id,"name",affiliation,address_id,created_at,updated_at,transportation_office_id,provides_services_counseling) + INSERT INTO duty_locations (id,"name",affiliation,address_id,created_at,updated_at,transportation_office_id,provides_services_counseling) SELECT '56255626-bbbe-4834-8324-1c08f011f2f6'::uuid,'NAS N Island, CA 92135',NULL,'3d617fab-bf6f-4f07-8ab5-f7652b8e7f3e'::uuid,now(),now(),null,true WHERE NOT EXISTS (select * from duty_locations where id = '56255626-bbbe-4834-8324-1c08f011f2f6'); - - INSERT INTO duty_locations (id,"name",affiliation,address_id,created_at,updated_at,transportation_office_id,provides_services_counseling) + + INSERT INTO duty_locations (id,"name",affiliation,address_id,created_at,updated_at,transportation_office_id,provides_services_counseling) SELECT '7156098f-13cf-4455-bcd5-eb829d57c714'::uuid,'NAS North Island, CA 92135',NULL,'8d613f71-b80e-4ad4-95e7-00781b084c7c'::uuid,now(),now(),null,true WHERE NOT EXISTS (select * from duty_locations where id = '7156098f-13cf-4455-bcd5-eb829d57c714'); END $$; @@ -42,7 +42,7 @@ BEGIN SELECT 'fb90a7df-6494-4974-a0ce-4bdbcaff80c0'::uuid, 'n/a', NULL, 'CANNON AFB', 'NM', '88101', now(), now(), NULL, 'CURRY', false, '791899e6-cd77-46f2-981b-176ecb8d7098'::uuid, '68393e10-1aed-4a51-85a0-559a0a5b0e3f'::uuid WHERE NOT EXISTS (select * from addresses where id = 'fb90a7df-6494-4974-a0ce-4bdbcaff80c0'); - INSERT INTO duty_locations (id,"name",affiliation,address_id,created_at,updated_at,transportation_office_id,provides_services_counseling) + INSERT INTO duty_locations (id,"name",affiliation,address_id,created_at,updated_at,transportation_office_id,provides_services_counseling) SELECT '98beab3c-f8ce-4e3c-b78e-8db614721621'::uuid, 'Cannon AFB, NM 88101',null, 'fb90a7df-6494-4974-a0ce-4bdbcaff80c0'::uuid,now(),now(),'80796bc4-e494-4b19-bb16-cdcdba187829',true WHERE NOT EXISTS (select * from duty_locations where id = '98beab3c-f8ce-4e3c-b78e-8db614721621'); END $$; diff --git a/migrations/app/schema/20250121184450_upd_duty_loc_B-22242.up.sql b/migrations/app/schema/20250121184450_upd_duty_loc_B-22242.up.sql index c21c04f6a81..70928fff219 100644 --- a/migrations/app/schema/20250121184450_upd_duty_loc_B-22242.up.sql +++ b/migrations/app/schema/20250121184450_upd_duty_loc_B-22242.up.sql @@ -1,30 +1,30 @@ DO $$ BEGIN - + --remove duty loc Johnston City, TN 37602 IF EXISTS (SELECT 1 FROM duty_locations WHERE id = 'd3a1be10-dcd7-4720-bcbe-7ba76d243687') THEN - - + + update orders set origin_duty_location_id = 'cd0c7325-15bb-45c7-a690-26c56c903ed7' where origin_duty_location_id = 'd3a1be10-dcd7-4720-bcbe-7ba76d243687'; update orders set new_duty_location_id = 'cd0c7325-15bb-45c7-a690-26c56c903ed7' where new_duty_location_id = 'd3a1be10-dcd7-4720-bcbe-7ba76d243687'; - + delete from duty_locations where id = 'd3a1be10-dcd7-4720-bcbe-7ba76d243687'; - + END IF; END $$; DO $$ BEGIN - + --remove duty loc Oceanside, CA 92052 IF EXISTS (SELECT 1 FROM duty_locations WHERE id = '54ca99b7-3c2a-42b0-aa1a-ad071ac580de') THEN - + update orders set origin_duty_location_id = 'a6993e7b-4600-44b9-b288-04ca011143f0' where origin_duty_location_id = '54ca99b7-3c2a-42b0-aa1a-ad071ac580de'; update orders set new_duty_location_id = 'a6993e7b-4600-44b9-b288-04ca011143f0' where new_duty_location_id = '54ca99b7-3c2a-42b0-aa1a-ad071ac580de'; - + delete from duty_locations where id = '54ca99b7-3c2a-42b0-aa1a-ad071ac580de'; - + END IF; END $$; @@ -34,9 +34,9 @@ BEGIN --remove duty loc Albuquerque, NM 87103 IF EXISTS (SELECT 1 FROM duty_locations WHERE id = '2cc57072-19fa-438b-a44b-e349dff11763') THEN - + update orders set new_duty_location_id = '54acfb0e-222b-49eb-b94b-ccb00c6f529c' where new_duty_location_id = '2cc57072-19fa-438b-a44b-e349dff11763'; - + delete from duty_locations where id = '2cc57072-19fa-438b-a44b-e349dff11763'; END IF; @@ -45,45 +45,45 @@ END $$; DO $$ BEGIN - + --remove duty loc August, GA 30917 IF EXISTS (SELECT 1 FROM duty_locations WHERE id = '109ac405-47fb-4e1e-9efb-58290453ac09') THEN - + update orders set origin_duty_location_id = '595363c2-14ee-48e0-b318-e76ab0016453' where origin_duty_location_id = '109ac405-47fb-4e1e-9efb-58290453ac09'; update orders set new_duty_location_id = '595363c2-14ee-48e0-b318-e76ab0016453' where new_duty_location_id = '109ac405-47fb-4e1e-9efb-58290453ac09'; - + delete from duty_locations where id = '109ac405-47fb-4e1e-9efb-58290453ac09'; - + END IF; END $$; DO $$ BEGIN - + --remove duty loc Frankfort, KY 40602 IF EXISTS (SELECT 1 FROM duty_locations WHERE id = 'c7fadaa2-902f-4302-a7cd-108c525b96d4') THEN - + update orders set origin_duty_location_id = '1a973257-cd15-42a9-86be-a14796c014bc' where origin_duty_location_id = 'c7fadaa2-902f-4302-a7cd-108c525b96d4'; update orders set new_duty_location_id = '1a973257-cd15-42a9-86be-a14796c014bc' where new_duty_location_id = 'c7fadaa2-902f-4302-a7cd-108c525b96d4'; - + delete from duty_locations where id = 'c7fadaa2-902f-4302-a7cd-108c525b96d4'; - + END IF; END $$; DO $$ BEGIN - + --remove duty loc Seattle, WA 98111 IF EXISTS (SELECT 1 FROM duty_locations WHERE id = '2fb3e898-d6de-4be7-8576-7c7b10c2a706') THEN - + update orders set origin_duty_location_id = 'e7fdae4f-6be7-4264-99f8-03ee8541499c' where origin_duty_location_id = '2fb3e898-d6de-4be7-8576-7c7b10c2a706'; update orders set new_duty_location_id = 'e7fdae4f-6be7-4264-99f8-03ee8541499c' where new_duty_location_id = '2fb3e898-d6de-4be7-8576-7c7b10c2a706'; - + delete from duty_locations where id = '2fb3e898-d6de-4be7-8576-7c7b10c2a706'; - + END IF; END $$; diff --git a/migrations/app/schema/20250123210535_update_re_intl_transit_times_for_ak_hhg.up.sql b/migrations/app/schema/20250123210535_update_re_intl_transit_times_for_ak_hhg.up.sql new file mode 100644 index 00000000000..fb67d5fee8b --- /dev/null +++ b/migrations/app/schema/20250123210535_update_re_intl_transit_times_for_ak_hhg.up.sql @@ -0,0 +1,9 @@ +UPDATE re_intl_transit_times + SET hhg_transit_time = 10 +WHERE origin_rate_area_id IN ('b80a00d4-f829-4051-961a-b8945c62c37d','5a27e806-21d4-4672-aa5e-29518f10c0aa') + OR destination_rate_area_id IN ('b80a00d4-f829-4051-961a-b8945c62c37d','5a27e806-21d4-4672-aa5e-29518f10c0aa'); + +update re_intl_transit_times + SET hhg_transit_time = 20 +WHERE origin_rate_area_id IN ('9bb87311-1b29-4f29-8561-8a4c795654d4','635e4b79-342c-4cfc-8069-39c408a2decd') + OR destination_rate_area_id IN ('9bb87311-1b29-4f29-8561-8a4c795654d4','635e4b79-342c-4cfc-8069-39c408a2decd'); \ No newline at end of file diff --git a/migrations/app/schema/20250207153450_add_fetch_documents_func.up.sql b/migrations/app/schema/20250207153450_add_fetch_documents_func.up.sql new file mode 100644 index 00000000000..2bc71695066 --- /dev/null +++ b/migrations/app/schema/20250207153450_add_fetch_documents_func.up.sql @@ -0,0 +1,25 @@ +CREATE OR REPLACE FUNCTION public.fetch_documents(docCursor refcursor, useruploadCursor refcursor, uploadCursor refcursor, _docID uuid) RETURNS setof refcursor AS $$ +BEGIN + OPEN $1 FOR + SELECT documents.created_at, documents.deleted_at, documents.id, documents.service_member_id, documents.updated_at + FROM documents AS documents + WHERE documents.id = _docID and documents.deleted_at is null + LIMIT 1; + RETURN NEXT $1; + OPEN $2 FOR + SELECT user_uploads.created_at, user_uploads.deleted_at, user_uploads.document_id, user_uploads.id, user_uploads.updated_at, + user_uploads.upload_id, user_uploads.uploader_id + FROM user_uploads AS user_uploads + WHERE user_uploads.deleted_at is null and user_uploads.document_id = _docID + ORDER BY created_at asc; + RETURN NEXT $2; + OPEN $3 FOR + SELECT uploads.id, uploads.bytes, uploads.checksum, uploads.content_type, uploads.created_at, uploads.deleted_at, uploads.filename, + uploads.rotation, uploads.storage_key, uploads.updated_at, uploads.upload_type + FROM uploads AS uploads, user_uploads + WHERE uploads.deleted_at is null + and uploads.id = user_uploads.upload_id + and user_uploads.deleted_at is null and user_uploads.document_id = _docID; + RETURN NEXT $3; +END; +$$ LANGUAGE plpgsql; diff --git a/migrations/app/schema/20250213151815_fix_spacing_fetch_documents.up.sql b/migrations/app/schema/20250213151815_fix_spacing_fetch_documents.up.sql new file mode 100644 index 00000000000..e5dd6537ee8 --- /dev/null +++ b/migrations/app/schema/20250213151815_fix_spacing_fetch_documents.up.sql @@ -0,0 +1,22 @@ +CREATE OR REPLACE FUNCTION public.fetch_documents(docCursor refcursor, useruploadCursor refcursor, uploadCursor refcursor, _docID uuid) RETURNS setof refcursor AS $$ +BEGIN + OPEN $1 FOR + SELECT documents.created_at, documents.deleted_at, documents.id, documents.service_member_id, documents.updated_at + FROM documents AS documents + WHERE documents.id = _docID and documents.deleted_at is null + LIMIT 1; + RETURN NEXT $1; + OPEN $2 FOR + SELECT user_uploads.created_at, user_uploads.deleted_at, user_uploads.document_id, user_uploads.id, user_uploads.updated_at, + user_uploads.upload_id, user_uploads.uploader_id + FROM user_uploads AS user_uploads + WHERE user_uploads.deleted_at is null and user_uploads.document_id = _docID + ORDER BY created_at asc; + RETURN NEXT $2; + OPEN $3 FOR + SELECT uploads.id, uploads.bytes, uploads.checksum, uploads.content_type, uploads.created_at, uploads.deleted_at, uploads.filename, + uploads.rotation, uploads.storage_key, uploads.updated_at, uploads.upload_type FROM uploads AS uploads , user_uploads + WHERE uploads.deleted_at is null and uploads.id = user_uploads.upload_id and user_uploads.deleted_at is null and user_uploads.document_id = _docID; + RETURN NEXT $3; +END; +$$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/pkg/cli/migration.go b/pkg/cli/migration.go index b540516beb0..bf54e72ab55 100644 --- a/pkg/cli/migration.go +++ b/pkg/cli/migration.go @@ -10,9 +10,22 @@ import ( const ( // MigrationManifestFlag is the migration manifest flag - MigrationManifestFlag string = "migration-manifest" + MigrationManifestFlag string = "migration-manifest" //deprecated + DMLMigrationManifestFlag string = "dml-migration-manifest" // MigrationWaitFlag is the migration wait flag MigrationWaitFlag string = "migration-wait" + + DDLTablesMigrationPathFlag = "ddl-tables-migration-path" + DDLTablesMigrationManifestFlag = "ddl-tables-migration-manifest" + + DDLTypesMigrationPathFlag = "ddl-types-migration-path" + DDLTypesMigrationManifestFlag = "ddl-types-migration-manifest" + + DDLViewsMigrationPathFlag = "ddl-views-migration-path" + DDLViewsMigrationManifestFlag = "ddl-views-migration-manifest" + + DDLFunctionsMigrationPathFlag = "ddl-functions-migration-path" + DDLFunctionsMigrationManifestFlag = "ddl-functions-migration-manifest" ) var ( @@ -22,7 +35,17 @@ var ( // InitMigrationFlags initializes the Migration command line flags func InitMigrationFlags(flag *pflag.FlagSet) { flag.StringP(MigrationManifestFlag, "m", "migrations/app/migrations_manifest.txt", "Path to the manifest") + flag.StringP(DMLMigrationManifestFlag, "d", "migrations/app/dml_migrations_manifest.txt", "Path to the manifest") flag.DurationP(MigrationWaitFlag, "w", time.Millisecond*10, "duration to wait when polling for new data from migration file") + flag.String(DDLTablesMigrationPathFlag, "", "Path to DDL tables migrations directory") + flag.String(DDLTablesMigrationManifestFlag, "", "Path to DDL tables migrations manifest") + flag.String(DDLTypesMigrationPathFlag, "", "Path to DDL types migrations directory") + flag.String(DDLTypesMigrationManifestFlag, "", "Path to DDL types migrations manifest") + flag.String(DDLViewsMigrationPathFlag, "", "Path to DDL views migrations directory") + flag.String(DDLViewsMigrationManifestFlag, "", "Path to DDL views migrations manifest") + flag.String(DDLFunctionsMigrationPathFlag, "", "Path to DDL functions migrations directory") + flag.String(DDLFunctionsMigrationManifestFlag, "", "Path to DDL functions migrations manifest") + } // CheckMigration validates migration command line flags @@ -34,5 +57,20 @@ func CheckMigration(v *viper.Viper) error { if len(MigrationManifestFlag) == 0 { return errMissingMigrationManifest } + if len(DMLMigrationManifestFlag) == 0 { + return errMissingMigrationManifest + } + if len(DDLTypesMigrationManifestFlag) == 0 { + return errMissingMigrationManifest + } + if len(DDLTablesMigrationManifestFlag) == 0 { + return errMissingMigrationManifest + } + if len(DDLViewsMigrationManifestFlag) == 0 { + return errMissingMigrationManifest + } + if len(DDLFunctionsMigrationManifestFlag) == 0 { + return errMissingMigrationManifest + } return nil } diff --git a/pkg/cli/receiver.go b/pkg/cli/receiver.go new file mode 100644 index 00000000000..ed71d45d209 --- /dev/null +++ b/pkg/cli/receiver.go @@ -0,0 +1,61 @@ +package cli + +import ( + "fmt" + + "github.com/spf13/pflag" + "github.com/spf13/viper" +) + +const ( + // ReceiverBackendFlag is the Receiver Backend Flag + ReceiverBackendFlag string = "receiver-backend" + // SNSTagsUpdatedTopicFlag is the SNS Tags Updated Topic Flag + SNSTagsUpdatedTopicFlag string = "sns-tags-updated-topic" + // SNSRegionFlag is the SNS Region flag + SNSRegionFlag string = "sns-region" + // SNSAccountId is the application's AWS account id + SNSAccountId string = "aws-account-id" + // ReceiverCleanupOnStartFlag is the Receiver Cleanup On Start Flag + ReceiverCleanupOnStartFlag string = "receiver-cleanup-on-start" +) + +// InitReceiverFlags initializes Storage command line flags +func InitReceiverFlags(flag *pflag.FlagSet) { + flag.String(ReceiverBackendFlag, "local", "Receiver backend to use, either local or sns_sqs.") + flag.String(SNSTagsUpdatedTopicFlag, "", "SNS Topic for receiving event messages") + flag.String(SNSRegionFlag, "", "Region used for SNS and SQS") + flag.String(SNSAccountId, "", "SNS account Id") + flag.Bool(ReceiverCleanupOnStartFlag, false, "Receiver will cleanup previous aws artifacts on start.") +} + +// CheckReceiver validates Storage command line flags +func CheckReceiver(v *viper.Viper) error { + + receiverBackend := v.GetString(ReceiverBackendFlag) + if !stringSliceContains([]string{"local", "sns_sqs"}, receiverBackend) { + return fmt.Errorf("invalid receiver_backend %s, expecting local or sns_sqs", receiverBackend) + } + + receiverCleanupOnStart := v.GetString(ReceiverCleanupOnStartFlag) + if !stringSliceContains([]string{"true", "false"}, receiverCleanupOnStart) { + return fmt.Errorf("invalid receiver_cleanup_on_start %s, expecting true or false", receiverCleanupOnStart) + } + + if receiverBackend == "sns_sqs" { + r := v.GetString(SNSRegionFlag) + if r == "" { + return fmt.Errorf("invalid value for %s: %s", SNSRegionFlag, r) + } + topic := v.GetString(SNSTagsUpdatedTopicFlag) + if topic == "" { + return fmt.Errorf("invalid value for %s: %s", SNSTagsUpdatedTopicFlag, topic) + } + accountId := v.GetString(SNSAccountId) + if topic == "" { + return fmt.Errorf("invalid value for %s: %s", SNSAccountId, accountId) + } + } + + return nil +} diff --git a/pkg/cli/receiver_test.go b/pkg/cli/receiver_test.go new file mode 100644 index 00000000000..7095a672f5f --- /dev/null +++ b/pkg/cli/receiver_test.go @@ -0,0 +1,6 @@ +package cli + +func (suite *cliTestSuite) TestConfigReceiver() { + suite.Setup(InitReceiverFlags, []string{}) + suite.NoError(CheckReceiver(suite.viper)) +} diff --git a/pkg/factory/address_factory.go b/pkg/factory/address_factory.go index 27d92999d00..ad4ce46507f 100644 --- a/pkg/factory/address_factory.go +++ b/pkg/factory/address_factory.go @@ -201,3 +201,75 @@ func GetTraitAddress4() []Customization { }, } } + +// GetTraitAddressAKZone1 is an address in Zone 1 of AK +func GetTraitAddressAKZone1() []Customization { + + return []Customization{ + { + Model: models.Address{ + StreetAddress1: "82 Joe Gibbs Rd", + StreetAddress2: models.StringPointer("P.O. Box 1234"), + StreetAddress3: models.StringPointer("c/o Another Person"), + City: "ANCHORAGE", + State: "AK", + PostalCode: "99695", + IsOconus: models.BoolPointer(true), + }, + }, + } +} + +// GetTraitAddressAKZone2 is an address in Zone 2 of Alaska +func GetTraitAddressAKZone2() []Customization { + + return []Customization{ + { + Model: models.Address{ + StreetAddress1: "44 John Riggins Rd", + StreetAddress2: models.StringPointer("P.O. Box 1234"), + StreetAddress3: models.StringPointer("c/o Another Person"), + City: "FAIRBANKS", + State: "AK", + PostalCode: "99703", + IsOconus: models.BoolPointer(true), + }, + }, + } +} + +// GetTraitAddressAKZone3 is an address in Zone 3 of Alaska +func GetTraitAddressAKZone3() []Customization { + + return []Customization{ + { + Model: models.Address{ + StreetAddress1: "26 Clinton Portis Rd", + StreetAddress2: models.StringPointer("P.O. Box 1234"), + StreetAddress3: models.StringPointer("c/o Another Person"), + City: "KODIAK", + State: "AK", + PostalCode: "99697", + IsOconus: models.BoolPointer(true), + }, + }, + } +} + +// GetTraitAddressAKZone4 is an address in Zone 4 of Alaska +func GetTraitAddressAKZone4() []Customization { + + return []Customization{ + { + Model: models.Address{ + StreetAddress1: "8 Alex Ovechkin Rd", + StreetAddress2: models.StringPointer("P.O. Box 1234"), + StreetAddress3: models.StringPointer("c/o Another Person"), + City: "JUNEAU", + State: "AK", + PostalCode: "99801", + IsOconus: models.BoolPointer(true), + }, + }, + } +} diff --git a/pkg/gen/ghcapi/configure_mymove.go b/pkg/gen/ghcapi/configure_mymove.go index 415ee1c890d..432343d4dab 100644 --- a/pkg/gen/ghcapi/configure_mymove.go +++ b/pkg/gen/ghcapi/configure_mymove.go @@ -4,6 +4,7 @@ package ghcapi import ( "crypto/tls" + "io" "net/http" "github.com/go-openapi/errors" @@ -64,6 +65,9 @@ func configureAPI(api *ghcoperations.MymoveAPI) http.Handler { api.BinProducer = runtime.ByteStreamProducer() api.JSONProducer = runtime.JSONProducer() + api.TextEventStreamProducer = runtime.ProducerFunc(func(w io.Writer, data interface{}) error { + return errors.NotImplemented("textEventStream producer has not yet been implemented") + }) // You may change here the memory limit for this multipart form parser. Below is the default (32 MB). // uploads.CreateUploadMaxParseMemory = 32 << 20 @@ -402,6 +406,11 @@ func configureAPI(api *ghcoperations.MymoveAPI) http.Handler { return middleware.NotImplemented("operation uploads.GetUpload has not yet been implemented") }) } + if api.UploadsGetUploadStatusHandler == nil { + api.UploadsGetUploadStatusHandler = uploads.GetUploadStatusHandlerFunc(func(params uploads.GetUploadStatusParams) middleware.Responder { + return middleware.NotImplemented("operation uploads.GetUploadStatus has not yet been implemented") + }) + } if api.CalendarIsDateWeekendHolidayHandler == nil { api.CalendarIsDateWeekendHolidayHandler = calendar.IsDateWeekendHolidayHandlerFunc(func(params calendar.IsDateWeekendHolidayParams) middleware.Responder { return middleware.NotImplemented("operation calendar.IsDateWeekendHoliday has not yet been implemented") diff --git a/pkg/gen/ghcapi/doc.go b/pkg/gen/ghcapi/doc.go index 24f788c8fb2..24ba756c211 100644 --- a/pkg/gen/ghcapi/doc.go +++ b/pkg/gen/ghcapi/doc.go @@ -21,6 +21,7 @@ // Produces: // - application/pdf // - application/json +// - text/event-stream // // swagger:meta package ghcapi diff --git a/pkg/gen/ghcapi/embedded_spec.go b/pkg/gen/ghcapi/embedded_spec.go index bb69f484f51..3d41db0769c 100644 --- a/pkg/gen/ghcapi/embedded_spec.go +++ b/pkg/gen/ghcapi/embedded_spec.go @@ -6690,6 +6690,58 @@ func init() { } } }, + "/uploads/{uploadID}/status": { + "get": { + "description": "Returns status of an upload based on antivirus run", + "produces": [ + "text/event-stream" + ], + "tags": [ + "uploads" + ], + "summary": "Returns status of an upload", + "operationId": "getUploadStatus", + "parameters": [ + { + "type": "string", + "format": "uuid", + "description": "UUID of the upload to return status of", + "name": "uploadID", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "the requested upload status", + "schema": { + "type": "string", + "enum": [ + "INFECTED", + "CLEAN", + "PROCESSING" + ], + "readOnly": true + } + }, + "400": { + "description": "invalid request", + "schema": { + "$ref": "#/definitions/InvalidRequestResponsePayload" + } + }, + "403": { + "description": "not authorized" + }, + "404": { + "description": "not found" + }, + "500": { + "description": "server error" + } + } + } + }, "/uploads/{uploadID}/update": { "patch": { "description": "Uploads represent a single digital file, such as a JPEG or PDF. The rotation is relevant to how it is displayed on the page.", @@ -24152,6 +24204,58 @@ func init() { } } }, + "/uploads/{uploadID}/status": { + "get": { + "description": "Returns status of an upload based on antivirus run", + "produces": [ + "text/event-stream" + ], + "tags": [ + "uploads" + ], + "summary": "Returns status of an upload", + "operationId": "getUploadStatus", + "parameters": [ + { + "type": "string", + "format": "uuid", + "description": "UUID of the upload to return status of", + "name": "uploadID", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "the requested upload status", + "schema": { + "type": "string", + "enum": [ + "INFECTED", + "CLEAN", + "PROCESSING" + ], + "readOnly": true + } + }, + "400": { + "description": "invalid request", + "schema": { + "$ref": "#/definitions/InvalidRequestResponsePayload" + } + }, + "403": { + "description": "not authorized" + }, + "404": { + "description": "not found" + }, + "500": { + "description": "server error" + } + } + } + }, "/uploads/{uploadID}/update": { "patch": { "description": "Uploads represent a single digital file, such as a JPEG or PDF. The rotation is relevant to how it is displayed on the page.", diff --git a/pkg/gen/ghcapi/ghcoperations/mymove_api.go b/pkg/gen/ghcapi/ghcoperations/mymove_api.go index c7668464a5d..ee86793406f 100644 --- a/pkg/gen/ghcapi/ghcoperations/mymove_api.go +++ b/pkg/gen/ghcapi/ghcoperations/mymove_api.go @@ -7,6 +7,7 @@ package ghcoperations import ( "fmt" + "io" "net/http" "strings" @@ -70,6 +71,9 @@ func NewMymoveAPI(spec *loads.Document) *MymoveAPI { BinProducer: runtime.ByteStreamProducer(), JSONProducer: runtime.JSONProducer(), + TextEventStreamProducer: runtime.ProducerFunc(func(w io.Writer, data interface{}) error { + return errors.NotImplemented("textEventStream producer has not yet been implemented") + }), OrderAcknowledgeExcessUnaccompaniedBaggageWeightRiskHandler: order.AcknowledgeExcessUnaccompaniedBaggageWeightRiskHandlerFunc(func(params order.AcknowledgeExcessUnaccompaniedBaggageWeightRiskParams) middleware.Responder { return middleware.NotImplemented("operation order.AcknowledgeExcessUnaccompaniedBaggageWeightRisk has not yet been implemented") @@ -269,6 +273,9 @@ func NewMymoveAPI(spec *loads.Document) *MymoveAPI { UploadsGetUploadHandler: uploads.GetUploadHandlerFunc(func(params uploads.GetUploadParams) middleware.Responder { return middleware.NotImplemented("operation uploads.GetUpload has not yet been implemented") }), + UploadsGetUploadStatusHandler: uploads.GetUploadStatusHandlerFunc(func(params uploads.GetUploadStatusParams) middleware.Responder { + return middleware.NotImplemented("operation uploads.GetUploadStatus has not yet been implemented") + }), CalendarIsDateWeekendHolidayHandler: calendar.IsDateWeekendHolidayHandlerFunc(func(params calendar.IsDateWeekendHolidayParams) middleware.Responder { return middleware.NotImplemented("operation calendar.IsDateWeekendHoliday has not yet been implemented") }), @@ -449,6 +456,9 @@ type MymoveAPI struct { // JSONProducer registers a producer for the following mime types: // - application/json JSONProducer runtime.Producer + // TextEventStreamProducer registers a producer for the following mime types: + // - text/event-stream + TextEventStreamProducer runtime.Producer // OrderAcknowledgeExcessUnaccompaniedBaggageWeightRiskHandler sets the operation handler for the acknowledge excess unaccompanied baggage weight risk operation OrderAcknowledgeExcessUnaccompaniedBaggageWeightRiskHandler order.AcknowledgeExcessUnaccompaniedBaggageWeightRiskHandler @@ -582,6 +592,8 @@ type MymoveAPI struct { TransportationOfficeGetTransportationOfficesOpenHandler transportation_office.GetTransportationOfficesOpenHandler // UploadsGetUploadHandler sets the operation handler for the get upload operation UploadsGetUploadHandler uploads.GetUploadHandler + // UploadsGetUploadStatusHandler sets the operation handler for the get upload status operation + UploadsGetUploadStatusHandler uploads.GetUploadStatusHandler // CalendarIsDateWeekendHolidayHandler sets the operation handler for the is date weekend holiday operation CalendarIsDateWeekendHolidayHandler calendar.IsDateWeekendHolidayHandler // MtoServiceItemListMTOServiceItemsHandler sets the operation handler for the list m t o service items operation @@ -754,6 +766,9 @@ func (o *MymoveAPI) Validate() error { if o.JSONProducer == nil { unregistered = append(unregistered, "JSONProducer") } + if o.TextEventStreamProducer == nil { + unregistered = append(unregistered, "TextEventStreamProducer") + } if o.OrderAcknowledgeExcessUnaccompaniedBaggageWeightRiskHandler == nil { unregistered = append(unregistered, "order.AcknowledgeExcessUnaccompaniedBaggageWeightRiskHandler") @@ -953,6 +968,9 @@ func (o *MymoveAPI) Validate() error { if o.UploadsGetUploadHandler == nil { unregistered = append(unregistered, "uploads.GetUploadHandler") } + if o.UploadsGetUploadStatusHandler == nil { + unregistered = append(unregistered, "uploads.GetUploadStatusHandler") + } if o.CalendarIsDateWeekendHolidayHandler == nil { unregistered = append(unregistered, "calendar.IsDateWeekendHolidayHandler") } @@ -1140,6 +1158,8 @@ func (o *MymoveAPI) ProducersFor(mediaTypes []string) map[string]runtime.Produce result["application/pdf"] = o.BinProducer case "application/json": result["application/json"] = o.JSONProducer + case "text/event-stream": + result["text/event-stream"] = o.TextEventStreamProducer } if p, ok := o.customProducers[mt]; ok { @@ -1447,6 +1467,10 @@ func (o *MymoveAPI) initHandlerCache() { if o.handlers["GET"] == nil { o.handlers["GET"] = make(map[string]http.Handler) } + o.handlers["GET"]["/uploads/{uploadID}/status"] = uploads.NewGetUploadStatus(o.context, o.UploadsGetUploadStatusHandler) + if o.handlers["GET"] == nil { + o.handlers["GET"] = make(map[string]http.Handler) + } o.handlers["GET"]["/calendar/{countryCode}/is-weekend-holiday/{date}"] = calendar.NewIsDateWeekendHoliday(o.context, o.CalendarIsDateWeekendHolidayHandler) if o.handlers["GET"] == nil { o.handlers["GET"] = make(map[string]http.Handler) diff --git a/pkg/gen/ghcapi/ghcoperations/uploads/get_upload_status.go b/pkg/gen/ghcapi/ghcoperations/uploads/get_upload_status.go new file mode 100644 index 00000000000..b893657d488 --- /dev/null +++ b/pkg/gen/ghcapi/ghcoperations/uploads/get_upload_status.go @@ -0,0 +1,58 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package uploads + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the generate command + +import ( + "net/http" + + "github.com/go-openapi/runtime/middleware" +) + +// GetUploadStatusHandlerFunc turns a function with the right signature into a get upload status handler +type GetUploadStatusHandlerFunc func(GetUploadStatusParams) middleware.Responder + +// Handle executing the request and returning a response +func (fn GetUploadStatusHandlerFunc) Handle(params GetUploadStatusParams) middleware.Responder { + return fn(params) +} + +// GetUploadStatusHandler interface for that can handle valid get upload status params +type GetUploadStatusHandler interface { + Handle(GetUploadStatusParams) middleware.Responder +} + +// NewGetUploadStatus creates a new http.Handler for the get upload status operation +func NewGetUploadStatus(ctx *middleware.Context, handler GetUploadStatusHandler) *GetUploadStatus { + return &GetUploadStatus{Context: ctx, Handler: handler} +} + +/* + GetUploadStatus swagger:route GET /uploads/{uploadID}/status uploads getUploadStatus + +# Returns status of an upload + +Returns status of an upload based on antivirus run +*/ +type GetUploadStatus struct { + Context *middleware.Context + Handler GetUploadStatusHandler +} + +func (o *GetUploadStatus) ServeHTTP(rw http.ResponseWriter, r *http.Request) { + route, rCtx, _ := o.Context.RouteInfo(r) + if rCtx != nil { + *r = *rCtx + } + var Params = NewGetUploadStatusParams() + if err := o.Context.BindValidRequest(r, route, &Params); err != nil { // bind params + o.Context.Respond(rw, r, route.Produces, route, err) + return + } + + res := o.Handler.Handle(Params) // actually handle the request + o.Context.Respond(rw, r, route.Produces, route, res) + +} diff --git a/pkg/gen/ghcapi/ghcoperations/uploads/get_upload_status_parameters.go b/pkg/gen/ghcapi/ghcoperations/uploads/get_upload_status_parameters.go new file mode 100644 index 00000000000..fa1b3ef9329 --- /dev/null +++ b/pkg/gen/ghcapi/ghcoperations/uploads/get_upload_status_parameters.go @@ -0,0 +1,91 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package uploads + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "net/http" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime/middleware" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/validate" +) + +// NewGetUploadStatusParams creates a new GetUploadStatusParams object +// +// There are no default values defined in the spec. +func NewGetUploadStatusParams() GetUploadStatusParams { + + return GetUploadStatusParams{} +} + +// GetUploadStatusParams contains all the bound params for the get upload status operation +// typically these are obtained from a http.Request +// +// swagger:parameters getUploadStatus +type GetUploadStatusParams struct { + + // HTTP Request Object + HTTPRequest *http.Request `json:"-"` + + /*UUID of the upload to return status of + Required: true + In: path + */ + UploadID strfmt.UUID +} + +// BindRequest both binds and validates a request, it assumes that complex things implement a Validatable(strfmt.Registry) error interface +// for simple values it will use straight method calls. +// +// To ensure default values, the struct must have been initialized with NewGetUploadStatusParams() beforehand. +func (o *GetUploadStatusParams) BindRequest(r *http.Request, route *middleware.MatchedRoute) error { + var res []error + + o.HTTPRequest = r + + rUploadID, rhkUploadID, _ := route.Params.GetOK("uploadID") + if err := o.bindUploadID(rUploadID, rhkUploadID, route.Formats); err != nil { + res = append(res, err) + } + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +// bindUploadID binds and validates parameter UploadID from path. +func (o *GetUploadStatusParams) bindUploadID(rawData []string, hasKey bool, formats strfmt.Registry) error { + var raw string + if len(rawData) > 0 { + raw = rawData[len(rawData)-1] + } + + // Required: true + // Parameter is provided by construction from the route + + // Format: uuid + value, err := formats.Parse("uuid", raw) + if err != nil { + return errors.InvalidType("uploadID", "path", "strfmt.UUID", raw) + } + o.UploadID = *(value.(*strfmt.UUID)) + + if err := o.validateUploadID(formats); err != nil { + return err + } + + return nil +} + +// validateUploadID carries on validations for parameter UploadID +func (o *GetUploadStatusParams) validateUploadID(formats strfmt.Registry) error { + + if err := validate.FormatOf("uploadID", "path", "uuid", o.UploadID.String(), formats); err != nil { + return err + } + return nil +} diff --git a/pkg/gen/ghcapi/ghcoperations/uploads/get_upload_status_responses.go b/pkg/gen/ghcapi/ghcoperations/uploads/get_upload_status_responses.go new file mode 100644 index 00000000000..894980d6a2b --- /dev/null +++ b/pkg/gen/ghcapi/ghcoperations/uploads/get_upload_status_responses.go @@ -0,0 +1,177 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package uploads + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "net/http" + + "github.com/go-openapi/runtime" + + "github.com/transcom/mymove/pkg/gen/ghcmessages" +) + +// GetUploadStatusOKCode is the HTTP code returned for type GetUploadStatusOK +const GetUploadStatusOKCode int = 200 + +/* +GetUploadStatusOK the requested upload status + +swagger:response getUploadStatusOK +*/ +type GetUploadStatusOK struct { + + /* + In: Body + */ + Payload string `json:"body,omitempty"` +} + +// NewGetUploadStatusOK creates GetUploadStatusOK with default headers values +func NewGetUploadStatusOK() *GetUploadStatusOK { + + return &GetUploadStatusOK{} +} + +// WithPayload adds the payload to the get upload status o k response +func (o *GetUploadStatusOK) WithPayload(payload string) *GetUploadStatusOK { + o.Payload = payload + return o +} + +// SetPayload sets the payload to the get upload status o k response +func (o *GetUploadStatusOK) SetPayload(payload string) { + o.Payload = payload +} + +// WriteResponse to the client +func (o *GetUploadStatusOK) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) { + + rw.WriteHeader(200) + payload := o.Payload + if err := producer.Produce(rw, payload); err != nil { + panic(err) // let the recovery middleware deal with this + } +} + +// GetUploadStatusBadRequestCode is the HTTP code returned for type GetUploadStatusBadRequest +const GetUploadStatusBadRequestCode int = 400 + +/* +GetUploadStatusBadRequest invalid request + +swagger:response getUploadStatusBadRequest +*/ +type GetUploadStatusBadRequest struct { + + /* + In: Body + */ + Payload *ghcmessages.InvalidRequestResponsePayload `json:"body,omitempty"` +} + +// NewGetUploadStatusBadRequest creates GetUploadStatusBadRequest with default headers values +func NewGetUploadStatusBadRequest() *GetUploadStatusBadRequest { + + return &GetUploadStatusBadRequest{} +} + +// WithPayload adds the payload to the get upload status bad request response +func (o *GetUploadStatusBadRequest) WithPayload(payload *ghcmessages.InvalidRequestResponsePayload) *GetUploadStatusBadRequest { + o.Payload = payload + return o +} + +// SetPayload sets the payload to the get upload status bad request response +func (o *GetUploadStatusBadRequest) SetPayload(payload *ghcmessages.InvalidRequestResponsePayload) { + o.Payload = payload +} + +// WriteResponse to the client +func (o *GetUploadStatusBadRequest) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) { + + rw.WriteHeader(400) + if o.Payload != nil { + payload := o.Payload + if err := producer.Produce(rw, payload); err != nil { + panic(err) // let the recovery middleware deal with this + } + } +} + +// GetUploadStatusForbiddenCode is the HTTP code returned for type GetUploadStatusForbidden +const GetUploadStatusForbiddenCode int = 403 + +/* +GetUploadStatusForbidden not authorized + +swagger:response getUploadStatusForbidden +*/ +type GetUploadStatusForbidden struct { +} + +// NewGetUploadStatusForbidden creates GetUploadStatusForbidden with default headers values +func NewGetUploadStatusForbidden() *GetUploadStatusForbidden { + + return &GetUploadStatusForbidden{} +} + +// WriteResponse to the client +func (o *GetUploadStatusForbidden) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) { + + rw.Header().Del(runtime.HeaderContentType) //Remove Content-Type on empty responses + + rw.WriteHeader(403) +} + +// GetUploadStatusNotFoundCode is the HTTP code returned for type GetUploadStatusNotFound +const GetUploadStatusNotFoundCode int = 404 + +/* +GetUploadStatusNotFound not found + +swagger:response getUploadStatusNotFound +*/ +type GetUploadStatusNotFound struct { +} + +// NewGetUploadStatusNotFound creates GetUploadStatusNotFound with default headers values +func NewGetUploadStatusNotFound() *GetUploadStatusNotFound { + + return &GetUploadStatusNotFound{} +} + +// WriteResponse to the client +func (o *GetUploadStatusNotFound) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) { + + rw.Header().Del(runtime.HeaderContentType) //Remove Content-Type on empty responses + + rw.WriteHeader(404) +} + +// GetUploadStatusInternalServerErrorCode is the HTTP code returned for type GetUploadStatusInternalServerError +const GetUploadStatusInternalServerErrorCode int = 500 + +/* +GetUploadStatusInternalServerError server error + +swagger:response getUploadStatusInternalServerError +*/ +type GetUploadStatusInternalServerError struct { +} + +// NewGetUploadStatusInternalServerError creates GetUploadStatusInternalServerError with default headers values +func NewGetUploadStatusInternalServerError() *GetUploadStatusInternalServerError { + + return &GetUploadStatusInternalServerError{} +} + +// WriteResponse to the client +func (o *GetUploadStatusInternalServerError) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) { + + rw.Header().Del(runtime.HeaderContentType) //Remove Content-Type on empty responses + + rw.WriteHeader(500) +} diff --git a/pkg/gen/ghcapi/ghcoperations/uploads/get_upload_status_urlbuilder.go b/pkg/gen/ghcapi/ghcoperations/uploads/get_upload_status_urlbuilder.go new file mode 100644 index 00000000000..edd3c2fd6f8 --- /dev/null +++ b/pkg/gen/ghcapi/ghcoperations/uploads/get_upload_status_urlbuilder.go @@ -0,0 +1,101 @@ +// Code generated by go-swagger; DO NOT EDIT. + +package uploads + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the generate command + +import ( + "errors" + "net/url" + golangswaggerpaths "path" + "strings" + + "github.com/go-openapi/strfmt" +) + +// GetUploadStatusURL generates an URL for the get upload status operation +type GetUploadStatusURL struct { + UploadID strfmt.UUID + + _basePath string + // avoid unkeyed usage + _ struct{} +} + +// WithBasePath sets the base path for this url builder, only required when it's different from the +// base path specified in the swagger spec. +// When the value of the base path is an empty string +func (o *GetUploadStatusURL) WithBasePath(bp string) *GetUploadStatusURL { + o.SetBasePath(bp) + return o +} + +// SetBasePath sets the base path for this url builder, only required when it's different from the +// base path specified in the swagger spec. +// When the value of the base path is an empty string +func (o *GetUploadStatusURL) SetBasePath(bp string) { + o._basePath = bp +} + +// Build a url path and query string +func (o *GetUploadStatusURL) Build() (*url.URL, error) { + var _result url.URL + + var _path = "/uploads/{uploadID}/status" + + uploadID := o.UploadID.String() + if uploadID != "" { + _path = strings.Replace(_path, "{uploadID}", uploadID, -1) + } else { + return nil, errors.New("uploadId is required on GetUploadStatusURL") + } + + _basePath := o._basePath + if _basePath == "" { + _basePath = "/ghc/v1" + } + _result.Path = golangswaggerpaths.Join(_basePath, _path) + + return &_result, nil +} + +// Must is a helper function to panic when the url builder returns an error +func (o *GetUploadStatusURL) Must(u *url.URL, err error) *url.URL { + if err != nil { + panic(err) + } + if u == nil { + panic("url can't be nil") + } + return u +} + +// String returns the string representation of the path with query string +func (o *GetUploadStatusURL) String() string { + return o.Must(o.Build()).String() +} + +// BuildFull builds a full url with scheme, host, path and query string +func (o *GetUploadStatusURL) BuildFull(scheme, host string) (*url.URL, error) { + if scheme == "" { + return nil, errors.New("scheme is required for a full url on GetUploadStatusURL") + } + if host == "" { + return nil, errors.New("host is required for a full url on GetUploadStatusURL") + } + + base, err := o.Build() + if err != nil { + return nil, err + } + + base.Scheme = scheme + base.Host = host + return base, nil +} + +// StringFull returns the string representation of a complete url +func (o *GetUploadStatusURL) StringFull(scheme, host string) string { + return o.Must(o.BuildFull(scheme, host)).String() +} diff --git a/pkg/handlers/apitests.go b/pkg/handlers/apitests.go index a84a6627f2c..a540d37e1f3 100644 --- a/pkg/handlers/apitests.go +++ b/pkg/handlers/apitests.go @@ -9,6 +9,7 @@ import ( "path" "path/filepath" "runtime/debug" + "strings" "time" "github.com/go-openapi/runtime" @@ -148,6 +149,11 @@ func (suite *BaseHandlerTestSuite) TestNotificationSender() notifications.Notifi return suite.notificationSender } +// TestNotificationReceiver returns the notification sender to use in the suite +func (suite *BaseHandlerTestSuite) TestNotificationReceiver() notifications.NotificationReceiver { + return notifications.NewStubNotificationReceiver() +} + // HasWebhookNotification checks that there's a record on the WebhookNotifications table for the object and trace IDs func (suite *BaseHandlerTestSuite) HasWebhookNotification(objectID uuid.UUID, traceID uuid.UUID) { notification := &models.WebhookNotification{} @@ -277,8 +283,12 @@ func (suite *BaseHandlerTestSuite) Fixture(name string) *runtime.File { if err != nil { suite.T().Error(err) } + cdRouting := "" + if strings.Contains(cwd, "routing") { + cdRouting = ".." + } - fixturePath := path.Join(cwd, "..", "..", fixtureDir, name) + fixturePath := path.Join(cwd, "..", "..", cdRouting, fixtureDir, name) file, err := os.Open(filepath.Clean(fixturePath)) if err != nil { diff --git a/pkg/handlers/authentication/auth.go b/pkg/handlers/authentication/auth.go index a01f499de5e..8e59132c750 100644 --- a/pkg/handlers/authentication/auth.go +++ b/pkg/handlers/authentication/auth.go @@ -221,6 +221,7 @@ var allowedRoutes = map[string]bool{ "uploads.deleteUpload": true, "users.showLoggedInUser": true, "okta_profile.showOktaInfo": true, + "uploads.getUploadStatus": true, } // checkIfRouteIsAllowed checks to see if the route is one of the ones that should be allowed through without stricter diff --git a/pkg/handlers/config.go b/pkg/handlers/config.go index b4bb2026915..50d45ee1978 100644 --- a/pkg/handlers/config.go +++ b/pkg/handlers/config.go @@ -39,6 +39,7 @@ type HandlerConfig interface { ) http.Handler FileStorer() storage.FileStorer NotificationSender() notifications.NotificationSender + NotificationReceiver() notifications.NotificationReceiver HHGPlanner() route.Planner DTODPlanner() route.Planner CookieSecret() string @@ -66,6 +67,7 @@ type Config struct { dtodPlanner route.Planner storage storage.FileStorer notificationSender notifications.NotificationSender + notificationReceiver notifications.NotificationReceiver iwsPersonLookup iws.PersonLookup sendProductionInvoice bool senderToGex services.GexSender @@ -86,6 +88,7 @@ func NewHandlerConfig( dtodPlanner route.Planner, storage storage.FileStorer, notificationSender notifications.NotificationSender, + notificationReceiver notifications.NotificationReceiver, iwsPersonLookup iws.PersonLookup, sendProductionInvoice bool, senderToGex services.GexSender, @@ -103,6 +106,7 @@ func NewHandlerConfig( dtodPlanner: dtodPlanner, storage: storage, notificationSender: notificationSender, + notificationReceiver: notificationReceiver, iwsPersonLookup: iwsPersonLookup, sendProductionInvoice: sendProductionInvoice, senderToGex: senderToGex, @@ -247,6 +251,16 @@ func (c *Config) SetNotificationSender(sender notifications.NotificationSender) c.notificationSender = sender } +// NotificationReceiver returns the sender to use in the current context +func (c *Config) NotificationReceiver() notifications.NotificationReceiver { + return c.notificationReceiver +} + +// SetNotificationSender is a simple setter for AWS SQS private field +func (c *Config) SetNotificationReceiver(receiver notifications.NotificationReceiver) { + c.notificationReceiver = receiver +} + // SetPlanner is a simple setter for the route.Planner private field func (c *Config) SetPlanner(planner route.Planner) { c.planner = planner diff --git a/pkg/handlers/config_test.go b/pkg/handlers/config_test.go index 26595daea29..85c9ccbff7c 100644 --- a/pkg/handlers/config_test.go +++ b/pkg/handlers/config_test.go @@ -30,7 +30,7 @@ func (suite *ConfigSuite) TestConfigHandler() { appCtx := suite.AppContextForTest() sessionManagers := auth.SetupSessionManagers(nil, false, time.Duration(180*time.Second), time.Duration(180*time.Second)) - handler := NewHandlerConfig(appCtx.DB(), nil, "", nil, nil, nil, nil, nil, false, nil, nil, false, ApplicationTestServername(), sessionManagers, nil) + handler := NewHandlerConfig(appCtx.DB(), nil, "", nil, nil, nil, nil, nil, nil, false, nil, nil, false, ApplicationTestServername(), sessionManagers, nil) req, err := http.NewRequest("GET", "/", nil) suite.NoError(err) myMethodCalled := false diff --git a/pkg/handlers/ghcapi/api.go b/pkg/handlers/ghcapi/api.go index 4119b8b3564..4402e196f67 100644 --- a/pkg/handlers/ghcapi/api.go +++ b/pkg/handlers/ghcapi/api.go @@ -4,6 +4,7 @@ import ( "log" "github.com/go-openapi/loads" + "github.com/go-openapi/runtime" "github.com/transcom/mymove/pkg/gen/ghcapi" ghcops "github.com/transcom/mymove/pkg/gen/ghcapi/ghcoperations" @@ -725,6 +726,8 @@ func NewGhcAPIHandler(handlerConfig handlers.HandlerConfig) *ghcops.MymoveAPI { ghcAPI.UploadsCreateUploadHandler = CreateUploadHandler{handlerConfig} ghcAPI.UploadsUpdateUploadHandler = UpdateUploadHandler{handlerConfig, upload.NewUploadInformationFetcher()} ghcAPI.UploadsDeleteUploadHandler = DeleteUploadHandler{handlerConfig, upload.NewUploadInformationFetcher()} + ghcAPI.UploadsGetUploadStatusHandler = GetUploadStatusHandler{handlerConfig, upload.NewUploadInformationFetcher()} + ghcAPI.TextEventStreamProducer = runtime.ByteStreamProducer() // GetUploadStatus produces Event Stream ghcAPI.CustomerSearchCustomersHandler = SearchCustomersHandler{ HandlerConfig: handlerConfig, diff --git a/pkg/handlers/ghcapi/documents.go b/pkg/handlers/ghcapi/documents.go index b150eb2a5d3..bdbd0ad05cf 100644 --- a/pkg/handlers/ghcapi/documents.go +++ b/pkg/handlers/ghcapi/documents.go @@ -53,7 +53,7 @@ func (h GetDocumentHandler) Handle(params documentop.GetDocumentParams) middlewa return handlers.ResponseForError(appCtx.Logger(), err), err } - document, err := models.FetchDocument(appCtx.DB(), appCtx.Session(), documentID, true) + document, err := models.FetchDocument(appCtx.DB(), appCtx.Session(), documentID) if err != nil { return handlers.ResponseForError(appCtx.Logger(), err), err } diff --git a/pkg/handlers/ghcapi/internal/payloads/model_to_payload.go b/pkg/handlers/ghcapi/internal/payloads/model_to_payload.go index 7e9e05e90fe..d384f326414 100644 --- a/pkg/handlers/ghcapi/internal/payloads/model_to_payload.go +++ b/pkg/handlers/ghcapi/internal/payloads/model_to_payload.go @@ -2075,10 +2075,10 @@ func Upload(storer storage.FileStorer, upload models.Upload, url string) *ghcmes } tags, err := storer.Tags(upload.StorageKey) - if err != nil || len(tags) == 0 { - uploadPayload.Status = "PROCESSING" + if err != nil { + uploadPayload.Status = string(models.AVStatusPROCESSING) } else { - uploadPayload.Status = tags["av-status"] + uploadPayload.Status = string(models.GetAVStatusFromTags(tags)) } return uploadPayload } @@ -2097,10 +2097,10 @@ func WeightTicketUpload(storer storage.FileStorer, upload models.Upload, url str IsWeightTicket: isWeightTicket, } tags, err := storer.Tags(upload.StorageKey) - if err != nil || len(tags) == 0 { - uploadPayload.Status = "PROCESSING" + if err != nil { + uploadPayload.Status = string(models.AVStatusPROCESSING) } else { - uploadPayload.Status = tags["av-status"] + uploadPayload.Status = string(models.GetAVStatusFromTags(tags)) } return uploadPayload } @@ -2153,10 +2153,10 @@ func PayloadForUploadModel( } tags, err := storer.Tags(upload.StorageKey) - if err != nil || len(tags) == 0 { - uploadPayload.Status = "PROCESSING" + if err != nil { + uploadPayload.Status = string(models.AVStatusPROCESSING) } else { - uploadPayload.Status = tags["av-status"] + uploadPayload.Status = string(models.GetAVStatusFromTags(tags)) } return uploadPayload } diff --git a/pkg/handlers/ghcapi/move.go b/pkg/handlers/ghcapi/move.go index aaf96dde91e..f4abb0b549a 100644 --- a/pkg/handlers/ghcapi/move.go +++ b/pkg/handlers/ghcapi/move.go @@ -429,10 +429,10 @@ func payloadForUploadModelFromAdditionalDocumentsUpload(storer storage.FileStore UpdatedAt: strfmt.DateTime(upload.UpdatedAt), } tags, err := storer.Tags(upload.StorageKey) - if err != nil || len(tags) == 0 { - uploadPayload.Status = "PROCESSING" + if err != nil { + uploadPayload.Status = string(models.AVStatusPROCESSING) } else { - uploadPayload.Status = tags["av-status"] + uploadPayload.Status = string(models.GetAVStatusFromTags(tags)) } return uploadPayload, nil } diff --git a/pkg/handlers/ghcapi/orders.go b/pkg/handlers/ghcapi/orders.go index 8a8ca3cafcf..5604477ffad 100644 --- a/pkg/handlers/ghcapi/orders.go +++ b/pkg/handlers/ghcapi/orders.go @@ -959,10 +959,10 @@ func payloadForUploadModelFromAmendedOrdersUpload(storer storage.FileStorer, upl UpdatedAt: strfmt.DateTime(upload.UpdatedAt), } tags, err := storer.Tags(upload.StorageKey) - if err != nil || len(tags) == 0 { - uploadPayload.Status = "PROCESSING" + if err != nil { + uploadPayload.Status = string(models.AVStatusPROCESSING) } else { - uploadPayload.Status = tags["av-status"] + uploadPayload.Status = string(models.GetAVStatusFromTags(tags)) } return uploadPayload, nil } diff --git a/pkg/handlers/ghcapi/uploads.go b/pkg/handlers/ghcapi/uploads.go index a74e5d48498..24708064e19 100644 --- a/pkg/handlers/ghcapi/uploads.go +++ b/pkg/handlers/ghcapi/uploads.go @@ -1,9 +1,16 @@ package ghcapi import ( + "context" + "fmt" + "net/http" + "strconv" + "time" + "github.com/go-openapi/runtime" "github.com/go-openapi/runtime/middleware" "github.com/gofrs/uuid" + "github.com/pkg/errors" "go.uber.org/zap" "github.com/transcom/mymove/pkg/appcontext" @@ -12,8 +19,10 @@ import ( "github.com/transcom/mymove/pkg/handlers" "github.com/transcom/mymove/pkg/handlers/ghcapi/internal/payloads" "github.com/transcom/mymove/pkg/models" + "github.com/transcom/mymove/pkg/notifications" "github.com/transcom/mymove/pkg/services" "github.com/transcom/mymove/pkg/services/upload" + "github.com/transcom/mymove/pkg/storage" uploaderpkg "github.com/transcom/mymove/pkg/uploader" ) @@ -50,7 +59,7 @@ func (h CreateUploadHandler) Handle(params uploadop.CreateUploadParams) middlewa } // Fetch document to ensure user has access to it - document, docErr := models.FetchDocument(appCtx.DB(), appCtx.Session(), documentID, true) + document, docErr := models.FetchDocument(appCtx.DB(), appCtx.Session(), documentID) if docErr != nil { return handlers.ResponseForError(appCtx.Logger(), docErr), rollbackErr } @@ -157,3 +166,189 @@ func (h DeleteUploadHandler) Handle(params uploadop.DeleteUploadParams) middlewa }) } + +// UploadStatusHandler returns status of an upload +type GetUploadStatusHandler struct { + handlers.HandlerConfig + services.UploadInformationFetcher +} + +type CustomGetUploadStatusResponse struct { + params uploadop.GetUploadStatusParams + storageKey string + appCtx appcontext.AppContext + receiver notifications.NotificationReceiver + storer storage.FileStorer +} + +func (o *CustomGetUploadStatusResponse) writeEventStreamMessage(rw http.ResponseWriter, producer runtime.Producer, id int, event string, data string) { + resProcess := []byte(fmt.Sprintf("id: %s\nevent: %s\ndata: %s\n\n", strconv.Itoa(id), event, data)) + if produceErr := producer.Produce(rw, resProcess); produceErr != nil { + o.appCtx.Logger().Error(produceErr.Error()) + } + if f, ok := rw.(http.Flusher); ok { + f.Flush() + } +} + +func (o *CustomGetUploadStatusResponse) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) { + + // Check current tag before event-driven wait for anti-virus + tags, err := o.storer.Tags(o.storageKey) + var uploadStatus models.AVStatusType + if err != nil { + uploadStatus = models.AVStatusPROCESSING + } else { + uploadStatus = models.GetAVStatusFromTags(tags) + } + + // Limitation: once the status code header has been written (first response), we are not able to update the status for subsequent responses. + // Standard 200 OK used with common SSE paradigm + rw.WriteHeader(http.StatusOK) + if uploadStatus == models.AVStatusCLEAN || uploadStatus == models.AVStatusINFECTED { + o.writeEventStreamMessage(rw, producer, 0, "message", string(uploadStatus)) + o.writeEventStreamMessage(rw, producer, 1, "close", "Connection closed") + return // skip notification loop since object already tagged from anti-virus + } else { + o.writeEventStreamMessage(rw, producer, 0, "message", string(uploadStatus)) + } + + // Start waiting for tag updates + topicName, err := o.receiver.GetDefaultTopic() + if err != nil { + o.appCtx.Logger().Error(err.Error()) + } + + filterPolicy := fmt.Sprintf(`{ + "detail": { + "object": { + "key": [ + {"suffix": "%s"} + ] + } + } + }`, o.params.UploadID) + + notificationParams := notifications.NotificationQueueParams{ + SubscriptionTopicName: topicName, + NamePrefix: notifications.QueuePrefixObjectTagsAdded, + FilterPolicy: filterPolicy, + } + + queueUrl, err := o.receiver.CreateQueueWithSubscription(o.appCtx, notificationParams) + if err != nil { + o.appCtx.Logger().Error(err.Error()) + } + + id_counter := 1 + + // For loop over 120 seconds, cancel context when done and it breaks the loop + totalReceiverContext, totalReceiverContextCancelFunc := context.WithTimeout(context.Background(), 120*time.Second) + defer func() { + id_counter++ + o.writeEventStreamMessage(rw, producer, id_counter, "close", "Connection closed") + totalReceiverContextCancelFunc() + }() + + // Cleanup if client closes connection + go func() { + <-o.params.HTTPRequest.Context().Done() + totalReceiverContextCancelFunc() + }() + + // Cleanup at end of work + go func() { + <-totalReceiverContext.Done() + _ = o.receiver.CloseoutQueue(o.appCtx, queueUrl) + }() + + for { + o.appCtx.Logger().Info("Receiving Messages...") + messages, errs := o.receiver.ReceiveMessages(o.appCtx, queueUrl, totalReceiverContext) + + if errors.Is(errs, context.Canceled) || errors.Is(errs, context.DeadlineExceeded) { + return + } + if errs != nil { + o.appCtx.Logger().Error(err.Error()) + return + } + + if len(messages) != 0 { + errTransaction := o.appCtx.NewTransaction(func(txnAppCtx appcontext.AppContext) error { + + tags, err := o.storer.Tags(o.storageKey) + + if err != nil { + uploadStatus = models.AVStatusPROCESSING + } else { + uploadStatus = models.GetAVStatusFromTags(tags) + } + + o.writeEventStreamMessage(rw, producer, id_counter, "message", string(uploadStatus)) + + if uploadStatus == models.AVStatusCLEAN || uploadStatus == models.AVStatusINFECTED { + return errors.New("connection_closed") + } + + return err + }) + + if errTransaction != nil && errTransaction.Error() == "connection_closed" { + return + } + + if errTransaction != nil { + o.appCtx.Logger().Error(err.Error()) + return + } + } + id_counter++ + + select { + case <-totalReceiverContext.Done(): + return + default: + time.Sleep(1 * time.Second) // Throttle as a precaution against hounding of the SDK + continue + } + } +} + +// Handle returns status of an upload +func (h GetUploadStatusHandler) Handle(params uploadop.GetUploadStatusParams) middleware.Responder { + return h.AuditableAppContextFromRequestWithErrors(params.HTTPRequest, + func(appCtx appcontext.AppContext) (middleware.Responder, error) { + + handleError := func(err error) (middleware.Responder, error) { + appCtx.Logger().Error("GetUploadStatusHandler error", zap.Error(err)) + switch errors.Cause(err) { + case models.ErrFetchForbidden: + return uploadop.NewGetUploadStatusForbidden(), err + case models.ErrFetchNotFound: + return uploadop.NewGetUploadStatusNotFound(), err + default: + return uploadop.NewGetUploadStatusInternalServerError(), err + } + } + + uploadId := params.UploadID.String() + uploadUUID, err := uuid.FromString(uploadId) + if err != nil { + return handleError(err) + } + + uploaded, err := models.FetchUserUploadFromUploadID(appCtx.DB(), appCtx.Session(), uploadUUID) + if err != nil { + return handleError(err) + } + + return &CustomGetUploadStatusResponse{ + params: params, + storageKey: uploaded.Upload.StorageKey, + appCtx: h.AppContextFromRequest(params.HTTPRequest), + receiver: h.NotificationReceiver(), + storer: h.FileStorer(), + }, nil + }) +} diff --git a/pkg/handlers/ghcapi/uploads_test.go b/pkg/handlers/ghcapi/uploads_test.go index 94830bdb5bf..0a22ea6b87a 100644 --- a/pkg/handlers/ghcapi/uploads_test.go +++ b/pkg/handlers/ghcapi/uploads_test.go @@ -4,13 +4,17 @@ import ( "net/http" "github.com/go-openapi/runtime/middleware" + "github.com/go-openapi/strfmt" "github.com/gofrs/uuid" "github.com/transcom/mymove/pkg/factory" uploadop "github.com/transcom/mymove/pkg/gen/ghcapi/ghcoperations/uploads" "github.com/transcom/mymove/pkg/handlers" "github.com/transcom/mymove/pkg/models" + "github.com/transcom/mymove/pkg/notifications" + "github.com/transcom/mymove/pkg/services/upload" storageTest "github.com/transcom/mymove/pkg/storage/test" + "github.com/transcom/mymove/pkg/uploader" ) const FixturePDF = "test.pdf" @@ -156,3 +160,127 @@ func (suite *HandlerSuite) TestCreateUploadsHandlerFailure() { t.Fatalf("Wrong number of uploads in database: expected %d, got %d", currentCount, count) } } + +func (suite *HandlerSuite) TestGetUploadStatusHandlerSuccess() { + fakeS3 := storageTest.NewFakeS3Storage(true) + localReceiver := notifications.StubNotificationReceiver{} + + orders := factory.BuildOrder(suite.DB(), nil, nil) + uploadUser1 := factory.BuildUserUpload(suite.DB(), []factory.Customization{ + { + Model: orders.UploadedOrders, + LinkOnly: true, + }, + { + Model: models.Upload{ + Filename: "FileName", + Bytes: int64(15), + ContentType: uploader.FileTypePDF, + }, + }, + }, nil) + + file := suite.Fixture(FixturePDF) + _, err := fakeS3.Store(uploadUser1.Upload.StorageKey, file.Data, "somehash", nil) + suite.NoError(err) + + params := uploadop.NewGetUploadStatusParams() + params.UploadID = strfmt.UUID(uploadUser1.Upload.ID.String()) + + req := &http.Request{} + req = suite.AuthenticateRequest(req, uploadUser1.Document.ServiceMember) + params.HTTPRequest = req + + handlerConfig := suite.HandlerConfig() + handlerConfig.SetFileStorer(fakeS3) + handlerConfig.SetNotificationReceiver(localReceiver) + uploadInformationFetcher := upload.NewUploadInformationFetcher() + handler := GetUploadStatusHandler{handlerConfig, uploadInformationFetcher} + + response := handler.Handle(params) + _, ok := response.(*CustomGetUploadStatusResponse) + suite.True(ok) + + queriedUpload := models.Upload{} + err = suite.DB().Find(&queriedUpload, uploadUser1.Upload.ID) + suite.NoError(err) +} + +func (suite *HandlerSuite) TestGetUploadStatusHandlerFailure() { + suite.Run("Error on no match for uploadId", func() { + orders := factory.BuildOrder(suite.DB(), factory.GetTraitActiveServiceMemberUser(), nil) + + uploadUUID := uuid.Must(uuid.NewV4()) + + params := uploadop.NewGetUploadStatusParams() + params.UploadID = strfmt.UUID(uploadUUID.String()) + + req := &http.Request{} + req = suite.AuthenticateRequest(req, orders.ServiceMember) + params.HTTPRequest = req + + fakeS3 := storageTest.NewFakeS3Storage(true) + localReceiver := notifications.StubNotificationReceiver{} + + handlerConfig := suite.HandlerConfig() + handlerConfig.SetFileStorer(fakeS3) + handlerConfig.SetNotificationReceiver(localReceiver) + uploadInformationFetcher := upload.NewUploadInformationFetcher() + handler := GetUploadStatusHandler{handlerConfig, uploadInformationFetcher} + + response := handler.Handle(params) + _, ok := response.(*uploadop.GetUploadStatusNotFound) + suite.True(ok) + + queriedUpload := models.Upload{} + err := suite.DB().Find(&queriedUpload, uploadUUID) + suite.Error(err) + }) + + suite.Run("Error when attempting access to another service member's upload", func() { + fakeS3 := storageTest.NewFakeS3Storage(true) + localReceiver := notifications.StubNotificationReceiver{} + + otherServiceMember := factory.BuildServiceMember(suite.DB(), nil, nil) + + orders := factory.BuildOrder(suite.DB(), nil, nil) + uploadUser1 := factory.BuildUserUpload(suite.DB(), []factory.Customization{ + { + Model: orders.UploadedOrders, + LinkOnly: true, + }, + { + Model: models.Upload{ + Filename: "FileName", + Bytes: int64(15), + ContentType: uploader.FileTypePDF, + }, + }, + }, nil) + + file := suite.Fixture(FixturePDF) + _, err := fakeS3.Store(uploadUser1.Upload.StorageKey, file.Data, "somehash", nil) + suite.NoError(err) + + params := uploadop.NewGetUploadStatusParams() + params.UploadID = strfmt.UUID(uploadUser1.Upload.ID.String()) + + req := &http.Request{} + req = suite.AuthenticateRequest(req, otherServiceMember) + params.HTTPRequest = req + + handlerConfig := suite.HandlerConfig() + handlerConfig.SetFileStorer(fakeS3) + handlerConfig.SetNotificationReceiver(localReceiver) + uploadInformationFetcher := upload.NewUploadInformationFetcher() + handler := GetUploadStatusHandler{handlerConfig, uploadInformationFetcher} + + response := handler.Handle(params) + _, ok := response.(*uploadop.GetUploadStatusForbidden) + suite.True(ok) + + queriedUpload := models.Upload{} + err = suite.DB().Find(&queriedUpload, uploadUser1.Upload.ID) + suite.NoError(err) + }) +} diff --git a/pkg/handlers/internalapi/documents.go b/pkg/handlers/internalapi/documents.go index 2c648661725..0562ee39200 100644 --- a/pkg/handlers/internalapi/documents.go +++ b/pkg/handlers/internalapi/documents.go @@ -73,7 +73,7 @@ func (h ShowDocumentHandler) Handle(params documentop.ShowDocumentParams) middle return handlers.ResponseForError(appCtx.Logger(), err), err } - document, err := models.FetchDocument(appCtx.DB(), appCtx.Session(), documentID, false) + document, err := models.FetchDocument(appCtx.DB(), appCtx.Session(), documentID) if err != nil { return handlers.ResponseForError(appCtx.Logger(), err), err } diff --git a/pkg/handlers/internalapi/internal/payloads/model_to_payload.go b/pkg/handlers/internalapi/internal/payloads/model_to_payload.go index 68e9cd5b576..26b25349e02 100644 --- a/pkg/handlers/internalapi/internal/payloads/model_to_payload.go +++ b/pkg/handlers/internalapi/internal/payloads/model_to_payload.go @@ -453,12 +453,14 @@ func PayloadForUploadModel( CreatedAt: strfmt.DateTime(upload.CreatedAt), UpdatedAt: strfmt.DateTime(upload.UpdatedAt), } + tags, err := storer.Tags(upload.StorageKey) - if err != nil || len(tags) == 0 { - uploadPayload.Status = "PROCESSING" + if err != nil { + uploadPayload.Status = string(models.AVStatusPROCESSING) } else { - uploadPayload.Status = tags["av-status"] + uploadPayload.Status = string(models.GetAVStatusFromTags(tags)) } + return uploadPayload } diff --git a/pkg/handlers/internalapi/moves.go b/pkg/handlers/internalapi/moves.go index 891c990e15e..f431da62850 100644 --- a/pkg/handlers/internalapi/moves.go +++ b/pkg/handlers/internalapi/moves.go @@ -588,10 +588,10 @@ func payloadForUploadModelFromAdditionalDocumentsUpload(storer storage.FileStore UpdatedAt: strfmt.DateTime(upload.UpdatedAt), } tags, err := storer.Tags(upload.StorageKey) - if err != nil || len(tags) == 0 { - uploadPayload.Status = "PROCESSING" + if err != nil { + uploadPayload.Status = string(models.AVStatusPROCESSING) } else { - uploadPayload.Status = tags["av-status"] + uploadPayload.Status = string(models.GetAVStatusFromTags(tags)) } return uploadPayload, nil } diff --git a/pkg/handlers/internalapi/orders.go b/pkg/handlers/internalapi/orders.go index 0097e94b7e3..4aca3b2ae82 100644 --- a/pkg/handlers/internalapi/orders.go +++ b/pkg/handlers/internalapi/orders.go @@ -35,10 +35,10 @@ func payloadForUploadModelFromAmendedOrdersUpload(storer storage.FileStorer, upl UpdatedAt: strfmt.DateTime(upload.UpdatedAt), } tags, err := storer.Tags(upload.StorageKey) - if err != nil || len(tags) == 0 { - uploadPayload.Status = "PROCESSING" + if err != nil { + uploadPayload.Status = string(models.AVStatusPROCESSING) } else { - uploadPayload.Status = tags["av-status"] + uploadPayload.Status = string(models.GetAVStatusFromTags(tags)) } return uploadPayload, nil } diff --git a/pkg/handlers/internalapi/uploads.go b/pkg/handlers/internalapi/uploads.go index 4167d7ed2b8..4d248598ed6 100644 --- a/pkg/handlers/internalapi/uploads.go +++ b/pkg/handlers/internalapi/uploads.go @@ -70,7 +70,7 @@ func (h CreateUploadHandler) Handle(params uploadop.CreateUploadParams) middlewa } // Fetch document to ensure user has access to it - document, docErr := models.FetchDocument(appCtx.DB(), appCtx.Session(), documentID, true) + document, docErr := models.FetchDocument(appCtx.DB(), appCtx.Session(), documentID) if docErr != nil { return handlers.ResponseForError(appCtx.Logger(), docErr), rollbackErr } @@ -267,7 +267,7 @@ func (h CreatePPMUploadHandler) Handle(params ppmop.CreatePPMUploadParams) middl documentID := uuid.FromStringOrNil(params.DocumentID.String()) // Fetch document to ensure user has access to it - document, docErr := models.FetchDocument(appCtx.DB(), appCtx.Session(), documentID, true) + document, docErr := models.FetchDocument(appCtx.DB(), appCtx.Session(), documentID) if docErr != nil { docNotFoundErr := fmt.Errorf("documentId %q was not found for this user", documentID) return ppmop.NewCreatePPMUploadNotFound().WithPayload(payloads.ClientError(handlers.NotFoundMessage, docNotFoundErr.Error(), h.GetTraceIDFromRequest(params.HTTPRequest))), docNotFoundErr diff --git a/pkg/handlers/primeapi/mto_service_item_test.go b/pkg/handlers/primeapi/mto_service_item_test.go index 319b3223705..8f58d9a096b 100644 --- a/pkg/handlers/primeapi/mto_service_item_test.go +++ b/pkg/handlers/primeapi/mto_service_item_test.go @@ -1040,8 +1040,8 @@ func (suite *HandlerSuite) TestCreateMTOServiceItemOriginSITHandlerWithDOFSITWit }, }, nil) factory.FetchReServiceByCode(suite.DB(), models.ReServiceCodeDOFSIT) - sitEntryDate := time.Date(2024, time.February, 28, 0, 0, 0, 0, time.UTC) - sitDepartureDate := time.Date(2024, time.February, 27, 0, 0, 0, 0, time.UTC) + sitEntryDate := time.Date(2024, time.February, 27, 0, 0, 0, 0, time.UTC) + sitDepartureDate := time.Date(2024, time.February, 28, 0, 0, 0, 0, time.UTC) sitPostalCode := "00000" // Original customer pickup address diff --git a/pkg/handlers/routing/base_routing_suite.go b/pkg/handlers/routing/base_routing_suite.go index 23e538792b7..77049e33664 100644 --- a/pkg/handlers/routing/base_routing_suite.go +++ b/pkg/handlers/routing/base_routing_suite.go @@ -85,6 +85,7 @@ func (suite *BaseRoutingSuite) RoutingConfig() *Config { handlerConfig := suite.BaseHandlerTestSuite.HandlerConfig() handlerConfig.SetAppNames(handlers.ApplicationTestServername()) handlerConfig.SetNotificationSender(suite.TestNotificationSender()) + handlerConfig.SetNotificationReceiver(suite.TestNotificationReceiver()) // Need this for any requests that will either retrieve or save files or their info. fakeS3 := storageTest.NewFakeS3Storage(true) diff --git a/pkg/handlers/routing/ghcapi_test/uploads_test.go b/pkg/handlers/routing/ghcapi_test/uploads_test.go new file mode 100644 index 00000000000..5eb27758d00 --- /dev/null +++ b/pkg/handlers/routing/ghcapi_test/uploads_test.go @@ -0,0 +1,85 @@ +package ghcapi_test + +import ( + "net/http" + "net/http/httptest" + + "github.com/transcom/mymove/pkg/factory" + "github.com/transcom/mymove/pkg/models" + "github.com/transcom/mymove/pkg/models/roles" + storageTest "github.com/transcom/mymove/pkg/storage/test" + "github.com/transcom/mymove/pkg/uploader" +) + +func (suite *GhcAPISuite) TestUploads() { + + suite.Run("Received status for upload, read tag without event queue", func() { + orders := factory.BuildOrder(suite.DB(), factory.GetTraitActiveServiceMemberUser(), nil) + uploadUser1 := factory.BuildUserUpload(suite.DB(), []factory.Customization{ + { + Model: orders.UploadedOrders, + LinkOnly: true, + }, + { + Model: models.Upload{ + Filename: "FileName", + Bytes: int64(15), + ContentType: uploader.FileTypePDF, + }, + }, + }, nil) + file := suite.Fixture("test.pdf") + _, err := suite.HandlerConfig().FileStorer().Store(uploadUser1.Upload.StorageKey, file.Data, "somehash", nil) + suite.NoError(err) + + officeUser := factory.BuildOfficeUserWithRoles(suite.DB(), factory.GetTraitActiveOfficeUser(), + []roles.RoleType{roles.RoleTypeTOO}) + req := suite.NewAuthenticatedOfficeRequest("GET", "/ghc/v1/uploads/"+uploadUser1.Upload.ID.String()+"/status", nil, officeUser) + rr := httptest.NewRecorder() + + suite.SetupSiteHandler().ServeHTTP(rr, req) + + suite.Equal(http.StatusOK, rr.Code) + suite.Equal("text/event-stream", rr.Header().Get("content-type")) + suite.Equal("id: 0\nevent: message\ndata: CLEAN\n\nid: 1\nevent: close\ndata: Connection closed\n\n", rr.Body.String()) + }) + + suite.Run("Received statuses for upload, receiving multiple statuses with event queue", func() { + orders := factory.BuildOrder(suite.DB(), factory.GetTraitActiveServiceMemberUser(), nil) + uploadUser1 := factory.BuildUserUpload(suite.DB(), []factory.Customization{ + { + Model: orders.UploadedOrders, + LinkOnly: true, + }, + { + Model: models.Upload{ + Filename: "FileName", + Bytes: int64(15), + ContentType: uploader.FileTypePDF, + }, + }, + }, nil) + file := suite.Fixture("test.pdf") + _, err := suite.HandlerConfig().FileStorer().Store(uploadUser1.Upload.StorageKey, file.Data, "somehash", nil) + suite.NoError(err) + + officeUser := factory.BuildOfficeUserWithRoles(suite.DB(), factory.GetTraitActiveOfficeUser(), + []roles.RoleType{roles.RoleTypeTOO}) + req := suite.NewAuthenticatedOfficeRequest("GET", "/ghc/v1/uploads/"+uploadUser1.Upload.ID.String()+"/status", nil, officeUser) + rr := httptest.NewRecorder() + + fakeS3, ok := suite.HandlerConfig().FileStorer().(*storageTest.FakeS3Storage) + suite.True(ok) + suite.NotNil(fakeS3, "FileStorer should be fakeS3") + + fakeS3.EmptyTags = true + suite.SetupSiteHandler().ServeHTTP(rr, req) + + suite.Equal(http.StatusOK, rr.Code) + suite.Equal("text/event-stream", rr.Header().Get("content-type")) + + suite.Contains(rr.Body.String(), "PROCESSING") + suite.Contains(rr.Body.String(), "CLEAN") + suite.Contains(rr.Body.String(), "Connection closed") + }) +} diff --git a/pkg/models/address.go b/pkg/models/address.go index 29a6bedbcbb..b4ed2723750 100644 --- a/pkg/models/address.go +++ b/pkg/models/address.go @@ -9,6 +9,7 @@ import ( "github.com/gobuffalo/validate/v3" "github.com/gobuffalo/validate/v3/validators" "github.com/gofrs/uuid" + "github.com/pkg/errors" "go.uber.org/zap" "go.uber.org/zap/zapcore" @@ -146,6 +147,13 @@ func (a *Address) LineDisplayFormat() string { return fmt.Sprintf("%s%s%s, %s, %s %s", a.StreetAddress1, optionalStreetAddress2, optionalStreetAddress3, a.City, a.State, a.PostalCode) } +func (a *Address) IsAddressAlaska() (bool, error) { + if a == nil { + return false, errors.New("address is nil") + } + return a.State == "AK", nil +} + // NotImplementedCountryCode is the default for unimplemented country code lookup type NotImplementedCountryCode struct { message string diff --git a/pkg/models/address_test.go b/pkg/models/address_test.go index 9dfe5a7fa1c..c7ef3c1053b 100644 --- a/pkg/models/address_test.go +++ b/pkg/models/address_test.go @@ -385,3 +385,34 @@ func (suite *ModelSuite) Test_FetchDutyLocationGblocForAK() { suite.Equal(string(*gbloc), "MAPK") }) } + +func (suite *ModelSuite) TestIsAddressAlaska() { + var address *m.Address + bool1, err := address.IsAddressAlaska() + suite.Error(err) + suite.Equal("address is nil", err.Error()) + suite.Equal(false, bool1) + + address = &m.Address{ + StreetAddress1: "street 1", + StreetAddress2: m.StringPointer("street 2"), + StreetAddress3: m.StringPointer("street 3"), + City: "city", + PostalCode: "90210", + County: m.StringPointer("County"), + } + + bool2, err := address.IsAddressAlaska() + suite.NoError(err) + suite.Equal(m.BoolPointer(false), &bool2) + + address.State = "MT" + bool3, err := address.IsAddressAlaska() + suite.NoError(err) + suite.Equal(m.BoolPointer(false), &bool3) + + address.State = "AK" + bool4, err := address.IsAddressAlaska() + suite.NoError(err) + suite.Equal(m.BoolPointer(true), &bool4) +} diff --git a/pkg/models/document.go b/pkg/models/document.go index 6392434a6ef..d47e2544105 100644 --- a/pkg/models/document.go +++ b/pkg/models/document.go @@ -1,6 +1,7 @@ package models import ( + "fmt" "time" "github.com/gobuffalo/pop/v6" @@ -40,28 +41,66 @@ func (d *Document) Validate(_ *pop.Connection) (*validate.Errors, error) { } // FetchDocument returns a document if the user has access to that document -func FetchDocument(db *pop.Connection, session *auth.Session, id uuid.UUID, includeDeletedDocs bool) (Document, error) { - return fetchDocumentWithAccessibilityCheck(db, session, id, includeDeletedDocs, true) +func FetchDocument(db *pop.Connection, session *auth.Session, id uuid.UUID) (Document, error) { + return fetchDocumentWithAccessibilityCheck(db, session, id, true) } // FetchDocument returns a document regardless if user has access to that document -func FetchDocumentWithNoRestrictions(db *pop.Connection, session *auth.Session, id uuid.UUID, includeDeletedDocs bool) (Document, error) { - return fetchDocumentWithAccessibilityCheck(db, session, id, includeDeletedDocs, false) +func FetchDocumentWithNoRestrictions(db *pop.Connection, session *auth.Session, id uuid.UUID) (Document, error) { + return fetchDocumentWithAccessibilityCheck(db, session, id, false) } // FetchDocument returns a document if the user has access to that document -func fetchDocumentWithAccessibilityCheck(db *pop.Connection, session *auth.Session, id uuid.UUID, includeDeletedDocs bool, checkUserAccessiability bool) (Document, error) { +func fetchDocumentWithAccessibilityCheck(db *pop.Connection, session *auth.Session, id uuid.UUID, checkUserAccessiability bool) (Document, error) { var document Document + var uploads []Upload query := db.Q() + // Giving the cursors names in which they will be defined as after opened in the database function. + // Doing so we can reference the specific cursor we want by the defined name as opposed to , + // which causes syntax errors when used in the FETCH ALL IN query. + documentCursor := "documentcursor" + userUploadCursor := "useruploadcursor" + uploadCursor := "uploadcursor" - if !includeDeletedDocs { - query = query.Where("documents.deleted_at is null and u.deleted_at is null") + documentsQuery := `SELECT fetch_documents(?, ?, ?, ?);` + + err := query.RawQuery(documentsQuery, documentCursor, userUploadCursor, uploadCursor, id).Exec() + + if err != nil { + if errors.Cause(err).Error() == RecordNotFoundErrorString { + return Document{}, ErrFetchNotFound + } + // Otherwise, it's an unexpected err so we return that. + return Document{}, err + } + + // Since we know the name of the cursor we can fetch the specific one we are interested in + // using FETCH ALL IN and populate the appropriate model + fetchDocument := `FETCH ALL IN ` + documentCursor + `;` + fetchUserUploads := `FETCH ALL IN ` + userUploadCursor + `;` + fetchUploads := `FETCH ALL IN ` + uploadCursor + `;` + + err = query.RawQuery(fetchDocument).First(&document) + + if err != nil { + if errors.Cause(err).Error() == RecordNotFoundErrorString { + return Document{}, ErrFetchNotFound + } + // Otherwise, it's an unexpected err so we return that. + return Document{}, err + } + + err = query.RawQuery(fetchUserUploads).All(&document.UserUploads) + + if err != nil { + if errors.Cause(err).Error() == RecordNotFoundErrorString { + return Document{}, ErrFetchNotFound + } + // Otherwise, it's an unexpected err so we return that. + return Document{}, err } - err := query.Eager("UserUploads.Upload"). - LeftJoin("user_uploads as uu", "documents.id = uu.document_id"). - LeftJoin("uploads as u", "uu.upload_id = u.id"). - Find(&document, id) + err = query.RawQuery(fetchUploads).All(&uploads) if err != nil { if errors.Cause(err).Error() == RecordNotFoundErrorString { @@ -71,10 +110,37 @@ func fetchDocumentWithAccessibilityCheck(db *pop.Connection, session *auth.Sessi return Document{}, err } - // encountered issues trying to filter userUploads using pop. - // going with the option to filter userUploads after the query. - if !includeDeletedDocs { - document.UserUploads = document.UserUploads.FilterDeleted() + // We have an array of UserUploads inside Document model, to populate that Upload model we need to loop and apply + // the resulting uploads into the appropriate UserUpload.Upload model by matching the upload ids + for i := 0; i < len(document.UserUploads); i++ { + for j := 0; j < len(uploads); j++ { + if document.UserUploads[i].UploadID == uploads[j].ID { + document.UserUploads[i].Upload = uploads[j] + } + } + } + + // We close all the cursors we opened during the fetch_documents call + closeDocCursor := `CLOSE ` + documentCursor + `;` + closeUserCursor := `CLOSE ` + userUploadCursor + `;` + closeUploadCursor := `CLOSE ` + uploadCursor + `;` + + closeErr := query.RawQuery(closeDocCursor).Exec() + + if closeErr != nil { + return Document{}, fmt.Errorf("error closing documents cursor: %w", closeErr) + } + + closeErr = query.RawQuery(closeUserCursor).Exec() + + if closeErr != nil { + return Document{}, fmt.Errorf("error closing user uploads cursor: %w", closeErr) + } + + closeErr = query.RawQuery(closeUploadCursor).Exec() + + if closeErr != nil { + return Document{}, fmt.Errorf("error closing uploads cursor: %w", closeErr) } if checkUserAccessiability { diff --git a/pkg/models/document_test.go b/pkg/models/document_test.go index 19e4e21b8c2..d013e4ab802 100644 --- a/pkg/models/document_test.go +++ b/pkg/models/document_test.go @@ -64,7 +64,7 @@ func (suite *ModelSuite) TestFetchDocument() { t.Errorf("did not expect validation errors: %v", verrs) } - doc, _ := models.FetchDocument(suite.DB(), &session, document.ID, false) + doc, _ := models.FetchDocument(suite.DB(), &session, document.ID) suite.Equal(doc.ID, document.ID) suite.Equal(0, len(doc.UserUploads)) } @@ -103,16 +103,9 @@ func (suite *ModelSuite) TestFetchDeletedDocument() { t.Errorf("did not expect validation errors: %v", verrs) } - doc, _ := models.FetchDocument(suite.DB(), &session, document.ID, false) + doc, _ := models.FetchDocument(suite.DB(), &session, document.ID) - // fetches a nil document + // FetchDocument should not return the document since it was deleted suite.Equal(doc.ID, uuid.Nil) suite.Equal(doc.ServiceMemberID, uuid.Nil) - - doc2, _ := models.FetchDocument(suite.DB(), &session, document.ID, true) - - // fetches a nil document - suite.Equal(doc2.ID, document.ID) - suite.Equal(doc2.ServiceMemberID, serviceMember.ID) - suite.Equal(1, len(doc2.UserUploads)) } diff --git a/pkg/models/upload.go b/pkg/models/upload.go index d6afc2d0d4a..c03c4ec2bd2 100644 --- a/pkg/models/upload.go +++ b/pkg/models/upload.go @@ -13,6 +13,26 @@ import ( "github.com/transcom/mymove/pkg/db/utilities" ) +// Used tangentally in association with an Upload to provide status of anti-virus scan +// AVStatusType represents the type of the anti-virus status, whether it is still processing, clean or infected +type AVStatusType string + +const ( + // AVStatusPROCESSING string PROCESSING + AVStatusPROCESSING AVStatusType = "PROCESSING" + // AVStatusCLEAN string CLEAN + AVStatusCLEAN AVStatusType = "CLEAN" + // AVStatusINFECTED string INFECTED + AVStatusINFECTED AVStatusType = "INFECTED" +) + +func GetAVStatusFromTags(tags map[string]string) AVStatusType { + if status, exists := tags["av-status"]; exists { + return AVStatusType(status) + } + return AVStatusType(AVStatusPROCESSING) +} + // UploadType represents the type of upload this is, whether is it uploaded for a User or for the Prime type UploadType string diff --git a/pkg/models/user_upload.go b/pkg/models/user_upload.go index 49ef6bf845a..e3826d9aacb 100644 --- a/pkg/models/user_upload.go +++ b/pkg/models/user_upload.go @@ -102,7 +102,7 @@ func FetchUserUpload(db *pop.Connection, session *auth.Session, id uuid.UUID) (U // If there's a document, check permissions. Otherwise user must // have been the uploader if userUpload.DocumentID != nil { - _, docErr := FetchDocument(db, session, *userUpload.DocumentID, false) + _, docErr := FetchDocument(db, session, *userUpload.DocumentID) if docErr != nil { return UserUpload{}, docErr } @@ -129,7 +129,7 @@ func FetchUserUploadFromUploadID(db *pop.Connection, session *auth.Session, uplo // If there's a document, check permissions. Otherwise user must // have been the uploader if userUpload.DocumentID != nil { - _, docErr := FetchDocument(db, session, *userUpload.DocumentID, false) + _, docErr := FetchDocument(db, session, *userUpload.DocumentID) if docErr != nil { return UserUpload{}, docErr } diff --git a/pkg/notifications/notification_receiver.go b/pkg/notifications/notification_receiver.go new file mode 100644 index 00000000000..6dfab1b5d74 --- /dev/null +++ b/pkg/notifications/notification_receiver.go @@ -0,0 +1,334 @@ +package notifications + +import ( + "context" + "errors" + "fmt" + "strings" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/service/sns" + "github.com/aws/aws-sdk-go-v2/service/sqs" + "github.com/gofrs/uuid" + "go.uber.org/zap" + + "github.com/transcom/mymove/pkg/appcontext" + "github.com/transcom/mymove/pkg/cli" +) + +// NotificationQueueParams stores the params for queue creation +type NotificationQueueParams struct { + SubscriptionTopicName string + NamePrefix QueuePrefixType + FilterPolicy string +} + +// NotificationReceiver is an interface for receiving notifications +type NotificationReceiver interface { + CreateQueueWithSubscription(appCtx appcontext.AppContext, params NotificationQueueParams) (string, error) + ReceiveMessages(appCtx appcontext.AppContext, queueUrl string, timerContext context.Context) ([]ReceivedMessage, error) + CloseoutQueue(appCtx appcontext.AppContext, queueUrl string) error + GetDefaultTopic() (string, error) +} + +// NotificationReceiverConext provides context to a notification Receiver. Maps use queueUrl for key +type NotificationReceiverContext struct { + viper ViperType + snsService SnsClient + sqsService SqsClient + awsRegion string + awsAccountId string + queueSubscriptionMap map[string]string + receiverCancelMap map[string]context.CancelFunc +} + +// QueuePrefixType represents a prefix identifier given to a name of dynamic notification queues +type QueuePrefixType string + +const ( + QueuePrefixObjectTagsAdded QueuePrefixType = "ObjectTagsAdded" +) + +//go:generate mockery --name SnsClient --output ./receiverMocks +type SnsClient interface { + Subscribe(ctx context.Context, params *sns.SubscribeInput, optFns ...func(*sns.Options)) (*sns.SubscribeOutput, error) + Unsubscribe(ctx context.Context, params *sns.UnsubscribeInput, optFns ...func(*sns.Options)) (*sns.UnsubscribeOutput, error) + ListSubscriptionsByTopic(context.Context, *sns.ListSubscriptionsByTopicInput, ...func(*sns.Options)) (*sns.ListSubscriptionsByTopicOutput, error) +} + +//go:generate mockery --name SqsClient --output ./receiverMocks +type SqsClient interface { + CreateQueue(ctx context.Context, params *sqs.CreateQueueInput, optFns ...func(*sqs.Options)) (*sqs.CreateQueueOutput, error) + ReceiveMessage(ctx context.Context, params *sqs.ReceiveMessageInput, optFns ...func(*sqs.Options)) (*sqs.ReceiveMessageOutput, error) + DeleteMessage(ctx context.Context, params *sqs.DeleteMessageInput, optFns ...func(*sqs.Options)) (*sqs.DeleteMessageOutput, error) + DeleteQueue(ctx context.Context, params *sqs.DeleteQueueInput, optFns ...func(*sqs.Options)) (*sqs.DeleteQueueOutput, error) + ListQueues(ctx context.Context, params *sqs.ListQueuesInput, optFns ...func(*sqs.Options)) (*sqs.ListQueuesOutput, error) +} + +//go:generate mockery --name ViperType --output ./receiverMocks +type ViperType interface { + GetString(string) string + SetEnvKeyReplacer(*strings.Replacer) +} + +// ReceivedMessage standardizes the format of the received message +type ReceivedMessage struct { + MessageId string + Body *string +} + +// NewNotificationReceiver returns a new NotificationReceiverContext +func NewNotificationReceiver(v ViperType, snsService SnsClient, sqsService SqsClient, awsRegion string, awsAccountId string) NotificationReceiverContext { + return NotificationReceiverContext{ + viper: v, + snsService: snsService, + sqsService: sqsService, + awsRegion: awsRegion, + awsAccountId: awsAccountId, + queueSubscriptionMap: make(map[string]string), + receiverCancelMap: make(map[string]context.CancelFunc), + } +} + +// CreateQueueWithSubscription first creates a new queue, then subscribes an AWS topic to it +func (n NotificationReceiverContext) CreateQueueWithSubscription(appCtx appcontext.AppContext, params NotificationQueueParams) (string, error) { + + queueUUID := uuid.Must(uuid.NewV4()) + + queueName := fmt.Sprintf("%s_%s", params.NamePrefix, queueUUID) + queueArn := n.constructArn("sqs", queueName) + topicArn := n.constructArn("sns", params.SubscriptionTopicName) + + accessPolicy := fmt.Sprintf(`{ + "Version": "2012-10-17", + "Statement": [{ + "Sid": "AllowSNSPublish", + "Effect": "Allow", + "Principal": { + "Service": "sns.amazonaws.com" + }, + "Action": ["sqs:SendMessage"], + "Resource": "%s", + "Condition": { + "ArnEquals": { + "aws:SourceArn": "%s" + } + } + }, { + "Sid": "DenyNonSSLAccess", + "Effect": "Deny", + "Principal": "*", + "Action": "sqs:*", + "Resource": "%s", + "Condition": { + "Bool": { + "aws:SecureTransport": "false" + } + } + }] + }`, queueArn, topicArn, queueArn) + + input := &sqs.CreateQueueInput{ + QueueName: &queueName, + Attributes: map[string]string{ + "MessageRetentionPeriod": "120", + "Policy": accessPolicy, + }, + } + + result, err := n.sqsService.CreateQueue(context.Background(), input) + if err != nil { + appCtx.Logger().Error("Failed to create SQS queue, %v", zap.Error(err)) + return "", err + } + + subscribeInput := &sns.SubscribeInput{ + TopicArn: &topicArn, + Protocol: aws.String("sqs"), + Endpoint: &queueArn, + Attributes: map[string]string{ + "FilterPolicy": params.FilterPolicy, + "FilterPolicyScope": "MessageBody", + }, + } + subscribeOutput, err := n.snsService.Subscribe(context.Background(), subscribeInput) + if err != nil { + appCtx.Logger().Error("Failed to create subscription, %v", zap.Error(err)) + return "", err + } + + n.queueSubscriptionMap[*result.QueueUrl] = *subscribeOutput.SubscriptionArn + + return *result.QueueUrl, nil +} + +// ReceiveMessages polls given queue continuously for messages for up to 20 seconds +func (n NotificationReceiverContext) ReceiveMessages(appCtx appcontext.AppContext, queueUrl string, timerContext context.Context) ([]ReceivedMessage, error) { + recCtx, cancelRecCtx := context.WithCancel(timerContext) + defer cancelRecCtx() + n.receiverCancelMap[queueUrl] = cancelRecCtx + + result, err := n.sqsService.ReceiveMessage(recCtx, &sqs.ReceiveMessageInput{ + QueueUrl: &queueUrl, + MaxNumberOfMessages: 1, + WaitTimeSeconds: 20, + }) + if errors.Is(recCtx.Err(), context.Canceled) || errors.Is(recCtx.Err(), context.DeadlineExceeded) { + return nil, recCtx.Err() + } + + if err != nil { + appCtx.Logger().Info("Couldn't get messages from queue. Error: %v\n", zap.Error(err)) + return nil, err + } + + receivedMessages := make([]ReceivedMessage, len(result.Messages)) + for index, value := range result.Messages { + receivedMessages[index] = ReceivedMessage{ + MessageId: *value.MessageId, + Body: value.Body, + } + + appCtx.Logger().Info("Message received.", zap.String("messageId", *value.MessageId)) + + _, err := n.sqsService.DeleteMessage(recCtx, &sqs.DeleteMessageInput{ + QueueUrl: &queueUrl, + ReceiptHandle: value.ReceiptHandle, + }) + if err != nil { + appCtx.Logger().Info("Couldn't delete message from queue. Error: %v\n", zap.Error(err)) + } + } + + return receivedMessages, recCtx.Err() +} + +// CloseoutQueue stops receiving messages and cleans up the queue and its subscriptions +func (n NotificationReceiverContext) CloseoutQueue(appCtx appcontext.AppContext, queueUrl string) error { + appCtx.Logger().Info("Closing out queue: ", zap.String("queueUrl", queueUrl)) + + if cancelFunc, exists := n.receiverCancelMap[queueUrl]; exists { + cancelFunc() + delete(n.receiverCancelMap, queueUrl) + } + + if subscriptionArn, exists := n.queueSubscriptionMap[queueUrl]; exists { + _, err := n.snsService.Unsubscribe(context.Background(), &sns.UnsubscribeInput{ + SubscriptionArn: &subscriptionArn, + }) + if err != nil { + return err + } + delete(n.queueSubscriptionMap, queueUrl) + } + + _, err := n.sqsService.DeleteQueue(context.Background(), &sqs.DeleteQueueInput{ + QueueUrl: &queueUrl, + }) + + return err +} + +// GetDefaultTopic returns the topic value set within the environment +func (n NotificationReceiverContext) GetDefaultTopic() (string, error) { + topicName := n.viper.GetString(cli.SNSTagsUpdatedTopicFlag) + receiverBackend := n.viper.GetString(cli.ReceiverBackendFlag) + if topicName == "" && receiverBackend == "sns_sqs" { + return "", errors.New("sns_tags_updated_topic key not available") + } + return topicName, nil +} + +// InitReceiver initializes the receiver backend, only call this once +func InitReceiver(v ViperType, logger *zap.Logger, wipeAllNotificationQueues bool) (NotificationReceiver, error) { + + if v.GetString(cli.ReceiverBackendFlag) == "sns_sqs" { + // Setup notification receiver service with SNS & SQS backend dependencies + awsSNSRegion := v.GetString(cli.SNSRegionFlag) + awsAccountId := v.GetString(cli.SNSAccountId) + + logger.Info("Using aws sns_sqs receiver backend", zap.String("region", awsSNSRegion)) + + cfg, err := config.LoadDefaultConfig(context.Background(), + config.WithRegion(awsSNSRegion), + ) + if err != nil { + logger.Fatal("error loading sns aws config", zap.Error(err)) + return nil, err + } + + snsService := sns.NewFromConfig(cfg) + sqsService := sqs.NewFromConfig(cfg) + + notificationReceiver := NewNotificationReceiver(v, snsService, sqsService, awsSNSRegion, awsAccountId) + + // Remove any remaining previous notification queues on server start + if wipeAllNotificationQueues { + err = notificationReceiver.wipeAllNotificationQueues(logger) + if err != nil { + return nil, err + } + } + + return notificationReceiver, nil + } + + logger.Info("Using local notification receiver backend", zap.String("receiver_backend", v.GetString(cli.ReceiverBackendFlag))) + + return NewStubNotificationReceiver(), nil +} + +func (n NotificationReceiverContext) constructArn(awsService string, endpointName string) string { + return fmt.Sprintf("arn:aws-us-gov:%s:%s:%s:%s", awsService, n.awsRegion, n.awsAccountId, endpointName) +} + +// Removes ALL previously created notification queues +func (n *NotificationReceiverContext) wipeAllNotificationQueues(logger *zap.Logger) error { + defaultTopic, err := n.GetDefaultTopic() + if err != nil { + return err + } + + logger.Info("Receiver cleanup - Removing previous subscriptions...") + paginator := sns.NewListSubscriptionsByTopicPaginator(n.snsService, &sns.ListSubscriptionsByTopicInput{ + TopicArn: aws.String(n.constructArn("sns", defaultTopic)), + }) + + for paginator.HasMorePages() { + output, err := paginator.NextPage(context.Background()) + if err != nil { + return err + } + for _, subscription := range output.Subscriptions { + if strings.Contains(*subscription.Endpoint, string(QueuePrefixObjectTagsAdded)) { + logger.Info("Subscription ARN: ", zap.String("subscription arn", *subscription.SubscriptionArn)) + logger.Info("Endpoint ARN: ", zap.String("endpoint arn", *subscription.Endpoint)) + _, err = n.snsService.Unsubscribe(context.Background(), &sns.UnsubscribeInput{ + SubscriptionArn: subscription.SubscriptionArn, + }) + if err != nil { + return err + } + } + } + } + + logger.Info("Receiver cleanup - Removing previous queues...") + result, err := n.sqsService.ListQueues(context.Background(), &sqs.ListQueuesInput{ + QueueNamePrefix: aws.String(string(QueuePrefixObjectTagsAdded)), + }) + if err != nil { + return err + } + + for _, url := range result.QueueUrls { + _, err = n.sqsService.DeleteQueue(context.Background(), &sqs.DeleteQueueInput{ + QueueUrl: &url, + }) + if err != nil { + return err + } + } + return nil +} diff --git a/pkg/notifications/notification_receiver_stub.go b/pkg/notifications/notification_receiver_stub.go new file mode 100644 index 00000000000..e98f0c8aa1e --- /dev/null +++ b/pkg/notifications/notification_receiver_stub.go @@ -0,0 +1,51 @@ +package notifications + +import ( + "context" + "time" + + "go.uber.org/zap" + + "github.com/transcom/mymove/pkg/appcontext" +) + +// StubNotificationReceiver mocks an SNS & SQS client for local usage +type StubNotificationReceiver NotificationReceiverContext + +// NewStubNotificationReceiver returns a new StubNotificationReceiver +func NewStubNotificationReceiver() StubNotificationReceiver { + return StubNotificationReceiver{ + snsService: nil, + sqsService: nil, + awsRegion: "", + awsAccountId: "", + queueSubscriptionMap: make(map[string]string), + receiverCancelMap: make(map[string]context.CancelFunc), + } +} + +func (n StubNotificationReceiver) CreateQueueWithSubscription(appCtx appcontext.AppContext, params NotificationQueueParams) (string, error) { + return "stubQueueName", nil +} + +func (n StubNotificationReceiver) ReceiveMessages(appCtx appcontext.AppContext, queueUrl string, timerContext context.Context) ([]ReceivedMessage, error) { + time.Sleep(3 * time.Second) + messageId := "stubMessageId" + body := queueUrl + ":stubMessageBody" + mockMessages := make([]ReceivedMessage, 1) + mockMessages[0] = ReceivedMessage{ + MessageId: messageId, + Body: &body, + } + appCtx.Logger().Debug("Receiving a stubbed message for queue: %v", zap.String("queueUrl", queueUrl)) + return mockMessages, nil +} + +func (n StubNotificationReceiver) CloseoutQueue(appCtx appcontext.AppContext, queueUrl string) error { + appCtx.Logger().Debug("Closing out the stubbed queue.") + return nil +} + +func (n StubNotificationReceiver) GetDefaultTopic() (string, error) { + return "stubDefaultTopic", nil +} diff --git a/pkg/notifications/notification_receiver_test.go b/pkg/notifications/notification_receiver_test.go new file mode 100644 index 00000000000..f7dab5a91b7 --- /dev/null +++ b/pkg/notifications/notification_receiver_test.go @@ -0,0 +1,146 @@ +package notifications + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/sns" + "github.com/aws/aws-sdk-go-v2/service/sqs" + "github.com/aws/aws-sdk-go-v2/service/sqs/types" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/suite" + + "github.com/transcom/mymove/pkg/cli" + mocks "github.com/transcom/mymove/pkg/notifications/receiverMocks" + "github.com/transcom/mymove/pkg/testingsuite" +) + +type notificationReceiverSuite struct { + *testingsuite.PopTestSuite +} + +func TestNotificationReceiverSuite(t *testing.T) { + + hs := ¬ificationReceiverSuite{ + PopTestSuite: testingsuite.NewPopTestSuite(testingsuite.CurrentPackage(), + testingsuite.WithPerTestTransaction()), + } + suite.Run(t, hs) + hs.PopTestSuite.TearDown() +} + +func (suite *notificationReceiverSuite) TestSuccessPath() { + + suite.Run("local backend - notification receiver stub", func() { + // Setup mocks + mockedViper := mocks.ViperType{} + mockedViper.On("GetString", cli.ReceiverBackendFlag).Return("local") + mockedViper.On("GetString", cli.SNSRegionFlag).Return("us-gov-west-1") + mockedViper.On("GetString", cli.SNSAccountId).Return("12345") + mockedViper.On("GetString", cli.SNSTagsUpdatedTopicFlag).Return("fake_sns_topic") + localReceiver, err := InitReceiver(&mockedViper, suite.Logger(), true) + + suite.NoError(err) + suite.IsType(StubNotificationReceiver{}, localReceiver) + + defaultTopic, err := localReceiver.GetDefaultTopic() + suite.Equal("stubDefaultTopic", defaultTopic) + suite.NoError(err) + + queueParams := NotificationQueueParams{ + NamePrefix: "testPrefix", + } + createdQueueUrl, err := localReceiver.CreateQueueWithSubscription(suite.AppContextForTest(), queueParams) + suite.NoError(err) + suite.NotContains(createdQueueUrl, queueParams.NamePrefix) + suite.Equal(createdQueueUrl, "stubQueueName") + + timerContext, cancelTimerContext := context.WithTimeout(context.Background(), 2*time.Second) + defer cancelTimerContext() + + receivedMessages, err := localReceiver.ReceiveMessages(suite.AppContextForTest(), createdQueueUrl, timerContext) + suite.NoError(err) + suite.Len(receivedMessages, 1) + suite.Equal(receivedMessages[0].MessageId, "stubMessageId") + suite.Equal(*receivedMessages[0].Body, fmt.Sprintf("%s:stubMessageBody", createdQueueUrl)) + }) + + suite.Run("aws backend - notification receiver InitReceiver", func() { + // Setup mocks + mockedViper := mocks.ViperType{} + mockedViper.On("GetString", cli.ReceiverBackendFlag).Return("sns_sqs") + mockedViper.On("GetString", cli.SNSRegionFlag).Return("us-gov-west-1") + mockedViper.On("GetString", cli.SNSAccountId).Return("12345") + mockedViper.On("GetString", cli.SNSTagsUpdatedTopicFlag).Return("fake_sns_topic") + + receiver, err := InitReceiver(&mockedViper, suite.Logger(), false) + + suite.NoError(err) + suite.IsType(NotificationReceiverContext{}, receiver) + defaultTopic, err := receiver.GetDefaultTopic() + suite.Equal("fake_sns_topic", defaultTopic) + suite.NoError(err) + }) + + suite.Run("aws backend - notification receiver with mock services", func() { + // Setup mocks + mockedViper := mocks.ViperType{} + mockedViper.On("GetString", cli.ReceiverBackendFlag).Return("sns_sqs") + mockedViper.On("GetString", cli.SNSRegionFlag).Return("us-gov-west-1") + mockedViper.On("GetString", cli.SNSAccountId).Return("12345") + mockedViper.On("GetString", cli.SNSTagsUpdatedTopicFlag).Return("fake_sns_topic") + + mockedSns := mocks.SnsClient{} + mockedSns.On("Subscribe", mock.Anything, mock.AnythingOfType("*sns.SubscribeInput")).Return(&sns.SubscribeOutput{ + SubscriptionArn: aws.String("FakeSubscriptionArn"), + }, nil) + mockedSns.On("Unsubscribe", mock.Anything, mock.AnythingOfType("*sns.UnsubscribeInput")).Return(&sns.UnsubscribeOutput{}, nil) + mockedSns.On("ListSubscriptionsByTopic", mock.Anything, mock.AnythingOfType("*sns.ListSubscriptionsByTopicInput")).Return(&sns.ListSubscriptionsByTopicOutput{}, nil) + + mockedSqs := mocks.SqsClient{} + mockedSqs.On("CreateQueue", mock.Anything, mock.AnythingOfType("*sqs.CreateQueueInput")).Return(&sqs.CreateQueueOutput{ + QueueUrl: aws.String("fakeQueueUrl"), + }, nil) + mockedSqs.On("ReceiveMessage", mock.Anything, mock.AnythingOfType("*sqs.ReceiveMessageInput")).Return(&sqs.ReceiveMessageOutput{ + Messages: []types.Message{ + { + MessageId: aws.String("fakeMessageId"), + Body: aws.String("fakeQueueUrl:fakeMessageBody"), + }, + }, + }, nil) + mockedSqs.On("DeleteMessage", mock.Anything, mock.AnythingOfType("*sqs.DeleteMessageInput")).Return(&sqs.DeleteMessageOutput{}, nil) + mockedSqs.On("DeleteQueue", mock.Anything, mock.AnythingOfType("*sqs.DeleteQueueInput")).Return(&sqs.DeleteQueueOutput{}, nil) + mockedSqs.On("ListQueues", mock.Anything, mock.AnythingOfType("*sqs.ListQueuesInput")).Return(&sqs.ListQueuesOutput{}, nil) + + // Run test + receiver := NewNotificationReceiver(&mockedViper, &mockedSns, &mockedSqs, "", "") + suite.IsType(NotificationReceiverContext{}, receiver) + + defaultTopic, err := receiver.GetDefaultTopic() + suite.Equal("fake_sns_topic", defaultTopic) + suite.NoError(err) + + queueParams := NotificationQueueParams{ + NamePrefix: "testPrefix", + } + createdQueueUrl, err := receiver.CreateQueueWithSubscription(suite.AppContextForTest(), queueParams) + suite.NoError(err) + suite.Equal("fakeQueueUrl", createdQueueUrl) + + timerContext, cancelTimerContext := context.WithTimeout(context.Background(), 2*time.Second) + defer cancelTimerContext() + + receivedMessages, err := receiver.ReceiveMessages(suite.AppContextForTest(), createdQueueUrl, timerContext) + suite.NoError(err) + suite.Len(receivedMessages, 1) + suite.Equal(receivedMessages[0].MessageId, "fakeMessageId") + suite.Equal(*receivedMessages[0].Body, fmt.Sprintf("%s:fakeMessageBody", createdQueueUrl)) + + err = receiver.CloseoutQueue(suite.AppContextForTest(), createdQueueUrl) + suite.NoError(err) + }) +} diff --git a/pkg/notifications/notification_stub.go b/pkg/notifications/notification_sender_stub.go similarity index 100% rename from pkg/notifications/notification_stub.go rename to pkg/notifications/notification_sender_stub.go diff --git a/pkg/notifications/notification_test.go b/pkg/notifications/notification_sender_test.go similarity index 100% rename from pkg/notifications/notification_test.go rename to pkg/notifications/notification_sender_test.go diff --git a/pkg/notifications/receiverMocks/SnsClient.go b/pkg/notifications/receiverMocks/SnsClient.go new file mode 100644 index 00000000000..0c562896a0d --- /dev/null +++ b/pkg/notifications/receiverMocks/SnsClient.go @@ -0,0 +1,141 @@ +// Code generated by mockery. DO NOT EDIT. + +package mocks + +import ( + context "context" + + mock "github.com/stretchr/testify/mock" + + sns "github.com/aws/aws-sdk-go-v2/service/sns" +) + +// SnsClient is an autogenerated mock type for the SnsClient type +type SnsClient struct { + mock.Mock +} + +// ListSubscriptionsByTopic provides a mock function with given fields: _a0, _a1, _a2 +func (_m *SnsClient) ListSubscriptionsByTopic(_a0 context.Context, _a1 *sns.ListSubscriptionsByTopicInput, _a2 ...func(*sns.Options)) (*sns.ListSubscriptionsByTopicOutput, error) { + _va := make([]interface{}, len(_a2)) + for _i := range _a2 { + _va[_i] = _a2[_i] + } + var _ca []interface{} + _ca = append(_ca, _a0, _a1) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for ListSubscriptionsByTopic") + } + + var r0 *sns.ListSubscriptionsByTopicOutput + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *sns.ListSubscriptionsByTopicInput, ...func(*sns.Options)) (*sns.ListSubscriptionsByTopicOutput, error)); ok { + return rf(_a0, _a1, _a2...) + } + if rf, ok := ret.Get(0).(func(context.Context, *sns.ListSubscriptionsByTopicInput, ...func(*sns.Options)) *sns.ListSubscriptionsByTopicOutput); ok { + r0 = rf(_a0, _a1, _a2...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*sns.ListSubscriptionsByTopicOutput) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *sns.ListSubscriptionsByTopicInput, ...func(*sns.Options)) error); ok { + r1 = rf(_a0, _a1, _a2...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Subscribe provides a mock function with given fields: ctx, params, optFns +func (_m *SnsClient) Subscribe(ctx context.Context, params *sns.SubscribeInput, optFns ...func(*sns.Options)) (*sns.SubscribeOutput, error) { + _va := make([]interface{}, len(optFns)) + for _i := range optFns { + _va[_i] = optFns[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, params) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for Subscribe") + } + + var r0 *sns.SubscribeOutput + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *sns.SubscribeInput, ...func(*sns.Options)) (*sns.SubscribeOutput, error)); ok { + return rf(ctx, params, optFns...) + } + if rf, ok := ret.Get(0).(func(context.Context, *sns.SubscribeInput, ...func(*sns.Options)) *sns.SubscribeOutput); ok { + r0 = rf(ctx, params, optFns...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*sns.SubscribeOutput) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *sns.SubscribeInput, ...func(*sns.Options)) error); ok { + r1 = rf(ctx, params, optFns...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Unsubscribe provides a mock function with given fields: ctx, params, optFns +func (_m *SnsClient) Unsubscribe(ctx context.Context, params *sns.UnsubscribeInput, optFns ...func(*sns.Options)) (*sns.UnsubscribeOutput, error) { + _va := make([]interface{}, len(optFns)) + for _i := range optFns { + _va[_i] = optFns[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, params) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for Unsubscribe") + } + + var r0 *sns.UnsubscribeOutput + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *sns.UnsubscribeInput, ...func(*sns.Options)) (*sns.UnsubscribeOutput, error)); ok { + return rf(ctx, params, optFns...) + } + if rf, ok := ret.Get(0).(func(context.Context, *sns.UnsubscribeInput, ...func(*sns.Options)) *sns.UnsubscribeOutput); ok { + r0 = rf(ctx, params, optFns...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*sns.UnsubscribeOutput) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *sns.UnsubscribeInput, ...func(*sns.Options)) error); ok { + r1 = rf(ctx, params, optFns...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewSnsClient creates a new instance of SnsClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewSnsClient(t interface { + mock.TestingT + Cleanup(func()) +}) *SnsClient { + mock := &SnsClient{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/pkg/notifications/receiverMocks/SqsClient.go b/pkg/notifications/receiverMocks/SqsClient.go new file mode 100644 index 00000000000..c8e6e6aa284 --- /dev/null +++ b/pkg/notifications/receiverMocks/SqsClient.go @@ -0,0 +1,215 @@ +// Code generated by mockery. DO NOT EDIT. + +package mocks + +import ( + context "context" + + mock "github.com/stretchr/testify/mock" + + sqs "github.com/aws/aws-sdk-go-v2/service/sqs" +) + +// SqsClient is an autogenerated mock type for the SqsClient type +type SqsClient struct { + mock.Mock +} + +// CreateQueue provides a mock function with given fields: ctx, params, optFns +func (_m *SqsClient) CreateQueue(ctx context.Context, params *sqs.CreateQueueInput, optFns ...func(*sqs.Options)) (*sqs.CreateQueueOutput, error) { + _va := make([]interface{}, len(optFns)) + for _i := range optFns { + _va[_i] = optFns[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, params) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for CreateQueue") + } + + var r0 *sqs.CreateQueueOutput + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *sqs.CreateQueueInput, ...func(*sqs.Options)) (*sqs.CreateQueueOutput, error)); ok { + return rf(ctx, params, optFns...) + } + if rf, ok := ret.Get(0).(func(context.Context, *sqs.CreateQueueInput, ...func(*sqs.Options)) *sqs.CreateQueueOutput); ok { + r0 = rf(ctx, params, optFns...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*sqs.CreateQueueOutput) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *sqs.CreateQueueInput, ...func(*sqs.Options)) error); ok { + r1 = rf(ctx, params, optFns...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteMessage provides a mock function with given fields: ctx, params, optFns +func (_m *SqsClient) DeleteMessage(ctx context.Context, params *sqs.DeleteMessageInput, optFns ...func(*sqs.Options)) (*sqs.DeleteMessageOutput, error) { + _va := make([]interface{}, len(optFns)) + for _i := range optFns { + _va[_i] = optFns[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, params) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for DeleteMessage") + } + + var r0 *sqs.DeleteMessageOutput + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *sqs.DeleteMessageInput, ...func(*sqs.Options)) (*sqs.DeleteMessageOutput, error)); ok { + return rf(ctx, params, optFns...) + } + if rf, ok := ret.Get(0).(func(context.Context, *sqs.DeleteMessageInput, ...func(*sqs.Options)) *sqs.DeleteMessageOutput); ok { + r0 = rf(ctx, params, optFns...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*sqs.DeleteMessageOutput) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *sqs.DeleteMessageInput, ...func(*sqs.Options)) error); ok { + r1 = rf(ctx, params, optFns...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DeleteQueue provides a mock function with given fields: ctx, params, optFns +func (_m *SqsClient) DeleteQueue(ctx context.Context, params *sqs.DeleteQueueInput, optFns ...func(*sqs.Options)) (*sqs.DeleteQueueOutput, error) { + _va := make([]interface{}, len(optFns)) + for _i := range optFns { + _va[_i] = optFns[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, params) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for DeleteQueue") + } + + var r0 *sqs.DeleteQueueOutput + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *sqs.DeleteQueueInput, ...func(*sqs.Options)) (*sqs.DeleteQueueOutput, error)); ok { + return rf(ctx, params, optFns...) + } + if rf, ok := ret.Get(0).(func(context.Context, *sqs.DeleteQueueInput, ...func(*sqs.Options)) *sqs.DeleteQueueOutput); ok { + r0 = rf(ctx, params, optFns...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*sqs.DeleteQueueOutput) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *sqs.DeleteQueueInput, ...func(*sqs.Options)) error); ok { + r1 = rf(ctx, params, optFns...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ListQueues provides a mock function with given fields: ctx, params, optFns +func (_m *SqsClient) ListQueues(ctx context.Context, params *sqs.ListQueuesInput, optFns ...func(*sqs.Options)) (*sqs.ListQueuesOutput, error) { + _va := make([]interface{}, len(optFns)) + for _i := range optFns { + _va[_i] = optFns[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, params) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for ListQueues") + } + + var r0 *sqs.ListQueuesOutput + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *sqs.ListQueuesInput, ...func(*sqs.Options)) (*sqs.ListQueuesOutput, error)); ok { + return rf(ctx, params, optFns...) + } + if rf, ok := ret.Get(0).(func(context.Context, *sqs.ListQueuesInput, ...func(*sqs.Options)) *sqs.ListQueuesOutput); ok { + r0 = rf(ctx, params, optFns...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*sqs.ListQueuesOutput) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *sqs.ListQueuesInput, ...func(*sqs.Options)) error); ok { + r1 = rf(ctx, params, optFns...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// ReceiveMessage provides a mock function with given fields: ctx, params, optFns +func (_m *SqsClient) ReceiveMessage(ctx context.Context, params *sqs.ReceiveMessageInput, optFns ...func(*sqs.Options)) (*sqs.ReceiveMessageOutput, error) { + _va := make([]interface{}, len(optFns)) + for _i := range optFns { + _va[_i] = optFns[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, params) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for ReceiveMessage") + } + + var r0 *sqs.ReceiveMessageOutput + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, *sqs.ReceiveMessageInput, ...func(*sqs.Options)) (*sqs.ReceiveMessageOutput, error)); ok { + return rf(ctx, params, optFns...) + } + if rf, ok := ret.Get(0).(func(context.Context, *sqs.ReceiveMessageInput, ...func(*sqs.Options)) *sqs.ReceiveMessageOutput); ok { + r0 = rf(ctx, params, optFns...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*sqs.ReceiveMessageOutput) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, *sqs.ReceiveMessageInput, ...func(*sqs.Options)) error); ok { + r1 = rf(ctx, params, optFns...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// NewSqsClient creates a new instance of SqsClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewSqsClient(t interface { + mock.TestingT + Cleanup(func()) +}) *SqsClient { + mock := &SqsClient{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/pkg/notifications/receiverMocks/ViperType.go b/pkg/notifications/receiverMocks/ViperType.go new file mode 100644 index 00000000000..bf5e6f84090 --- /dev/null +++ b/pkg/notifications/receiverMocks/ViperType.go @@ -0,0 +1,51 @@ +// Code generated by mockery. DO NOT EDIT. + +package mocks + +import ( + mock "github.com/stretchr/testify/mock" + + strings "strings" +) + +// ViperType is an autogenerated mock type for the ViperType type +type ViperType struct { + mock.Mock +} + +// GetString provides a mock function with given fields: _a0 +func (_m *ViperType) GetString(_a0 string) string { + ret := _m.Called(_a0) + + if len(ret) == 0 { + panic("no return value specified for GetString") + } + + var r0 string + if rf, ok := ret.Get(0).(func(string) string); ok { + r0 = rf(_a0) + } else { + r0 = ret.Get(0).(string) + } + + return r0 +} + +// SetEnvKeyReplacer provides a mock function with given fields: _a0 +func (_m *ViperType) SetEnvKeyReplacer(_a0 *strings.Replacer) { + _m.Called(_a0) +} + +// NewViperType creates a new instance of ViperType. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewViperType(t interface { + mock.TestingT + Cleanup(func()) +}) *ViperType { + mock := &ViperType{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} diff --git a/pkg/services/mto_service_item/mto_service_item_creator.go b/pkg/services/mto_service_item/mto_service_item_creator.go index 3b46e33ea3a..6e5415a5916 100644 --- a/pkg/services/mto_service_item/mto_service_item_creator.go +++ b/pkg/services/mto_service_item/mto_service_item_creator.go @@ -910,6 +910,12 @@ func (o *mtoServiceItemCreator) validateFirstDaySITServiceItem(appCtx appcontext return nil, err } + //SIT Entry Date must be before SIT Departure Date + err = o.checkSITEntryDateBeforeDepartureDate(serviceItem) + if err != nil { + return nil, err + } + verrs := validate.NewErrors() // check if the address IDs are nil diff --git a/pkg/services/mto_service_item/mto_service_item_creator_test.go b/pkg/services/mto_service_item/mto_service_item_creator_test.go index 9a9146f50d8..c785f78ec1b 100644 --- a/pkg/services/mto_service_item/mto_service_item_creator_test.go +++ b/pkg/services/mto_service_item/mto_service_item_creator_test.go @@ -1261,6 +1261,97 @@ func (suite *MTOServiceItemServiceSuite) TestCreateOriginSITServiceItem() { suite.IsType(apperror.ConflictError{}, err) }) + suite.Run("Do not create DOFSIT if departure date is after entry date", func() { + shipment := setupTestData() + originAddress := factory.BuildAddress(suite.DB(), nil, nil) + reServiceDOFSIT := factory.FetchReServiceByCode(suite.DB(), models.ReServiceCodeDOFSIT) + serviceItemDOFSIT := factory.BuildMTOServiceItem(nil, []factory.Customization{ + { + Model: models.MTOServiceItem{ + SITEntryDate: models.TimePointer(time.Now().AddDate(0, 0, 1)), + SITDepartureDate: models.TimePointer(time.Now()), + }, + }, + { + Model: reServiceDOFSIT, + LinkOnly: true, + }, + { + Model: shipment, + LinkOnly: true, + }, + { + Model: originAddress, + LinkOnly: true, + Type: &factory.Addresses.SITOriginHHGOriginalAddress, + }, + }, nil) + builder := query.NewQueryBuilder() + moveRouter := moverouter.NewMoveRouter() + planner := &mocks.Planner{} + planner.On("ZipTransitDistance", + mock.AnythingOfType("*appcontext.appContext"), + mock.Anything, + mock.Anything, + false, + ).Return(400, nil) + creator := NewMTOServiceItemCreator(planner, builder, moveRouter, ghcrateengine.NewDomesticUnpackPricer(), ghcrateengine.NewDomesticPackPricer(), ghcrateengine.NewDomesticLinehaulPricer(), ghcrateengine.NewDomesticShorthaulPricer(), ghcrateengine.NewDomesticOriginPricer(), ghcrateengine.NewDomesticDestinationPricer(), ghcrateengine.NewFuelSurchargePricer()) + _, _, err := creator.CreateMTOServiceItem(suite.AppContextForTest(), &serviceItemDOFSIT) + suite.Error(err) + expectedError := fmt.Sprintf( + "the SIT Departure Date (%s) must be after the SIT Entry Date (%s)", + serviceItemDOFSIT.SITDepartureDate.Format("2006-01-02"), + serviceItemDOFSIT.SITEntryDate.Format("2006-01-02"), + ) + suite.Contains(err.Error(), expectedError) + }) + + suite.Run("Do not create DOFSIT if departure date is the same as entry date", func() { + today := models.TimePointer(time.Now()) + shipment := setupTestData() + originAddress := factory.BuildAddress(suite.DB(), nil, nil) + reServiceDOFSIT := factory.FetchReServiceByCode(suite.DB(), models.ReServiceCodeDOFSIT) + serviceItemDOFSIT := factory.BuildMTOServiceItem(nil, []factory.Customization{ + { + Model: models.MTOServiceItem{ + SITEntryDate: today, + SITDepartureDate: today, + }, + }, + { + Model: reServiceDOFSIT, + LinkOnly: true, + }, + { + Model: shipment, + LinkOnly: true, + }, + { + Model: originAddress, + LinkOnly: true, + Type: &factory.Addresses.SITOriginHHGOriginalAddress, + }, + }, nil) + builder := query.NewQueryBuilder() + moveRouter := moverouter.NewMoveRouter() + planner := &mocks.Planner{} + planner.On("ZipTransitDistance", + mock.AnythingOfType("*appcontext.appContext"), + mock.Anything, + mock.Anything, + false, + ).Return(400, nil) + creator := NewMTOServiceItemCreator(planner, builder, moveRouter, ghcrateengine.NewDomesticUnpackPricer(), ghcrateengine.NewDomesticPackPricer(), ghcrateengine.NewDomesticLinehaulPricer(), ghcrateengine.NewDomesticShorthaulPricer(), ghcrateengine.NewDomesticOriginPricer(), ghcrateengine.NewDomesticDestinationPricer(), ghcrateengine.NewFuelSurchargePricer()) + _, _, err := creator.CreateMTOServiceItem(suite.AppContextForTest(), &serviceItemDOFSIT) + suite.Error(err) + expectedError := fmt.Sprintf( + "the SIT Departure Date (%s) must be after the SIT Entry Date (%s)", + serviceItemDOFSIT.SITDepartureDate.Format("2006-01-02"), + serviceItemDOFSIT.SITEntryDate.Format("2006-01-02"), + ) + suite.Contains(err.Error(), expectedError) + }) + suite.Run("Do not create standalone DOPSIT service item", func() { // TESTCASE SCENARIO // Under test: CreateMTOServiceItem function @@ -1686,6 +1777,63 @@ func (suite *MTOServiceItemServiceSuite) TestCreateDestSITServiceItem() { suite.Contains(err.Error(), expectedError) }) + suite.Run("Do not create DDFSIT if departure date is after entry date", func() { + shipment, creator, reServiceDDFSIT := setupTestData() + serviceItemDDFSIT := factory.BuildMTOServiceItem(nil, []factory.Customization{ + { + Model: models.MTOServiceItem{ + SITEntryDate: models.TimePointer(time.Now().AddDate(0, 0, 1)), + SITDepartureDate: models.TimePointer(time.Now()), + }, + }, + { + Model: reServiceDDFSIT, + LinkOnly: true, + }, + { + Model: shipment, + LinkOnly: true, + }, + }, nil) + _, _, err := creator.CreateMTOServiceItem(suite.AppContextForTest(), &serviceItemDDFSIT) + suite.Error(err) + expectedError := fmt.Sprintf( + "the SIT Departure Date (%s) must be after the SIT Entry Date (%s)", + serviceItemDDFSIT.SITDepartureDate.Format("2006-01-02"), + serviceItemDDFSIT.SITEntryDate.Format("2006-01-02"), + ) + suite.Contains(err.Error(), expectedError) + }) + + suite.Run("Do not create DDFSIT if departure date is the same as entry date", func() { + today := models.TimePointer(time.Now()) + shipment, creator, reServiceDDFSIT := setupTestData() + serviceItemDDFSIT := factory.BuildMTOServiceItem(nil, []factory.Customization{ + { + Model: models.MTOServiceItem{ + SITEntryDate: today, + SITDepartureDate: today, + }, + }, + { + Model: reServiceDDFSIT, + LinkOnly: true, + }, + { + Model: shipment, + LinkOnly: true, + }, + }, nil) + _, _, err := creator.CreateMTOServiceItem(suite.AppContextForTest(), &serviceItemDDFSIT) + suite.Error(err) + expectedError := fmt.Sprintf( + "the SIT Departure Date (%s) must be after the SIT Entry Date (%s)", + serviceItemDDFSIT.SITDepartureDate.Format("2006-01-02"), + serviceItemDDFSIT.SITEntryDate.Format("2006-01-02"), + ) + suite.Contains(err.Error(), expectedError) + }) + // Successful creation of DDASIT service item suite.Run("Success - DDASIT creation approved", func() { shipment, creator, reServiceDDFSIT := setupTestData() @@ -2103,7 +2251,6 @@ func (suite *MTOServiceItemServiceSuite) TestPriceEstimator() { mock.Anything, mock.Anything, false, - false, ).Return(400, nil) creator := NewMTOServiceItemCreator(planner, builder, moveRouter, ghcrateengine.NewDomesticUnpackPricer(), ghcrateengine.NewDomesticPackPricer(), ghcrateengine.NewDomesticLinehaulPricer(), ghcrateengine.NewDomesticShorthaulPricer(), ghcrateengine.NewDomesticOriginPricer(), ghcrateengine.NewDomesticDestinationPricer(), ghcrateengine.NewFuelSurchargePricer()) @@ -2403,7 +2550,6 @@ func (suite *MTOServiceItemServiceSuite) TestPriceEstimator() { mock.Anything, mock.Anything, false, - false, ).Return(800, nil) creator := NewMTOServiceItemCreator(planner, builder, moveRouter, ghcrateengine.NewDomesticUnpackPricer(), ghcrateengine.NewDomesticPackPricer(), ghcrateengine.NewDomesticLinehaulPricer(), ghcrateengine.NewDomesticShorthaulPricer(), ghcrateengine.NewDomesticOriginPricer(), ghcrateengine.NewDomesticDestinationPricer(), ghcrateengine.NewFuelSurchargePricer()) diff --git a/pkg/services/mto_service_item/mto_service_item_validators.go b/pkg/services/mto_service_item/mto_service_item_validators.go index 3b7d4cc7fc1..a16ee07fc10 100644 --- a/pkg/services/mto_service_item/mto_service_item_validators.go +++ b/pkg/services/mto_service_item/mto_service_item_validators.go @@ -830,3 +830,16 @@ func (o *mtoServiceItemCreator) checkSITEntryDateAndFADD(serviceItem *models.MTO return nil } + +func (o *mtoServiceItemCreator) checkSITEntryDateBeforeDepartureDate(serviceItem *models.MTOServiceItem) error { + if serviceItem.SITEntryDate == nil || serviceItem.SITDepartureDate == nil { + return nil + } + + //Departure Date has to be after the Entry Date + if !serviceItem.SITDepartureDate.After(*serviceItem.SITEntryDate) { + return apperror.NewUnprocessableEntityError(fmt.Sprintf("the SIT Departure Date (%s) must be after the SIT Entry Date (%s)", + serviceItem.SITDepartureDate.Format("2006-01-02"), serviceItem.SITEntryDate.Format("2006-01-02"))) + } + return nil +} diff --git a/pkg/services/mto_service_item/mto_service_item_validators_test.go b/pkg/services/mto_service_item/mto_service_item_validators_test.go index de41dc6bc9d..947758dec43 100644 --- a/pkg/services/mto_service_item/mto_service_item_validators_test.go +++ b/pkg/services/mto_service_item/mto_service_item_validators_test.go @@ -833,7 +833,8 @@ func (suite *MTOServiceItemServiceSuite) TestUpdateMTOServiceItemData() { }, }, nil) newSITServiceItem := oldSITServiceItem - newSITServiceItem.SITDepartureDate = &later + newSITDepartureDate := later.AddDate(0, 0, 1) + newSITServiceItem.SITDepartureDate = &newSITDepartureDate serviceItemData := updateMTOServiceItemData{ updatedServiceItem: newSITServiceItem, oldServiceItem: oldSITServiceItem, @@ -1444,4 +1445,49 @@ func (suite *MTOServiceItemServiceSuite) TestCreateMTOServiceItemValidators() { ) suite.Contains(err.Error(), expectedError) }) + + suite.Run("checkSITEntryDateBeforeDepartureDate - success when the SIT entry date is before the SIT departure date", func() { + s := mtoServiceItemCreator{} + serviceItem := setupTestData() + //Set SIT entry date = today, SIT departure date = tomorrow + serviceItem.SITEntryDate = models.TimePointer(time.Now()) + serviceItem.SITDepartureDate = models.TimePointer(time.Now().AddDate(0, 0, 1)) + err := s.checkSITEntryDateBeforeDepartureDate(&serviceItem) + suite.NoError(err) + }) + + suite.Run("checkSITEntryDateBeforeDepartureDate - error when the SIT entry date is after the SIT departure date", func() { + s := mtoServiceItemCreator{} + serviceItem := setupTestData() + //Set SIT entry date = tomorrow, SIT departure date = today + serviceItem.SITEntryDate = models.TimePointer(time.Now().AddDate(0, 0, 1)) + serviceItem.SITDepartureDate = models.TimePointer(time.Now()) + err := s.checkSITEntryDateBeforeDepartureDate(&serviceItem) + suite.Error(err) + suite.IsType(apperror.UnprocessableEntityError{}, err) + expectedError := fmt.Sprintf( + "the SIT Departure Date (%s) must be after the SIT Entry Date (%s)", + serviceItem.SITDepartureDate.Format("2006-01-02"), + serviceItem.SITEntryDate.Format("2006-01-02"), + ) + suite.Contains(err.Error(), expectedError) + }) + + suite.Run("checkSITEntryDateBeforeDepartureDate - error when the SIT entry date is the same as the SIT departure date", func() { + s := mtoServiceItemCreator{} + serviceItem := setupTestData() + //Set SIT entry date = today, SIT departure date = today + today := models.TimePointer(time.Now()) + serviceItem.SITEntryDate = today + serviceItem.SITDepartureDate = today + err := s.checkSITEntryDateBeforeDepartureDate(&serviceItem) + suite.Error(err) + suite.IsType(apperror.UnprocessableEntityError{}, err) + expectedError := fmt.Sprintf( + "the SIT Departure Date (%s) must be after the SIT Entry Date (%s)", + serviceItem.SITDepartureDate.Format("2006-01-02"), + serviceItem.SITEntryDate.Format("2006-01-02"), + ) + suite.Contains(err.Error(), expectedError) + }) } diff --git a/pkg/services/mto_shipment/mto_shipment_updater.go b/pkg/services/mto_shipment/mto_shipment_updater.go index fb75c795a77..7788c05d731 100644 --- a/pkg/services/mto_shipment/mto_shipment_updater.go +++ b/pkg/services/mto_shipment/mto_shipment_updater.go @@ -1075,7 +1075,7 @@ func (o *mtoShipmentStatusUpdater) setRequiredDeliveryDate(appCtx appcontext.App pickupLocation = shipment.PickupAddress deliveryLocation = shipment.DestinationAddress } - requiredDeliveryDate, calcErr := CalculateRequiredDeliveryDate(appCtx, o.planner, *pickupLocation, *deliveryLocation, *shipment.ScheduledPickupDate, weight.Int(), shipment.MarketCode) + requiredDeliveryDate, calcErr := CalculateRequiredDeliveryDate(appCtx, o.planner, *pickupLocation, *deliveryLocation, *shipment.ScheduledPickupDate, weight.Int(), shipment.MarketCode, shipment.MoveTaskOrderID) if calcErr != nil { return calcErr } @@ -1192,18 +1192,7 @@ func reServiceCodesForShipment(shipment models.MTOShipment) []models.ReServiceCo // CalculateRequiredDeliveryDate function is used to get a distance calculation using the pickup and destination addresses. It then uses // the value returned to make a fetch on the ghc_domestic_transit_times table and returns a required delivery date // based on the max_days_transit_time. -func CalculateRequiredDeliveryDate(appCtx appcontext.AppContext, planner route.Planner, pickupAddress models.Address, destinationAddress models.Address, pickupDate time.Time, weight int, marketCode models.MarketCode) (*time.Time, error) { - // Okay, so this is something to get us able to take care of the 20 day condition over in the gdoc linked in this - // story: https://dp3.atlassian.net/browse/MB-1141 - // We unfortunately didn't get a lot of guidance regarding vicinity. So for now we're taking zip codes that are the - // explicitly mentioned 20 day cities and those in the same county (that I've manually compiled together here). - // If a move is in that group it adds 20 days, if it's not in that group, but is in Alaska it adds 10 days. - // Else it will not do either of those things. - // The cities for 20 days are: Adak, Kodiak, Juneau, Ketchikan, and Sitka. As well as others in their 'vicinity.' - twentyDayAKZips := [28]string{"99546", "99547", "99591", "99638", "99660", "99685", "99692", "99550", "99608", - "99615", "99619", "99624", "99643", "99644", "99697", "99650", "99801", "99802", "99803", "99811", "99812", - "99950", "99824", "99850", "99901", "99928", "99950", "99835"} - +func CalculateRequiredDeliveryDate(appCtx appcontext.AppContext, planner route.Planner, pickupAddress models.Address, destinationAddress models.Address, pickupDate time.Time, weight int, marketCode models.MarketCode, moveID uuid.UUID) (*time.Time, error) { internationalShipment := marketCode == models.MarketCodeInternational distance, err := planner.ZipTransitDistance(appCtx, pickupAddress.PostalCode, destinationAddress.PostalCode, internationalShipment) @@ -1225,17 +1214,59 @@ func CalculateRequiredDeliveryDate(appCtx appcontext.AppContext, planner route.P // Add the max transit time to the pickup date to get the new required delivery date requiredDeliveryDate := pickupDate.AddDate(0, 0, ghcDomesticTransitTime.MaxDaysTransitTime) - // Let's add some days if we're dealing with an alaska shipment. - if destinationAddress.State == "AK" { - for _, zip := range twentyDayAKZips { - if destinationAddress.PostalCode == zip { - // Add an extra 10 days here, so that after we add the 10 for being in AK we wind up with a total of 20 - requiredDeliveryDate = requiredDeliveryDate.AddDate(0, 0, 10) - break + destinationIsAlaska, err := destinationAddress.IsAddressAlaska() + if err != nil { + return nil, fmt.Errorf("destination address is nil for move ID: %s", moveID) + } + pickupIsAlaska, err := pickupAddress.IsAddressAlaska() + if err != nil { + return nil, fmt.Errorf("pickup address is nil for move ID: %s", moveID) + } + // Let's add some days if we're dealing with a shipment between CONUS/Alaska + if (destinationIsAlaska || pickupIsAlaska) && !(destinationIsAlaska && pickupIsAlaska) { + var rateAreaID uuid.UUID + var intlTransTime models.InternationalTransitTime + + contract, err := models.FetchContractForMove(appCtx, moveID) + if err != nil { + return nil, fmt.Errorf("error fetching contract for move ID: %s", moveID) + } + + if destinationIsAlaska { + rateAreaID, err = models.FetchRateAreaID(appCtx.DB(), destinationAddress.ID, &uuid.Nil, contract.ID) + if err != nil { + return nil, fmt.Errorf("error fetching destination rate area id for address ID: %s", destinationAddress.ID) + } + err = appCtx.DB().Where("destination_rate_area_id = $1", rateAreaID).First(&intlTransTime) + if err != nil { + switch err { + case sql.ErrNoRows: + return nil, fmt.Errorf("no international transit time found for destination rate area ID: %s", rateAreaID) + default: + return nil, err + } + } + } + + if pickupIsAlaska { + rateAreaID, err = models.FetchRateAreaID(appCtx.DB(), pickupAddress.ID, &uuid.Nil, contract.ID) + if err != nil { + return nil, fmt.Errorf("error fetching pickup rate area id for address ID: %s", pickupAddress.ID) + } + err = appCtx.DB().Where("origin_rate_area_id = $1", rateAreaID).First(&intlTransTime) + if err != nil { + switch err { + case sql.ErrNoRows: + return nil, fmt.Errorf("no international transit time found for pickup rate area ID: %s", rateAreaID) + default: + return nil, err + } } } - // Add an extra 10 days for being in AK - requiredDeliveryDate = requiredDeliveryDate.AddDate(0, 0, 10) + + if intlTransTime.HhgTransitTime != nil { + requiredDeliveryDate = requiredDeliveryDate.AddDate(0, 0, *intlTransTime.HhgTransitTime) + } } // return the value diff --git a/pkg/services/mto_shipment/mto_shipment_updater_test.go b/pkg/services/mto_shipment/mto_shipment_updater_test.go index 5cdd5100b13..e408c246cd0 100644 --- a/pkg/services/mto_shipment/mto_shipment_updater_test.go +++ b/pkg/services/mto_shipment/mto_shipment_updater_test.go @@ -2462,6 +2462,137 @@ func (suite *MTOShipmentServiceSuite) TestUpdateMTOShipmentStatus() { } }) + suite.Run("Test that we are properly adding days to Alaska shipments", func() { + reContract := testdatagen.FetchOrMakeReContract(suite.DB(), testdatagen.Assertions{}) + testdatagen.FetchOrMakeReContractYear(suite.DB(), testdatagen.Assertions{ + ReContractYear: models.ReContractYear{ + Contract: reContract, + ContractID: reContract.ID, + StartDate: time.Now(), + EndDate: time.Now().Add(time.Hour * 12), + Escalation: 1.0, + EscalationCompounded: 1.0, + }, + }) + move := factory.BuildAvailableToPrimeMove(suite.DB(), nil, nil) + appCtx := suite.AppContextForTest() + + ghcDomesticTransitTime0LbsUpper := models.GHCDomesticTransitTime{ + MaxDaysTransitTime: 12, + WeightLbsLower: 10001, + WeightLbsUpper: 0, + DistanceMilesLower: 0, + DistanceMilesUpper: 10000, + } + verrs, err := suite.DB().ValidateAndCreate(&ghcDomesticTransitTime0LbsUpper) + suite.Assert().False(verrs.HasAny()) + suite.NoError(err) + + conusAddress := factory.BuildAddress(suite.DB(), nil, []factory.Trait{factory.GetTraitAddress2}) + zone1Address := factory.BuildAddress(suite.DB(), nil, []factory.Trait{factory.GetTraitAddressAKZone1}) + zone2Address := factory.BuildAddress(suite.DB(), nil, []factory.Trait{factory.GetTraitAddressAKZone2}) + zone3Address := factory.BuildAddress(suite.DB(), nil, []factory.Trait{factory.GetTraitAddressAKZone3}) + zone4Address := factory.BuildAddress(suite.DB(), nil, []factory.Trait{factory.GetTraitAddressAKZone4}) + + estimatedWeight := unit.Pound(11000) + + testCases10Days := []struct { + pickupLocation models.Address + destinationLocation models.Address + }{ + {conusAddress, zone1Address}, + {conusAddress, zone2Address}, + {zone1Address, conusAddress}, + {zone2Address, conusAddress}, + } + // adding 22 days; ghcDomesticTransitTime0LbsUpper.MaxDaysTransitTime is 12, plus 10 for Zones 1 and 2 + rdd10DaysDate := testdatagen.DateInsidePeakRateCycle.AddDate(0, 0, 22) + for _, testCase := range testCases10Days { + shipment := factory.BuildMTOShipmentMinimal(suite.DB(), []factory.Customization{ + { + Model: move, + LinkOnly: true, + }, + { + Model: models.MTOShipment{ + ShipmentType: models.MTOShipmentTypeHHG, + ScheduledPickupDate: &testdatagen.DateInsidePeakRateCycle, + PrimeEstimatedWeight: &estimatedWeight, + Status: models.MTOShipmentStatusSubmitted, + }, + }, + { + Model: testCase.pickupLocation, + Type: &factory.Addresses.PickupAddress, + LinkOnly: true, + }, + { + Model: testCase.destinationLocation, + Type: &factory.Addresses.DeliveryAddress, + LinkOnly: true, + }, + }, nil) + shipmentEtag := etag.GenerateEtag(shipment.UpdatedAt) + _, err = updater.UpdateMTOShipmentStatus(appCtx, shipment.ID, status, nil, nil, shipmentEtag) + suite.NoError(err) + + fetchedShipment := models.MTOShipment{} + err = suite.DB().Find(&fetchedShipment, shipment.ID) + suite.NoError(err) + suite.NotNil(fetchedShipment.RequiredDeliveryDate) + suite.Equal(rdd10DaysDate.Format(time.RFC3339), fetchedShipment.RequiredDeliveryDate.Format(time.RFC3339)) + } + + testCases20Days := []struct { + pickupLocation models.Address + destinationLocation models.Address + }{ + {conusAddress, zone3Address}, + {conusAddress, zone4Address}, + {zone3Address, conusAddress}, + {zone4Address, conusAddress}, + } + // adding 32 days; ghcDomesticTransitTime0LbsUpper.MaxDaysTransitTime is 12, plus 20 for Zones 3 and 4 + rdd20DaysDate := testdatagen.DateInsidePeakRateCycle.AddDate(0, 0, 32) + for _, testCase := range testCases20Days { + shipment := factory.BuildMTOShipmentMinimal(suite.DB(), []factory.Customization{ + { + Model: move, + LinkOnly: true, + }, + { + Model: models.MTOShipment{ + ShipmentType: models.MTOShipmentTypeHHG, + ScheduledPickupDate: &testdatagen.DateInsidePeakRateCycle, + PrimeEstimatedWeight: &estimatedWeight, + Status: models.MTOShipmentStatusSubmitted, + }, + }, + { + Model: testCase.pickupLocation, + Type: &factory.Addresses.PickupAddress, + LinkOnly: true, + }, + { + Model: testCase.destinationLocation, + Type: &factory.Addresses.DeliveryAddress, + LinkOnly: true, + }, + }, nil) + shipmentEtag := etag.GenerateEtag(shipment.UpdatedAt) + _, err = updater.UpdateMTOShipmentStatus(appCtx, shipment.ID, status, nil, nil, shipmentEtag) + suite.NoError(err) + + fetchedShipment := models.MTOShipment{} + err = suite.DB().Find(&fetchedShipment, shipment.ID) + suite.NoError(err) + suite.NotNil(fetchedShipment.RequiredDeliveryDate) + fmt.Println("fetchedShipment.RequiredDeliveryDate") + fmt.Println(fetchedShipment.RequiredDeliveryDate) + suite.Equal(rdd20DaysDate.Format(time.RFC3339), fetchedShipment.RequiredDeliveryDate.Format(time.RFC3339)) + } + }) + suite.Run("Cannot set SUBMITTED status on shipment via UpdateMTOShipmentStatus", func() { setupTestData() diff --git a/pkg/services/mto_shipment/rules.go b/pkg/services/mto_shipment/rules.go index 0fe7e481ebc..604da6a12f0 100644 --- a/pkg/services/mto_shipment/rules.go +++ b/pkg/services/mto_shipment/rules.go @@ -343,7 +343,7 @@ func checkPrimeValidationsOnModel(planner route.Planner) validator { weight = older.NTSRecordedWeight } requiredDeliveryDate, err := CalculateRequiredDeliveryDate(appCtx, planner, *latestPickupAddress, - *latestDestinationAddress, *latestSchedPickupDate, weight.Int(), older.MarketCode) + *latestDestinationAddress, *latestSchedPickupDate, weight.Int(), older.MarketCode, older.MoveTaskOrderID) if err != nil { verrs.Add("requiredDeliveryDate", err.Error()) } diff --git a/pkg/services/mto_shipment/shipment_approver.go b/pkg/services/mto_shipment/shipment_approver.go index 9191657787c..fcce3db616b 100644 --- a/pkg/services/mto_shipment/shipment_approver.go +++ b/pkg/services/mto_shipment/shipment_approver.go @@ -247,7 +247,7 @@ func (f *shipmentApprover) setRequiredDeliveryDate(appCtx appcontext.AppContext, deliveryLocation = shipment.DestinationAddress weight = shipment.PrimeEstimatedWeight.Int() } - requiredDeliveryDate, calcErr := CalculateRequiredDeliveryDate(appCtx, f.planner, *pickupLocation, *deliveryLocation, *shipment.ScheduledPickupDate, weight, shipment.MarketCode) + requiredDeliveryDate, calcErr := CalculateRequiredDeliveryDate(appCtx, f.planner, *pickupLocation, *deliveryLocation, *shipment.ScheduledPickupDate, weight, shipment.MarketCode, shipment.MoveTaskOrderID) if calcErr != nil { return calcErr } diff --git a/pkg/services/paperwork/prime_download_user_upload_to_pdf_converter.go b/pkg/services/paperwork/prime_download_user_upload_to_pdf_converter.go index 0a61b8ebe26..504e8af3a00 100644 --- a/pkg/services/paperwork/prime_download_user_upload_to_pdf_converter.go +++ b/pkg/services/paperwork/prime_download_user_upload_to_pdf_converter.go @@ -117,14 +117,11 @@ func (g *moveUserUploadToPDFDownloader) GenerateDownloadMoveUserUploadPDF(appCtx // Build orderUploadDocType for document func (g *moveUserUploadToPDFDownloader) buildPdfBatchInfo(appCtx appcontext.AppContext, uploadDocType services.MoveOrderUploadType, documentID uuid.UUID) (*pdfBatchInfo, error) { - document, err := models.FetchDocumentWithNoRestrictions(appCtx.DB(), appCtx.Session(), documentID, true) + document, err := models.FetchDocumentWithNoRestrictions(appCtx.DB(), appCtx.Session(), documentID) if err != nil { return nil, errors.Wrap(err, fmt.Sprintf("error fetching document domain by id: %s", documentID)) } - // filter out deleted uploads from userUploads - document.UserUploads = document.UserUploads.FilterDeleted() - var pdfFileNames []string var pageCounts []int // Document has one or more uploads. Create PDF file for each. diff --git a/pkg/services/ppm_closeout/ppm_closeout.go b/pkg/services/ppm_closeout/ppm_closeout.go index 7ced6a8c257..5c807372da7 100644 --- a/pkg/services/ppm_closeout/ppm_closeout.go +++ b/pkg/services/ppm_closeout/ppm_closeout.go @@ -212,7 +212,33 @@ func (p *ppmCloseoutFetcher) GetPPMShipment(appCtx appcontext.AppContext, ppmShi return nil, apperror.NewQueryError("PPMShipment", err, "while looking for PPMShipment") } } + + // the following checks are needed since we can't use "ExcludeDeletedScope()" in the big query above + // this is because not all of the tables being queried have "deleted_at" columns and this returns an error + if ppmShipment.WeightTickets != nil { + var filteredWeightTickets []models.WeightTicket + // We do not need to consider deleted weight tickets or uploads within them + for _, wt := range ppmShipment.WeightTickets { + if wt.DeletedAt == nil { + wt.EmptyDocument.UserUploads = wt.EmptyDocument.UserUploads.FilterDeleted() + wt.FullDocument.UserUploads = wt.FullDocument.UserUploads.FilterDeleted() + wt.ProofOfTrailerOwnershipDocument.UserUploads = wt.ProofOfTrailerOwnershipDocument.UserUploads.FilterDeleted() + filteredWeightTickets = append(filteredWeightTickets, wt) + } + } + ppmShipment.WeightTickets = filteredWeightTickets + } + // We do not need to consider deleted moving expenses + if len(ppmShipment.MovingExpenses) > 0 { + ppmShipment.MovingExpenses = ppmShipment.MovingExpenses.FilterDeleted() + } + // We do not need to consider deleted progear weight tickets + if len(ppmShipment.ProgearWeightTickets) > 0 { + ppmShipment.ProgearWeightTickets = ppmShipment.ProgearWeightTickets.FilterDeleted() + } + var weightTicket models.WeightTicket + if len(ppmShipment.WeightTickets) >= 1 { weightTicket = ppmShipment.WeightTickets[0] } diff --git a/pkg/services/shipment_summary_worksheet/shipment_summary_worksheet.go b/pkg/services/shipment_summary_worksheet/shipment_summary_worksheet.go index 90b187f0be0..f07bb40bd97 100644 --- a/pkg/services/shipment_summary_worksheet/shipment_summary_worksheet.go +++ b/pkg/services/shipment_summary_worksheet/shipment_summary_worksheet.go @@ -968,7 +968,11 @@ func formatDisbursement(expensesMap map[string]float64, ppmRemainingEntitlement disbursementGTCC = 0 } else { // Disbursement Member is remaining entitlement plus member SIT minus GTCC Disbursement, not less than 0. - disbursementMember = ppmRemainingEntitlement + expensesMap["StorageMemberPaid"] + totalGTCCPaid := expensesMap["TotalGTCCPaid"] + expensesMap["StorageGTCCPaid"] + disbursementMember = ppmRemainingEntitlement - totalGTCCPaid + expensesMap["StorageMemberPaid"] + if disbursementMember < 0 { + disbursementMember = 0 + } } // Return formatted values in string @@ -1083,6 +1087,32 @@ func (SSWPPMComputer *SSWPPMComputer) FetchDataShipmentSummaryWorksheetFormData( return nil, dbQErr } + // the following checks are needed since we can't use "ExcludeDeletedScope()" in the big query above + // this is because not all of the tables being queried have "deleted_at" columns and this returns an error + if ppmShipment.WeightTickets != nil { + var filteredWeightTickets []models.WeightTicket + // We do not need to consider deleted weight tickets or uploads within them + for _, wt := range ppmShipment.WeightTickets { + if wt.DeletedAt == nil { + wt.EmptyDocument.UserUploads = wt.EmptyDocument.UserUploads.FilterDeleted() + wt.FullDocument.UserUploads = wt.FullDocument.UserUploads.FilterDeleted() + wt.ProofOfTrailerOwnershipDocument.UserUploads = wt.ProofOfTrailerOwnershipDocument.UserUploads.FilterDeleted() + filteredWeightTickets = append(filteredWeightTickets, wt) + } + } + ppmShipment.WeightTickets = filteredWeightTickets + } + // We do not need to consider deleted moving expenses + if len(ppmShipment.MovingExpenses) > 0 { + nonDeletedMovingExpenses := ppmShipment.MovingExpenses.FilterDeleted() + ppmShipment.MovingExpenses = nonDeletedMovingExpenses + } + // We do not need to consider deleted progear weight tickets + if len(ppmShipment.ProgearWeightTickets) > 0 { + nonDeletedProgearTickets := ppmShipment.ProgearWeightTickets.FilterDeleted() + ppmShipment.ProgearWeightTickets = nonDeletedProgearTickets + } + // Final actual weight is a calculated value we don't store. This needs to be fetched independently // Requires WeightTickets eager preload ppmShipmentFinalWeight := models.GetPPMNetWeight(ppmShipment) diff --git a/pkg/services/shipment_summary_worksheet/shipment_summary_worksheet_test.go b/pkg/services/shipment_summary_worksheet/shipment_summary_worksheet_test.go index 7eca297f3a6..4e608703d55 100644 --- a/pkg/services/shipment_summary_worksheet/shipment_summary_worksheet_test.go +++ b/pkg/services/shipment_summary_worksheet/shipment_summary_worksheet_test.go @@ -801,7 +801,7 @@ func (suite *ShipmentSummaryWorksheetServiceSuite) TestGTCCPaidRemainingPPMEntit MovingExpenseType: &storageExpense, Amount: &amount, PaidWithGTCC: models.BoolPointer(true), - SITReimburseableAmount: models.CentPointer(unit.Cents(200)), + SITReimburseableAmount: models.CentPointer(unit.Cents(20000)), }, } @@ -809,8 +809,8 @@ func (suite *ShipmentSummaryWorksheetServiceSuite) TestGTCCPaidRemainingPPMEntit id := uuid.Must(uuid.NewV4()) PPMShipments := []models.PPMShipment{ { - FinalIncentive: models.CentPointer(unit.Cents(600)), - AdvanceAmountReceived: models.CentPointer(unit.Cents(100)), + FinalIncentive: models.CentPointer(unit.Cents(60000)), + AdvanceAmountReceived: models.CentPointer(unit.Cents(10000)), ID: id, Shipment: models.MTOShipment{ ShipmentLocator: &locator, @@ -840,8 +840,8 @@ func (suite *ShipmentSummaryWorksheetServiceSuite) TestGTCCPaidRemainingPPMEntit mockPPMCloseoutFetcher := &mocks.PPMCloseoutFetcher{} sswPPMComputer := NewSSWPPMComputer(mockPPMCloseoutFetcher) sswPage2, _ := sswPPMComputer.FormatValuesShipmentSummaryWorksheetFormPage2(ssd, true, expensesMap) - suite.Equal("$5.00", sswPage2.PPMRemainingEntitlement) - suite.Equal(expectedDisbursementString(500, 500), sswPage2.Disbursement) + suite.Equal("$500.00", sswPage2.PPMRemainingEntitlement) + suite.Equal(expectedDisbursementString(10000, 40000), sswPage2.Disbursement) } func (suite *ShipmentSummaryWorksheetServiceSuite) TestGroupExpenses() { paidWithGTCC := false diff --git a/pkg/services/sit_entry_date_update/sit_entry_date_updater.go b/pkg/services/sit_entry_date_update/sit_entry_date_updater.go index 61bc78bb988..2e32dc8172c 100644 --- a/pkg/services/sit_entry_date_update/sit_entry_date_updater.go +++ b/pkg/services/sit_entry_date_update/sit_entry_date_updater.go @@ -2,6 +2,7 @@ package sitentrydateupdate import ( "database/sql" + "fmt" "time" "github.com/transcom/mymove/pkg/appcontext" @@ -85,12 +86,18 @@ func (p sitEntryDateUpdater) UpdateSitEntryDate(appCtx appcontext.AppContext, s // updating sister service item to have the next day for SIT entry date if s.SITEntryDate == nil { return nil, apperror.NewUnprocessableEntityError("You must provide the SIT entry date in the request") - } else if s.SITEntryDate != nil { - serviceItem.SITEntryDate = s.SITEntryDate - dayAfter := s.SITEntryDate.Add(24 * time.Hour) - serviceItemAdditionalDays.SITEntryDate = &dayAfter } + // The new SIT entry date must be before SIT departure date + if serviceItem.SITDepartureDate != nil && !s.SITEntryDate.Before(*serviceItem.SITDepartureDate) { + return nil, apperror.NewUnprocessableEntityError(fmt.Sprintf("the SIT Entry Date (%s) must be before the SIT Departure Date (%s)", + s.SITEntryDate.Format("2006-01-02"), serviceItem.SITDepartureDate.Format("2006-01-02"))) + } + + serviceItem.SITEntryDate = s.SITEntryDate + dayAfter := s.SITEntryDate.Add(24 * time.Hour) + serviceItemAdditionalDays.SITEntryDate = &dayAfter + // Make the update to both service items and create a InvalidInputError if there were validation issues transactionError := appCtx.NewTransaction(func(txnCtx appcontext.AppContext) error { diff --git a/pkg/services/sit_entry_date_update/sit_entry_date_updater_test.go b/pkg/services/sit_entry_date_update/sit_entry_date_updater_test.go index a6f45b1dcdc..d3546d7a5f7 100644 --- a/pkg/services/sit_entry_date_update/sit_entry_date_updater_test.go +++ b/pkg/services/sit_entry_date_update/sit_entry_date_updater_test.go @@ -1,6 +1,7 @@ package sitentrydateupdate import ( + "fmt" "time" "github.com/gofrs/uuid" @@ -88,4 +89,167 @@ func (suite *UpdateSitEntryDateServiceSuite) TestUpdateSitEntryDate() { suite.Equal(ddaServiceItem.SITEntryDate.Local(), newSitEntryDateNextDay.Local()) }) + suite.Run("Fails to update when DOFSIT entry date is after DOFSIT departure date", func() { + today := models.TimePointer(time.Now()) + tomorrow := models.TimePointer(time.Now()) + move := factory.BuildMove(suite.DB(), nil, nil) + shipment := factory.BuildMTOShipment(suite.DB(), []factory.Customization{ + { + Model: move, + LinkOnly: true, + }, + }, nil) + dofsitServiceItem := factory.BuildMTOServiceItem(suite.DB(), []factory.Customization{ + { + Model: models.MTOServiceItem{ + SITEntryDate: today, + SITDepartureDate: tomorrow, + }, + }, + { + Model: shipment, + LinkOnly: true, + }, + { + Model: models.ReService{ + Code: models.ReServiceCodeDOFSIT, + }, + }, + }, nil) + updatedServiceItem := models.SITEntryDateUpdate{ + ID: dofsitServiceItem.ID, + SITEntryDate: models.TimePointer(tomorrow.AddDate(0, 0, 1)), + } + _, err := updater.UpdateSitEntryDate(suite.AppContextForTest(), &updatedServiceItem) + suite.Error(err) + expectedError := fmt.Sprintf( + "the SIT Entry Date (%s) must be before the SIT Departure Date (%s)", + updatedServiceItem.SITEntryDate.Format("2006-01-02"), + dofsitServiceItem.SITDepartureDate.Format("2006-01-02"), + ) + suite.Contains(err.Error(), expectedError) + }) + + suite.Run("Fails to update when DOFSIT entry date is the same as DOFSIT departure date", func() { + today := models.TimePointer(time.Now()) + tomorrow := models.TimePointer(time.Now()) + move := factory.BuildMove(suite.DB(), nil, nil) + shipment := factory.BuildMTOShipment(suite.DB(), []factory.Customization{ + { + Model: move, + LinkOnly: true, + }, + }, nil) + dofsitServiceItem := factory.BuildMTOServiceItem(suite.DB(), []factory.Customization{ + { + Model: models.MTOServiceItem{ + SITEntryDate: today, + SITDepartureDate: tomorrow, + }, + }, + { + Model: shipment, + LinkOnly: true, + }, + { + Model: models.ReService{ + Code: models.ReServiceCodeDOFSIT, + }, + }, + }, nil) + updatedServiceItem := models.SITEntryDateUpdate{ + ID: dofsitServiceItem.ID, + SITEntryDate: tomorrow, + } + _, err := updater.UpdateSitEntryDate(suite.AppContextForTest(), &updatedServiceItem) + suite.Error(err) + expectedError := fmt.Sprintf( + "the SIT Entry Date (%s) must be before the SIT Departure Date (%s)", + updatedServiceItem.SITEntryDate.Format("2006-01-02"), + dofsitServiceItem.SITDepartureDate.Format("2006-01-02"), + ) + suite.Contains(err.Error(), expectedError) + }) + + suite.Run("Fails to update when DDFSIT entry date is after DDFSIT departure date", func() { + today := models.TimePointer(time.Now()) + tomorrow := models.TimePointer(time.Now()) + move := factory.BuildMove(suite.DB(), nil, nil) + shipment := factory.BuildMTOShipment(suite.DB(), []factory.Customization{ + { + Model: move, + LinkOnly: true, + }, + }, nil) + ddfsitServiceItem := factory.BuildMTOServiceItem(suite.DB(), []factory.Customization{ + { + Model: models.MTOServiceItem{ + SITEntryDate: today, + SITDepartureDate: tomorrow, + }, + }, + { + Model: shipment, + LinkOnly: true, + }, + { + Model: models.ReService{ + Code: models.ReServiceCodeDDFSIT, + }, + }, + }, nil) + updatedServiceItem := models.SITEntryDateUpdate{ + ID: ddfsitServiceItem.ID, + SITEntryDate: models.TimePointer(tomorrow.AddDate(0, 0, 1)), + } + _, err := updater.UpdateSitEntryDate(suite.AppContextForTest(), &updatedServiceItem) + suite.Error(err) + expectedError := fmt.Sprintf( + "the SIT Entry Date (%s) must be before the SIT Departure Date (%s)", + updatedServiceItem.SITEntryDate.Format("2006-01-02"), + ddfsitServiceItem.SITDepartureDate.Format("2006-01-02"), + ) + suite.Contains(err.Error(), expectedError) + }) + + suite.Run("Fails to update when DDFSIT entry date is the same as DDFSIT departure date", func() { + today := models.TimePointer(time.Now()) + tomorrow := models.TimePointer(time.Now()) + move := factory.BuildMove(suite.DB(), nil, nil) + shipment := factory.BuildMTOShipment(suite.DB(), []factory.Customization{ + { + Model: move, + LinkOnly: true, + }, + }, nil) + ddfsitServiceItem := factory.BuildMTOServiceItem(suite.DB(), []factory.Customization{ + { + Model: models.MTOServiceItem{ + SITEntryDate: today, + SITDepartureDate: tomorrow, + }, + }, + { + Model: shipment, + LinkOnly: true, + }, + { + Model: models.ReService{ + Code: models.ReServiceCodeDDFSIT, + }, + }, + }, nil) + updatedServiceItem := models.SITEntryDateUpdate{ + ID: ddfsitServiceItem.ID, + SITEntryDate: tomorrow, + } + _, err := updater.UpdateSitEntryDate(suite.AppContextForTest(), &updatedServiceItem) + suite.Error(err) + expectedError := fmt.Sprintf( + "the SIT Entry Date (%s) must be before the SIT Departure Date (%s)", + updatedServiceItem.SITEntryDate.Format("2006-01-02"), + ddfsitServiceItem.SITDepartureDate.Format("2006-01-02"), + ) + suite.Contains(err.Error(), expectedError) + }) } diff --git a/pkg/storage/filesystem.go b/pkg/storage/filesystem.go index 259fd4ee8ab..f6e43583420 100644 --- a/pkg/storage/filesystem.go +++ b/pkg/storage/filesystem.go @@ -116,6 +116,8 @@ func (fs *Filesystem) Fetch(key string) (io.ReadCloser, error) { // Tags returns the tags for a specified key func (fs *Filesystem) Tags(_ string) (map[string]string, error) { tags := make(map[string]string) + // Assume anti-virus complete + tags["av-status"] = "CLEAN" return tags, nil } diff --git a/pkg/storage/filesystem_test.go b/pkg/storage/filesystem_test.go index 27ecc5e951c..9c37b9204c8 100644 --- a/pkg/storage/filesystem_test.go +++ b/pkg/storage/filesystem_test.go @@ -1,6 +1,8 @@ package storage import ( + "io" + "strings" "testing" ) @@ -21,3 +23,62 @@ func TestFilesystemPresignedURL(t *testing.T) { t.Errorf("wrong presigned url: expected %s, got %s", expected, url) } } + +func TestFilesystemReturnsSuccessful(t *testing.T) { + fsParams := FilesystemParams{ + root: "./", + webRoot: "https://example.text/files", + } + filesystem := NewFilesystem(fsParams) + if filesystem == nil { + t.Fatal("could not create new filesystem") + } + + storeValue := strings.NewReader("anyValue") + _, err := filesystem.Store("anyKey", storeValue, "", nil) + if err != nil { + t.Fatalf("could not store in filesystem: %s", err) + } + + retReader, err := filesystem.Fetch("anyKey") + if err != nil { + t.Fatalf("could not fetch from filesystem: %s", err) + } + + err = filesystem.Delete("anyKey") + if err != nil { + t.Fatalf("could not delete on filesystem: %s", err) + } + + retValue, err := io.ReadAll(retReader) + if strings.Compare(string(retValue[:]), "anyValue") != 0 { + t.Fatalf("could not fetch from filesystem: %s", err) + } + + fileSystem := filesystem.FileSystem() + if fileSystem == nil { + t.Fatal("could not retrieve filesystem from filesystem") + } + + tempFileSystem := filesystem.TempFileSystem() + if tempFileSystem == nil { + t.Fatal("could not retrieve filesystem from filesystem") + } +} + +func TestFilesystemTags(t *testing.T) { + fsParams := FilesystemParams{ + root: "/home/username", + webRoot: "https://example.text/files", + } + fs := NewFilesystem(fsParams) + + tags, err := fs.Tags("anyKey") + if err != nil { + t.Fatalf("could not get tags: %s", err) + } + + if tag, exists := tags["av-status"]; exists && strings.Compare(tag, "CLEAN") != 0 { + t.Fatal("tag 'av-status' should return CLEAN") + } +} diff --git a/pkg/storage/memory.go b/pkg/storage/memory.go index 2f06ed6b96e..4e171e40e9d 100644 --- a/pkg/storage/memory.go +++ b/pkg/storage/memory.go @@ -116,6 +116,8 @@ func (fs *Memory) Fetch(key string) (io.ReadCloser, error) { // Tags returns the tags for a specified key func (fs *Memory) Tags(_ string) (map[string]string, error) { tags := make(map[string]string) + // Assume anti-virus complete + tags["av-status"] = "CLEAN" return tags, nil } diff --git a/pkg/storage/memory_test.go b/pkg/storage/memory_test.go index 59384c5acee..bdf3133e9c8 100644 --- a/pkg/storage/memory_test.go +++ b/pkg/storage/memory_test.go @@ -1,6 +1,8 @@ package storage import ( + "io" + "strings" "testing" ) @@ -21,3 +23,62 @@ func TestMemoryPresignedURL(t *testing.T) { t.Errorf("wrong presigned url: expected %s, got %s", expected, url) } } + +func TestMemoryReturnsSuccessful(t *testing.T) { + fsParams := MemoryParams{ + root: "/home/username", + webRoot: "https://example.text/files", + } + memory := NewMemory(fsParams) + if memory == nil { + t.Fatal("could not create new memory") + } + + storeValue := strings.NewReader("anyValue") + _, err := memory.Store("anyKey", storeValue, "", nil) + if err != nil { + t.Fatalf("could not store in memory: %s", err) + } + + retReader, err := memory.Fetch("anyKey") + if err != nil { + t.Fatalf("could not fetch from memory: %s", err) + } + + err = memory.Delete("anyKey") + if err != nil { + t.Fatalf("could not delete on memory: %s", err) + } + + retValue, err := io.ReadAll(retReader) + if strings.Compare(string(retValue[:]), "anyValue") != 0 { + t.Fatalf("could not fetch from memory: %s", err) + } + + fileSystem := memory.FileSystem() + if fileSystem == nil { + t.Fatal("could not retrieve filesystem from memory") + } + + tempFileSystem := memory.TempFileSystem() + if tempFileSystem == nil { + t.Fatal("could not retrieve filesystem from memory") + } +} + +func TestMemoryTags(t *testing.T) { + fsParams := MemoryParams{ + root: "/home/username", + webRoot: "https://example.text/files", + } + fs := NewMemory(fsParams) + + tags, err := fs.Tags("anyKey") + if err != nil { + t.Fatalf("could not get tags: %s", err) + } + + if tag, exists := tags["av-status"]; exists && strings.Compare(tag, "CLEAN") != 0 { + t.Fatal("tag 'av-status' should return CLEAN") + } +} diff --git a/pkg/storage/test/s3.go b/pkg/storage/test/s3.go index 97d06e7733d..56fbac83564 100644 --- a/pkg/storage/test/s3.go +++ b/pkg/storage/test/s3.go @@ -18,6 +18,7 @@ type FakeS3Storage struct { willSucceed bool fs *afero.Afero tempFs *afero.Afero + EmptyTags bool // Used for testing only } // Delete removes a file. @@ -95,7 +96,11 @@ func (fake *FakeS3Storage) TempFileSystem() *afero.Afero { // Tags returns the tags for a specified key func (fake *FakeS3Storage) Tags(_ string) (map[string]string, error) { tags := map[string]string{ - "tagName": "tagValue", + "av-status": "CLEAN", // Assume anti-virus run + } + if fake.EmptyTags { + tags = map[string]string{} + fake.EmptyTags = false // Reset after initial return, so future calls (tests) have filled tags } return tags, nil } diff --git a/pkg/storage/test/s3_test.go b/pkg/storage/test/s3_test.go new file mode 100644 index 00000000000..3c2f63bbeff --- /dev/null +++ b/pkg/storage/test/s3_test.go @@ -0,0 +1,101 @@ +package test + +import ( + "errors" + "io" + "strings" + "testing" +) + +// Tests all functions of FakeS3Storage +func TestFakeS3ReturnsSuccessful(t *testing.T) { + fakeS3 := NewFakeS3Storage(true) + if fakeS3 == nil { + t.Fatal("could not create new fakeS3") + } + + storeValue := strings.NewReader("anyValue") + _, err := fakeS3.Store("anyKey", storeValue, "", nil) + if err != nil { + t.Fatalf("could not store in fakeS3: %s", err) + } + + retReader, err := fakeS3.Fetch("anyKey") + if err != nil { + t.Fatalf("could not fetch from fakeS3: %s", err) + } + + err = fakeS3.Delete("anyKey") + if err != nil { + t.Fatalf("could not delete on fakeS3: %s", err) + } + + retValue, err := io.ReadAll(retReader) + if strings.Compare(string(retValue[:]), "anyValue") != 0 { + t.Fatalf("could not fetch from fakeS3: %s", err) + } + + fileSystem := fakeS3.FileSystem() + if fileSystem == nil { + t.Fatal("could not retrieve filesystem from fakeS3") + } + + tempFileSystem := fakeS3.TempFileSystem() + if tempFileSystem == nil { + t.Fatal("could not retrieve filesystem from fakeS3") + } + + tags, err := fakeS3.Tags("anyKey") + if err != nil { + t.Fatalf("could not fetch from fakeS3: %s", err) + } + if len(tags) != 1 { + t.Fatal("return tags must have av-status key assigned for fakeS3") + } + + presignedUrl, err := fakeS3.PresignedURL("anyKey", "anyContentType", "anyFileName") + if err != nil { + t.Fatal("could not retrieve presignedUrl from fakeS3") + } + + if strings.Compare(presignedUrl, "https://example.com/dir/anyKey?response-content-disposition=attachment%3B+filename%3D%22anyFileName%22&response-content-type=anyContentType&signed=test") != 0 { + t.Fatalf("could not retrieve proper presignedUrl from fakeS3 %s", presignedUrl) + } +} + +// Test for willSucceed false +func TestFakeS3WillNotSucceed(t *testing.T) { + fakeS3 := NewFakeS3Storage(false) + if fakeS3 == nil { + t.Fatalf("could not create new fakeS3") + } + + storeValue := strings.NewReader("anyValue") + _, err := fakeS3.Store("anyKey", storeValue, "", nil) + if err == nil || errors.Is(err, errors.New("failed to push")) { + t.Fatalf("should not be able to store when willSucceed false: %s", err) + } + + _, err = fakeS3.Fetch("anyKey") + if err == nil || errors.Is(err, errors.New("failed to fetch file")) { + t.Fatalf("should not find file on Fetch for willSucceed false: %s", err) + } +} + +// Tests empty tag returns empty tags on FakeS3Storage +func TestFakeS3ReturnsEmptyTags(t *testing.T) { + fakeS3 := NewFakeS3Storage(true) + if fakeS3 == nil { + t.Fatal("could not create new fakeS3") + } + + fakeS3.EmptyTags = true + + tags, err := fakeS3.Tags("anyKey") + if err != nil { + t.Fatalf("could not fetch from fakeS3: %s", err) + } + if len(tags) != 0 { + t.Fatal("return tags must be empty for FakeS3 when EmptyTags set to true") + } +} diff --git a/pkg/testdatagen/scenario/shared.go b/pkg/testdatagen/scenario/shared.go index 69522d1d02f..52a325f3c8d 100644 --- a/pkg/testdatagen/scenario/shared.go +++ b/pkg/testdatagen/scenario/shared.go @@ -5011,7 +5011,7 @@ func createHHGWithPaymentServiceItems( } destEntryDate := actualPickupDate - destDepDate := actualPickupDate + destDepDate := actualPickupDate.AddDate(0, 0, 1) destSITAddress := factory.BuildAddress(db, nil, nil) destSIT := factory.BuildMTOServiceItem(nil, []factory.Customization{ { diff --git a/pkg/testdatagen/testharness/make_move.go b/pkg/testdatagen/testharness/make_move.go index 0039c984493..5c8909b983b 100644 --- a/pkg/testdatagen/testharness/make_move.go +++ b/pkg/testdatagen/testharness/make_move.go @@ -9107,6 +9107,7 @@ func MakeBasicInternationalHHGMoveWithServiceItemsandPaymentRequestsForTIO(appCt ihpkCost := unit.Cents(298800) ihupkCost := unit.Cents(33280) poefscCost := unit.Cents(25000) + idshutCost := unit.Cents(623) // Create Customer userInfo := newUserInfo("customer") @@ -9441,6 +9442,47 @@ func MakeBasicInternationalHHGMoveWithServiceItemsandPaymentRequestsForTIO(appCt }, }, nil) + // Shuttling service item + approvedAtTime := time.Now() + idshut := factory.BuildMTOServiceItem(appCtx.DB(), []factory.Customization{ + { + Model: models.MTOServiceItem{ + Status: models.MTOServiceItemStatusApproved, + ApprovedAt: &approvedAtTime, + EstimatedWeight: &estimatedWeight, + ActualWeight: &actualWeight, + }, + }, + { + Model: mto, + LinkOnly: true, + }, + { + Model: mtoShipmentHHG, + LinkOnly: true, + }, + { + Model: models.ReService{ + ID: uuid.FromStringOrNil("22fc07ed-be15-4f50-b941-cbd38153b378"), // IDSHUT - International Destination Shuttle + }, + }, + }, nil) + + factory.BuildPaymentServiceItemWithParams(appCtx.DB(), models.ReServiceCodeIDSHUT, + basicPaymentServiceItemParams, []factory.Customization{ + { + Model: models.PaymentServiceItem{ + PriceCents: &idshutCost, + }, + }, { + Model: paymentRequestHHG, + LinkOnly: true, + }, { + Model: idshut, + LinkOnly: true, + }, + }, nil) + basicPortFuelSurchargePaymentServiceItemParams := []factory.CreatePaymentServiceItemParams{ { Key: models.ServiceItemParamNameContractCode, diff --git a/playwright/tests/office/txo/tioFlowsInternational.spec.js b/playwright/tests/office/txo/tioFlowsInternational.spec.js index 30f4c0b0dac..cf8ed39c541 100644 --- a/playwright/tests/office/txo/tioFlowsInternational.spec.js +++ b/playwright/tests/office/txo/tioFlowsInternational.spec.js @@ -143,6 +143,17 @@ test.describe('TIO user', () => { await page.getByText('Next').click(); await tioFlowPage.slowDown(); + await expect(page.getByText('International destination shuttle service')).toBeVisible(); + await page.getByText('Show calculations').click(); + await expect(page.locator('[data-testid="ServiceItemCalculations"]')).toContainText('Calculations'); + await expect(page.locator('[data-testid="ServiceItemCalculations"]')).toContainText('Billable weight (cwt)'); + await expect(page.locator('[data-testid="ServiceItemCalculations"]')).toContainText('Destination price'); + await expect(page.locator('[data-testid="ServiceItemCalculations"]')).toContainText('Price escalation factor'); + // approve + await tioFlowPage.approveServiceItem(); + await page.getByText('Next').click(); + await tioFlowPage.slowDown(); + await expect(page.getByText('International POE Fuel Surcharge')).toBeVisible(); await page.getByText('Show calculations').click(); await expect(page.locator('[data-testid="ServiceItemCalculations"]')).toContainText('Calculations'); @@ -159,8 +170,8 @@ test.describe('TIO user', () => { await expect(page.getByText('needs your review')).toHaveCount(0, { timeout: 10000 }); await page.getByText('Complete request').click(); - await expect(page.locator('[data-testid="requested"]')).toContainText('$4,281.48'); - await expect(page.locator('[data-testid="accepted"]')).toContainText('$4,281.48'); + await expect(page.locator('[data-testid="requested"]')).toContainText('$4,287.71'); + await expect(page.locator('[data-testid="accepted"]')).toContainText('$4,287.71'); await expect(page.locator('[data-testid="rejected"]')).toContainText('$0.00'); await page.getByText('Authorize payment').click(); diff --git a/scripts/README.md b/scripts/README.md index 46aa61a74c1..d010b67b7fe 100644 --- a/scripts/README.md +++ b/scripts/README.md @@ -170,6 +170,7 @@ migrations. | `download-secure-migration` | A script to download secure migrations from all environments | | `generate-secure-migration` | A script to help manage the creation of secure migrations | | `upload-secure-migration` | A script to upload secure migrations to all environments in both commercial and GovCloud AWS | +| `generate-ddl-migration` | A script to help manage the creation of DDL migrations | ### Database Scripts diff --git a/scripts/generate-ddl-migration b/scripts/generate-ddl-migration new file mode 100755 index 00000000000..345d712c2cc --- /dev/null +++ b/scripts/generate-ddl-migration @@ -0,0 +1,22 @@ +#!/bin/bash + +dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +version=$(date +"%Y%m%d%H%M%S") +filename=$1 +type=$2 + +if [ "$type" == "functions" ]; then + echo "${version}_fn_${filename}.up.sql" >> "${dir}/../migrations/app/ddl_functions_manifest.txt" + touch "${dir}/../migrations/app/ddl_migrations/ddl_functions/${version}_fn_${filename}.up.sql" +elif [ "$type" == "tables" ]; then + echo "${version}_tbl_${filename}.up.sql" >> "${dir}/../migrations/app/ddl_tables_manifest.txt" + touch "${dir}/../migrations/app/ddl_migrations/ddl_tables/${version}_tbl_${filename}.up.sql" +elif [ "$type" == "types" ]; then + echo "${version}_ty_${filename}.up.sql" >> "${dir}/../migrations/app/ddl_types_manifest.txt" + touch "${dir}/../migrations/app/ddl_migrations/ddl_types/${version}_ty_${filename}.up.sql" + elif [ "$type" == "views" ]; then + echo "${version}_vw_${filename}.up.sql" >> "${dir}/../migrations/app/ddl_views_manifest.txt" + touch "${dir}/../migrations/app/ddl_migrations/ddl_views/${version}_vw_${filename}.up.sql" +else + echo "Invalid type" +fi diff --git a/scripts/generate-secure-migration b/scripts/generate-secure-migration index f8053049d66..82a3adba869 100755 --- a/scripts/generate-secure-migration +++ b/scripts/generate-secure-migration @@ -83,5 +83,9 @@ EOM # # Update the migrations manifest # +# Add migration to DML manifest +readonly manifest_file="${dir}/../migrations/app/dml_migrations_manifest.txt" +echo "${secure_migration_name}" >> "$manifest_file" -./scripts/update-migrations-manifest + +##./scripts/update-migrations-manifest diff --git a/src/components/Customer/MtoShipmentForm/MtoShipmentForm.jsx b/src/components/Customer/MtoShipmentForm/MtoShipmentForm.jsx index d8e04da7494..dea9ef9184f 100644 --- a/src/components/Customer/MtoShipmentForm/MtoShipmentForm.jsx +++ b/src/components/Customer/MtoShipmentForm/MtoShipmentForm.jsx @@ -51,6 +51,7 @@ import withRouter from 'utils/routing'; import { ORDERS_TYPE } from 'constants/orders'; import { isBooleanFlagEnabled } from 'utils/featureFlags'; import { dateSelectionWeekendHolidayCheck } from 'utils/calendar'; +import { isPreceedingAddressComplete } from 'shared/utils'; const blankAddress = { address: { @@ -105,7 +106,7 @@ class MtoShipmentForm extends Component { const { moveId } = params; const isNTSR = shipmentType === SHIPMENT_OPTIONS.NTSR; - const saveDeliveryAddress = hasDeliveryAddress === 'yes' || isNTSR; + const saveDeliveryAddress = hasDeliveryAddress === 'true' || isNTSR; const preformattedMtoShipment = { shipmentType, @@ -116,14 +117,14 @@ class MtoShipmentForm extends Component { ...delivery, address: saveDeliveryAddress ? delivery.address : undefined, }, - hasSecondaryPickup: hasSecondaryPickup === 'yes', - secondaryPickup: hasSecondaryPickup === 'yes' ? secondaryPickup : {}, - hasSecondaryDelivery: hasSecondaryDelivery === 'yes', - secondaryDelivery: hasSecondaryDelivery === 'yes' ? secondaryDelivery : {}, - hasTertiaryPickup: hasTertiaryPickup === 'yes', - tertiaryPickup: hasTertiaryPickup === 'yes' ? tertiaryPickup : {}, - hasTertiaryDelivery: hasTertiaryDelivery === 'yes', - tertiaryDelivery: hasTertiaryDelivery === 'yes' ? tertiaryDelivery : {}, + hasSecondaryPickup: hasSecondaryPickup === 'true', + secondaryPickup: hasSecondaryPickup === 'true' ? secondaryPickup : {}, + hasSecondaryDelivery: hasSecondaryDelivery === 'true', + secondaryDelivery: hasSecondaryDelivery === 'true' ? secondaryDelivery : {}, + hasTertiaryPickup: hasTertiaryPickup === 'true', + tertiaryPickup: hasTertiaryPickup === 'true' ? tertiaryPickup : {}, + hasTertiaryDelivery: hasTertiaryDelivery === 'true', + tertiaryDelivery: hasTertiaryDelivery === 'true' ? tertiaryDelivery : {}, }; const pendingMtoShipment = formatMtoShipmentForAPI(preformattedMtoShipment); @@ -379,9 +380,10 @@ class MtoShipmentForm extends Component { data-testid="has-secondary-pickup" label="Yes" name="hasSecondaryPickup" - value="yes" + value="true" title="Yes, I have a second pickup address" - checked={hasSecondaryPickup === 'yes'} + checked={hasSecondaryPickup === 'true'} + disabled={!isPreceedingAddressComplete('true', values.pickup.address)} /> - {hasSecondaryPickup === 'yes' && ( + {hasSecondaryPickup === 'true' && ( )} - {isTertiaryAddressEnabled && hasSecondaryPickup === 'yes' && ( + {isTertiaryAddressEnabled && hasSecondaryPickup === 'true' && (

Do you want movers to pick up any belongings from a third address?

@@ -414,9 +417,15 @@ class MtoShipmentForm extends Component { data-testid="has-tertiary-pickup" label="Yes" name="hasTertiaryPickup" - value="yes" + value="true" title="Yes, I have a third pickup address" - checked={hasTertiaryPickup === 'yes'} + checked={hasTertiaryPickup === 'true'} + disabled={ + !isPreceedingAddressComplete( + hasSecondaryPickup, + values.secondaryPickup.address, + ) + } />
)} {isTertiaryAddressEnabled && - hasTertiaryPickup === 'yes' && - hasSecondaryPickup === 'yes' && ( + hasTertiaryPickup === 'true' && + hasSecondaryPickup === 'true' && ( <>

Third Pickup Address

)} - {(hasDeliveryAddress === 'yes' || isNTSR) && ( + {(hasDeliveryAddress === 'true' || isNTSR) && ( - {hasSecondaryDelivery === 'yes' && ( + {hasSecondaryDelivery === 'true' && ( )} - {isTertiaryAddressEnabled && hasSecondaryDelivery === 'yes' && ( + {isTertiaryAddressEnabled && hasSecondaryDelivery === 'true' && (

Do you want movers to deliver any belongings to a third address?

@@ -569,9 +586,15 @@ class MtoShipmentForm extends Component { data-testid="has-tertiary-delivery" label="Yes" name="hasTertiaryDelivery" - value="yes" + value="true" title="Yes, I have a third delivery address" - checked={hasTertiaryDelivery === 'yes'} + checked={hasTertiaryDelivery === 'true'} + disabled={ + !isPreceedingAddressComplete( + hasSecondaryDelivery, + values.secondaryDelivery.address, + ) + } />
)} {isTertiaryAddressEnabled && - hasTertiaryDelivery === 'yes' && - hasSecondaryDelivery === 'yes' && ( + hasTertiaryDelivery === 'true' && + hasSecondaryDelivery === 'true' && ( <>

Third Delivery Address

)} - {hasDeliveryAddress === 'no' && !isRetireeSeparatee && !isNTSR && ( + {hasDeliveryAddress === 'false' && !isRetireeSeparatee && !isNTSR && (

We can use the zip of your new duty location.
@@ -616,7 +645,7 @@ class MtoShipmentForm extends Component { You can add the specific delivery address later, once you know it.

)} - {hasDeliveryAddress === 'no' && isRetireeSeparatee && !isNTSR && ( + {hasDeliveryAddress === 'false' && isRetireeSeparatee && !isNTSR && (

We can use the zip of the HOR, PLEAD or HOS you entered with your orders.
diff --git a/src/components/Customer/MtoShipmentForm/MtoShipmentForm.test.jsx b/src/components/Customer/MtoShipmentForm/MtoShipmentForm.test.jsx index 424bbe04d55..2f28ca91088 100644 --- a/src/components/Customer/MtoShipmentForm/MtoShipmentForm.test.jsx +++ b/src/components/Customer/MtoShipmentForm/MtoShipmentForm.test.jsx @@ -326,19 +326,46 @@ describe('MtoShipmentForm component', () => { await userEvent.click(screen.getByTitle('Yes, I have a second pickup address')); const streetAddress1 = await screen.findAllByLabelText(/Address 1/); - expect(streetAddress1[1]).toHaveAttribute('name', 'secondaryPickup.address.streetAddress1'); + expect(streetAddress1.length).toBe(1); + expect(streetAddress1[0]).toHaveAttribute('name', 'pickup.address.streetAddress1'); const streetAddress2 = await screen.findAllByLabelText(/Address 2/); - expect(streetAddress2[1]).toHaveAttribute('name', 'secondaryPickup.address.streetAddress2'); + expect(streetAddress2[0]).toHaveAttribute('name', 'pickup.address.streetAddress2'); const city = screen.getAllByTestId('City'); - expect(city[1]).toHaveAttribute('aria-label', 'secondaryPickup.address.city'); + expect(city[0]).toHaveAttribute('aria-label', 'pickup.address.city'); const state = screen.getAllByTestId(/State/); - expect(state[1]).toHaveAttribute('aria-label', 'secondaryPickup.address.state'); + expect(state[0]).toHaveAttribute('aria-label', 'pickup.address.state'); const zip = screen.getAllByTestId(/ZIP/); - expect(zip[1]).toHaveAttribute('aria-label', 'secondaryPickup.address.postalCode'); + expect(zip[0]).toHaveAttribute('aria-label', 'pickup.address.postalCode'); + }); + + it('renders a second address fieldset when the user has a pickup address', async () => { + renderMtoShipmentForm(); + + await userEvent.click(screen.getByTitle('Yes, I know my delivery address')); + + const streetAddress1 = await screen.findAllByLabelText(/Address 1/); + expect(streetAddress1[0]).toHaveAttribute('name', 'pickup.address.streetAddress1'); + expect(streetAddress1[1]).toHaveAttribute('name', 'delivery.address.streetAddress1'); + + const streetAddress2 = await screen.findAllByLabelText(/Address 2/); + expect(streetAddress2[0]).toHaveAttribute('name', 'pickup.address.streetAddress2'); + expect(streetAddress2[1]).toHaveAttribute('name', 'delivery.address.streetAddress2'); + + const city = screen.getAllByTestId('City'); + expect(city[0]).toHaveAttribute('aria-label', 'pickup.address.city'); + expect(city[1]).toHaveAttribute('aria-label', 'delivery.address.city'); + + const state = screen.getAllByTestId('State'); + expect(state[0]).toHaveAttribute('aria-label', 'pickup.address.state'); + expect(state[1]).toHaveAttribute('aria-label', 'delivery.address.state'); + + const zip = screen.getAllByTestId('ZIP'); + expect(zip[0]).toHaveAttribute('aria-label', 'pickup.address.postalCode'); + expect(zip[1]).toHaveAttribute('aria-label', 'delivery.address.postalCode'); }); it('renders a second address fieldset when the user has a delivery address', async () => { @@ -388,24 +415,24 @@ describe('MtoShipmentForm component', () => { await userEvent.click(screen.getByTitle('Yes, I have a second delivery address')); const streetAddress1 = await screen.findAllByLabelText(/Address 1/); - expect(streetAddress1.length).toBe(3); - expect(streetAddress1[2]).toHaveAttribute('name', 'secondaryDelivery.address.streetAddress1'); + expect(streetAddress1[0]).toHaveAttribute('name', 'pickup.address.streetAddress1'); + expect(streetAddress1[1]).toHaveAttribute('name', 'delivery.address.streetAddress1'); const streetAddress2 = await screen.findAllByLabelText(/Address 2/); - expect(streetAddress2.length).toBe(3); - expect(streetAddress2[2]).toHaveAttribute('name', 'secondaryDelivery.address.streetAddress2'); + expect(streetAddress2[0]).toHaveAttribute('name', 'pickup.address.streetAddress2'); + expect(streetAddress2[1]).toHaveAttribute('name', 'delivery.address.streetAddress2'); const city = screen.getAllByTestId('City'); - expect(city.length).toBe(3); - expect(city[2]).toHaveAttribute('aria-label', 'secondaryDelivery.address.city'); + expect(city[0]).toHaveAttribute('aria-label', 'pickup.address.city'); + expect(city[1]).toHaveAttribute('aria-label', 'delivery.address.city'); const state = await screen.getAllByTestId(/State/); - expect(state.length).toBe(3); - expect(state[2]).toHaveAttribute('aria-label', 'secondaryDelivery.address.state'); + expect(state[0]).toHaveAttribute('aria-label', 'pickup.address.state'); + expect(state[1]).toHaveAttribute('aria-label', 'delivery.address.state'); const zip = await screen.getAllByTestId(/ZIP/); - expect(zip.length).toBe(3); - expect(zip[2]).toHaveAttribute('aria-label', 'secondaryDelivery.address.postalCode'); + expect(zip[0]).toHaveAttribute('aria-label', 'pickup.address.postalCode'); + expect(zip[1]).toHaveAttribute('aria-label', 'delivery.address.postalCode'); }); it('goes back when the back button is clicked', async () => { @@ -1134,25 +1161,25 @@ describe('MtoShipmentForm component', () => { }); }); - it('renders a second address fieldset when the user has a second pickup address', async () => { + it('renders a second address fieldset when the user has a pickup address', async () => { renderUBShipmentForm(); await userEvent.click(screen.getByTitle('Yes, I have a second pickup address')); const streetAddress1 = await screen.findAllByLabelText(/Address 1/); - expect(streetAddress1[1]).toHaveAttribute('name', 'secondaryPickup.address.streetAddress1'); + expect(streetAddress1[0]).toHaveAttribute('name', 'pickup.address.streetAddress1'); const streetAddress2 = await screen.findAllByLabelText(/Address 2/); - expect(streetAddress2[1]).toHaveAttribute('name', 'secondaryPickup.address.streetAddress2'); + expect(streetAddress2[0]).toHaveAttribute('name', 'pickup.address.streetAddress2'); const city = screen.getAllByTestId('City'); - expect(city[1]).toHaveAttribute('aria-label', 'secondaryPickup.address.city'); + expect(city[0]).toHaveAttribute('aria-label', 'pickup.address.city'); const state = screen.getAllByTestId('State'); - expect(state[1]).toHaveAttribute('aria-label', 'secondaryPickup.address.state'); + expect(state[0]).toHaveAttribute('aria-label', 'pickup.address.state'); const zip = screen.getAllByTestId('ZIP'); - expect(zip[1]).toHaveAttribute('aria-label', 'secondaryPickup.address.postalCode'); + expect(zip[0]).toHaveAttribute('aria-label', 'pickup.address.postalCode'); }); it('renders a second address fieldset when the user has a delivery address', async () => { diff --git a/src/components/Customer/PPM/Booking/DateAndLocationForm/DateAndLocationForm.jsx b/src/components/Customer/PPM/Booking/DateAndLocationForm/DateAndLocationForm.jsx index ca158d027cd..f9f86fed5b2 100644 --- a/src/components/Customer/PPM/Booking/DateAndLocationForm/DateAndLocationForm.jsx +++ b/src/components/Customer/PPM/Booking/DateAndLocationForm/DateAndLocationForm.jsx @@ -21,6 +21,7 @@ import { OptionalAddressSchema } from 'components/Customer/MtoShipmentForm/valid import { requiredAddressSchema, partialRequiredAddressSchema } from 'utils/validation'; import { isBooleanFlagEnabled } from 'utils/featureFlags'; import RequiredTag from 'components/form/RequiredTag'; +import { isPreceedingAddressComplete } from 'shared/utils'; let meta = ''; @@ -45,6 +46,12 @@ let validationShape = { secondaryDestinationAddress: Yup.object().shape({ address: OptionalAddressSchema, }), + tertiaryPickupAddress: Yup.object().shape({ + address: OptionalAddressSchema, + }), + tertiaryDestinationAddress: Yup.object().shape({ + address: OptionalAddressSchema, + }), }; const DateAndLocationForm = ({ mtoShipment, destinationDutyLocation, serviceMember, move, onBack, onSubmit }) => { @@ -52,6 +59,7 @@ const DateAndLocationForm = ({ mtoShipment, destinationDutyLocation, serviceMemb useCurrentResidence: false, pickupAddress: {}, secondaryPickupAddress: {}, + tertiaryPickupAddress: {}, hasSecondaryPickupAddress: mtoShipment?.ppmShipment?.secondaryPickupAddress ? 'true' : 'false', hasTertiaryPickupAddress: mtoShipment?.ppmShipment?.tertiaryPickupAddress ? 'true' : 'false', useCurrentDestinationAddress: false, @@ -62,7 +70,6 @@ const DateAndLocationForm = ({ mtoShipment, destinationDutyLocation, serviceMemb sitExpected: mtoShipment?.ppmShipment?.sitExpected ? 'true' : 'false', expectedDepartureDate: mtoShipment?.ppmShipment?.expectedDepartureDate || '', closeoutOffice: move?.closeoutOffice || {}, - tertiaryPickupAddress: {}, tertiaryDestinationAddress: {}, }; @@ -228,6 +235,7 @@ const DateAndLocationForm = ({ mtoShipment, destinationDutyLocation, serviceMemb name="hasSecondaryPickupAddress" value="true" checked={values.hasSecondaryPickupAddress === 'true'} + disabled={!isPreceedingAddressComplete('true', values.pickupAddress.address)} /> @@ -276,6 +285,12 @@ const DateAndLocationForm = ({ mtoShipment, destinationDutyLocation, serviceMemb value="true" title="Yes, I have a third delivery address" checked={values.hasTertiaryPickupAddress === 'true'} + disabled={ + !isPreceedingAddressComplete( + values.hasSecondaryPickupAddress, + values.secondaryPickupAddress.address, + ) + } /> @@ -341,6 +362,7 @@ const DateAndLocationForm = ({ mtoShipment, destinationDutyLocation, serviceMemb name="hasSecondaryDestinationAddress" value="true" checked={values.hasSecondaryDestinationAddress === 'true'} + disabled={!isPreceedingAddressComplete('true', values.destinationAddress.address)} /> @@ -390,6 +413,12 @@ const DateAndLocationForm = ({ mtoShipment, destinationDutyLocation, serviceMemb value="true" title="Yes, I have a third delivery address" checked={values.hasTertiaryDestinationAddress === 'true'} + disabled={ + !isPreceedingAddressComplete( + values.hasSecondaryDestinationAddress, + values.secondaryDestinationAddress.address, + ) + } /> diff --git a/src/components/Customer/PPM/Booking/DateAndLocationForm/DateAndLocationForm.test.jsx b/src/components/Customer/PPM/Booking/DateAndLocationForm/DateAndLocationForm.test.jsx index fa35741a231..5f7fd941cbf 100644 --- a/src/components/Customer/PPM/Booking/DateAndLocationForm/DateAndLocationForm.test.jsx +++ b/src/components/Customer/PPM/Booking/DateAndLocationForm/DateAndLocationForm.test.jsx @@ -184,23 +184,37 @@ describe('DateAndLocationForm component', () => { , ); - const hasSecondaryDestinationAddress = await screen.getAllByLabelText('Yes')[1]; - await userEvent.click(hasSecondaryDestinationAddress); + await userEvent.click(screen.getByLabelText('Use my current delivery address')); + const postalCodes = screen.getAllByTestId(/ZIP/); const address1 = screen.getAllByLabelText(/Address 1/, { exact: false }); const address2 = screen.getAllByLabelText('Address 2', { exact: false }); - const address3 = screen.getAllByLabelText('Address 3', { exact: false }); const state = screen.getAllByTestId(/State/); const city = screen.getAllByTestId(/City/); + expect(address1[1]).toHaveValue(defaultProps.destinationDutyLocation.address.streetAddress1); + expect(address2[1]).toHaveValue(''); + expect(city[1]).toHaveTextContent(defaultProps.destinationDutyLocation.address.city); + expect(state[1]).toHaveTextContent(defaultProps.destinationDutyLocation.address.state); + expect(postalCodes[1]).toHaveTextContent(defaultProps.destinationDutyLocation.address.postalCode); + + const hasSecondaryDestinationAddress = await screen.getAllByLabelText('Yes')[1]; + + await userEvent.click(hasSecondaryDestinationAddress); + const secondaryPostalCodes = screen.getAllByTestId(/ZIP/); + const secondaryAddress1 = screen.getAllByLabelText(/Address 1/, { exact: false }); + const secondaryAddress2 = screen.getAllByLabelText('Address 2', { exact: false }); + const secondaryAddress3 = screen.getAllByLabelText('Address 3', { exact: false }); + const secondaryState = screen.getAllByTestId(/State/); + const secondaryCity = screen.getAllByTestId(/City/); await waitFor(() => { - expect(address1[2]).toBeInstanceOf(HTMLInputElement); - expect(address2[2]).toBeInstanceOf(HTMLInputElement); - expect(address3[2]).toBeInstanceOf(HTMLInputElement); - expect(state[2]).toBeInstanceOf(HTMLLabelElement); - expect(city[2]).toBeInstanceOf(HTMLLabelElement); - expect(postalCodes[2]).toBeInstanceOf(HTMLLabelElement); + expect(secondaryAddress1[2]).toBeInstanceOf(HTMLInputElement); + expect(secondaryAddress2[2]).toBeInstanceOf(HTMLInputElement); + expect(secondaryAddress3[2]).toBeInstanceOf(HTMLInputElement); + expect(secondaryState[2]).toBeInstanceOf(HTMLLabelElement); + expect(secondaryCity[2]).toBeInstanceOf(HTMLLabelElement); + expect(secondaryPostalCodes[2]).toBeInstanceOf(HTMLLabelElement); }); }); }); diff --git a/src/components/Office/ServiceItemCalculations/helpers.js b/src/components/Office/ServiceItemCalculations/helpers.js index d555fe17733..ca185d5c87f 100644 --- a/src/components/Office/ServiceItemCalculations/helpers.js +++ b/src/components/Office/ServiceItemCalculations/helpers.js @@ -380,6 +380,20 @@ const shuttleOriginPriceDomestic = (params) => { ); }; +const shuttleOriginPriceInternational = (params) => { + const value = getPriceRateOrFactor(params); + const label = SERVICE_ITEM_CALCULATION_LABELS.OriginPrice; + + const pickupDate = `${SERVICE_ITEM_CALCULATION_LABELS.PickupDate}: ${formatDateWithUTC( + getParamValue(SERVICE_ITEM_PARAM_KEYS.ReferenceDate, params), + 'DD MMM YYYY', + )}`; + + const market = getParamValue(SERVICE_ITEM_PARAM_KEYS.MarketDest, params) === 'O' ? 'Oconus' : 'Conus'; + + return calculation(value, label, formatDetail(pickupDate), formatDetail(market)); +}; + // There is no param representing the destination price as available in the re_domestic_service_area_prices table // A param to return the service schedule is also not being created const destinationPrice = (params, shipmentType) => { @@ -418,6 +432,20 @@ const shuttleDestinationPriceDomestic = (params) => { ); }; +const shuttleDestinationPriceInternational = (params) => { + const value = getPriceRateOrFactor(params); + const label = SERVICE_ITEM_CALCULATION_LABELS.DestinationPrice; + + const deliveryDate = `${SERVICE_ITEM_CALCULATION_LABELS.DeliveryDate}: ${formatDateWithUTC( + getParamValue(SERVICE_ITEM_PARAM_KEYS.ReferenceDate, params), + 'DD MMM YYYY', + )}`; + + const market = getParamValue(SERVICE_ITEM_PARAM_KEYS.MarketDest, params) === 'O' ? 'OCONUS' : 'CONUS'; + + return calculation(value, label, formatDetail(deliveryDate), formatDetail(market)); +}; + const priceEscalationFactor = (params) => { const value = getParamValue(SERVICE_ITEM_PARAM_KEYS.EscalationCompounded, params) ? getParamValue(SERVICE_ITEM_PARAM_KEYS.EscalationCompounded, params) @@ -922,6 +950,15 @@ export default function makeCalculations(itemCode, totalAmount, params, mtoParam totalAmountRequested(totalAmount), ]; break; + // International origin shuttle service + case SERVICE_ITEM_CODES.IOSHUT: + result = [ + shuttleBillableWeight(params), + shuttleOriginPriceInternational(params), + priceEscalationFactorWithoutContractYear(params), + totalAmountRequested(totalAmount), + ]; + break; // Domestic Destination Additional Days SIT case SERVICE_ITEM_CODES.DDASIT: result = [ @@ -950,6 +987,15 @@ export default function makeCalculations(itemCode, totalAmount, params, mtoParam totalAmountRequested(totalAmount), ]; break; + // International destination shuttle service + case SERVICE_ITEM_CODES.IDSHUT: + result = [ + shuttleBillableWeight(params), + shuttleDestinationPriceInternational(params), + priceEscalationFactorWithoutContractYear(params), + totalAmountRequested(totalAmount), + ]; + break; // Domestic crating case SERVICE_ITEM_CODES.DCRT: result = [ diff --git a/src/components/Office/ShipmentForm/ShipmentForm.jsx b/src/components/Office/ShipmentForm/ShipmentForm.jsx index 076212d6953..03e7d006b65 100644 --- a/src/components/Office/ShipmentForm/ShipmentForm.jsx +++ b/src/components/Office/ShipmentForm/ShipmentForm.jsx @@ -70,6 +70,7 @@ import { validateDate } from 'utils/validation'; import { isBooleanFlagEnabled } from 'utils/featureFlags'; import { dateSelectionWeekendHolidayCheck } from 'utils/calendar'; import { datePickerFormat, formatDate } from 'shared/dates'; +import { isPreceedingAddressComplete } from 'shared/utils'; const ShipmentForm = (props) => { const { @@ -560,14 +561,14 @@ const ShipmentForm = (props) => { storageFacility, usesExternalVendor, destinationType, - hasSecondaryPickup: hasSecondaryPickup === 'yes', - secondaryPickup: hasSecondaryPickup === 'yes' ? secondaryPickup : {}, - hasSecondaryDelivery: hasSecondaryDelivery === 'yes', - secondaryDelivery: hasSecondaryDelivery === 'yes' ? secondaryDelivery : {}, - hasTertiaryPickup: hasTertiaryPickup === 'yes', - tertiaryPickup: hasTertiaryPickup === 'yes' ? tertiaryPickup : {}, - hasTertiaryDelivery: hasTertiaryDelivery === 'yes', - tertiaryDelivery: hasTertiaryDelivery === 'yes' ? tertiaryDelivery : {}, + hasSecondaryPickup: hasSecondaryPickup === 'true', + secondaryPickup: hasSecondaryPickup === 'true' ? secondaryPickup : {}, + hasSecondaryDelivery: hasSecondaryDelivery === 'true', + secondaryDelivery: hasSecondaryDelivery === 'true' ? secondaryDelivery : {}, + hasTertiaryPickup: hasTertiaryPickup === 'true', + tertiaryPickup: hasTertiaryPickup === 'true' ? tertiaryPickup : {}, + hasTertiaryDelivery: hasTertiaryDelivery === 'true', + tertiaryDelivery: hasTertiaryDelivery === 'true' ? tertiaryDelivery : {}, }); // Mobile Home Shipment @@ -657,7 +658,6 @@ const ShipmentForm = (props) => { hasTertiaryDelivery, isActualExpenseReimbursement, } = values; - const lengthHasError = !!( (formikProps.touched.lengthFeet && formikProps.errors.lengthFeet === 'Required') || (formikProps.touched.lengthInches && formikProps.errors.lengthFeet === 'Required') @@ -788,7 +788,7 @@ const ShipmentForm = (props) => { if (status === ADDRESS_UPDATE_STATUS.APPROVED) { setValues({ ...values, - hasDeliveryAddress: 'yes', + hasDeliveryAddress: 'true', delivery: { ...values.delivery, address: mtoShipment.deliveryAddressUpdate.newAddress, @@ -962,9 +962,10 @@ const ShipmentForm = (props) => { data-testid="has-secondary-pickup" label="Yes" name="hasSecondaryPickup" - value="yes" + value="true" title="Yes, I have a second pickup address" - checked={hasSecondaryPickup === 'yes'} + checked={hasSecondaryPickup === 'true'} + disabled={!isPreceedingAddressComplete('true', values.pickup.address)} /> { data-testid="no-secondary-pickup" label="No" name="hasSecondaryPickup" - value="no" + value="false" title="No, I do not have a second pickup address" - checked={hasSecondaryPickup !== 'yes'} + checked={hasSecondaryPickup !== 'true'} + disabled={!isPreceedingAddressComplete('true', values.pickup.address)} /> - {hasSecondaryPickup === 'yes' && ( + {hasSecondaryPickup === 'true' && ( <> { data-testid="has-tertiary-pickup" label="Yes" name="hasTertiaryPickup" - value="yes" + value="true" title="Yes, I have a third pickup address" - checked={hasTertiaryPickup === 'yes'} + checked={hasTertiaryPickup === 'true'} + disabled={ + !isPreceedingAddressComplete( + hasSecondaryPickup, + values.secondaryPickup.address, + ) + } /> { data-testid="no-tertiary-pickup" label="No" name="hasTertiaryPickup" - value="no" + value="false" title="No, I do not have a third pickup address" - checked={hasTertiaryPickup !== 'yes'} + checked={hasTertiaryPickup !== 'true'} + disabled={ + !isPreceedingAddressComplete( + hasSecondaryPickup, + values.secondaryPickup.address, + ) + } /> - {hasTertiaryPickup === 'yes' && ( + {hasTertiaryPickup === 'true' && ( { id="has-secondary-delivery" label="Yes" name="hasSecondaryDelivery" - value="yes" + value="true" title="Yes, I have a second destination location" - checked={hasSecondaryDelivery === 'yes'} + checked={hasSecondaryDelivery === 'true'} + disabled={!isPreceedingAddressComplete('true', values.delivery.address)} /> { id="no-secondary-delivery" label="No" name="hasSecondaryDelivery" - value="no" + value="false" title="No, I do not have a second destination location" - checked={hasSecondaryDelivery !== 'yes'} + checked={hasSecondaryDelivery !== 'true'} + disabled={!isPreceedingAddressComplete('true', values.delivery.address)} /> - {hasSecondaryDelivery === 'yes' && ( + {hasSecondaryDelivery === 'true' && ( <> { data-testid="has-tertiary-delivery" label="Yes" name="hasTertiaryDelivery" - value="yes" + value="true" title="Yes, I have a third delivery address" - checked={hasTertiaryDelivery === 'yes'} + checked={hasTertiaryDelivery === 'true'} + disabled={ + !isPreceedingAddressComplete( + hasSecondaryDelivery, + values.secondaryDelivery.address, + ) + } /> { data-testid="no-tertiary-delivery" label="No" name="hasTertiaryDelivery" - value="no" + value="false" title="No, I do not have a third delivery address" - checked={hasTertiaryDelivery !== 'yes'} + checked={hasTertiaryDelivery !== 'true'} + disabled={ + !isPreceedingAddressComplete( + hasSecondaryDelivery, + values.secondaryDelivery.address, + ) + } /> - {hasTertiaryDelivery === 'yes' && ( + {hasTertiaryDelivery === 'true' && ( { id="has-delivery-address" label="Yes" name="hasDeliveryAddress" - value="yes" + value="true" title="Yes, I know my delivery address" - checked={hasDeliveryAddress === 'yes'} + checked={hasDeliveryAddress === 'true'} /> - {hasDeliveryAddress === 'yes' ? ( + {hasDeliveryAddress === 'true' ? ( { id="has-secondary-delivery" label="Yes" name="hasSecondaryDelivery" - value="yes" + value="true" title="Yes, I have a second destination location" - checked={hasSecondaryDelivery === 'yes'} + checked={hasSecondaryDelivery === 'true'} + disabled={ + !isPreceedingAddressComplete(hasDeliveryAddress, values.delivery.address) + } /> { id="no-secondary-delivery" label="No" name="hasSecondaryDelivery" - value="no" + value="false" title="No, I do not have a second destination location" - checked={hasSecondaryDelivery !== 'yes'} + checked={hasSecondaryDelivery !== 'true'} + disabled={ + !isPreceedingAddressComplete(hasDeliveryAddress, values.delivery.address) + } /> - {hasSecondaryDelivery === 'yes' && ( + {hasSecondaryDelivery === 'true' && ( <> { data-testid="has-tertiary-delivery" label="Yes" name="hasTertiaryDelivery" - value="yes" + value="true" title="Yes, I have a third delivery address" - checked={hasTertiaryDelivery === 'yes'} + checked={hasTertiaryDelivery === 'true'} + disabled={ + !isPreceedingAddressComplete( + hasSecondaryDelivery, + values.secondaryDelivery.address, + ) + } /> { data-testid="no-tertiary-delivery" label="No" name="hasTertiaryDelivery" - value="no" + value="false" title="No, I do not have a third delivery address" - checked={hasTertiaryDelivery !== 'yes'} + checked={hasTertiaryDelivery !== 'true'} + disabled={ + !isPreceedingAddressComplete( + hasSecondaryDelivery, + values.secondaryDelivery.address, + ) + } /> - {hasTertiaryDelivery === 'yes' && ( + {hasTertiaryDelivery === 'true' && ( { value="true" title="Yes, there is a second pickup address" checked={hasSecondaryPickup === 'true'} + disabled={!isPreceedingAddressComplete('true', values.pickup.address)} /> { value="false" title="No, there is not a second pickup address" checked={hasSecondaryPickup !== 'true'} + disabled={!isPreceedingAddressComplete('true', values.pickup.address)} /> @@ -1487,6 +1535,12 @@ const ShipmentForm = (props) => { value="true" title="Yes, there is a third pickup address" checked={hasTertiaryPickup === 'true'} + disabled={ + !isPreceedingAddressComplete( + hasSecondaryPickup, + values.secondaryPickup.address, + ) + } /> { value="false" title="No, there is not a third pickup address" checked={hasTertiaryPickup !== 'true'} + disabled={ + !isPreceedingAddressComplete( + hasSecondaryPickup, + values.secondaryPickup.address, + ) + } /> @@ -1539,6 +1599,7 @@ const ShipmentForm = (props) => { value="true" title="Yes, there is a second destination location" checked={hasSecondaryDestination === 'true'} + disabled={!isPreceedingAddressComplete('true', values.destination.address)} /> { value="false" title="No, there is not a second destination location" checked={hasSecondaryDestination !== 'true'} + disabled={!isPreceedingAddressComplete('true', values.destination.address)} /> @@ -1577,6 +1639,12 @@ const ShipmentForm = (props) => { value="true" title="Yes, I have a third delivery address" checked={hasTertiaryDestination === 'true'} + disabled={ + !isPreceedingAddressComplete( + hasSecondaryDestination, + values.secondaryDestination.address, + ) + } /> { value="false" title="No, I do not have a third delivery address" checked={hasTertiaryDestination !== 'true'} + disabled={ + !isPreceedingAddressComplete( + hasSecondaryDestination, + values.secondaryDestination.address, + ) + } /> diff --git a/src/constants/serviceItems.js b/src/constants/serviceItems.js index 16f2dd60b62..a5b7e850b3c 100644 --- a/src/constants/serviceItems.js +++ b/src/constants/serviceItems.js @@ -228,12 +228,14 @@ const allowedServiceItemCalculations = [ SERVICE_ITEM_CODES.DOP, SERVICE_ITEM_CODES.DOPSIT, SERVICE_ITEM_CODES.DOSHUT, + SERVICE_ITEM_CODES.IOSHUT, SERVICE_ITEM_CODES.DPK, SERVICE_ITEM_CODES.DNPK, SERVICE_ITEM_CODES.DSH, SERVICE_ITEM_CODES.DUPK, SERVICE_ITEM_CODES.FSC, SERVICE_ITEM_CODES.DDSHUT, + SERVICE_ITEM_CODES.IDSHUT, SERVICE_ITEM_CODES.DCRT, SERVICE_ITEM_CODES.DUCRT, SERVICE_ITEM_CODES.DOSFSC, diff --git a/src/pages/Office/MoveTaskOrder/MoveTaskOrder.jsx b/src/pages/Office/MoveTaskOrder/MoveTaskOrder.jsx index a1fe5abec1c..d2b71885c0a 100644 --- a/src/pages/Office/MoveTaskOrder/MoveTaskOrder.jsx +++ b/src/pages/Office/MoveTaskOrder/MoveTaskOrder.jsx @@ -798,6 +798,16 @@ export const MoveTaskOrder = (props) => { setAlertMessage('SIT entry date updated'); setAlertType('success'); }, + onError: (error) => { + let errorMessage = 'There was a problem updating the SIT entry date'; + if (error.response.status === 422) { + const responseData = JSON.parse(error?.response?.data); + errorMessage = responseData?.detail; + setAlertMessage(errorMessage); + setAlertType('error'); + } + setIsEditSitEntryDateModalVisible(false); + }, }, ); }; diff --git a/src/pages/Office/MoveTaskOrder/MoveTaskOrder.test.jsx b/src/pages/Office/MoveTaskOrder/MoveTaskOrder.test.jsx index 81a65bd6098..d07cd167678 100644 --- a/src/pages/Office/MoveTaskOrder/MoveTaskOrder.test.jsx +++ b/src/pages/Office/MoveTaskOrder/MoveTaskOrder.test.jsx @@ -1,6 +1,8 @@ import React from 'react'; import { mount } from 'enzyme'; -import { render, screen } from '@testing-library/react'; +import { render, screen, within, cleanup } from '@testing-library/react'; +import * as reactQuery from '@tanstack/react-query'; +import userEvent from '@testing-library/user-event'; import { unapprovedMTOQuery, @@ -22,6 +24,7 @@ import { multiplePaymentRequests, moveHistoryTestData, actualPPMWeightQuery, + approvedMTOWithApprovedSitItemsQuery, } from './moveTaskOrderUnitTestData'; import { MoveTaskOrder } from 'pages/Office/MoveTaskOrder/MoveTaskOrder'; @@ -543,6 +546,153 @@ describe('MoveTaskOrder', () => { }); }); + describe('SIT entry date update', () => { + const mockMutateServiceItemSitEntryDate = jest.fn(); + jest.spyOn(reactQuery, 'useMutation').mockImplementation(() => ({ + mutate: mockMutateServiceItemSitEntryDate, + })); + beforeEach(() => { + // Reset the mock before each test + mockMutateServiceItemSitEntryDate.mockReset(); + }); + afterEach(() => { + cleanup(); // This will unmount the component after each test + }); + + const renderComponent = () => { + useMoveTaskOrderQueries.mockReturnValue(approvedMTOWithApprovedSitItemsQuery); + useMovePaymentRequestsQueries.mockReturnValue({ paymentRequests: [] }); + useGHCGetMoveHistory.mockReturnValue(moveHistoryTestData); + const isMoveLocked = false; + render( + + + , + ); + }; + it('shows error message when SIT entry date is invalid', async () => { + renderComponent(); + // Set up the mock to simulate an error + mockMutateServiceItemSitEntryDate.mockImplementation((data, options) => { + options.onError({ + response: { + status: 422, + data: JSON.stringify({ + detail: + 'UpdateSitEntryDate failed for service item: the SIT Entry Date (2025-03-05) must be before the SIT Departure Date (2025-02-27)', + }), + }, + }); + }); + const approvedServiceItems = await screen.findByTestId('ApprovedServiceItemsTable'); + expect(approvedServiceItems).toBeInTheDocument(); + const spanElement = within(approvedServiceItems).getByText(/Domestic origin 1st day SIT/i); + expect(spanElement).toBeInTheDocument(); + // Search for the edit button within the approvedServiceItems div + const editButton = within(approvedServiceItems).getByRole('button', { name: /edit/i }); + expect(editButton).toBeInTheDocument(); + await userEvent.click(editButton); + const modal = await screen.findByTestId('modal'); + expect(modal).toBeInTheDocument(); + const heading = within(modal).getByRole('heading', { name: /Edit SIT Entry Date/i, level: 2 }); + expect(heading).toBeInTheDocument(); + const formGroups = screen.getAllByTestId('formGroup'); + const sitEntryDateFormGroup = Array.from(formGroups).find( + (group) => + within(group).queryByPlaceholderText('DD MMM YYYY') && + within(group).queryByPlaceholderText('DD MMM YYYY').getAttribute('name') === 'sitEntryDate', + ); + const dateInput = within(sitEntryDateFormGroup).getByPlaceholderText('DD MMM YYYY'); + expect(dateInput).toBeInTheDocument(); + const remarksTextarea = within(modal).getByTestId('officeRemarks'); + expect(remarksTextarea).toBeInTheDocument(); + const saveButton = within(modal).getByRole('button', { name: /Save/ }); + + await userEvent.clear(dateInput); + await userEvent.type(dateInput, '05 Mar 2025'); + await userEvent.type(remarksTextarea, 'Need to update the sit entry date.'); + expect(saveButton).toBeEnabled(); + await userEvent.click(saveButton); + + // Verify that the mutation was called + expect(mockMutateServiceItemSitEntryDate).toHaveBeenCalled(); + + // The modal should close + expect(screen.queryByTestId('modal')).not.toBeInTheDocument(); + + // Verify that the error message is displayed + const alert = screen.getByTestId('alert'); + expect(alert).toBeInTheDocument(); + expect(alert).toHaveClass('usa-alert--error'); + expect(alert).toHaveTextContent( + 'UpdateSitEntryDate failed for service item: the SIT Entry Date (2025-03-05) must be before the SIT Departure Date (2025-02-27)', + ); + }); + + it('shows success message when SIT entry date is valid', async () => { + renderComponent(); + // Set up the mock to simulate an error + mockMutateServiceItemSitEntryDate.mockImplementation((data, options) => { + options.onSuccess({ + response: { + status: 200, + data: JSON.stringify({ + detail: 'SIT entry date updated', + }), + }, + }); + }); + const approvedServiceItems = await screen.findByTestId('ApprovedServiceItemsTable'); + expect(approvedServiceItems).toBeInTheDocument(); + const spanElement = within(approvedServiceItems).getByText(/Domestic origin 1st day SIT/i); + expect(spanElement).toBeInTheDocument(); + // Search for the edit button within the approvedServiceItems div + const editButton = within(approvedServiceItems).getByRole('button', { name: /edit/i }); + expect(editButton).toBeInTheDocument(); + await userEvent.click(editButton); + const modal = await screen.findByTestId('modal'); + expect(modal).toBeInTheDocument(); + const heading = within(modal).getByRole('heading', { name: /Edit SIT Entry Date/i, level: 2 }); + expect(heading).toBeInTheDocument(); + const formGroups = screen.getAllByTestId('formGroup'); + const sitEntryDateFormGroup = Array.from(formGroups).find( + (group) => + within(group).queryByPlaceholderText('DD MMM YYYY') && + within(group).queryByPlaceholderText('DD MMM YYYY').getAttribute('name') === 'sitEntryDate', + ); + const dateInput = within(sitEntryDateFormGroup).getByPlaceholderText('DD MMM YYYY'); + expect(dateInput).toBeInTheDocument(); + const remarksTextarea = within(modal).getByTestId('officeRemarks'); + expect(remarksTextarea).toBeInTheDocument(); + const saveButton = within(modal).getByRole('button', { name: /Save/ }); + + await userEvent.clear(dateInput); + await userEvent.type(dateInput, '03 Mar 2024'); + await userEvent.type(remarksTextarea, 'Need to update the sit entry date.'); + expect(saveButton).toBeEnabled(); + await userEvent.click(saveButton); + + // Verify that the mutation was called + expect(mockMutateServiceItemSitEntryDate).toHaveBeenCalled(); + + // The modal should close + expect(screen.queryByTestId('modal')).not.toBeInTheDocument(); + + // Verify that the error message is displayed + const alert = screen.getByTestId('alert'); + expect(alert).toBeInTheDocument(); + expect(alert).toHaveClass('usa-alert--success'); + expect(alert).toHaveTextContent('SIT entry date updated'); + }); + }); + describe('approved mto with both submitted and approved shipments', () => { useMoveTaskOrderQueries.mockReturnValue(someShipmentsApprovedMTOQuery); useMovePaymentRequestsQueries.mockReturnValue(multiplePaymentRequests); diff --git a/src/pages/Office/MoveTaskOrder/moveTaskOrderUnitTestData.js b/src/pages/Office/MoveTaskOrder/moveTaskOrderUnitTestData.js index 614867fe84b..a1cc6a708ff 100644 --- a/src/pages/Office/MoveTaskOrder/moveTaskOrderUnitTestData.js +++ b/src/pages/Office/MoveTaskOrder/moveTaskOrderUnitTestData.js @@ -3004,3 +3004,76 @@ export const moveHistoryTestData = { ], }, }; + +export const approvedMTOWithApprovedSitItemsQuery = { + orders: { + 1: { + id: '1', + originDutyLocation: { + address: { + streetAddress1: '', + city: 'Fort Knox', + state: 'KY', + postalCode: '40121', + }, + }, + destinationDutyLocation: { + address: { + streetAddress1: '', + city: 'Fort Irwin', + state: 'CA', + postalCode: '92310', + }, + }, + entitlement: { + authorizedWeight: 8000, + totalWeight: 8500, + }, + }, + }, + move: { + id: '2', + status: MOVE_STATUSES.APPROVALS_REQUESTED, + }, + mtoShipments: [ + { + id: '3', + moveTaskOrderID: '2', + shipmentType: SHIPMENT_OPTIONS.HHG, + scheduledPickupDate: '2020-03-16', + requestedPickupDate: '2020-03-15', + pickupAddress: { + streetAddress1: '932 Baltic Avenue', + city: 'Chicago', + state: 'IL', + postalCode: '60601', + eTag: '1234', + }, + destinationAddress: { + streetAddress1: '10 Park Place', + city: 'Atlantic City', + state: 'NJ', + postalCode: '08401', + }, + status: shipmentStatuses.APPROVED, + eTag: '1234', + reweigh: { + id: '00000000-0000-0000-0000-000000000000', + }, + sitExtensions: [], + sitStatus: SITStatusOrigin, + }, + ], + mtoServiceItems: [ + { + id: '5', + mtoShipmentID: '3', + reServiceName: 'Domestic origin 1st day SIT', + status: SERVICE_ITEM_STATUS.APPROVED, + reServiceCode: 'DOFSIT', + }, + ], + isLoading: false, + isError: false, + isSuccess: true, +}; diff --git a/src/shared/utils.js b/src/shared/utils.js index 12ccf91c7a8..bae96deadaf 100644 --- a/src/shared/utils.js +++ b/src/shared/utils.js @@ -209,3 +209,20 @@ export function checkAddressTogglesToClearAddresses(body) { return values; } + +export function isPreceedingAddressComplete(hasAddress, addressValues) { + if (addressValues === undefined || addressValues.postalCode === undefined) { + return false; + } + + if ( + hasAddress === 'true' && + addressValues.streetAddress1 !== '' && + addressValues.state !== '' && + addressValues.city !== '' && + addressValues.postalCode !== '' + ) { + return true; + } + return false; +} diff --git a/src/utils/formatMtoShipment.js b/src/utils/formatMtoShipment.js index 6a0c6a7e7dc..9543a4f4bab 100644 --- a/src/utils/formatMtoShipment.js +++ b/src/utils/formatMtoShipment.js @@ -205,11 +205,11 @@ export function formatMtoShipmentForDisplay({ tertiaryDelivery: { address: { ...emptyAddressShape }, }, - hasDeliveryAddress: 'no', - hasSecondaryPickup: 'no', - hasSecondaryDelivery: 'no', - hasTertiaryPickup: 'no', - hasTertiaryDelivery: 'no', + hasDeliveryAddress: 'false', + hasSecondaryPickup: 'false', + hasSecondaryDelivery: 'false', + hasTertiaryPickup: 'false', + hasTertiaryDelivery: 'false', ntsRecordedWeight, tacType, sacType, @@ -245,16 +245,16 @@ export function formatMtoShipmentForDisplay({ if (secondaryPickupAddress) { displayValues.secondaryPickup.address = { ...emptyAddressShape, ...secondaryPickupAddress }; - displayValues.hasSecondaryPickup = 'yes'; + displayValues.hasSecondaryPickup = 'true'; } if (tertiaryPickupAddress) { displayValues.tertiaryPickup.address = { ...emptyAddressShape, ...tertiaryPickupAddress }; - displayValues.hasTertiaryPickup = 'yes'; + displayValues.hasTertiaryPickup = 'true'; } if (destinationAddress) { displayValues.delivery.address = { ...emptyAddressShape, ...destinationAddress }; - if (destinationAddress.streetAddress1 !== 'N/A') displayValues.hasDeliveryAddress = 'yes'; + if (destinationAddress.streetAddress1 !== 'N/A') displayValues.hasDeliveryAddress = 'true'; } if (destinationType) { @@ -263,11 +263,11 @@ export function formatMtoShipmentForDisplay({ if (secondaryDeliveryAddress) { displayValues.secondaryDelivery.address = { ...emptyAddressShape, ...secondaryDeliveryAddress }; - displayValues.hasSecondaryDelivery = 'yes'; + displayValues.hasSecondaryDelivery = 'true'; } if (tertiaryDeliveryAddress) { displayValues.tertiaryDelivery.address = { ...emptyAddressShape, ...tertiaryDeliveryAddress }; - displayValues.hasTertiaryDelivery = 'yes'; + displayValues.hasTertiaryDelivery = 'true'; } if (requestedDeliveryDate) { diff --git a/src/utils/formatMtoShipment.test.js b/src/utils/formatMtoShipment.test.js index 172204a3d8e..67291b5f196 100644 --- a/src/utils/formatMtoShipment.test.js +++ b/src/utils/formatMtoShipment.test.js @@ -131,19 +131,19 @@ describe('formatMtoShipmentForDisplay', () => { expect(displayValues.delivery.requestedDate.toDateString()).toBe('Tue Jan 27 2026'); checkAddressesAreEqual(displayValues.delivery.address, emptyAddressShape); checkAgentsAreEqual(displayValues.delivery.agent, emptyAgentShape); - expect(displayValues.hasDeliveryAddress).toBe('no'); + expect(displayValues.hasDeliveryAddress).toBe('false'); checkAddressesAreEqual(displayValues.secondaryPickup.address, emptyAddressShape); - expect(displayValues.hasSecondaryPickup).toBe('no'); + expect(displayValues.hasSecondaryPickup).toBe('false'); checkAddressesAreEqual(displayValues.secondaryDelivery.address, emptyAddressShape); - expect(displayValues.hasSecondaryDelivery).toBe('no'); + expect(displayValues.hasSecondaryDelivery).toBe('false'); checkAddressesAreEqual(displayValues.tertiaryPickup.address, emptyAddressShape); - expect(displayValues.hasTertiaryPickup).toBe('no'); + expect(displayValues.hasTertiaryPickup).toBe('false'); checkAddressesAreEqual(displayValues.tertiaryDelivery.address, emptyAddressShape); - expect(displayValues.hasTertiaryDelivery).toBe('no'); + expect(displayValues.hasTertiaryDelivery).toBe('false'); expect(displayValues.agents).toBeUndefined(); }, @@ -192,15 +192,15 @@ describe('formatMtoShipmentForDisplay', () => { const expectedDeliveryAddress = { ...emptyAddressShape, ...destinationAddress }; checkAddressesAreEqual(displayValues.delivery.address, expectedDeliveryAddress); - expect(displayValues.hasDeliveryAddress).toBe('yes'); + expect(displayValues.hasDeliveryAddress).toBe('true'); const expectedSecondaryPickupAddress = { ...emptyAddressShape, ...secondaryPickupAddress }; checkAddressesAreEqual(displayValues.secondaryPickup.address, expectedSecondaryPickupAddress); - expect(displayValues.hasSecondaryPickup).toBe('yes'); + expect(displayValues.hasSecondaryPickup).toBe('true'); const expectedSecondaryDeliveryAddress = { ...emptyAddressShape, ...secondaryDeliveryAddress }; checkAddressesAreEqual(displayValues.secondaryDelivery.address, expectedSecondaryDeliveryAddress); - expect(displayValues.hasSecondaryDelivery).toBe('yes'); + expect(displayValues.hasSecondaryDelivery).toBe('true'); }); it('can format a shipment with a primary, secondary, and tertiary pickup and destination', () => { @@ -218,23 +218,23 @@ describe('formatMtoShipmentForDisplay', () => { const expectedDeliveryAddress = { ...emptyAddressShape, ...destinationAddress }; checkAddressesAreEqual(displayValues.delivery.address, expectedDeliveryAddress); - expect(displayValues.hasDeliveryAddress).toBe('yes'); + expect(displayValues.hasDeliveryAddress).toBe('true'); const expectedSecondaryPickupAddress = { ...emptyAddressShape, ...secondaryPickupAddress }; checkAddressesAreEqual(displayValues.secondaryPickup.address, expectedSecondaryPickupAddress); - expect(displayValues.hasSecondaryPickup).toBe('yes'); + expect(displayValues.hasSecondaryPickup).toBe('true'); const expectedSecondaryDeliveryAddress = { ...emptyAddressShape, ...secondaryDeliveryAddress }; checkAddressesAreEqual(displayValues.secondaryDelivery.address, expectedSecondaryDeliveryAddress); - expect(displayValues.hasSecondaryDelivery).toBe('yes'); + expect(displayValues.hasSecondaryDelivery).toBe('true'); const expectedTertiaryPickupAddress = { ...emptyAddressShape, ...tertiaryPickupAddress }; checkAddressesAreEqual(displayValues.tertiaryPickup.address, expectedTertiaryPickupAddress); - expect(displayValues.hasTertiaryPickup).toBe('yes'); + expect(displayValues.hasTertiaryPickup).toBe('true'); const expectedTertiaryDeliveryAddress = { ...emptyAddressShape, ...tertiaryDeliveryAddress }; checkAddressesAreEqual(displayValues.tertiaryDelivery.address, expectedTertiaryDeliveryAddress); - expect(displayValues.hasTertiaryDelivery).toBe('yes'); + expect(displayValues.hasTertiaryDelivery).toBe('true'); }); it('can format a shipment with lines of accounting', () => { diff --git a/swagger-def/ghc.yaml b/swagger-def/ghc.yaml index faf1e7b44af..217fbd4f2d4 100644 --- a/swagger-def/ghc.yaml +++ b/swagger-def/ghc.yaml @@ -4495,6 +4495,42 @@ paths: description: payload is too large "500": description: server error + /uploads/{uploadID}/status: + get: + summary: Returns status of an upload + description: Returns status of an upload based on antivirus run + operationId: getUploadStatus + produces: + - text/event-stream + tags: + - uploads + parameters: + - in: path + name: uploadID + type: string + format: uuid + required: true + description: UUID of the upload to return status of + responses: + '200': + description: the requested upload status + schema: + type: string + enum: + - INFECTED + - CLEAN + - PROCESSING + readOnly: true + '400': + description: invalid request + schema: + $ref: '#/definitions/InvalidRequestResponsePayload' + '403': + description: not authorized + '404': + description: not found + '500': + description: server error /application_parameters/{parameterName}: get: summary: Searches for an application parameter by name, returns nil if not found diff --git a/swagger/ghc.yaml b/swagger/ghc.yaml index e6b1d27b36e..e51d85a9794 100644 --- a/swagger/ghc.yaml +++ b/swagger/ghc.yaml @@ -4701,6 +4701,42 @@ paths: description: payload is too large '500': description: server error + /uploads/{uploadID}/status: + get: + summary: Returns status of an upload + description: Returns status of an upload based on antivirus run + operationId: getUploadStatus + produces: + - text/event-stream + tags: + - uploads + parameters: + - in: path + name: uploadID + type: string + format: uuid + required: true + description: UUID of the upload to return status of + responses: + '200': + description: the requested upload status + schema: + type: string + enum: + - INFECTED + - CLEAN + - PROCESSING + readOnly: true + '400': + description: invalid request + schema: + $ref: '#/definitions/InvalidRequestResponsePayload' + '403': + description: not authorized + '404': + description: not found + '500': + description: server error /application_parameters/{parameterName}: get: summary: Searches for an application parameter by name, returns nil if not found