Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,10 @@
import io.vertx.core.json.JsonObject;
import io.vertx.sqlclient.SqlResult;
import io.vertx.sqlclient.templates.RowMapper;
import io.vertx.sqlclient.templates.SqlTemplate;
import io.vertx.sqlclient.templates.TupleMapper;
import java.time.LocalDateTime;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
Expand Down Expand Up @@ -327,8 +329,9 @@ public Future<Void> createDatabase(TenantPgPool pool) {
"CREATE TABLE IF NOT EXISTS " + pool.getSchema() + "." + table()
+ "("
+ dbColumnNameAndType(ID) + " PRIMARY KEY, "
+ dbColumnNameAndType(CHANNEL_ID) + " NOT NULL "
+ " REFERENCES " + pool.getSchema() + "." + Tables.CHANNEL + " (" + new Channel().dbColumnName(ID) + "), "
+ dbColumnNameAndType(CHANNEL_ID) + " NOT NULL CONSTRAINT import_job_channel_id_fkey "
+ "REFERENCES " + pool.getSchema() + "." + Tables.CHANNEL + " (" + new Channel().dbColumnName(ID) + ") "
+ " ON DELETE CASCADE, "
+ dbColumnNameAndType(CHANNEL_NAME) + ", "
+ dbColumnNameAndType(IMPORT_TYPE) + ", "
+ dbColumnNameAndType(TRANSFORMATION) + ", "
Expand All @@ -343,6 +346,14 @@ public Future<Void> createDatabase(TenantPgPool pool) {
).mapEmpty();
}

public Future<Integer> countImportJobsByChannelId(TenantPgPool pool, String channelId) {
return SqlTemplate.forQuery(pool.getPool(),
"SELECT COUNT(*) AS import_jobs_count FROM " + pool.getSchema() + "." + table()
+ " WHERE " + dbColumnName(CHANNEL_ID) + " = #{channelId}")
.execute(Collections.singletonMap("channelId", channelId))
.map(rows -> rows.iterator().next().getInteger("import_jobs_count"));
}

public record ImportJobRecord(UUID id,
UUID channelId,
String channelName,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -182,14 +182,14 @@ public Future<Void> createDatabase(TenantPgPool pool) {
"CREATE TABLE IF NOT EXISTS " + pool.getSchema() + "." + table()
+ "("
+ dbColumnName(ID) + " UUID PRIMARY KEY, "
+ dbColumnName(IMPORT_JOB_ID) + " UUID NOT NULL REFERENCES "
+ pool.getSchema() + "." + Tables.IMPORT_JOB + " (" + new ImportJob().dbColumnName(ID) + "), "
+ dbColumnName(IMPORT_JOB_ID) + " UUID NOT NULL CONSTRAINT log_statement_import_job_id_fkey REFERENCES "
+ pool.getSchema() + "." + Tables.IMPORT_JOB + " (" + new ImportJob().dbColumnName(ID) + ") "
+ "ON DELETE CASCADE, "
+ dbColumnName(TIME_STAMP) + " TIMESTAMP NOT NULL, "
+ dbColumnName(JOB_LABEL) + " TEXT NOT NULL, "
+ dbColumnName(LOG_STATEMENT) + " TEXT NOT NULL, "
+ metadata.columnsDdl()
+ ")",

"CREATE INDEX IF NOT EXISTS log_statement_import_job_id_idx "
+ " ON " + pool.getSchema() + "." + table() + "(" + dbColumnName(IMPORT_JOB_ID) + ")"
).mapEmpty();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -254,8 +254,9 @@ public Future<Void> createDatabase(TenantPgPool pool) {
"CREATE TABLE IF NOT EXISTS " + pool.getSchema() + "." + table()
+ "("
+ dbColumnNameAndType(ID) + " PRIMARY KEY, "
+ dbColumnNameAndType(IMPORT_JOB_ID) + " NOT NULL REFERENCES "
+ pool.getSchema() + "." + Tables.IMPORT_JOB + "(" + new ImportJob().dbColumnName(ID) + "), "
+ dbColumnNameAndType(IMPORT_JOB_ID) + " NOT NULL CONSTRAINT record_failure_import_job_id_fkey "
+ "REFERENCES " + pool.getSchema() + "." + Tables.IMPORT_JOB
+ "(" + new ImportJob().dbColumnName(ID) + ") ON DELETE CASCADE, "
+ dbColumnNameAndType(RECORD_NUMBER) + ", "
+ dbColumnNameAndType(TIME_STAMP) + ", "
+ dbColumnNameAndType(SOURCE_FILE_NAME) + ", "
Expand All @@ -264,7 +265,6 @@ public Future<Void> createDatabase(TenantPgPool pool) {
+ dbColumnNameAndType(TRANSFORMED_RECORD) + " NOT NULL, "
+ metadata.columnsDdl()
+ ")",

"CREATE INDEX IF NOT EXISTS record_failure_import_job_id_idx "
+ " ON " + pool.getSchema() + "." + table() + "(" + dbColumnName(IMPORT_JOB_ID) + ")"
).mapEmpty();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,9 @@ public JsonObject asJson() {
json.put(jsonPropertyName(ID), theRecord.id);
json.put(jsonPropertyName(NAME), theRecord.name);
json.put(jsonPropertyName(DESCRIPTION), theRecord.description);
json.put("steps", stepsArray);
if (containsListOfSteps()) {
json.put("steps", stepsArray);
}
putMetadata(json);
return json;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import io.vertx.core.Future;
import io.vertx.core.json.JsonObject;
import io.vertx.sqlclient.SqlResult;
import io.vertx.sqlclient.templates.RowMapper;
import io.vertx.sqlclient.templates.SqlTemplate;
import io.vertx.sqlclient.templates.TupleMapper;
Expand Down Expand Up @@ -209,6 +210,14 @@ public Future<List<JsonObject>> fetchTransformationStepsByTransformationId(Tenan
});
}

public Future<Integer> deleteTransformationStepsByTransformationId(TenantPgPool pool, UUID transformationId) {
return SqlTemplate.forUpdate(pool.getPool(),
"DELETE FROM " + this.schemaTable(pool.getSchema()) + " "
+ "WHERE transformation_id = #{transformationId}")
.execute(Collections.singletonMap("transformationId", transformationId))
.map(SqlResult::rowCount);
}

public Future<Void> executeUpdateAndAdjustPositions(ServiceRequest request) {

return request.entityStorage().updateEntity(this.theRecord.id, this.withUpdatingUser(request.currentUser()))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import java.util.UUID;
import java.util.concurrent.atomic.AtomicBoolean;
import org.folio.inventoryupdate.importing.moduledata.Channel;
import org.folio.inventoryupdate.importing.moduledata.ImportJob;
import org.folio.inventoryupdate.importing.moduledata.database.EntityStorage;
import org.folio.inventoryupdate.importing.moduledata.database.SqlQuery;
import org.folio.inventoryupdate.importing.moduledata.database.Tables;
Expand Down Expand Up @@ -91,12 +92,25 @@ public static Future<Void> putChannel(ServiceRequest request) {

public static Future<Void> deleteChannel(ServiceRequest request) {
String channelId = request.requestParam("id");
boolean force = "true".equalsIgnoreCase(request.requestParam("force"));
return getChannelByTagOrUuid(request, channelId).compose(channel -> {
if (channel == null) {
return responseText(request.routingContext(), 404)
.end("Found no channel with tag or id " + channelId + " to delete.").mapEmpty();
} else {
return deleteEntityAndRespond(request, new Channel()).compose(na -> decommission(request)).mapEmpty();
if (force) {
return deleteEntityAndRespond(request, new Channel()).compose(na -> decommission(request)).mapEmpty();
} else {
return new ImportJob().countImportJobsByChannelId(request.entityStorage().getTenantPool(), channelId)
.compose(jobsCount -> {
if (jobsCount > 0) {
return responseText(request.routingContext(), 400).end("Channel not deleted because it has " + jobsCount
+ " logged import jobs. To delete all logs together with the channel, use parameter ?force=true");
} else {
return deleteEntityAndRespond(request, new Channel()).compose(na -> decommission(request)).mapEmpty();
}
});
}
}
});
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@
import org.apache.logging.log4j.Logger;
import org.folio.inventoryupdate.importing.foliodata.SettingsClient;
import org.folio.inventoryupdate.importing.moduledata.ImportJob;
import org.folio.inventoryupdate.importing.moduledata.LogLine;
import org.folio.inventoryupdate.importing.moduledata.RecordFailure;
import org.folio.inventoryupdate.importing.moduledata.database.Tables;
import org.folio.inventoryupdate.importing.service.ServiceRequest;
import org.folio.inventoryupdate.importing.utils.Miscellaneous;
Expand Down Expand Up @@ -49,38 +47,6 @@ private Future<Void> purgePastJobsBySetting(ServiceRequest request, String purge
}

private Future<Void> purgePreviousJobsByAge(LocalDateTime untilDate) {
return deleteLogs(untilDate)
.compose(deletedLogs -> deleteFailedRecords(untilDate))
.compose(deletedFailedRecords -> deleteImportJobs(untilDate));
}

private Future<Void> deleteLogs(LocalDateTime untilDate) {
return SqlTemplate.forUpdate(pool.getPool(),
"DELETE FROM " + pool.getSchema() + "." + Tables.LOG_STATEMENT
+ " WHERE " + new LogLine().field(LogLine.IMPORT_JOB_ID).columnName()
+ " IN (SELECT " + new ImportJob().field(ImportJob.ID).columnName()
+ " FROM " + pool.getSchema() + "." + Tables.IMPORT_JOB
+ " WHERE " + new ImportJob().field(ImportJob.STARTED).columnName() + " < #{untilDate} )")
.execute(Collections.singletonMap("untilDate", untilDate))
.onSuccess(result -> logger.info("{} log lines deleted", result.rowCount()))
.onFailure(error -> logger.error("{} (occurred when attempting to delete logs)", error.getMessage()))
.mapEmpty();
}

private Future<Void> deleteFailedRecords(LocalDateTime untilDate) {
return SqlTemplate.forUpdate(pool.getPool(),
"DELETE FROM " + pool.getSchema() + "." + Tables.RECORD_FAILURE
+ " WHERE " + new RecordFailure().field(LogLine.IMPORT_JOB_ID).columnName()
+ " IN (SELECT " + new ImportJob().field(ImportJob.ID).columnName()
+ " FROM " + pool.getSchema() + "." + Tables.IMPORT_JOB
+ " WHERE " + new ImportJob().field(ImportJob.STARTED).columnName() + " < #{untilDate} )")
.execute(Collections.singletonMap("untilDate", untilDate))
.onSuccess(result -> logger.info("{} failed records deleted", result.rowCount()))
.onFailure(error -> logger.error("{} (occurred when attempting to delete failed records)", error.getMessage()))
.mapEmpty();
}

private Future<Void> deleteImportJobs(LocalDateTime untilDate) {
return SqlTemplate.forUpdate(pool.getPool(),
"DELETE FROM " + pool.getSchema() + "." + Tables.IMPORT_JOB
+ " WHERE " + new ImportJob().field(ImportJob.STARTED).columnName() + " <#{untilDate} ")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,10 @@ public static Future<Void> updateTransformation(ServiceRequest request) {
}

public static Future<Void> deleteTransformation(ServiceRequest request) {
return deleteEntityAndRespond(request, new Transformation());
UUID transformationId = UUID.fromString(request.requestParam("id"));
return new TransformationStep()
.deleteTransformationStepsByTransformationId(request.entityStorage().getTenantPool(), transformationId)
.compose(na -> deleteEntityAndRespond(request, new Transformation())).mapEmpty();
}

public static Future<Void> postTransformationStep(ServiceRequest request) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,16 @@ public void canPostGetPutDeleteTransformation() {
putJsonObject(Service.PATH_TRANSFORMATIONS + "/" + Files.JSON_TRANSFORMATION_CONFIG.getString("id"), update, 204);
putJsonObject(Service.PATH_TRANSFORMATIONS + "/" + UUID.randomUUID(), update, 404);
getRecords(Service.PATH_TRANSFORMATIONS).body("totalRecords", is(1));
JsonObject step = new JsonObject();
step.put("id", STEP_ID)
.put("name", "test step")
.put("script", Files.XSLT_MARC_TO_INSTANCE);
postJsonObject(Service.PATH_STEPS, step);
JsonObject tsa = new JsonObject();
tsa.put("stepId", STEP_ID)
.put("transformationId", Files.JSON_TRANSFORMATION_CONFIG.getString("id"))
.put("position", "1");
postJsonObject(Service.PATH_TSAS, tsa);
deleteRecord(Service.PATH_TRANSFORMATIONS, Files.JSON_TRANSFORMATION_CONFIG.getString("id"), 200);
getRecords(Service.PATH_TRANSFORMATIONS).body("totalRecords", is(0));
deleteRecord(Service.PATH_TRANSFORMATIONS, Files.JSON_TRANSFORMATION_CONFIG.getString("id"), 404);
Expand Down Expand Up @@ -673,11 +683,21 @@ public void canPostGetPutDeleteChannel() {
putJsonObject(Service.PATH_CHANNELS + "/" + Files.JSON_CHANNEL.getString("id"), update, 200);
putJsonObject(Service.PATH_CHANNELS + "/" + UUID.randomUUID(), update, 404);
getRecords(Service.PATH_CHANNELS).body("totalRecords", is(1));
// Can delete channel with no logged jobs
deleteRecord(Service.PATH_CHANNELS, Files.JSON_CHANNEL.getString("id"), 200);
getRecords(Service.PATH_CHANNELS).body("totalRecords", is(0));
deleteRecord(Service.PATH_CHANNELS, Files.JSON_CHANNEL.getString("id"), 404);

// Can create disabled channel
postJsonObject(PATH_CHANNELS, Files.JSON_CHANNEL.copy().put("enabled", false));
// Can only delete channel with logged jobs if `force` set to `true`
postJsonObject(Service.PATH_IMPORT_JOBS, Files.JSON_IMPORT_JOB);
deleteRecord(Service.PATH_CHANNELS, Files.JSON_CHANNEL.getString("id"), 400);
getRecords(Service.PATH_CHANNELS).body("totalRecords", is(1));
deleteRecord(Service.PATH_CHANNELS, Files.JSON_CHANNEL.getString("id"), "force=true", 200);
getRecords(Service.PATH_CHANNELS).body("totalRecords", is(0));
deleteRecord(Service.PATH_CHANNELS, Files.JSON_CHANNEL.getString("id"), 404);


}

@Test
Expand Down Expand Up @@ -1535,6 +1555,16 @@ ValidatableResponse deleteRecord(String api, String id, int statusCode) {
.statusCode(statusCode);
}

ValidatableResponse deleteRecord(String api, String id, String argument, int statusCode) {
return given()
.baseUri(BASE_URI_INVENTORY_UPDATE)
.header(Service.OKAPI_TENANT)
.header(Service.OKAPI_URL)
.delete(api + "/" + id + "?" + argument)
.then()
.statusCode(statusCode);
}

ValidatableResponse getRecords(String api) {
return given()
.baseUri(BASE_URI_INVENTORY_UPDATE)
Expand Down