Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
---
type: perf
issue: 5748
title: "In the JPA server, several database columns related to Batch2 jobs and searching
have been reworked so that they no will longer use LOB datatypes going forward. This
is a significant advantage on Postgresql databases as it removes a significant use
of the inefficient `pg_largeobject` table, and should yield performance boosts for
MSSQL as well."
Original file line number Diff line number Diff line change
Expand Up @@ -55,14 +55,15 @@ int updateInstanceStatusIfIn(
int updateWorkChunksPurgedTrue(@Param("id") String theInstanceId);

@Query(
"SELECT b from Batch2JobInstanceEntity b WHERE b.myDefinitionId = :defId AND b.myParamsJson = :params AND b.myStatus IN( :stats )")
"SELECT b from Batch2JobInstanceEntity b WHERE b.myDefinitionId = :defId AND (b.myParamsJson = :params OR b.myParamsJsonVc = :params) AND b.myStatus IN( :stats )")
List<Batch2JobInstanceEntity> findInstancesByJobIdParamsAndStatus(
@Param("defId") String theDefinitionId,
@Param("params") String theParams,
@Param("stats") Set<StatusEnum> theStatus,
Pageable thePageable);

@Query("SELECT b from Batch2JobInstanceEntity b WHERE b.myDefinitionId = :defId AND b.myParamsJson = :params")
@Query(
"SELECT b from Batch2JobInstanceEntity b WHERE b.myDefinitionId = :defId AND (b.myParamsJson = :params OR b.myParamsJsonVc = :params)")
List<Batch2JobInstanceEntity> findInstancesByJobIdAndParams(
@Param("defId") String theDefinitionId, @Param("params") String theParams, Pageable thePageable);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ Stream<Batch2WorkChunkEntity> fetchChunksForStep(

@Modifying
@Query("UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, "
+ "e.myRecordsProcessed = :rp, e.myErrorCount = e.myErrorCount + :errorRetries, e.mySerializedData = null, "
+ "e.myRecordsProcessed = :rp, e.myErrorCount = e.myErrorCount + :errorRetries, e.mySerializedData = null, e.mySerializedDataVc = null, "
+ "e.myWarningMessage = :warningMessage WHERE e.myId = :id")
void updateChunkStatusAndClearDataForEndSuccess(
@Param("id") String theChunkId,
Expand All @@ -77,7 +77,7 @@ void updateChunkStatusAndClearDataForEndSuccess(

@Modifying
@Query(
"UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, e.mySerializedData = null, e.myErrorMessage = :em WHERE e.myId IN(:ids)")
"UPDATE Batch2WorkChunkEntity e SET e.myStatus = :status, e.myEndTime = :et, e.mySerializedData = null, e.mySerializedDataVc = null, e.myErrorMessage = :em WHERE e.myId IN(:ids)")
void updateAllChunksForInstanceStatusClearDataAndSetError(
@Param("ids") List<String> theChunkIds,
@Param("et") Date theEndTime,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import jakarta.persistence.Version;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.Length;

import java.io.Serializable;
import java.util.Date;
Expand Down Expand Up @@ -95,13 +96,17 @@ public class Batch2JobInstanceEntity implements Serializable {
@Column(name = "FAST_TRACKING", nullable = true)
private Boolean myFastTracking;

// TODO: VC column added in 7.2.0 - Remove non-VC column later
@Column(name = "PARAMS_JSON", length = PARAMS_JSON_MAX_LENGTH, nullable = true)
private String myParamsJson;

@Lob
@Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later
@Column(name = "PARAMS_JSON_LOB", nullable = true)
private String myParamsJsonLob;

@Column(name = "PARAMS_JSON_VC", nullable = true, length = Length.LONG32)
private String myParamsJsonVc;

@Column(name = "CMB_RECS_PROCESSED", nullable = true)
private Integer myCombinedRecordsProcessed;

Expand Down Expand Up @@ -142,11 +147,14 @@ public class Batch2JobInstanceEntity implements Serializable {
* Any output from the job can be held in this column
* Even serialized json
*/
@Lob
@Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later
@Basic(fetch = FetchType.LAZY)
@Column(name = "REPORT", nullable = true, length = Integer.MAX_VALUE - 1)
private String myReport;

@Column(name = "REPORT_VC", nullable = true, length = Length.LONG32)
private String myReportVc;

public String getCurrentGatedStepId() {
return myCurrentGatedStepId;
}
Expand Down Expand Up @@ -260,20 +268,19 @@ public void setStatus(StatusEnum theStatus) {
}

public String getParams() {
if (myParamsJsonVc != null) {
return myParamsJsonVc;
}
if (myParamsJsonLob != null) {
return myParamsJsonLob;
}
return myParamsJson;
}

public void setParams(String theParams) {
myParamsJsonVc = theParams;
myParamsJsonLob = null;
myParamsJson = null;
if (theParams != null && theParams.length() > PARAMS_JSON_MAX_LENGTH) {
myParamsJsonLob = theParams;
} else {
myParamsJson = theParams;
}
}

public boolean getWorkChunksPurged() {
Expand Down Expand Up @@ -309,11 +316,12 @@ public void setEstimatedTimeRemaining(String theEstimatedTimeRemaining) {
}

public String getReport() {
return myReport;
return myReportVc != null ? myReportVc : myReport;
}

public void setReport(String theReport) {
myReport = theReport;
myReportVc = theReport;
myReport = null;
}

public String getWarningMessages() {
Expand Down Expand Up @@ -362,7 +370,7 @@ public String toString() {
.append("progress", myProgress)
.append("errorMessage", myErrorMessage)
.append("estimatedTimeRemaining", myEstimatedTimeRemaining)
.append("report", myReport)
.append("report", getReport())
.append("warningMessages", myWarningMessages)
.append("initiatingUsername", myTriggeringUsername)
.append("initiatingclientId", myTriggeringClientId)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import jakarta.persistence.Version;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.Length;

import java.io.Serializable;
import java.util.Date;
Expand Down Expand Up @@ -92,11 +93,14 @@ public class Batch2WorkChunkEntity implements Serializable {
@Column(name = "TGT_STEP_ID", length = ID_MAX_LENGTH, nullable = false)
private String myTargetStepId;

@Lob
@Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later
@Basic(fetch = FetchType.LAZY)
@Column(name = "CHUNK_DATA", nullable = true, length = Integer.MAX_VALUE - 1)
private String mySerializedData;

@Column(name = "CHUNK_DATA_VC", nullable = true, length = Length.LONG32)
private String mySerializedDataVc;

@Column(name = "STAT", length = STATUS_MAX_LENGTH, nullable = false)
@Enumerated(EnumType.STRING)
private WorkChunkStatusEnum myStatus;
Expand Down Expand Up @@ -263,11 +267,12 @@ public void setTargetStepId(String theTargetStepId) {
}

public String getSerializedData() {
return mySerializedData;
return mySerializedDataVc != null ? mySerializedDataVc : mySerializedData;
}

public void setSerializedData(String theSerializedData) {
mySerializedData = theSerializedData;
mySerializedData = null;
mySerializedDataVc = theSerializedData;
}

public WorkChunkStatusEnum getStatus() {
Expand Down Expand Up @@ -309,7 +314,7 @@ public String toString() {
.append("updateTime", myUpdateTime)
.append("recordsProcessed", myRecordsProcessed)
.append("targetStepId", myTargetStepId)
.append("serializedData", mySerializedData)
.append("serializedData", getSerializedData())
.append("status", myStatus)
.append("errorMessage", myErrorMessage)
.append("warningMessage", myWarningMessage)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import jakarta.persistence.ManyToOne;
import jakarta.persistence.SequenceGenerator;
import jakarta.persistence.Table;
import org.hibernate.Length;

import java.io.Serializable;
import java.nio.charset.StandardCharsets;
Expand Down Expand Up @@ -66,10 +67,13 @@ public class BulkImportJobFileEntity implements Serializable {
@Column(name = "FILE_DESCRIPTION", nullable = true, length = MAX_DESCRIPTION_LENGTH)
private String myFileDescription;

@Lob
@Column(name = "JOB_CONTENTS", nullable = false)
@Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later
@Column(name = "JOB_CONTENTS", nullable = true)
private byte[] myContents;

@Column(name = "JOB_CONTENTS_VC", nullable = true, length = Length.LONG32)
private String myContentsVc;

@Column(name = "TENANT_NAME", nullable = true, length = PartitionEntity.MAX_NAME_LENGTH)
private String myTenantName;

Expand Down Expand Up @@ -98,11 +102,16 @@ public void setFileSequence(int theFileSequence) {
}

public String getContents() {
return new String(myContents, StandardCharsets.UTF_8);
if (myContentsVc != null) {
return myContentsVc;
} else {
return new String(myContents, StandardCharsets.UTF_8);
}
}

public void setContents(String theContents) {
myContents = theContents.getBytes(StandardCharsets.UTF_8);
myContentsVc = theContents;
myContents = null;
}

public BulkImportJobFileJson toJson() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
import jakarta.persistence.Version;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.hibernate.Length;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.annotations.OptimisticLock;
import org.hibernate.type.SqlTypes;
Expand Down Expand Up @@ -141,14 +142,21 @@ public class Search implements ICachedSearchDetails, Serializable {

@Column(name = "RESOURCE_TYPE", length = 200, nullable = true)
private String myResourceType;

/**
* Note that this field may have the request partition IDs prepended to it
*/
@Lob()
@Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later
@Basic(fetch = FetchType.LAZY)
@Column(name = "SEARCH_QUERY_STRING", nullable = true, updatable = false, length = MAX_SEARCH_QUERY_STRING)
private String mySearchQueryString;

/**
* Note that this field may have the request partition IDs prepended to it
*/
@Column(name = "SEARCH_QUERY_STRING_VC", nullable = true, length = Length.LONG32)
private String mySearchQueryStringVc;

@Column(name = "SEARCH_QUERY_STRING_HASH", nullable = true, updatable = false)
private Integer mySearchQueryStringHash;

Expand All @@ -172,10 +180,13 @@ public class Search implements ICachedSearchDetails, Serializable {
@Column(name = "OPTLOCK_VERSION", nullable = true)
private Integer myVersion;

@Lob
@Lob // TODO: VC column added in 7.2.0 - Remove non-VC column later
@Column(name = "SEARCH_PARAM_MAP", nullable = true)
private byte[] mySearchParameterMap;

@Column(name = "SEARCH_PARAM_MAP_BIN", nullable = true, length = Length.LONG32)
private byte[] mySearchParameterMapBin;

@Transient
private transient SearchParameterMap mySearchParameterMapTransient;

Expand Down Expand Up @@ -350,7 +361,7 @@ public void setResourceType(String theResourceType) {
* Note that this field may have the request partition IDs prepended to it
*/
public String getSearchQueryString() {
return mySearchQueryString;
return mySearchQueryStringVc != null ? mySearchQueryStringVc : mySearchQueryString;
}

public void setSearchQueryString(String theSearchQueryString, RequestPartitionId theRequestPartitionId) {
Expand All @@ -362,12 +373,13 @@ public void setSearchQueryString(String theSearchQueryString, RequestPartitionId
// We want this field to always have a wide distribution of values in order
// to avoid optimizers avoiding using it if it has lots of nulls, so in the
// case of null, just put a value that will never be hit
mySearchQueryString = UUID.randomUUID().toString();
mySearchQueryStringVc = UUID.randomUUID().toString();
} else {
mySearchQueryString = searchQueryString;
mySearchQueryStringVc = searchQueryString;
}

mySearchQueryStringHash = mySearchQueryString.hashCode();
mySearchQueryString = null;
mySearchQueryStringHash = mySearchQueryStringVc.hashCode();
}

public SearchTypeEnum getSearchType() {
Expand Down Expand Up @@ -466,16 +478,21 @@ public Optional<SearchParameterMap> getSearchParameterMap() {
return Optional.of(mySearchParameterMapTransient);
}
SearchParameterMap searchParameterMap = null;
if (mySearchParameterMap != null) {
searchParameterMap = SerializationUtils.deserialize(mySearchParameterMap);
byte[] searchParameterMapSerialized = mySearchParameterMapBin;
if (searchParameterMapSerialized == null) {
searchParameterMapSerialized = mySearchParameterMap;
}
if (searchParameterMapSerialized != null) {
searchParameterMap = SerializationUtils.deserialize(searchParameterMapSerialized);
mySearchParameterMapTransient = searchParameterMap;
}
return Optional.ofNullable(searchParameterMap);
}

public void setSearchParameterMap(SearchParameterMap theSearchParameterMap) {
mySearchParameterMapTransient = theSearchParameterMap;
mySearchParameterMap = SerializationUtils.serialize(theSearchParameterMap);
mySearchParameterMapBin = SerializationUtils.serialize(theSearchParameterMap);
mySearchParameterMap = null;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ public HapiFhirJpaMigrationTasks(Set<String> theFlags) {
init640_after_20230126();
init660();
init680();
init680_Part2();
init700();
}

Expand Down Expand Up @@ -218,6 +219,44 @@ protected void init700() {
version.addTask(new ForceIdMigrationFixTask(version.getRelease(), "20231222.1"));
}

private void init680_Part2() {
Builder version = forVersion(VersionEnum.V6_8_0);

// Add additional LOB migration columns
version.onTable("BT2_JOB_INSTANCE")
.addColumn("20240227.1", "REPORT_VC")
.nullable()
.type(ColumnTypeEnum.TEXT);
version.onTable("BT2_JOB_INSTANCE")
.addColumn("20240227.2", "PARAMS_JSON_VC")
.nullable()
.type(ColumnTypeEnum.TEXT);

version.onTable("BT2_WORK_CHUNK")
.addColumn("20240227.3", "CHUNK_DATA_VC")
.nullable()
.type(ColumnTypeEnum.TEXT);

version.onTable("HFJ_SEARCH")
.addColumn("20240227.4", "SEARCH_QUERY_STRING_VC")
.nullable()
.type(ColumnTypeEnum.TEXT);
version.onTable("HFJ_SEARCH")
.addColumn("20240227.5", "SEARCH_PARAM_MAP_BIN")
.nullable()
.type(ColumnTypeEnum.BINARY);

version.onTable("HFJ_BLK_IMPORT_JOBFILE")
.addColumn("20240227.6", "JOB_CONTENTS_VC")
.nullable()
.type(ColumnTypeEnum.TEXT);

version.onTable("HFJ_BLK_IMPORT_JOBFILE")
.modifyColumn("20240227.7", "JOB_CONTENTS")
.nullable()
.withType(ColumnTypeEnum.BLOB);
}

protected void init680() {
Builder version = forVersion(VersionEnum.V6_8_0);
// HAPI-FHIR #4801 - Add New Index On HFJ_RESOURCE
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -207,14 +207,7 @@ public static ColumnType getColumnType(
case Types.BLOB:
return new ColumnType(ColumnTypeEnum.BLOB, length);
case Types.LONGVARBINARY:
if (DriverTypeEnum.MYSQL_5_7.equals(theConnectionProperties.getDriverType())) {
// See git
return new ColumnType(ColumnTypeEnum.BLOB, length);
} else {
throw new IllegalArgumentException(
Msg.code(32) + "Don't know how to handle datatype " + dataType
+ " for column " + theColumnName + " on table " + theTableName);
}
return new ColumnType(ColumnTypeEnum.BINARY, length);
case Types.VARBINARY:
if (DriverTypeEnum.MSSQL_2012.equals(theConnectionProperties.getDriverType())) {
// MS SQLServer seems to be mapping BLOB to VARBINARY under the covers, so we need
Expand Down
Loading