diff --git a/src/Hangfire.PostgreSql/Hangfire.PostgreSql.csproj b/src/Hangfire.PostgreSql/Hangfire.PostgreSql.csproj
index 102d2e1..f0e7460 100644
--- a/src/Hangfire.PostgreSql/Hangfire.PostgreSql.csproj
+++ b/src/Hangfire.PostgreSql/Hangfire.PostgreSql.csproj
@@ -25,6 +25,7 @@
default
true
$(NoWarn);1591
+ $(InterceptorsNamespaces);Dapper.AOT
@@ -33,6 +34,7 @@
+
all
runtime; build; native; contentfiles; analyzers; buildtransitive
diff --git a/src/Hangfire.PostgreSql/JsonParameter.cs b/src/Hangfire.PostgreSql/JsonParameter.cs
index e3639e2..dcad4f3 100644
--- a/src/Hangfire.PostgreSql/JsonParameter.cs
+++ b/src/Hangfire.PostgreSql/JsonParameter.cs
@@ -1,42 +1,28 @@
using System;
-using System.Data;
using System.Text.Json;
-using Dapper;
using Hangfire.Annotations;
-using Npgsql;
-using NpgsqlTypes;
namespace Hangfire.PostgreSql;
-internal class JsonParameter : SqlMapper.ICustomQueryParameter
+internal static class JsonParameter
{
- [CanBeNull] private readonly object _value;
- private readonly ValueType _type;
-
- public JsonParameter([CanBeNull] object value) : this(value, ValueType.Object)
- {
- }
-
- public JsonParameter([CanBeNull] object value, ValueType type)
+ public static string GetParameterValue([CanBeNull] object value)
{
- _value = value;
- _type = type;
+ return GetParameterValue(value, ValueType.Object);
}
- public void AddParameter(IDbCommand command, string name)
+ public static string GetParameterValue([CanBeNull] object value, ValueType type)
{
- string value = _value switch {
+ return value switch {
string { Length: > 0 } stringValue => stringValue,
- string { Length: 0 } or null => GetDefaultValue(),
- var _ => JsonSerializer.Serialize(_value),
+ string { Length: 0 } or null => GetDefaultValue(type),
+ var _ => JsonSerializer.Serialize(value),
};
- command.Parameters.Add(new NpgsqlParameter(name, NpgsqlDbType.Jsonb) { Value = value });
}
- private string GetDefaultValue()
+ private static string GetDefaultValue(ValueType type)
{
- return _type switch
- {
+ return type switch {
ValueType.Object => "{}",
ValueType.Array => "[]",
var _ => throw new ArgumentOutOfRangeException(),
diff --git a/src/Hangfire.PostgreSql/PostgreSqlConnection.cs b/src/Hangfire.PostgreSql/PostgreSqlConnection.cs
index 3c0114d..ebd48d0 100644
--- a/src/Hangfire.PostgreSql/PostgreSqlConnection.cs
+++ b/src/Hangfire.PostgreSql/PostgreSqlConnection.cs
@@ -37,6 +37,7 @@
namespace Hangfire.PostgreSql
{
+ [DapperAot]
public class PostgreSqlConnection : JobStorageConnection
{
private readonly Dictionary> _lockedResources;
@@ -115,7 +116,7 @@ public override string CreateExpiredJob(
string createJobSql = $@"
INSERT INTO ""{_options.SchemaName}"".""job"" (""invocationdata"", ""arguments"", ""createdat"", ""expireat"")
- VALUES (@InvocationData, @Arguments, @CreatedAt, @ExpireAt)
+ VALUES (@InvocationData::jsonb, @Arguments::jsonb, @CreatedAt, @ExpireAt)
RETURNING ""id"";
";
@@ -124,30 +125,28 @@ public override string CreateExpiredJob(
return _storage.UseTransaction(_dedicatedConnection, (connection, transaction) => {
string jobId = connection.QuerySingle(createJobSql,
new {
- InvocationData = new JsonParameter(SerializationHelper.Serialize(invocationData)),
- Arguments = new JsonParameter(invocationData.Arguments, JsonParameter.ValueType.Array),
+ InvocationData = JsonParameter.GetParameterValue(SerializationHelper.Serialize(invocationData)),
+ Arguments = JsonParameter.GetParameterValue(invocationData.Arguments, JsonParameter.ValueType.Array),
CreatedAt = createdAt,
ExpireAt = createdAt.Add(expireIn),
}).ToString(CultureInfo.InvariantCulture);
if (parameters.Count > 0)
{
- object[] parameterArray = new object[parameters.Count];
- int parameterIndex = 0;
- foreach (KeyValuePair parameter in parameters)
- {
- parameterArray[parameterIndex++] = new {
- JobId = Convert.ToInt64(jobId, CultureInfo.InvariantCulture),
- Name = parameter.Key,
- parameter.Value,
- };
- }
+ var parameterArray = parameters
+ .Select(parameter =>
+ new {
+ JobId = Convert.ToInt64(jobId, CultureInfo.InvariantCulture),
+ Name = parameter.Key,
+ parameter.Value,
+ }
+ )
+ .ToArray();
string insertParameterSql = $@"
INSERT INTO ""{_options.SchemaName}"".""jobparameter"" (""jobid"", ""name"", ""value"")
VALUES (@JobId, @Name, @Value);
";
-
connection.Execute(insertParameterSql, parameterArray, transaction);
}
@@ -170,8 +169,7 @@ public override JobData GetJobData(string id)
SqlJob jobData = _storage.UseConnection(_dedicatedConnection,
connection => connection
- .Query(sql, new { Id = Convert.ToInt64(id, CultureInfo.InvariantCulture) })
- .SingleOrDefault());
+ .QuerySingleOrDefault(sql, new { Id = Convert.ToInt64(id, CultureInfo.InvariantCulture) }));
if (jobData == null)
{
@@ -218,8 +216,7 @@ public override StateData GetStateData(string jobId)
SqlState sqlState = _storage.UseConnection(_dedicatedConnection,
connection => connection
- .Query(sql, new { JobId = Convert.ToInt64(jobId, CultureInfo.InvariantCulture) })
- .SingleOrDefault());
+ .QuerySingleOrDefault(sql, new { JobId = Convert.ToInt64(jobId, CultureInfo.InvariantCulture) }));
return sqlState == null
? null
: new StateData {
@@ -350,7 +347,7 @@ public override List GetFirstByLowestScoreFromSet(string key, double fro
ORDER BY ""score"" LIMIT @Limit;
",
new { Key = key, FromScore = fromScore, ToScore = toScore, Limit = count }))
- .ToList();
+ .AsList();
}
public override void SetRangeInHash(string key, IEnumerable> keyValuePairs)
@@ -453,7 +450,7 @@ public override void AnnounceServer(string serverId, ServerContext context)
string sql = $@"
WITH ""inputvalues"" AS (
- SELECT @Id ""id"", @Data ""data"", NOW() ""lastheartbeat""
+ SELECT @Id ""id"", @Data::jsonb ""data"", NOW() ""lastheartbeat""
), ""updatedrows"" AS (
UPDATE ""{_options.SchemaName}"".""server"" ""updatetarget""
SET ""data"" = ""inputvalues"".""data"", ""lastheartbeat"" = ""inputvalues"".""lastheartbeat""
@@ -472,7 +469,7 @@ SELECT 1
";
_storage.UseConnection(_dedicatedConnection, connection => connection
- .Execute(sql, new { Id = serverId, Data = new JsonParameter(SerializationHelper.Serialize(data)) }));
+ .Execute(sql, new { Id = serverId, Data = JsonParameter.GetParameterValue(SerializationHelper.Serialize(data)) }));
}
public override void RemoveServer(string serverId)
@@ -546,7 +543,7 @@ public override List GetAllItemsFromList(string key)
return _storage.UseConnection(_dedicatedConnection, connection => connection
.Query(query, new { Key = key })
- .ToList());
+ .AsList());
}
public override long GetCounter(string key)
@@ -612,7 +609,7 @@ LIMIT @Limit OFFSET @Offset
return _storage.UseConnection(_dedicatedConnection, connection =>
connection
.Query(query, new { Key = key, Limit = endingAt - startingFrom + 1, Offset = startingFrom })
- .ToList());
+ .AsList());
}
public override long GetHashCount(string key)
@@ -660,7 +657,7 @@ LIMIT @Limit OFFSET @Offset
return _storage.UseConnection(_dedicatedConnection, connection => connection
.Query(query, new { Key = key, Limit = endingAt - startingFrom + 1, Offset = startingFrom })
- .ToList());
+ .AsList());
}
public override TimeSpan GetSetTtl(string key)
@@ -693,8 +690,7 @@ public override string GetValueFromHash(string key, string name)
string query = $@"SELECT ""value"" FROM ""{_options.SchemaName}"".""hash"" WHERE ""key"" = @Key AND ""field"" = @Field";
return _storage.UseConnection(_dedicatedConnection, connection => connection
- .Query(query, new { Key = key, Field = name })
- .SingleOrDefault());
+ .QuerySingleOrDefault(query, new { Key = key, Field = name }));
}
private IDisposable AcquireLock(string resource, TimeSpan timeout)
diff --git a/src/Hangfire.PostgreSql/PostgreSqlJobQueue.cs b/src/Hangfire.PostgreSql/PostgreSqlJobQueue.cs
index 89aa724..290b827 100644
--- a/src/Hangfire.PostgreSql/PostgreSqlJobQueue.cs
+++ b/src/Hangfire.PostgreSql/PostgreSqlJobQueue.cs
@@ -33,6 +33,7 @@
namespace Hangfire.PostgreSql
{
+ [DapperAot]
public class PostgreSqlJobQueue : IPersistentJobQueue
{
private const string JobNotificationChannel = "new_job";
@@ -163,9 +164,8 @@ LIMIT 1
try
{
using NpgsqlTransaction trx = connection.BeginTransaction(IsolationLevel.ReadCommitted);
- FetchedJob jobToFetch = connection.Query(fetchJobSql,
- new { Queues = queues.ToList() }, trx)
- .SingleOrDefault();
+ FetchedJob jobToFetch = connection.QuerySingleOrDefault(fetchJobSql,
+ new { Queues = queues.ToList() }, trx);
trx.Commit();
@@ -173,7 +173,7 @@ LIMIT 1
}
catch (InvalidOperationException)
{
- // thrown by .SingleOrDefault(): stop the exception propagation if the fetched job was concurrently fetched by another worker
+ // thrown by .QuerySingleOrDefault(): stop the exception propagation if the fetched job was concurrently fetched by another worker
}
finally
{
@@ -237,9 +237,9 @@ internal IFetchedJob Dequeue_UpdateCount(string[] queues, CancellationToken canc
{
cancellationToken.ThrowIfCancellationRequested();
- FetchedJob jobToFetch = _storage.UseConnection(null, connection => connection.Query(jobToFetchSql,
+ FetchedJob jobToFetch = _storage.UseConnection(null, connection => connection.QuerySingleOrDefault(jobToFetchSql,
new { Queues = queues.ToList() })
- .SingleOrDefault());
+ );
if (jobToFetch == null)
{
@@ -254,9 +254,9 @@ internal IFetchedJob Dequeue_UpdateCount(string[] queues, CancellationToken canc
}
else
{
- markJobAsFetched = _storage.UseConnection(null, connection => connection.Query(markJobAsFetchedSql,
+ markJobAsFetched = _storage.UseConnection(null, connection => connection.QuerySingleOrDefault(markJobAsFetchedSql,
jobToFetch)
- .SingleOrDefault());
+ );
}
}
while (markJobAsFetched == null);
@@ -318,7 +318,7 @@ private Task ListenForNotificationsAsync(CancellationToken cancellationToken)
}
[UsedImplicitly(ImplicitUseTargetFlags.WithMembers)]
- private class FetchedJob
+ internal class FetchedJob
{
public long Id { get; set; }
public long JobId { get; set; }
diff --git a/src/Hangfire.PostgreSql/PostgreSqlJobQueueMonitoringApi.cs b/src/Hangfire.PostgreSql/PostgreSqlJobQueueMonitoringApi.cs
index 4ec1be6..f62b905 100644
--- a/src/Hangfire.PostgreSql/PostgreSqlJobQueueMonitoringApi.cs
+++ b/src/Hangfire.PostgreSql/PostgreSqlJobQueueMonitoringApi.cs
@@ -26,6 +26,7 @@
namespace Hangfire.PostgreSql
{
+ [DapperAot]
internal class PostgreSqlJobQueueMonitoringApi : IPersistentJobQueueMonitoringApi
{
private readonly PostgreSqlStorage _storage;
@@ -38,7 +39,7 @@ public PostgreSqlJobQueueMonitoringApi(PostgreSqlStorage storage)
public IEnumerable GetQueues()
{
string sqlQuery = $@"SELECT DISTINCT ""queue"" FROM ""{_storage.Options.SchemaName}"".""jobqueue""";
- return _storage.UseConnection(null, connection => connection.Query(sqlQuery).ToList());
+ return _storage.UseConnection(null, connection => connection.Query(sqlQuery).AsList());
}
public IEnumerable GetEnqueuedJobIds(string queue, int from, int perPage)
@@ -69,7 +70,7 @@ SELECT COUNT(*)
";
(long enqueuedCount, long fetchedCount) = _storage.UseConnection(null, connection =>
- connection.QuerySingle<(long EnqueuedCount, long FetchedCount)>(sqlQuery, new { Queue = queue }));
+ connection.Query(sqlQuery, new { Queue = queue })).Single();
return new EnqueuedAndFetchedCountDto {
EnqueuedCount = enqueuedCount,
@@ -92,7 +93,9 @@ AND j.""id"" IS NOT NULL
return _storage.UseConnection(null, connection => connection.Query(sqlQuery,
new { Queue = queue, Offset = from, Limit = perPage })
- .ToList());
+ .AsList());
}
+
+ internal record struct EnqueuedAndFetchedCount(long EnqueuedCount, long FetchedCount);
}
}
diff --git a/src/Hangfire.PostgreSql/PostgreSqlMonitoringApi.cs b/src/Hangfire.PostgreSql/PostgreSqlMonitoringApi.cs
index 63a5e4c..178bf69 100644
--- a/src/Hangfire.PostgreSql/PostgreSqlMonitoringApi.cs
+++ b/src/Hangfire.PostgreSql/PostgreSqlMonitoringApi.cs
@@ -33,6 +33,7 @@
namespace Hangfire.PostgreSql
{
+ [DapperAot]
public class PostgreSqlMonitoringApi : IMonitoringApi
{
private readonly PersistentJobQueueProviderCollection _queueProviders;
@@ -399,9 +400,9 @@ UNION ALL
GROUP BY "key"
""";
- Dictionary valuesMap = UseConnection(connection => connection.Query<(string Key, long Count)>(query,
+ Dictionary valuesMap = UseConnection(connection => connection.Query(query,
new { Keys = keyMaps.Keys.ToList() })
- .ToList()
+ .AsList()
.ToDictionary(x => x.Key, x => x.Count));
foreach (string key in keyMaps.Keys)
@@ -422,6 +423,8 @@ GROUP BY "key"
return result;
}
+ internal record struct KeyCount(string Key, long Count);
+
private IPersistentJobQueueMonitoringApi GetQueueApi(string queueName)
{
IPersistentJobQueueProvider provider = _queueProviders.GetProvider(queueName);
@@ -444,7 +447,7 @@ private JobList EnqueuedJobs(IEnumerable jobIds)
List jobs = UseConnection(connection => connection.Query(enqueuedJobsSql,
new { JobIds = jobIds.ToList() })
- .ToList());
+ .AsList());
return DeserializeJobs(jobs,
(sqlJob, job, stateData) => new EnqueuedJobDto {
@@ -493,7 +496,7 @@ ORDER BY ""j"".""id"" DESC
List jobs = UseConnection(connection => connection.Query(jobsSql,
new { StateName = stateName, Limit = count, Offset = from })
- .ToList());
+ .AsList());
return DeserializeJobs(jobs, selector);
}
@@ -540,7 +543,7 @@ private JobList FetchedJobs(
List jobs = UseConnection(connection => connection.Query(fetchedJobsSql,
new { JobIds = jobIds.ToList() })
- .ToList());
+ .AsList());
Dictionary result = jobs.ToDictionary(job => job.Id.ToString(), job => new FetchedJobDto {
Job = DeserializeJob(job.InvocationData, job.Arguments),
diff --git a/src/Hangfire.PostgreSql/PostgreSqlWriteOnlyTransaction.cs b/src/Hangfire.PostgreSql/PostgreSqlWriteOnlyTransaction.cs
index eb8ecba..10dece7 100644
--- a/src/Hangfire.PostgreSql/PostgreSqlWriteOnlyTransaction.cs
+++ b/src/Hangfire.PostgreSql/PostgreSqlWriteOnlyTransaction.cs
@@ -108,7 +108,7 @@ public override void SetJobState(string jobId, IState state)
string addAndSetStateSql = $@"
WITH ""s"" AS (
INSERT INTO ""{_storage.Options.SchemaName}"".""state"" (""jobid"", ""name"", ""reason"", ""createdat"", ""data"")
- VALUES (@JobId, @Name, @Reason, @CreatedAt, @Data) RETURNING ""id""
+ VALUES (@JobId, @Name, @Reason, @CreatedAt, @Data::jsonb) RETURNING ""id""
)
UPDATE ""{_storage.Options.SchemaName}"".""job"" ""j""
SET ""stateid"" = s.""id"", ""statename"" = @Name
@@ -122,7 +122,7 @@ public override void SetJobState(string jobId, IState state)
state.Name,
state.Reason,
CreatedAt = DateTime.UtcNow,
- Data = new JsonParameter(SerializationHelper.Serialize(state.SerializeData())),
+ Data = JsonParameter.GetParameterValue(SerializationHelper.Serialize(state.SerializeData())),
Id = Convert.ToInt64(jobId, CultureInfo.InvariantCulture),
}));
}
@@ -131,7 +131,7 @@ public override void AddJobState(string jobId, IState state)
{
string addStateSql = $@"
INSERT INTO ""{_storage.Options.SchemaName}"".""state"" (""jobid"", ""name"", ""reason"", ""createdat"", ""data"")
- VALUES (@JobId, @Name, @Reason, @CreatedAt, @Data);
+ VALUES (@JobId, @Name, @Reason, @CreatedAt, @Data::jsonb);
";
QueueCommand(con => con.Execute(addStateSql,
@@ -140,7 +140,7 @@ public override void AddJobState(string jobId, IState state)
state.Name,
state.Reason,
CreatedAt = DateTime.UtcNow,
- Data = new JsonParameter(SerializationHelper.Serialize(state.SerializeData())),
+ Data = JsonParameter.GetParameterValue(SerializationHelper.Serialize(state.SerializeData())),
}));
}
diff --git a/tests/Hangfire.PostgreSql.Tests/ExpirationManagerFacts.cs b/tests/Hangfire.PostgreSql.Tests/ExpirationManagerFacts.cs
index ed46f38..757b97b 100644
--- a/tests/Hangfire.PostgreSql.Tests/ExpirationManagerFacts.cs
+++ b/tests/Hangfire.PostgreSql.Tests/ExpirationManagerFacts.cs
@@ -167,7 +167,7 @@ public void Execute_Processes_SetTable()
manager.Execute(_token);
// Assert
- Assert.Equal(0, connection.Query($@"SELECT COUNT(*) FROM ""{GetSchemaName()}"".""set""").Single());
+ Assert.Equal(0, connection.QuerySingle($@"SELECT COUNT(*) FROM ""{GetSchemaName()}"".""set"""));
});
}
diff --git a/tests/Hangfire.PostgreSql.Tests/PostgreSqlConnectionFacts.cs b/tests/Hangfire.PostgreSql.Tests/PostgreSqlConnectionFacts.cs
index 8787802..38ccace 100644
--- a/tests/Hangfire.PostgreSql.Tests/PostgreSqlConnectionFacts.cs
+++ b/tests/Hangfire.PostgreSql.Tests/PostgreSqlConnectionFacts.cs
@@ -194,7 +194,7 @@ public void GetJobData_ReturnsResult_WhenJobExists()
{
string arrangeSql = $@"
INSERT INTO ""{GetSchemaName()}"".""job"" (""invocationdata"", ""arguments"", ""statename"", ""createdat"")
- VALUES (@InvocationData, @Arguments, @StateName, NOW()) RETURNING ""id""
+ VALUES (@InvocationData::jsonb, @Arguments::jsonb, @StateName, NOW()) RETURNING ""id""
";
UseConnections((connection, jobStorageConnection) => {
@@ -202,9 +202,9 @@ public void GetJobData_ReturnsResult_WhenJobExists()
long jobId = connection.QuerySingle(arrangeSql,
new {
- InvocationData = new JsonParameter(SerializationHelper.Serialize(InvocationData.SerializeJob(job))),
+ InvocationData = JsonParameter.GetParameterValue(SerializationHelper.Serialize(InvocationData.SerializeJob(job))),
StateName = "Succeeded",
- Arguments = new JsonParameter("[\"\\\"Arguments\\\"\"]", JsonParameter.ValueType.Array),
+ Arguments = JsonParameter.GetParameterValue("[\"\\\"Arguments\\\"\"]", JsonParameter.ValueType.Array),
});
JobData result = jobStorageConnection.GetJobData(jobId.ToString(CultureInfo.InvariantCulture));
@@ -250,7 +250,7 @@ public void GetStateData_ReturnsCorrectData()
VALUES(@JobId, 'old-state', NOW());
INSERT INTO ""{GetSchemaName()}"".""state"" (""jobid"", ""name"", ""reason"", ""data"", ""createdat"")
- VALUES(@JobId, @Name, @Reason, @Data, NOW())
+ VALUES(@JobId, @Name, @Reason, @Data::jsonb, NOW())
RETURNING ""id"";
";
@@ -268,7 +268,7 @@ public void GetStateData_ReturnsCorrectData()
long jobId = connection.QuerySingle(createJobSql);
long stateId = connection.QuerySingle(createStateSql,
- new { JobId = jobId, Name = "Name", Reason = "Reason", Data = new JsonParameter(SerializationHelper.Serialize(data)) });
+ new { JobId = jobId, Name = "Name", Reason = "Reason", Data = JsonParameter.GetParameterValue(SerializationHelper.Serialize(data)) });
connection.Execute(updateJobStateSql, new { JobId = jobId, StateId = stateId });
@@ -627,7 +627,7 @@ public void AnnounceServer_CreatesOrUpdatesARecord()
};
jobStorageConnection.AnnounceServer("server", context1);
- dynamic server = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""server""").Single();
+ dynamic server = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""server""");
Assert.Equal("server", server.id);
ServerContext serverData = JsonSerializer.Deserialize(server.data);
@@ -640,7 +640,7 @@ public void AnnounceServer_CreatesOrUpdatesARecord()
WorkerCount = 1000,
};
jobStorageConnection.AnnounceServer("server", context2);
- dynamic sameServer = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""server""").Single();
+ dynamic sameServer = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""server""");
Assert.Equal("server", sameServer.id);
Assert.Contains("1000", sameServer.data);
});
@@ -668,7 +668,7 @@ public void RemoveServer_RemovesAServerRecord()
jobStorageConnection.RemoveServer("Server1");
- dynamic server = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""server""").Single();
+ dynamic server = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""server""");
Assert.NotEqual("Server1", server.Id, StringComparer.OrdinalIgnoreCase);
});
}
@@ -736,7 +736,7 @@ public void RemoveTimedOutServers_DoItsWorkPerfectly()
jobStorageConnection.RemoveTimedOutServers(TimeSpan.FromHours(15));
- dynamic liveServer = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""server""").Single();
+ dynamic liveServer = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""server""");
Assert.Equal("server2", liveServer.id);
});
}
@@ -1365,7 +1365,7 @@ TransactionScope CreateTransactionScope()
UseConnections((connection, _) => {
if (completeTransactionScope)
{
- dynamic sqlJob = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""job""").Single();
+ dynamic sqlJob = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""job""");
Assert.Equal(jobId, sqlJob.id.ToString());
Assert.Equal(createdAt, sqlJob.createdat);
Assert.Null((long?)sqlJob.stateid);
@@ -1373,7 +1373,7 @@ TransactionScope CreateTransactionScope()
}
else
{
- TestJob job = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""job""").SingleOrDefault();
+ TestJob job = connection.QuerySingleOrDefault($@"SELECT * FROM ""{GetSchemaName()}"".""job""");
Assert.Null(job);
}
});
diff --git a/tests/Hangfire.PostgreSql.Tests/PostgreSqlDistributedLockFacts.cs b/tests/Hangfire.PostgreSql.Tests/PostgreSqlDistributedLockFacts.cs
index 2147a54..17bc105 100644
--- a/tests/Hangfire.PostgreSql.Tests/PostgreSqlDistributedLockFacts.cs
+++ b/tests/Hangfire.PostgreSql.Tests/PostgreSqlDistributedLockFacts.cs
@@ -1,6 +1,5 @@
using System;
using System.Data;
-using System.Linq;
using System.Threading;
using Dapper;
using Hangfire.PostgreSql.Tests.Utils;
@@ -234,8 +233,8 @@ public void Dispose_ReleasesExclusiveApplicationLock_WithoutUseNativeDatabaseTra
PostgreSqlDistributedLock.Acquire(connection, "hello", _timeout, options);
PostgreSqlDistributedLock.Release(connection, "hello", options);
- long lockCount = connection.Query($@"SELECT COUNT(*) FROM ""{GetSchemaName()}"".""lock"" WHERE ""resource"" = @Resource",
- new { Resource = "hello" }).Single();
+ long lockCount = connection.QuerySingle($@"SELECT COUNT(*) FROM ""{GetSchemaName()}"".""lock"" WHERE ""resource"" = @Resource",
+ new { Resource = "hello" });
Assert.Equal(0, lockCount);
});
diff --git a/tests/Hangfire.PostgreSql.Tests/PostgreSqlFetchedJobFacts.cs b/tests/Hangfire.PostgreSql.Tests/PostgreSqlFetchedJobFacts.cs
index 2bd6402..4e7407b 100644
--- a/tests/Hangfire.PostgreSql.Tests/PostgreSqlFetchedJobFacts.cs
+++ b/tests/Hangfire.PostgreSql.Tests/PostgreSqlFetchedJobFacts.cs
@@ -113,7 +113,7 @@ public void Requeue_SetsFetchedAtValueToNull()
// Assert
dynamic record = _storage.UseConnection(null, connection =>
- connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""jobqueue""").Single());
+ connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""jobqueue"""));
Assert.Null(record.fetchedat);
}
@@ -131,7 +131,7 @@ public void Timer_UpdatesFetchedAtColumn()
Thread.Sleep(TimeSpan.FromSeconds(10));
processingJob.ExecuteKeepAliveQueryIfRequired();
- dynamic record = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""jobqueue""").Single();
+ dynamic record = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""jobqueue""");
Assert.NotNull(processingJob.FetchedAt);
Assert.Equal(processingJob.FetchedAt, record.fetchedat);
@@ -157,7 +157,7 @@ public void RemoveFromQueue_AfterTimer_RemovesJobFromTheQueue()
processingJob.RemoveFromQueue();
// Assert
- int count = connection.Query($@"SELECT count(*) FROM ""{GetSchemaName()}"".""jobqueue""").Single();
+ int count = connection.QuerySingle($@"SELECT count(*) FROM ""{GetSchemaName()}"".""jobqueue""");
Assert.Equal(0, count);
}
});
@@ -179,7 +179,7 @@ public void RequeueQueue_AfterTimer_SetsFetchedAtValueToNull()
processingJob.Requeue();
// Assert
- dynamic record = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""jobqueue""").Single();
+ dynamic record = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""jobqueue""");
Assert.Null(record.fetchedat);
}
});
@@ -198,7 +198,7 @@ public void Dispose_SetsFetchedAtValueToNull_IfThereWereNoCallsToComplete()
// Assert
dynamic record = _storage.UseConnection(null, connection =>
- connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""jobqueue""").Single());
+ connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""jobqueue"""));
Assert.Null(record.fetchedat);
}
diff --git a/tests/Hangfire.PostgreSql.Tests/PostgreSqlInstallerFacts.cs b/tests/Hangfire.PostgreSql.Tests/PostgreSqlInstallerFacts.cs
index 80d9002..44e9a0d 100644
--- a/tests/Hangfire.PostgreSql.Tests/PostgreSqlInstallerFacts.cs
+++ b/tests/Hangfire.PostgreSql.Tests/PostgreSqlInstallerFacts.cs
@@ -1,6 +1,5 @@
using System;
using System.Globalization;
-using System.Linq;
using Dapper;
using Hangfire.PostgreSql.Tests.Utils;
using Npgsql;
@@ -19,7 +18,7 @@ public void InstallingSchemaUpdatesVersionAndShouldNotThrowAnException()
PostgreSqlObjectsInstaller.Install(connection, schemaName);
- int lastVersion = connection.Query($@"SELECT version FROM ""{schemaName}"".""schema""").Single();
+ int lastVersion = connection.QuerySingle($@"SELECT version FROM ""{schemaName}"".""schema""");
Assert.Equal(23, lastVersion);
connection.Execute($@"DROP SCHEMA ""{schemaName}"" CASCADE;");
@@ -38,7 +37,7 @@ public void InstallingSchemaWithCapitalsUpdatesVersionAndShouldNotThrowAnExcepti
PostgreSqlObjectsInstaller.Install(connection, schemaName);
- int lastVersion = connection.Query($@"SELECT version FROM ""{schemaName}"".""schema""").Single();
+ int lastVersion = connection.QuerySingle($@"SELECT version FROM ""{schemaName}"".""schema""");
Assert.Equal(23, lastVersion);
connection.Execute($@"DROP SCHEMA ""{schemaName}"" CASCADE;");
diff --git a/tests/Hangfire.PostgreSql.Tests/PostgreSqlJobQueueFacts.cs b/tests/Hangfire.PostgreSql.Tests/PostgreSqlJobQueueFacts.cs
index f25cea1..f2a3cdd 100644
--- a/tests/Hangfire.PostgreSql.Tests/PostgreSqlJobQueueFacts.cs
+++ b/tests/Hangfire.PostgreSql.Tests/PostgreSqlJobQueueFacts.cs
@@ -195,7 +195,7 @@ private void Dequeue_ShouldLeaveJobInTheQueue_ButSetItsFetchedAtValue(bool useNa
string arrangeSql = $@"
WITH i AS (
INSERT INTO ""{GetSchemaName()}"".""job"" (""invocationdata"", ""arguments"", ""createdat"")
- VALUES (@InvocationData, @Arguments, NOW())
+ VALUES (@InvocationData::jsonb, @Arguments::jsonb, NOW())
RETURNING ""id""
)
INSERT INTO ""{GetSchemaName()}"".""jobqueue"" (""jobid"", ""queue"")
@@ -205,7 +205,7 @@ WITH i AS (
// Arrange
UseConnection((connection, storage) => {
connection.Execute(arrangeSql,
- new { InvocationData = new JsonParameter(""), Arguments = new JsonParameter("", JsonParameter.ValueType.Array), Queue = "default" });
+ new { InvocationData = JsonParameter.GetParameterValue(""), Arguments = JsonParameter.GetParameterValue("", JsonParameter.ValueType.Array), Queue = "default" });
PostgreSqlJobQueue queue = CreateJobQueue(storage, useNativeDatabaseTransactions);
// Act
@@ -256,7 +256,7 @@ private void Dequeue_ShouldFetchATimedOutJobs_FromTheSpecifiedQueue(bool useNati
string arrangeSql = $@"
WITH i AS (
INSERT INTO ""{GetSchemaName()}"".""job"" (""invocationdata"", ""arguments"", ""createdat"")
- VALUES (@InvocationData, @Arguments, NOW())
+ VALUES (@InvocationData::jsonb, @Arguments::jsonb, NOW())
RETURNING ""id""
)
INSERT INTO ""{GetSchemaName()}"".""jobqueue"" (""jobid"", ""queue"", ""fetchedat"")
@@ -270,8 +270,8 @@ WITH i AS (
new {
Queue = "default",
FetchedAt = DateTime.UtcNow.AddDays(-1),
- InvocationData = new JsonParameter(""),
- Arguments = new JsonParameter("", JsonParameter.ValueType.Array),
+ InvocationData = JsonParameter.GetParameterValue(""),
+ Arguments = JsonParameter.GetParameterValue("", JsonParameter.ValueType.Array),
});
PostgreSqlJobQueue queue = CreateJobQueue(storage, useNativeDatabaseTransactions, useSlidingInvisibilityTimeout: useSlidingInvisibilityTimeout);
@@ -303,7 +303,7 @@ private void Dequeue_ShouldSetFetchedAt_OnlyForTheFetchedJob(bool useNativeDatab
string arrangeSql = $@"
WITH i AS (
INSERT INTO ""{GetSchemaName()}"".""job"" (""invocationdata"", ""arguments"", ""createdat"")
- VALUES (@InvocationData, @Arguments, NOW())
+ VALUES (@InvocationData::jsonb, @Arguments::jsonb, NOW())
RETURNING ""id""
)
INSERT INTO ""{GetSchemaName()}"".""jobqueue"" (""jobid"", ""queue"")
@@ -313,8 +313,8 @@ WITH i AS (
UseConnection((connection, storage) => {
connection.Execute(arrangeSql,
new[] {
- new { Queue = "default", InvocationData = new JsonParameter(""), Arguments = new JsonParameter("", JsonParameter.ValueType.Array) },
- new { Queue = "default", InvocationData = new JsonParameter(""), Arguments = new JsonParameter("", JsonParameter.ValueType.Array) },
+ new { Queue = "default", InvocationData = JsonParameter.GetParameterValue(""), Arguments = JsonParameter.GetParameterValue("", JsonParameter.ValueType.Array) },
+ new { Queue = "default", InvocationData = JsonParameter.GetParameterValue(""), Arguments = JsonParameter.GetParameterValue("", JsonParameter.ValueType.Array) },
});
PostgreSqlJobQueue queue = CreateJobQueue(storage, useNativeDatabaseTransactions);
@@ -350,7 +350,7 @@ private void Dequeue_ShouldFetchJobs_OnlyFromSpecifiedQueues(bool useNativeDatab
string arrangeSql = $@"
WITH i AS (
INSERT INTO ""{GetSchemaName()}"".""job"" (""invocationdata"", ""arguments"", ""createdat"")
- VALUES (@InvocationData, @Arguments, NOW())
+ VALUES (@InvocationData::jsonb, @Arguments::jsonb, NOW())
RETURNING ""id""
)
INSERT INTO ""{GetSchemaName()}"".""jobqueue"" (""jobid"", ""queue"")
@@ -360,7 +360,7 @@ WITH i AS (
PostgreSqlJobQueue queue = CreateJobQueue(storage, useNativeDatabaseTransactions);
connection.Execute(arrangeSql,
- new { Queue = "critical", InvocationData = new JsonParameter(""), Arguments = new JsonParameter("", JsonParameter.ValueType.Array) });
+ new { Queue = "critical", InvocationData = JsonParameter.GetParameterValue(""), Arguments = JsonParameter.GetParameterValue("", JsonParameter.ValueType.Array) });
Assert.Throws(() => queue.Dequeue(_defaultQueues,
CreateTimingOutCancellationToken()));
@@ -386,7 +386,7 @@ private void Dequeue_ShouldFetchJobs_FromMultipleQueues(bool useNativeDatabaseTr
string arrangeSql = $@"
WITH i AS (
INSERT INTO ""{GetSchemaName()}"".""job"" (""invocationdata"", ""arguments"", ""createdat"")
- VALUES (@InvocationData, @Arguments, NOW())
+ VALUES (@InvocationData::jsonb, @Arguments::jsonb, NOW())
RETURNING ""id""
)
INSERT INTO ""{GetSchemaName()}"".""jobqueue"" (""jobid"", ""queue"")
@@ -398,8 +398,8 @@ WITH i AS (
UseConnection((connection, storage) => {
connection.Execute(arrangeSql,
new[] {
- new { Queue = queueNames.First(), InvocationData = new JsonParameter("") , Arguments = new JsonParameter("", JsonParameter.ValueType.Array) },
- new { Queue = queueNames.Last(), InvocationData = new JsonParameter(""), Arguments = new JsonParameter("", JsonParameter.ValueType.Array) },
+ new { Queue = queueNames.First(), InvocationData = JsonParameter.GetParameterValue("") , Arguments = JsonParameter.GetParameterValue("", JsonParameter.ValueType.Array) },
+ new { Queue = queueNames.Last(), InvocationData = JsonParameter.GetParameterValue(""), Arguments = JsonParameter.GetParameterValue("", JsonParameter.ValueType.Array) },
});
PostgreSqlJobQueue queue = CreateJobQueue(storage, useNativeDatabaseTransactions);
@@ -531,7 +531,7 @@ private void Enqueue_AddsAJobToTheQueue(bool useNativeDatabaseTransactions)
queue.Enqueue(connection, "default", "1");
- dynamic record = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""jobqueue""").Single();
+ dynamic record = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""jobqueue""");
Assert.Equal("1", record.jobid.ToString());
Assert.Equal("default", record.queue);
Assert.Null(record.FetchedAt);
diff --git a/tests/Hangfire.PostgreSql.Tests/PostgreSqlMonitoringApiFacts.cs b/tests/Hangfire.PostgreSql.Tests/PostgreSqlMonitoringApiFacts.cs
index 4f6f217..2f8d3d4 100644
--- a/tests/Hangfire.PostgreSql.Tests/PostgreSqlMonitoringApiFacts.cs
+++ b/tests/Hangfire.PostgreSql.Tests/PostgreSqlMonitoringApiFacts.cs
@@ -29,7 +29,7 @@ public void GetJobs_MixedCasing_ReturnsJob()
{
string arrangeSql = $@"
INSERT INTO ""{ConnectionUtils.GetSchemaName()}"".""job""(""invocationdata"", ""arguments"", ""createdat"")
- VALUES (@InvocationData, @Arguments, NOW()) RETURNING ""id""";
+ VALUES (@InvocationData::jsonb, @Arguments::jsonb, NOW()) RETURNING ""id""";
Job job = Job.FromExpression(() => SampleMethod("Hello"));
InvocationData invocationData = InvocationData.SerializeJob(job);
@@ -37,8 +37,8 @@ public void GetJobs_MixedCasing_ReturnsJob()
UseConnection(connection => {
long jobId = connection.QuerySingle(arrangeSql,
new {
- InvocationData = new JsonParameter(SerializationHelper.Serialize(invocationData)),
- Arguments = new JsonParameter(invocationData.Arguments, JsonParameter.ValueType.Array),
+ InvocationData = JsonParameter.GetParameterValue(SerializationHelper.Serialize(invocationData)),
+ Arguments = JsonParameter.GetParameterValue(invocationData.Arguments, JsonParameter.ValueType.Array),
});
Mock state = new();
diff --git a/tests/Hangfire.PostgreSql.Tests/PostgreSqlWriteOnlyTransactionFacts.cs b/tests/Hangfire.PostgreSql.Tests/PostgreSqlWriteOnlyTransactionFacts.cs
index 399c6c0..98a5a5a 100644
--- a/tests/Hangfire.PostgreSql.Tests/PostgreSqlWriteOnlyTransactionFacts.cs
+++ b/tests/Hangfire.PostgreSql.Tests/PostgreSqlWriteOnlyTransactionFacts.cs
@@ -103,8 +103,8 @@ public void SetJobState_AppendsAStateAndSetItToTheJob()
VALUES ('{{}}', '[]', NOW()) RETURNING ""id""";
UseConnection(connection => {
- dynamic jobId = connection.Query(arrangeSql).Single().id.ToString();
- dynamic anotherJobId = connection.Query(arrangeSql).Single().id.ToString();
+ dynamic jobId = connection.QuerySingle(arrangeSql).id.ToString();
+ dynamic anotherJobId = connection.QuerySingle(arrangeSql).id.ToString();
Mock state = new();
state.Setup(x => x.Name).Returns("State");
@@ -123,7 +123,7 @@ public void SetJobState_AppendsAStateAndSetItToTheJob()
Assert.Null(anotherJob.StateName);
Assert.Null(anotherJob.StateId);
- dynamic jobState = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""state""").Single();
+ dynamic jobState = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""state""");
Assert.Equal((string)jobId, jobState.jobid.ToString());
Assert.Equal("State", jobState.name);
Assert.Equal("Reason", jobState.reason);
@@ -160,8 +160,8 @@ TransactionScope CreateTransactionScope(IsolationLevel isolationLevel = Isolatio
string jobId = null;
string anotherJobId = null;
UseConnection(connection => {
- jobId = connection.Query(arrangeSql).Single().id.ToString();
- anotherJobId = connection.Query(arrangeSql).Single().id.ToString();
+ jobId = connection.QuerySingle(arrangeSql).id.ToString();
+ anotherJobId = connection.QuerySingle(arrangeSql).id.ToString();
});
using (TransactionScope scope = CreateTransactionScope())
@@ -188,7 +188,7 @@ TransactionScope CreateTransactionScope(IsolationLevel isolationLevel = Isolatio
Assert.Equal("State", job.StateName);
Assert.NotNull(job.StateId);
- dynamic jobState = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""state""").Single();
+ dynamic jobState = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""state""");
Assert.Equal(jobId, jobState.jobid.ToString());
Assert.Equal("State", jobState.name);
Assert.Equal("Reason", jobState.reason);
@@ -204,7 +204,7 @@ TransactionScope CreateTransactionScope(IsolationLevel isolationLevel = Isolatio
Assert.Null(job.StateName);
Assert.Null(job.StateId);
- Assert.Null(connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""state""").SingleOrDefault());
+ Assert.Null(connection.QuerySingleOrDefault($@"SELECT * FROM ""{GetSchemaName()}"".""state"""));
}
TestJob anotherJob = Helper.GetTestJob(connection, GetSchemaName(), anotherJobId);
@@ -226,7 +226,7 @@ public void AddJobState_JustAddsANewRecordInATable()
Dictionary expectedData = new() { { "Name", "Value" } };
UseConnection(connection => {
- dynamic jobId = connection.Query(arrangeSql).Single().id.ToString(CultureInfo.InvariantCulture);
+ dynamic jobId = connection.QuerySingle(arrangeSql).id.ToString(CultureInfo.InvariantCulture);
Mock state = new();
state.Setup(x => x.Name).Returns("State");
@@ -239,7 +239,7 @@ public void AddJobState_JustAddsANewRecordInATable()
Assert.Null(job.StateName);
Assert.Null(job.StateId);
- dynamic jobState = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""state""").Single();
+ dynamic jobState = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""state""");
Assert.Equal((string)jobId, jobState.jobid.ToString(CultureInfo.InvariantCulture));
Assert.Equal("State", jobState.name);
Assert.Equal("Reason", jobState.reason);
@@ -285,7 +285,7 @@ public void IncrementCounter_AddsRecordToCounterTable_WithPositiveValue()
UseConnection(connection => {
Commit(connection, x => x.IncrementCounter("my-key"));
- dynamic record = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""counter""").Single();
+ dynamic record = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""counter""");
Assert.Equal("my-key", record.key);
Assert.Equal(1, record.value);
@@ -300,7 +300,7 @@ public void IncrementCounter_WithExpiry_AddsARecord_WithExpirationTimeSet()
UseConnection(connection => {
Commit(connection, x => x.IncrementCounter("my-key", TimeSpan.FromDays(1)));
- dynamic record = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""counter""").Single();
+ dynamic record = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""counter""");
Assert.Equal("my-key", record.key);
Assert.Equal(1, record.value);
@@ -336,7 +336,7 @@ public void DecrementCounter_AddsRecordToCounterTable_WithNegativeValue()
UseConnection(connection => {
Commit(connection, x => x.DecrementCounter("my-key"));
- dynamic record = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""counter""").Single();
+ dynamic record = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""counter""");
Assert.Equal("my-key", record.key);
Assert.Equal(-1, record.value);
@@ -351,7 +351,7 @@ public void DecrementCounter_WithExpiry_AddsARecord_WithExpirationTimeSet()
UseConnection(connection => {
Commit(connection, x => x.DecrementCounter("my-key", TimeSpan.FromDays(1)));
- dynamic record = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""counter""").Single();
+ dynamic record = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""counter""");
Assert.Equal("my-key", record.key);
Assert.Equal(-1, record.value);
@@ -387,7 +387,7 @@ public void AddToSet_AddsARecord_IfThereIsNo_SuchKeyAndValue()
UseConnection(connection => {
Commit(connection, x => x.AddToSet("my-key", "my-value"));
- dynamic record = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""set""").Single();
+ dynamic record = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""set""");
Assert.Equal("my-key", record.key);
Assert.Equal("my-value", record.value);
@@ -434,7 +434,7 @@ public void AddToSet_WithScore_AddsARecordWithScore_WhenBothKeyAndValueAreNotExi
UseConnection(connection => {
Commit(connection, x => x.AddToSet("my-key", "my-value", 3.2));
- dynamic record = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""set""").Single();
+ dynamic record = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""set""");
Assert.Equal("my-key", record.key);
Assert.Equal("my-value", record.value);
@@ -452,7 +452,7 @@ public void AddToSet_WithScore_UpdatesAScore_WhenBothKeyAndValueAreExist()
x.AddToSet("my-key", "my-value", 3.2);
});
- dynamic record = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""set""").Single();
+ dynamic record = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""set""");
Assert.Equal(3.2, record.score, 3);
});
@@ -496,10 +496,10 @@ void CommitTags(PostgreSqlWriteOnlyTransaction transaction, IEnumerable
});
UseConnection(connection => {
- int jobsCountUnderMySharedTag = connection.Query($@"
+ int jobsCountUnderMySharedTag = connection.QuerySingle($@"
SELECT COUNT(*)
FROM ""{GetSchemaName()}"".set
- WHERE key LIKE 'tags:my-shared-tag'").Single();
+ WHERE key LIKE 'tags:my-shared-tag'");
Assert.Equal(loopIterations, jobsCountUnderMySharedTag);
@@ -511,10 +511,10 @@ SELECT COUNT(*)
Assert.All(jobsCountsUnderJobTypeTags, count => Assert.Equal(loopIterations / jobGroups, count));
- int jobLinkTagsCount = connection.Query($@"
+ int jobLinkTagsCount = connection.QuerySingle($@"
SELECT COUNT(*) FROM ""{GetSchemaName()}"".set
where value ~ '^\d+$'
- ").Single();
+ ");
Assert.Equal(loopIterations * totalTagsCount, jobLinkTagsCount);
});
@@ -575,7 +575,7 @@ public void InsertToList_AddsARecord_WithGivenValues()
UseConnection(connection => {
Commit(connection, x => x.InsertToList("my-key", "my-value"));
- dynamic record = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""list""").Single();
+ dynamic record = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""list""");
Assert.Equal("my-key", record.key);
Assert.Equal("my-value", record.value);
@@ -859,7 +859,7 @@ public void RemoveSet_RemovesASet_WithAGivenKey()
Commit(connection, x => x.RemoveSet("set-1"));
- dynamic record = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""set""").Single();
+ dynamic record = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""set""");
Assert.Equal("set-2", record.key);
});
}
@@ -1097,7 +1097,7 @@ public void AddToQueue_AddsAJobToTheQueue_UsingStorageConnection_WithTransaction
string connectionString = ConnectionUtils.GetConnectionString();
using (NpgsqlConnection connection = new NpgsqlConnection(connectionString))
{
- dynamic _ = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""jobqueue""").FirstOrDefault();
+ dynamic _ = connection.QueryFirstOrDefault($@"SELECT * FROM ""{GetSchemaName()}"".""jobqueue""");
}
PostgreSqlStorageOptions options = new() { EnableTransactionScopeEnlistment = true };
@@ -1122,7 +1122,7 @@ public void AddToQueue_AddsAJobToTheQueue_UsingStorageConnection_WithTransaction
}
UseConnection(connection => {
- dynamic record = connection.Query($@"SELECT * FROM ""{GetSchemaName()}"".""jobqueue""").Single();
+ dynamic record = connection.QuerySingle($@"SELECT * FROM ""{GetSchemaName()}"".""jobqueue""");
Assert.Equal(jobId, record.jobid.ToString());
Assert.Equal("default", record.queue);
Assert.Null(record.FetchedAt);
diff --git a/tests/Hangfire.PostgreSql.Tests/Utils/ConnectionUtils.cs b/tests/Hangfire.PostgreSql.Tests/Utils/ConnectionUtils.cs
index e01cba1..dc6e119 100644
--- a/tests/Hangfire.PostgreSql.Tests/Utils/ConnectionUtils.cs
+++ b/tests/Hangfire.PostgreSql.Tests/Utils/ConnectionUtils.cs
@@ -17,7 +17,7 @@ public static class ConnectionUtils
private const string DefaultDatabaseName = @"hangfire_tests";
private const string DefaultSchemaName = @"hangfire";
- private const string DefaultConnectionStringTemplate = @"Server=127.0.0.1;Port=5432;Database=postgres;User Id=postgres;Password=password;";
+ private const string DefaultConnectionStringTemplate = @"Server=127.0.0.1;Port=5432;Database=postgres;User Id=postgres;Password=password;Include Error Detail=true";
public static string GetDatabaseName()
{