From 6a2ad0ff282091c54ad12fe416d7746b4143f064 Mon Sep 17 00:00:00 2001 From: Adam <103067949+AdamL-Microsoft@users.noreply.github.com> Date: Tue, 29 Aug 2023 12:57:19 -0700 Subject: [PATCH 01/32] Release 8.7.1 (hotfix) (#3459) * Remove the retention policy setting (#3452) --------- Co-authored-by: Cheick Keita --- .devcontainer/devcontainer.json | 3 +- .github/workflows/ci.yml | 2 - CURRENT_VERSION | 2 +- .../ApiService/Functions/QueueJobResult.cs | 60 ------- .../ApiService/OneFuzzTypes/Model.cs | 45 ----- src/ApiService/ApiService/Program.cs | 1 - .../ApiService/onefuzzlib/Config.cs | 1 - .../ApiService/onefuzzlib/Extension.cs | 44 +++-- .../onefuzzlib/JobResultOperations.cs | 121 ------------- .../ApiService/onefuzzlib/OnefuzzContext.cs | 2 - .../IntegrationTests/Fakes/TestContext.cs | 3 - src/agent/Cargo.lock | 16 -- src/agent/Cargo.toml | 1 - src/agent/onefuzz-agent/src/config.rs | 12 -- src/agent/onefuzz-agent/src/log_uploader.rs | 29 ++++ src/agent/onefuzz-agent/src/work.rs | 5 +- src/agent/onefuzz-result/Cargo.toml | 18 -- src/agent/onefuzz-result/src/lib.rs | 4 - src/agent/onefuzz-task/Cargo.toml | 1 - src/agent/onefuzz-task/src/local/cmd.rs | 42 ++++- src/agent/onefuzz-task/src/local/common.rs | 26 ++- .../example_templates/libfuzzer_basic.yml | 34 ++-- .../src/local/generic_analysis.rs | 137 ++++++++++++++- .../src/local/generic_crash_report.rs | 138 ++++++++++++++- .../src/local/generic_generator.rs | 142 ++++++++++++++- src/agent/onefuzz-task/src/local/libfuzzer.rs | 161 +++++++++++++++++- .../src/local/libfuzzer_crash_report.rs | 128 +++++++++++++- .../onefuzz-task/src/local/libfuzzer_merge.rs | 84 ++++++++- .../src/local/libfuzzer_regression.rs | 134 ++++++++++++++- .../src/local/libfuzzer_test_input.rs | 83 +++++++++ src/agent/onefuzz-task/src/local/mod.rs | 1 + src/agent/onefuzz-task/src/local/radamsa.rs | 78 +++++++++ src/agent/onefuzz-task/src/local/schema.json | 8 +- src/agent/onefuzz-task/src/local/template.rs | 13 +- .../onefuzz-task/src/local/test_input.rs | 86 ++++++++++ .../src/tasks/analysis/generic.rs | 5 +- src/agent/onefuzz-task/src/tasks/config.rs | 20 --- .../src/tasks/coverage/generic.rs | 8 +- .../onefuzz-task/src/tasks/fuzz/generator.rs | 7 +- .../src/tasks/fuzz/libfuzzer/common.rs | 49 ++---- .../onefuzz-task/src/tasks/fuzz/supervisor.rs | 15 +- src/agent/onefuzz-task/src/tasks/heartbeat.rs | 2 +- .../onefuzz-task/src/tasks/merge/generic.rs | 2 +- .../src/tasks/merge/libfuzzer_merge.rs | 2 +- .../src/tasks/regression/common.rs | 15 +- .../src/tasks/regression/generic.rs | 3 +- .../src/tasks/regression/libfuzzer.rs | 3 +- .../src/tasks/report/crash_report.rs | 45 +---- .../src/tasks/report/dotnet/generic.rs | 22 +-- .../onefuzz-task/src/tasks/report/generic.rs | 14 +- .../src/tasks/report/libfuzzer_report.rs | 5 - src/agent/onefuzz/Cargo.toml | 1 - src/agent/onefuzz/src/blob/url.rs | 23 +-- src/agent/onefuzz/src/syncdir.rs | 15 +- .../bicep-templates/storageAccounts.bicep | 2 +- src/integration-tests/integration-test.py | 50 ++---- src/runtime-tools/linux/setup.sh | 64 ++----- 57 files changed, 1388 insertions(+), 649 deletions(-) delete mode 100644 src/ApiService/ApiService/Functions/QueueJobResult.cs delete mode 100644 src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs delete mode 100644 src/agent/onefuzz-result/Cargo.toml delete mode 100644 src/agent/onefuzz-result/src/lib.rs create mode 100644 src/agent/onefuzz-task/src/local/radamsa.rs mode change 100644 => 100755 src/runtime-tools/linux/setup.sh diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index d3fcf050ed..4059b3d7c1 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -13,7 +13,6 @@ "**/target/**": true }, "lldb.executable": "/usr/bin/lldb", - "dotnet.server.useOmnisharp": true, "omnisharp.enableEditorConfigSupport": true, "omnisharp.enableRoslynAnalyzers": true, "python.defaultInterpreterPath": "/workspaces/onefuzz/src/venv/bin/python", @@ -49,4 +48,4 @@ "features": { "ghcr.io/devcontainers/features/azure-cli:1": {} } -} \ No newline at end of file +} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5f07124dd7..456491b6c7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -550,11 +550,9 @@ jobs: mkdir -p artifacts/linux-libfuzzer mkdir -p artifacts/linux-libfuzzer-with-options - mkdir -p artifacts/mariner-libfuzzer (cd libfuzzer ; make ) cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/linux-libfuzzer cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/linux-libfuzzer-with-options - cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/mariner-libfuzzer mkdir -p artifacts/linux-libfuzzer-regression (cd libfuzzer-regression ; make ) diff --git a/CURRENT_VERSION b/CURRENT_VERSION index 3ecb398163..e5c15102d9 100644 --- a/CURRENT_VERSION +++ b/CURRENT_VERSION @@ -1 +1 @@ -8.9.0 \ No newline at end of file +8.9.0 diff --git a/src/ApiService/ApiService/Functions/QueueJobResult.cs b/src/ApiService/ApiService/Functions/QueueJobResult.cs deleted file mode 100644 index d781a4d1e1..0000000000 --- a/src/ApiService/ApiService/Functions/QueueJobResult.cs +++ /dev/null @@ -1,60 +0,0 @@ -using System.Text.Json; -using Microsoft.Azure.Functions.Worker; -using Microsoft.Extensions.Logging; -using Microsoft.OneFuzz.Service.OneFuzzLib.Orm; -namespace Microsoft.OneFuzz.Service.Functions; - - -public class QueueJobResult { - private readonly ILogger _log; - private readonly IOnefuzzContext _context; - - public QueueJobResult(ILogger logTracer, IOnefuzzContext context) { - _log = logTracer; - _context = context; - } - - [Function("QueueJobResult")] - public async Async.Task Run([QueueTrigger("job-result", Connection = "AzureWebJobsStorage")] string msg) { - - var _tasks = _context.TaskOperations; - var _jobs = _context.JobOperations; - - _log.LogInformation("job result: {msg}", msg); - var jr = JsonSerializer.Deserialize(msg, EntityConverter.GetJsonSerializerOptions()).EnsureNotNull($"wrong data {msg}"); - - var task = await _tasks.GetByTaskId(jr.TaskId); - if (task == null) { - _log.LogWarning("invalid {TaskId}", jr.TaskId); - return; - } - - var job = await _jobs.Get(task.JobId); - if (job == null) { - _log.LogWarning("invalid {JobId}", task.JobId); - return; - } - - JobResultData? data = jr.Data; - if (data == null) { - _log.LogWarning($"job result data is empty, throwing out: {jr}"); - return; - } - - var jobResultType = data.Type; - _log.LogInformation($"job result data type: {jobResultType}"); - - Dictionary value; - if (jr.Value.Count > 0) { - value = jr.Value; - } else { - _log.LogWarning($"job result data is empty, throwing out: {jr}"); - return; - } - - var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jobResultType, value); - if (!jobResult.IsOk) { - _log.LogError("failed to create or update with job result {JobId}", job.JobId); - } - } -} diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index 23811e9fe0..67cbea39b6 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -33,19 +33,6 @@ public enum HeartbeatType { TaskAlive, } -[SkipRename] -public enum JobResultType { - NewCrashingInput, - NoReproCrashingInput, - NewReport, - NewUniqueReport, - NewRegressionReport, - NewCoverage, - NewCrashDump, - CoverageData, - RuntimeStats, -} - public record HeartbeatData(HeartbeatType Type); public record TaskHeartbeatEntry( @@ -54,16 +41,6 @@ public record TaskHeartbeatEntry( Guid MachineId, HeartbeatData[] Data); -public record JobResultData(JobResultType Type); - -public record TaskJobResultEntry( - Guid TaskId, - Guid? JobId, - Guid MachineId, - JobResultData Data, - Dictionary Value - ); - public record NodeHeartbeatEntry(Guid NodeId, HeartbeatData[] Data); public record NodeCommandStopIfFree(); @@ -918,27 +895,6 @@ public record SecretAddress(Uri Url) : ISecret { public record SecretData(ISecret Secret) { } -public record JobResult( - [PartitionKey][RowKey] Guid JobId, - string Project, - string Name, - double NewCrashingInput = 0, - double NoReproCrashingInput = 0, - double NewReport = 0, - double NewUniqueReport = 0, - double NewRegressionReport = 0, - double NewCrashDump = 0, - double InstructionsCovered = 0, - double TotalInstructions = 0, - double CoverageRate = 0, - double IterationCount = 0 -) : EntityBase() { - public JobResult(Guid JobId, string Project, string Name) : this( - JobId: JobId, - Project: Project, - Name: Name, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) { } -} - public record JobConfig( string Project, string Name, @@ -1105,7 +1061,6 @@ public record TaskUnitConfig( string? InstanceTelemetryKey, string? MicrosoftTelemetryKey, Uri HeartbeatQueue, - Uri JobResultQueue, Dictionary Tags ) { public Uri? inputQueue { get; set; } diff --git a/src/ApiService/ApiService/Program.cs b/src/ApiService/ApiService/Program.cs index f26463883b..7db11a8569 100644 --- a/src/ApiService/ApiService/Program.cs +++ b/src/ApiService/ApiService/Program.cs @@ -180,7 +180,6 @@ public static async Async.Task Main() { .AddScoped() .AddScoped() .AddScoped() - .AddScoped() .AddScoped() .AddScoped() .AddScoped() diff --git a/src/ApiService/ApiService/onefuzzlib/Config.cs b/src/ApiService/ApiService/onefuzzlib/Config.cs index 872cedbc01..71af317348 100644 --- a/src/ApiService/ApiService/onefuzzlib/Config.cs +++ b/src/ApiService/ApiService/onefuzzlib/Config.cs @@ -71,7 +71,6 @@ private static BlobContainerSasPermissions ConvertPermissions(ContainerPermissio InstanceTelemetryKey: _serviceConfig.ApplicationInsightsInstrumentationKey, MicrosoftTelemetryKey: _serviceConfig.OneFuzzTelemetry, HeartbeatQueue: await _queue.GetQueueSas("task-heartbeat", StorageType.Config, QueueSasPermissions.Add) ?? throw new Exception("unable to get heartbeat queue sas"), - JobResultQueue: await _queue.GetQueueSas("job-result", StorageType.Config, QueueSasPermissions.Add) ?? throw new Exception("unable to get heartbeat queue sas"), Tags: task.Config.Tags ?? new Dictionary() ); diff --git a/src/ApiService/ApiService/onefuzzlib/Extension.cs b/src/ApiService/ApiService/onefuzzlib/Extension.cs index fbf62dd343..7995026eca 100644 --- a/src/ApiService/ApiService/onefuzzlib/Extension.cs +++ b/src/ApiService/ApiService/onefuzzlib/Extension.cs @@ -36,9 +36,7 @@ public async Async.Task> GenericExtensions(AzureLocati var extensions = new List(); var instanceConfig = await _context.ConfigOperations.Fetch(); - if (vmOs == Os.Windows) { - extensions.Add(await MonitorExtension(region)); - } + extensions.Add(await MonitorExtension(region, vmOs)); var depenency = DependencyExtension(region, vmOs); if (depenency is not null) { @@ -331,21 +329,37 @@ public async Async.Task AgentConfig(AzureLocation region, Os throw new NotSupportedException($"unsupported OS: {vmOs}"); } - public async Async.Task MonitorExtension(AzureLocation region) { + public async Async.Task MonitorExtension(AzureLocation region, Os vmOs) { var settings = await _context.LogAnalytics.GetMonitorSettings(); var extensionSettings = JsonSerializer.Serialize(new { WorkspaceId = settings.Id }, _extensionSerializerOptions); var protectedExtensionSettings = JsonSerializer.Serialize(new { WorkspaceKey = settings.Key }, _extensionSerializerOptions); - return new VMExtensionWrapper { - Location = region, - Name = "OMSExtension", - TypePropertiesType = "MicrosoftMonitoringAgent", - Publisher = "Microsoft.EnterpriseCloud.Monitoring", - TypeHandlerVersion = "1.0", - AutoUpgradeMinorVersion = true, - Settings = new BinaryData(extensionSettings), - ProtectedSettings = new BinaryData(protectedExtensionSettings), - EnableAutomaticUpgrade = false - }; + if (vmOs == Os.Windows) { + return new VMExtensionWrapper { + Location = region, + Name = "OMSExtension", + TypePropertiesType = "MicrosoftMonitoringAgent", + Publisher = "Microsoft.EnterpriseCloud.Monitoring", + TypeHandlerVersion = "1.0", + AutoUpgradeMinorVersion = true, + Settings = new BinaryData(extensionSettings), + ProtectedSettings = new BinaryData(protectedExtensionSettings), + EnableAutomaticUpgrade = false + }; + } else if (vmOs == Os.Linux) { + return new VMExtensionWrapper { + Location = region, + Name = "OmsAgentForLinux", + TypePropertiesType = "OmsAgentForLinux", + Publisher = "Microsoft.EnterpriseCloud.Monitoring", + TypeHandlerVersion = "1.0", + AutoUpgradeMinorVersion = true, + Settings = new BinaryData(extensionSettings), + ProtectedSettings = new BinaryData(protectedExtensionSettings), + EnableAutomaticUpgrade = false + }; + } else { + throw new NotSupportedException($"unsupported os: {vmOs}"); + } } diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs deleted file mode 100644 index 1166cf91d4..0000000000 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ /dev/null @@ -1,121 +0,0 @@ -using ApiService.OneFuzzLib.Orm; -using Microsoft.Extensions.Logging; -using Polly; -namespace Microsoft.OneFuzz.Service; - -public interface IJobResultOperations : IOrm { - - Async.Task GetJobResult(Guid jobId); - Async.Task CreateOrUpdate(Guid jobId, JobResultType resultType, Dictionary resultValue); - -} -public class JobResultOperations : Orm, IJobResultOperations { - - public JobResultOperations(ILogger log, IOnefuzzContext context) - : base(log, context) { - } - - public async Async.Task GetJobResult(Guid jobId) { - return await SearchByPartitionKeys(new[] { jobId.ToString() }).SingleOrDefaultAsync(); - } - - private JobResult UpdateResult(JobResult result, JobResultType type, Dictionary resultValue) { - - var newResult = result; - double newValue; - switch (type) { - case JobResultType.NewCrashingInput: - newValue = result.NewCrashingInput + resultValue["count"]; - newResult = result with { NewCrashingInput = newValue }; - break; - case JobResultType.NewReport: - newValue = result.NewReport + resultValue["count"]; - newResult = result with { NewReport = newValue }; - break; - case JobResultType.NewUniqueReport: - newValue = result.NewUniqueReport + resultValue["count"]; - newResult = result with { NewUniqueReport = newValue }; - break; - case JobResultType.NewRegressionReport: - newValue = result.NewRegressionReport + resultValue["count"]; - newResult = result with { NewRegressionReport = newValue }; - break; - case JobResultType.NewCrashDump: - newValue = result.NewCrashDump + resultValue["count"]; - newResult = result with { NewCrashDump = newValue }; - break; - case JobResultType.CoverageData: - double newCovered = resultValue["covered"]; - double newTotalCovered = resultValue["features"]; - double newCoverageRate = resultValue["rate"]; - newResult = result with { InstructionsCovered = newCovered, TotalInstructions = newTotalCovered, CoverageRate = newCoverageRate }; - break; - case JobResultType.RuntimeStats: - double newTotalIterations = resultValue["total_count"]; - newResult = result with { IterationCount = newTotalIterations }; - break; - default: - _logTracer.LogWarning($"Invalid Field {type}."); - break; - } - _logTracer.LogInformation($"Attempting to log new result: {newResult}"); - return newResult; - } - - private async Async.Task TryUpdate(Job job, JobResultType resultType, Dictionary resultValue) { - var jobId = job.JobId; - - var jobResult = await GetJobResult(jobId); - - if (jobResult == null) { - _logTracer.LogInformation("Creating new JobResult for Job {JobId}", jobId); - - var entry = new JobResult(JobId: jobId, Project: job.Config.Project, Name: job.Config.Name); - - jobResult = UpdateResult(entry, resultType, resultValue); - - var r = await Insert(jobResult); - if (!r.IsOk) { - throw new InvalidOperationException($"failed to insert job result {jobResult.JobId}"); - } - _logTracer.LogInformation("created job result {JobId}", jobResult.JobId); - } else { - _logTracer.LogInformation("Updating existing JobResult entry for Job {JobId}", jobId); - - jobResult = UpdateResult(jobResult, resultType, resultValue); - - var r = await Update(jobResult); - if (!r.IsOk) { - throw new InvalidOperationException($"failed to insert job result {jobResult.JobId}"); - } - _logTracer.LogInformation("updated job result {JobId}", jobResult.JobId); - } - - return true; - } - - public async Async.Task CreateOrUpdate(Guid jobId, JobResultType resultType, Dictionary resultValue) { - - var job = await _context.JobOperations.Get(jobId); - if (job == null) { - return OneFuzzResultVoid.Error(ErrorCode.INVALID_REQUEST, "invalid job"); - } - - var success = false; - try { - _logTracer.LogInformation("attempt to update job result {JobId}", job.JobId); - var policy = Policy.Handle().WaitAndRetryAsync(50, _ => new TimeSpan(0, 0, 5)); - await policy.ExecuteAsync(async () => { - success = await TryUpdate(job, resultType, resultValue); - _logTracer.LogInformation("attempt {success}", success); - }); - return OneFuzzResultVoid.Ok; - } catch (Exception e) { - return OneFuzzResultVoid.Error(ErrorCode.UNABLE_TO_UPDATE, new string[] { - $"Unexpected failure when attempting to update job result for {job.JobId}", - $"Exception: {e}" - }); - } - } -} - diff --git a/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs b/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs index 03c6322663..d877bfddbb 100644 --- a/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs +++ b/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs @@ -19,7 +19,6 @@ public interface IOnefuzzContext { IExtensions Extensions { get; } IIpOperations IpOperations { get; } IJobOperations JobOperations { get; } - IJobResultOperations JobResultOperations { get; } ILogAnalytics LogAnalytics { get; } INodeMessageOperations NodeMessageOperations { get; } INodeOperations NodeOperations { get; } @@ -84,7 +83,6 @@ public OnefuzzContext(IServiceProvider serviceProvider) { public IVmOperations VmOperations => _serviceProvider.GetRequiredService(); public ISecretsOperations SecretsOperations => _serviceProvider.GetRequiredService(); public IJobOperations JobOperations => _serviceProvider.GetRequiredService(); - public IJobResultOperations JobResultOperations => _serviceProvider.GetRequiredService(); public IScheduler Scheduler => _serviceProvider.GetRequiredService(); public IConfig Config => _serviceProvider.GetRequiredService(); public ILogAnalytics LogAnalytics => _serviceProvider.GetRequiredService(); diff --git a/src/ApiService/IntegrationTests/Fakes/TestContext.cs b/src/ApiService/IntegrationTests/Fakes/TestContext.cs index 66d121e746..c46ff5fce7 100644 --- a/src/ApiService/IntegrationTests/Fakes/TestContext.cs +++ b/src/ApiService/IntegrationTests/Fakes/TestContext.cs @@ -32,7 +32,6 @@ public TestContext(IHttpClientFactory httpClientFactory, OneFuzzLoggerProvider p TaskOperations = new TaskOperations(provider.CreateLogger(), Cache, this); NodeOperations = new NodeOperations(provider.CreateLogger(), this); JobOperations = new JobOperations(provider.CreateLogger(), this); - JobResultOperations = new JobResultOperations(provider.CreateLogger(), this); NodeTasksOperations = new NodeTasksOperations(provider.CreateLogger(), this); TaskEventOperations = new TaskEventOperations(provider.CreateLogger(), this); NodeMessageOperations = new NodeMessageOperations(provider.CreateLogger(), this); @@ -58,7 +57,6 @@ public Async.Task InsertAll(params EntityBase[] objs) Node n => NodeOperations.Insert(n), Pool p => PoolOperations.Insert(p), Job j => JobOperations.Insert(j), - JobResult jr => JobResultOperations.Insert(jr), Repro r => ReproOperations.Insert(r), Scaleset ss => ScalesetOperations.Insert(ss), NodeTasks nt => NodeTasksOperations.Insert(nt), @@ -86,7 +84,6 @@ public Async.Task InsertAll(params EntityBase[] objs) public ITaskOperations TaskOperations { get; } public IJobOperations JobOperations { get; } - public IJobResultOperations JobResultOperations { get; } public INodeOperations NodeOperations { get; } public INodeTasksOperations NodeTasksOperations { get; } public ITaskEventOperations TaskEventOperations { get; } diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index eb35241201..6136357d65 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -2138,7 +2138,6 @@ dependencies = [ "log", "nix", "notify", - "onefuzz-result", "onefuzz-telemetry", "pete", "pretty_assertions", @@ -2213,20 +2212,6 @@ dependencies = [ "serde_json", ] -[[package]] -name = "onefuzz-result" -version = "0.2.0" -dependencies = [ - "anyhow", - "async-trait", - "log", - "onefuzz-telemetry", - "reqwest", - "serde", - "storage-queue", - "uuid", -] - [[package]] name = "onefuzz-task" version = "0.2.0" @@ -2256,7 +2241,6 @@ dependencies = [ "num_cpus", "onefuzz", "onefuzz-file-format", - "onefuzz-result", "onefuzz-telemetry", "path-absolutize", "pretty_assertions", diff --git a/src/agent/Cargo.toml b/src/agent/Cargo.toml index ce01ae880c..2f4cea41a4 100644 --- a/src/agent/Cargo.toml +++ b/src/agent/Cargo.toml @@ -10,7 +10,6 @@ members = [ "onefuzz", "onefuzz-task", "onefuzz-agent", - "onefuzz-result", "onefuzz-file-format", "onefuzz-telemetry", "reqwest-retry", diff --git a/src/agent/onefuzz-agent/src/config.rs b/src/agent/onefuzz-agent/src/config.rs index fc623e72af..87edfb2c1b 100644 --- a/src/agent/onefuzz-agent/src/config.rs +++ b/src/agent/onefuzz-agent/src/config.rs @@ -34,8 +34,6 @@ pub struct StaticConfig { pub heartbeat_queue: Option, - pub job_result_queue: Option, - pub instance_id: Uuid, #[serde(default = "default_as_true")] @@ -73,8 +71,6 @@ struct RawStaticConfig { pub heartbeat_queue: Option, - pub job_result_queue: Option, - pub instance_id: Uuid, #[serde(default = "default_as_true")] @@ -121,7 +117,6 @@ impl StaticConfig { microsoft_telemetry_key: config.microsoft_telemetry_key, instance_telemetry_key: config.instance_telemetry_key, heartbeat_queue: config.heartbeat_queue, - job_result_queue: config.job_result_queue, instance_id: config.instance_id, managed: config.managed, machine_identity, @@ -157,12 +152,6 @@ impl StaticConfig { None }; - let job_result_queue = if let Ok(key) = std::env::var("ONEFUZZ_JOB_RESULT") { - Some(Url::parse(&key)?) - } else { - None - }; - let instance_telemetry_key = if let Ok(key) = std::env::var("ONEFUZZ_INSTANCE_TELEMETRY_KEY") { Some(InstanceTelemetryKey::new(Uuid::parse_str(&key)?)) @@ -194,7 +183,6 @@ impl StaticConfig { instance_telemetry_key, microsoft_telemetry_key, heartbeat_queue, - job_result_queue, instance_id, managed: !is_unmanaged, machine_identity, diff --git a/src/agent/onefuzz-agent/src/log_uploader.rs b/src/agent/onefuzz-agent/src/log_uploader.rs index d424013421..6bccc0bef2 100644 --- a/src/agent/onefuzz-agent/src/log_uploader.rs +++ b/src/agent/onefuzz-agent/src/log_uploader.rs @@ -210,3 +210,32 @@ async fn sync_file( blob_client.append_block(Body::from(f)).await?; Ok(len) } + +#[cfg(test)] +mod tests { + use std::io::Seek; + + use anyhow::Result; + use tokio::io::{AsyncReadExt, AsyncSeekExt}; + + #[allow(clippy::unused_io_amount)] + #[tokio::test] + #[ignore] + + async fn test_seek_behavior() -> Result<()> { + let path = "C:\\temp\\test.ps1"; + let mut std_file = std::fs::File::open(path)?; + std_file.seek(std::io::SeekFrom::Start(3))?; + + let mut tokio_file = tokio::fs::File::from_std(std_file); + + let buf = &mut [0u8; 5]; + tokio_file.read(buf).await?; + println!("******** buf {:?}", buf); + tokio_file.seek(std::io::SeekFrom::Start(0)).await?; + tokio_file.read(buf).await?; + println!("******** buf {:?}", buf); + + Ok(()) + } +} diff --git a/src/agent/onefuzz-agent/src/work.rs b/src/agent/onefuzz-agent/src/work.rs index d0222744a7..b55d1d86a1 100644 --- a/src/agent/onefuzz-agent/src/work.rs +++ b/src/agent/onefuzz-agent/src/work.rs @@ -91,10 +91,7 @@ impl WorkSet { pub fn setup_dir(&self) -> Result { let root = self.get_root_folder()?; - // Putting the setup container at the root for backward compatibility. - // The path of setup folder can be used as part of the deduplication logic in the bug filing service - let setup_root = root.parent().ok_or_else(|| anyhow!("Invalid root"))?; - self.setup_url.as_path(setup_root) + self.setup_url.as_path(root) } pub fn extra_setup_dir(&self) -> Result> { diff --git a/src/agent/onefuzz-result/Cargo.toml b/src/agent/onefuzz-result/Cargo.toml deleted file mode 100644 index 7c7de6615c..0000000000 --- a/src/agent/onefuzz-result/Cargo.toml +++ /dev/null @@ -1,18 +0,0 @@ -[package] -name = "onefuzz-result" -version = "0.2.0" -authors = ["fuzzing@microsoft.com"] -edition = "2021" -publish = false -license = "MIT" - -[dependencies] -anyhow = { version = "1.0", features = ["backtrace"] } -async-trait = "0.1" -reqwest = "0.11" -serde = "1.0" -storage-queue = { path = "../storage-queue" } -uuid = { version = "1.4", features = ["serde", "v4"] } -onefuzz-telemetry = { path = "../onefuzz-telemetry" } -log = "0.4" - diff --git a/src/agent/onefuzz-result/src/lib.rs b/src/agent/onefuzz-result/src/lib.rs deleted file mode 100644 index dae666ca9a..0000000000 --- a/src/agent/onefuzz-result/src/lib.rs +++ /dev/null @@ -1,4 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -pub mod job_result; diff --git a/src/agent/onefuzz-task/Cargo.toml b/src/agent/onefuzz-task/Cargo.toml index 4b3e8e8c43..4c6d48d8a7 100644 --- a/src/agent/onefuzz-task/Cargo.toml +++ b/src/agent/onefuzz-task/Cargo.toml @@ -47,7 +47,6 @@ serde_json = "1.0" serde_yaml = "0.9.21" onefuzz = { path = "../onefuzz" } onefuzz-telemetry = { path = "../onefuzz-telemetry" } -onefuzz-result = { path = "../onefuzz-result" } path-absolutize = "3.1" reqwest-retry = { path = "../reqwest-retry" } strum = "0.25" diff --git a/src/agent/onefuzz-task/src/local/cmd.rs b/src/agent/onefuzz-task/src/local/cmd.rs index cb800d445e..f1df4002c0 100644 --- a/src/agent/onefuzz-task/src/local/cmd.rs +++ b/src/agent/onefuzz-task/src/local/cmd.rs @@ -4,7 +4,11 @@ use super::{create_template, template}; #[cfg(any(target_os = "linux", target_os = "windows"))] use crate::local::coverage; -use crate::local::{common::add_common_config, libfuzzer_fuzz, tui::TerminalUi}; +use crate::local::{ + common::add_common_config, generic_analysis, generic_crash_report, generic_generator, + libfuzzer, libfuzzer_crash_report, libfuzzer_fuzz, libfuzzer_merge, libfuzzer_regression, + libfuzzer_test_input, radamsa, test_input, tui::TerminalUi, +}; use anyhow::{Context, Result}; use clap::{Arg, ArgAction, Command}; @@ -16,9 +20,19 @@ use tokio::{select, time::timeout}; #[derive(Debug, PartialEq, Eq, EnumString, IntoStaticStr, EnumIter)] #[strum(serialize_all = "kebab-case")] enum Commands { + Radamsa, #[cfg(any(target_os = "linux", target_os = "windows"))] Coverage, LibfuzzerFuzz, + LibfuzzerMerge, + LibfuzzerCrashReport, + LibfuzzerTestInput, + LibfuzzerRegression, + Libfuzzer, + CrashReport, + Generator, + Analysis, + TestInput, Template, CreateTemplate, } @@ -54,7 +68,23 @@ pub async fn run(args: clap::ArgMatches) -> Result<()> { match command { #[cfg(any(target_os = "linux", target_os = "windows"))] Commands::Coverage => coverage::run(&sub_args, event_sender).await, + Commands::Radamsa => radamsa::run(&sub_args, event_sender).await, + Commands::LibfuzzerCrashReport => { + libfuzzer_crash_report::run(&sub_args, event_sender).await + } Commands::LibfuzzerFuzz => libfuzzer_fuzz::run(&sub_args, event_sender).await, + Commands::LibfuzzerMerge => libfuzzer_merge::run(&sub_args, event_sender).await, + Commands::LibfuzzerTestInput => { + libfuzzer_test_input::run(&sub_args, event_sender).await + } + Commands::LibfuzzerRegression => { + libfuzzer_regression::run(&sub_args, event_sender).await + } + Commands::Libfuzzer => libfuzzer::run(&sub_args, event_sender).await, + Commands::CrashReport => generic_crash_report::run(&sub_args, event_sender).await, + Commands::Generator => generic_generator::run(&sub_args, event_sender).await, + Commands::Analysis => generic_analysis::run(&sub_args, event_sender).await, + Commands::TestInput => test_input::run(&sub_args, event_sender).await, Commands::Template => { let config = sub_args .get_one::("config") @@ -111,7 +141,17 @@ pub fn args(name: &'static str) -> Command { let app = match subcommand { #[cfg(any(target_os = "linux", target_os = "windows"))] Commands::Coverage => coverage::args(subcommand.into()), + Commands::Radamsa => radamsa::args(subcommand.into()), + Commands::LibfuzzerCrashReport => libfuzzer_crash_report::args(subcommand.into()), Commands::LibfuzzerFuzz => libfuzzer_fuzz::args(subcommand.into()), + Commands::LibfuzzerMerge => libfuzzer_merge::args(subcommand.into()), + Commands::LibfuzzerTestInput => libfuzzer_test_input::args(subcommand.into()), + Commands::LibfuzzerRegression => libfuzzer_regression::args(subcommand.into()), + Commands::Libfuzzer => libfuzzer::args(subcommand.into()), + Commands::CrashReport => generic_crash_report::args(subcommand.into()), + Commands::Generator => generic_generator::args(subcommand.into()), + Commands::Analysis => generic_analysis::args(subcommand.into()), + Commands::TestInput => test_input::args(subcommand.into()), Commands::Template => Command::new("template") .about("uses the template to generate a run") .args(vec![Arg::new("config") diff --git a/src/agent/onefuzz-task/src/local/common.rs b/src/agent/onefuzz-task/src/local/common.rs index 17940d799f..f8d7949e80 100644 --- a/src/agent/onefuzz-task/src/local/common.rs +++ b/src/agent/onefuzz-task/src/local/common.rs @@ -26,10 +26,20 @@ pub const INPUTS_DIR: &str = "inputs_dir"; pub const CRASHES_DIR: &str = "crashes_dir"; pub const CRASHDUMPS_DIR: &str = "crashdumps_dir"; pub const TARGET_WORKERS: &str = "target_workers"; +pub const REPORTS_DIR: &str = "reports_dir"; +pub const NO_REPRO_DIR: &str = "no_repro_dir"; pub const TARGET_TIMEOUT: &str = "target_timeout"; +pub const CHECK_RETRY_COUNT: &str = "check_retry_count"; +pub const DISABLE_CHECK_QUEUE: &str = "disable_check_queue"; +pub const UNIQUE_REPORTS_DIR: &str = "unique_reports_dir"; pub const COVERAGE_DIR: &str = "coverage_dir"; pub const READONLY_INPUTS: &str = "readonly_inputs_dir"; +pub const CHECK_ASAN_LOG: &str = "check_asan_log"; +pub const TOOLS_DIR: &str = "tools_dir"; +pub const RENAME_OUTPUT: &str = "rename_output"; pub const CHECK_FUZZER_HELP: &str = "check_fuzzer_help"; +pub const DISABLE_CHECK_DEBUGGER: &str = "disable_check_debugger"; +pub const REGRESSION_REPORTS_DIR: &str = "regression_reports_dir"; pub const TARGET_EXE: &str = "target_exe"; pub const TARGET_ENV: &str = "target_env"; @@ -37,6 +47,17 @@ pub const TARGET_OPTIONS: &str = "target_options"; // pub const SUPERVISOR_EXE: &str = "supervisor_exe"; // pub const SUPERVISOR_ENV: &str = "supervisor_env"; // pub const SUPERVISOR_OPTIONS: &str = "supervisor_options"; +pub const GENERATOR_EXE: &str = "generator_exe"; +pub const GENERATOR_ENV: &str = "generator_env"; +pub const GENERATOR_OPTIONS: &str = "generator_options"; + +pub const ANALYZER_EXE: &str = "analyzer_exe"; +pub const ANALYZER_OPTIONS: &str = "analyzer_options"; +pub const ANALYZER_ENV: &str = "analyzer_env"; +pub const ANALYSIS_DIR: &str = "analysis_dir"; +pub const ANALYSIS_INPUTS: &str = "analysis_inputs"; +pub const ANALYSIS_UNIQUE_INPUTS: &str = "analysis_unique_inputs"; +pub const PRESERVE_EXISTING_OUTPUTS: &str = "preserve_existing_outputs"; pub const CREATE_JOB_DIR: &str = "create_job_dir"; @@ -45,6 +66,7 @@ const WAIT_FOR_DIR_DELAY: Duration = Duration::from_secs(1); pub enum CmdType { Target, + Generator, // Supervisor, } @@ -68,6 +90,7 @@ pub fn get_cmd_exe(cmd_type: CmdType, args: &clap::ArgMatches) -> Result let name = match cmd_type { CmdType::Target => TARGET_EXE, // CmdType::Supervisor => SUPERVISOR_EXE, + CmdType::Generator => GENERATOR_EXE, }; args.get_one::(name) @@ -79,6 +102,7 @@ pub fn get_cmd_arg(cmd_type: CmdType, args: &clap::ArgMatches) -> Vec { let name = match cmd_type { CmdType::Target => TARGET_OPTIONS, // CmdType::Supervisor => SUPERVISOR_OPTIONS, + CmdType::Generator => GENERATOR_OPTIONS, }; args.get_many::(name) @@ -91,6 +115,7 @@ pub fn get_cmd_env(cmd_type: CmdType, args: &clap::ArgMatches) -> Result TARGET_ENV, // CmdType::Supervisor => SUPERVISOR_ENV, + CmdType::Generator => GENERATOR_ENV, }; get_hash_map(args, env_name) } @@ -240,7 +265,6 @@ pub async fn build_local_context( }, instance_telemetry_key: None, heartbeat_queue: None, - job_result_queue: None, microsoft_telemetry_key: None, logs: None, min_available_memory_mb: 0, diff --git a/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml b/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml index aba02c7991..7210893809 100644 --- a/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml +++ b/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml @@ -5,31 +5,28 @@ # 2. Install llvm and export LLVM_SYMBOLIZER_PATH like we do in setup.sh -required_args: &required_args - target_exe: "REPLACE_ME" # The path to your target - inputs: &inputs "REPLACE_ME" # A folder containining your inputs - crashes: &crashes "REPLACE_ME" # The folder where you want the crashing inputs to be output - crashdumps: "REPLACE_ME" # The folder where you want the crash dumps to be output - coverage: "REPLACE_ME" # The folder where you want the code coverage to be output - regression_reports: "REPLACE_ME" # The folder where you want the regression reports to be output - target_args: &target_args - <<: *required_args target_env: {} + target_exe: "C:\\temp\\onefuzz\\integration\\windows-libfuzzer\\fuzz.exe" target_options: [] +inputs: &inputs "C:\\temp\\onefuzz\\integration\\windows-libfuzzer\\seeds" + tasks: - type: LibFuzzer <<: *target_args + inputs: *inputs + crashes: &crash "./crashes" readonly_inputs: [] check_fuzzer_help: true - - type: LibfuzzerRegression + - type: "Report" <<: *target_args - - - type: "LibfuzzerCrashReport" - <<: *target_args - input_queue: *crashes + input_queue: *crash + crashes: *crash + reports: "./reports" + unique_reports: "./unique_reports" + no_repro: "./no_repro" check_fuzzer_help: true - type: "Coverage" @@ -38,11 +35,4 @@ tasks: - "{input}" input_queue: *inputs readonly_inputs: [*inputs] - - # The analysis task is optional in the libfuzzer_basic template - # - type: Analysis - # <<: *target_args - # analysis: "REPLACE_ME" # The folder where you want the analysis results to be output - # analyzer_exe: "REPLACE_ME" - # analyzer_options: [] - # analyzer_env: {} + coverage: "./coverage" diff --git a/src/agent/onefuzz-task/src/local/generic_analysis.rs b/src/agent/onefuzz-task/src/local/generic_analysis.rs index cbb31a1ff9..a1bc714d1a 100644 --- a/src/agent/onefuzz-task/src/local/generic_analysis.rs +++ b/src/agent/onefuzz-task/src/local/generic_analysis.rs @@ -3,13 +3,139 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::tasks::config::CommonConfig; +use crate::{ + local::common::{ + build_local_context, get_cmd_arg, get_cmd_exe, get_hash_map, get_synced_dir, CmdType, + SyncCountDirMonitor, UiEvent, ANALYSIS_DIR, ANALYZER_ENV, ANALYZER_EXE, ANALYZER_OPTIONS, + CRASHES_DIR, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TOOLS_DIR, + UNIQUE_REPORTS_DIR, + }, + tasks::{ + analysis::generic::{run as run_analysis, Config}, + config::CommonConfig, + }, +}; use anyhow::Result; use async_trait::async_trait; +use clap::{Arg, Command}; +use flume::Sender; use schemars::JsonSchema; +use storage_queue::QueueClient; use super::template::{RunContext, Template}; +pub fn build_analysis_config( + args: &clap::ArgMatches, + input_queue: Option, + common: CommonConfig, + event_sender: Option>, +) -> Result { + let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); + let target_options = get_cmd_arg(CmdType::Target, args); + + let analyzer_exe = args + .get_one::(ANALYZER_EXE) + .cloned() + .ok_or_else(|| format_err!("expected {ANALYZER_EXE}"))?; + + let analyzer_options = args + .get_many::(ANALYZER_OPTIONS) + .unwrap_or_default() + .map(|x| x.to_string()) + .collect(); + + let analyzer_env = get_hash_map(args, ANALYZER_ENV)?; + let analysis = get_synced_dir(ANALYSIS_DIR, common.job_id, common.task_id, args)? + .monitor_count(&event_sender)?; + let tools = get_synced_dir(TOOLS_DIR, common.job_id, common.task_id, args)?; + let crashes = if input_queue.is_none() { + get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)? + } else { + None + }; + let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + + let config = Config { + analyzer_exe, + analyzer_options, + analyzer_env, + target_exe, + target_options, + input_queue, + crashes, + analysis, + tools: Some(tools), + reports, + unique_reports, + no_repro, + common, + }; + + Ok(config) +} + +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender.clone()).await?; + let config = build_analysis_config(args, None, context.common_config.clone(), event_sender)?; + run_analysis(config).await +} + +pub fn build_shared_args(required_task: bool) -> Vec { + vec![ + Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), + Arg::new(TARGET_ENV) + .long(TARGET_ENV) + .requires(TARGET_EXE) + .num_args(0..), + Arg::new(TARGET_OPTIONS) + .long(TARGET_OPTIONS) + .default_value("{input}") + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(CRASHES_DIR) + .long(CRASHES_DIR) + .value_parser(value_parser!(PathBuf)), + Arg::new(ANALYZER_OPTIONS) + .long(ANALYZER_OPTIONS) + .requires(ANALYZER_EXE) + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(ANALYZER_ENV) + .long(ANALYZER_ENV) + .requires(ANALYZER_EXE) + .num_args(0..), + Arg::new(TOOLS_DIR) + .long(TOOLS_DIR) + .value_parser(value_parser!(PathBuf)), + Arg::new(ANALYZER_EXE) + .long(ANALYZER_EXE) + .requires(ANALYSIS_DIR) + .requires(CRASHES_DIR) + .required(required_task), + Arg::new(ANALYSIS_DIR) + .long(ANALYSIS_DIR) + .requires(ANALYZER_EXE) + .requires(CRASHES_DIR) + .required(required_task), + ] +} + +pub fn args(name: &'static str) -> Command { + Command::new(name) + .about("execute a local-only generic analysis") + .args(&build_shared_args(true)) +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct Analysis { analyzer_exe: String, @@ -20,7 +146,7 @@ pub struct Analysis { input_queue: Option, crashes: Option, analysis: PathBuf, - tools: Option, + tools: PathBuf, reports: Option, unique_reports: Option, no_repro: Option, @@ -65,10 +191,9 @@ impl Template for Analysis { .and_then(|path| context.to_monitored_sync_dir("crashes", path).ok()), analysis: context.to_monitored_sync_dir("analysis", self.analysis.clone())?, - tools: self - .tools - .as_ref() - .and_then(|path| context.to_monitored_sync_dir("tools", path).ok()), + tools: context + .to_monitored_sync_dir("tools", self.tools.clone()) + .ok(), reports: self .reports diff --git a/src/agent/onefuzz-task/src/local/generic_crash_report.rs b/src/agent/onefuzz-task/src/local/generic_crash_report.rs index 91dec1ae44..dc2773b341 100644 --- a/src/agent/onefuzz-task/src/local/generic_crash_report.rs +++ b/src/agent/onefuzz-task/src/local/generic_crash_report.rs @@ -3,14 +3,150 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::tasks::{config::CommonConfig, utils::default_bool_true}; +use crate::{ + local::common::{ + build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType, + SyncCountDirMonitor, UiEvent, CHECK_ASAN_LOG, CHECK_RETRY_COUNT, CRASHES_DIR, + DISABLE_CHECK_DEBUGGER, DISABLE_CHECK_QUEUE, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV, + TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, UNIQUE_REPORTS_DIR, + }, + tasks::{ + config::CommonConfig, + report::generic::{Config, ReportTask}, + utils::default_bool_true, + }, +}; use anyhow::Result; use async_trait::async_trait; +use clap::{Arg, ArgAction, Command}; +use flume::Sender; use futures::future::OptionFuture; use schemars::JsonSchema; +use storage_queue::QueueClient; use super::template::{RunContext, Template}; +pub fn build_report_config( + args: &clap::ArgMatches, + input_queue: Option, + common: CommonConfig, + event_sender: Option>, +) -> Result { + let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); + let target_env = get_cmd_env(CmdType::Target, args)?; + let target_options = get_cmd_arg(CmdType::Target, args); + + let crashes = Some(get_synced_dir( + CRASHES_DIR, + common.job_id, + common.task_id, + args, + )?) + .monitor_count(&event_sender)?; + let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + + let unique_reports = Some(get_synced_dir( + UNIQUE_REPORTS_DIR, + common.job_id, + common.task_id, + args, + )?) + .monitor_count(&event_sender)?; + + let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); + + let check_retry_count = args + .get_one::(CHECK_RETRY_COUNT) + .copied() + .expect("has a default"); + + let check_queue = !args.get_flag(DISABLE_CHECK_QUEUE); + let check_asan_log = args.get_flag(CHECK_ASAN_LOG); + let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER); + + let config = Config { + target_exe, + target_env, + target_options, + target_timeout, + check_asan_log, + check_debugger, + check_retry_count, + check_queue, + crashes, + minimized_stack_depth: None, + input_queue, + no_repro, + reports, + unique_reports, + common, + }; + + Ok(config) +} + +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender.clone()).await?; + let config = build_report_config(args, None, context.common_config.clone(), event_sender)?; + ReportTask::new(config).managed_run().await +} + +pub fn build_shared_args() -> Vec { + vec![ + Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), + Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), + Arg::new(TARGET_OPTIONS) + .default_value("{input}") + .long(TARGET_OPTIONS) + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(CRASHES_DIR) + .long(CRASHES_DIR) + .required(true) + .value_parser(value_parser!(PathBuf)), + Arg::new(REPORTS_DIR) + .long(REPORTS_DIR) + .required(false) + .value_parser(value_parser!(PathBuf)), + Arg::new(NO_REPRO_DIR) + .long(NO_REPRO_DIR) + .required(false) + .value_parser(value_parser!(PathBuf)), + Arg::new(UNIQUE_REPORTS_DIR) + .long(UNIQUE_REPORTS_DIR) + .value_parser(value_parser!(PathBuf)) + .required(true), + Arg::new(TARGET_TIMEOUT) + .long(TARGET_TIMEOUT) + .value_parser(value_parser!(u64)) + .default_value("30"), + Arg::new(CHECK_RETRY_COUNT) + .long(CHECK_RETRY_COUNT) + .value_parser(value_parser!(u64)) + .default_value("0"), + Arg::new(DISABLE_CHECK_QUEUE) + .action(ArgAction::SetTrue) + .long(DISABLE_CHECK_QUEUE), + Arg::new(CHECK_ASAN_LOG) + .action(ArgAction::SetTrue) + .long(CHECK_ASAN_LOG), + Arg::new(DISABLE_CHECK_DEBUGGER) + .action(ArgAction::SetTrue) + .long(DISABLE_CHECK_DEBUGGER), + ] +} + +pub fn args(name: &'static str) -> Command { + Command::new(name) + .about("execute a local-only generic crash report") + .args(&build_shared_args()) +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct CrashReport { target_exe: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/generic_generator.rs b/src/agent/onefuzz-task/src/local/generic_generator.rs index 3c26af4cf8..68490cd29d 100644 --- a/src/agent/onefuzz-task/src/local/generic_generator.rs +++ b/src/agent/onefuzz-task/src/local/generic_generator.rs @@ -3,14 +3,154 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::tasks::{config::CommonConfig, utils::default_bool_true}; +use crate::{ + local::common::{ + build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, + get_synced_dirs, CmdType, SyncCountDirMonitor, UiEvent, CHECK_ASAN_LOG, CHECK_RETRY_COUNT, + CRASHES_DIR, DISABLE_CHECK_DEBUGGER, GENERATOR_ENV, GENERATOR_EXE, GENERATOR_OPTIONS, + READONLY_INPUTS, RENAME_OUTPUT, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, + TOOLS_DIR, + }, + tasks::{ + config::CommonConfig, + fuzz::generator::{Config, GeneratorTask}, + utils::default_bool_true, + }, +}; use anyhow::Result; use async_trait::async_trait; +use clap::{Arg, ArgAction, Command}; +use flume::Sender; use onefuzz::syncdir::SyncedDir; use schemars::JsonSchema; use super::template::{RunContext, Template}; +pub fn build_fuzz_config( + args: &clap::ArgMatches, + common: CommonConfig, + event_sender: Option>, +) -> Result { + let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)? + .monitor_count(&event_sender)?; + let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); + let target_options = get_cmd_arg(CmdType::Target, args); + let target_env = get_cmd_env(CmdType::Target, args)?; + + let generator_exe = get_cmd_exe(CmdType::Generator, args)?; + let generator_options = get_cmd_arg(CmdType::Generator, args); + let generator_env = get_cmd_env(CmdType::Generator, args)?; + let readonly_inputs = get_synced_dirs(READONLY_INPUTS, common.job_id, common.task_id, args)? + .into_iter() + .map(|sd| sd.monitor_count(&event_sender)) + .collect::>>()?; + + let rename_output = args.get_flag(RENAME_OUTPUT); + let check_asan_log = args.get_flag(CHECK_ASAN_LOG); + let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER); + + let check_retry_count = args + .get_one::(CHECK_RETRY_COUNT) + .copied() + .expect("has a default"); + + let target_timeout = Some( + args.get_one::(TARGET_TIMEOUT) + .copied() + .expect("has a default"), + ); + + let tools = get_synced_dir(TOOLS_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + + let ensemble_sync_delay = None; + + let config = Config { + generator_exe, + generator_env, + generator_options, + readonly_inputs, + crashes, + tools, + target_exe, + target_env, + target_options, + target_timeout, + check_asan_log, + check_debugger, + check_retry_count, + rename_output, + ensemble_sync_delay, + common, + }; + + Ok(config) +} + +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender.clone()).await?; + let config = build_fuzz_config(args, context.common_config.clone(), event_sender)?; + GeneratorTask::new(config).run().await +} + +pub fn build_shared_args() -> Vec { + vec![ + Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), + Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), + Arg::new(TARGET_OPTIONS) + .default_value("{input}") + .long(TARGET_OPTIONS) + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(GENERATOR_EXE) + .long(GENERATOR_EXE) + .default_value("radamsa") + .required(true), + Arg::new(GENERATOR_ENV).long(GENERATOR_ENV).num_args(0..), + Arg::new(GENERATOR_OPTIONS) + .long(GENERATOR_OPTIONS) + .value_delimiter(' ') + .default_value("-H sha256 -o {generated_inputs}/input-%h.%s -n 100 -r {input_corpus}") + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(CRASHES_DIR) + .required(true) + .long(CRASHES_DIR) + .value_parser(value_parser!(PathBuf)), + Arg::new(READONLY_INPUTS) + .required(true) + .num_args(1..) + .value_parser(value_parser!(PathBuf)) + .long(READONLY_INPUTS), + Arg::new(TOOLS_DIR) + .long(TOOLS_DIR) + .value_parser(value_parser!(PathBuf)), + Arg::new(CHECK_RETRY_COUNT) + .long(CHECK_RETRY_COUNT) + .value_parser(value_parser!(u64)) + .default_value("0"), + Arg::new(CHECK_ASAN_LOG) + .action(ArgAction::SetTrue) + .long(CHECK_ASAN_LOG), + Arg::new(RENAME_OUTPUT) + .action(ArgAction::SetTrue) + .long(RENAME_OUTPUT), + Arg::new(TARGET_TIMEOUT) + .long(TARGET_TIMEOUT) + .value_parser(value_parser!(u64)) + .default_value("30"), + Arg::new(DISABLE_CHECK_DEBUGGER) + .action(ArgAction::SetTrue) + .long(DISABLE_CHECK_DEBUGGER), + ] +} + +pub fn args(name: &'static str) -> Command { + Command::new(name) + .about("execute a local-only generator fuzzing task") + .args(&build_shared_args()) +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct Generator { generator_exe: String, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer.rs b/src/agent/onefuzz-task/src/local/libfuzzer.rs index 472a6ae9e8..12abae88b0 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer.rs @@ -1,19 +1,168 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -use crate::tasks::{ - config::CommonConfig, - fuzz::libfuzzer::{common::default_workers, generic::LibFuzzerFuzzTask}, - utils::default_bool_true, +#[cfg(any(target_os = "linux", target_os = "windows"))] +use crate::{ + local::{common::COVERAGE_DIR, coverage, coverage::build_shared_args as build_coverage_args}, + tasks::coverage::generic::CoverageTask, +}; +use crate::{ + local::{ + common::{ + build_local_context, wait_for_dir, DirectoryMonitorQueue, UiEvent, ANALYZER_EXE, + REGRESSION_REPORTS_DIR, UNIQUE_REPORTS_DIR, + }, + generic_analysis::{build_analysis_config, build_shared_args as build_analysis_args}, + libfuzzer_crash_report::{build_report_config, build_shared_args as build_crash_args}, + libfuzzer_fuzz::{build_fuzz_config, build_shared_args as build_fuzz_args}, + libfuzzer_regression::{ + build_regression_config, build_shared_args as build_regression_args, + }, + }, + tasks::{ + analysis::generic::run as run_analysis, + config::CommonConfig, + fuzz::libfuzzer::{common::default_workers, generic::LibFuzzerFuzzTask}, + regression::libfuzzer::LibFuzzerRegressionTask, + report::libfuzzer_report::ReportTask, + utils::default_bool_true, + }, }; use anyhow::Result; use async_trait::async_trait; -use onefuzz::syncdir::SyncedDir; +use clap::Command; +use flume::Sender; +use onefuzz::{syncdir::SyncedDir, utils::try_wait_all_join_handles}; use schemars::JsonSchema; -use std::{collections::HashMap, path::PathBuf}; +use std::{ + collections::{HashMap, HashSet}, + path::PathBuf, +}; +use tokio::task::spawn; +use uuid::Uuid; use super::template::{RunContext, Template}; +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender.clone()).await?; + let fuzz_config = build_fuzz_config(args, context.common_config.clone(), event_sender.clone())?; + let crash_dir = fuzz_config + .crashes + .remote_url()? + .as_file_path() + .expect("invalid crash dir remote location"); + + let fuzzer = LibFuzzerFuzzTask::new(fuzz_config)?; + let mut task_handles = vec![]; + + let fuzz_task = spawn(async move { fuzzer.run().await }); + + wait_for_dir(&crash_dir).await?; + + task_handles.push(fuzz_task); + + if args.contains_id(UNIQUE_REPORTS_DIR) { + let crash_report_input_monitor = + DirectoryMonitorQueue::start_monitoring(crash_dir.clone()).await?; + + let report_config = build_report_config( + args, + Some(crash_report_input_monitor.queue_client), + CommonConfig { + task_id: Uuid::new_v4(), + ..context.common_config.clone() + }, + event_sender.clone(), + )?; + + let mut report = ReportTask::new(report_config); + let report_task = spawn(async move { report.managed_run().await }); + + task_handles.push(report_task); + task_handles.push(crash_report_input_monitor.handle); + } + + #[cfg(any(target_os = "linux", target_os = "windows"))] + if args.contains_id(COVERAGE_DIR) { + let coverage_input_monitor = + DirectoryMonitorQueue::start_monitoring(crash_dir.clone()).await?; + let coverage_config = coverage::build_coverage_config( + args, + true, + Some(coverage_input_monitor.queue_client), + CommonConfig { + task_id: Uuid::new_v4(), + ..context.common_config.clone() + }, + event_sender.clone(), + )?; + + let mut coverage = CoverageTask::new(coverage_config); + let coverage_task = spawn(async move { coverage.run().await }); + + task_handles.push(coverage_task); + task_handles.push(coverage_input_monitor.handle); + } + + if args.contains_id(ANALYZER_EXE) { + let analysis_input_monitor = DirectoryMonitorQueue::start_monitoring(crash_dir).await?; + let analysis_config = build_analysis_config( + args, + Some(analysis_input_monitor.queue_client), + CommonConfig { + task_id: Uuid::new_v4(), + ..context.common_config.clone() + }, + event_sender.clone(), + )?; + let analysis_task = spawn(async move { run_analysis(analysis_config).await }); + + task_handles.push(analysis_task); + task_handles.push(analysis_input_monitor.handle); + } + + if args.contains_id(REGRESSION_REPORTS_DIR) { + let regression_config = build_regression_config( + args, + CommonConfig { + task_id: Uuid::new_v4(), + ..context.common_config.clone() + }, + event_sender, + )?; + let regression = LibFuzzerRegressionTask::new(regression_config); + let regression_task = spawn(async move { regression.run().await }); + task_handles.push(regression_task); + } + + try_wait_all_join_handles(task_handles).await?; + + Ok(()) +} + +pub fn args(name: &'static str) -> Command { + let mut app = Command::new(name).about("run a local libfuzzer & crash reporting task"); + + let mut used = HashSet::new(); + + for args in &[ + build_fuzz_args(), + build_crash_args(), + build_analysis_args(false), + #[cfg(any(target_os = "linux", target_os = "windows"))] + build_coverage_args(true), + build_regression_args(false), + ] { + for arg in args { + if used.insert(arg.get_id()) { + app = app.arg(arg); + } + } + } + + app +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibFuzzer { inputs: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs b/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs index 9de1fc66ce..be3326f749 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs @@ -3,13 +3,139 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::tasks::{config::CommonConfig, utils::default_bool_true}; +use crate::{ + local::common::{ + build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType, + SyncCountDirMonitor, UiEvent, CHECK_FUZZER_HELP, CHECK_RETRY_COUNT, CRASHES_DIR, + DISABLE_CHECK_QUEUE, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, + TARGET_TIMEOUT, UNIQUE_REPORTS_DIR, + }, + tasks::{ + config::CommonConfig, + report::libfuzzer_report::{Config, ReportTask}, + utils::default_bool_true, + }, +}; use anyhow::Result; use async_trait::async_trait; +use clap::{Arg, ArgAction, Command}; +use flume::Sender; use futures::future::OptionFuture; use schemars::JsonSchema; +use storage_queue::QueueClient; use super::template::{RunContext, Template}; + +pub fn build_report_config( + args: &clap::ArgMatches, + input_queue: Option, + common: CommonConfig, + event_sender: Option>, +) -> Result { + let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); + let target_env = get_cmd_env(CmdType::Target, args)?; + let target_options = get_cmd_arg(CmdType::Target, args); + + let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + + let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + + let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + + let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); + + let check_retry_count = args + .get_one::(CHECK_RETRY_COUNT) + .copied() + .expect("has a default"); + + let check_queue = !args.get_flag(DISABLE_CHECK_QUEUE); + + let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP); + + let crashes = if input_queue.is_none() { crashes } else { None }; + + let config = Config { + target_exe, + target_env, + target_options, + target_timeout, + check_retry_count, + check_fuzzer_help, + minimized_stack_depth: None, + input_queue, + check_queue, + crashes, + reports, + no_repro, + unique_reports, + common, + }; + + Ok(config) +} + +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender.clone()).await?; + let config = build_report_config(args, None, context.common_config.clone(), event_sender)?; + ReportTask::new(config).managed_run().await +} + +pub fn build_shared_args() -> Vec { + vec![ + Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), + Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), + Arg::new(TARGET_OPTIONS) + .long(TARGET_OPTIONS) + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(CRASHES_DIR) + .long(CRASHES_DIR) + .required(true) + .value_parser(value_parser!(PathBuf)), + Arg::new(REPORTS_DIR) + .long(REPORTS_DIR) + .required(false) + .value_parser(value_parser!(PathBuf)), + Arg::new(NO_REPRO_DIR) + .long(NO_REPRO_DIR) + .required(false) + .value_parser(value_parser!(PathBuf)), + Arg::new(UNIQUE_REPORTS_DIR) + .long(UNIQUE_REPORTS_DIR) + .required(true) + .value_parser(value_parser!(PathBuf)), + Arg::new(TARGET_TIMEOUT) + .value_parser(value_parser!(u64)) + .long(TARGET_TIMEOUT), + Arg::new(CHECK_RETRY_COUNT) + .long(CHECK_RETRY_COUNT) + .value_parser(value_parser!(u64)) + .default_value("0"), + Arg::new(DISABLE_CHECK_QUEUE) + .action(ArgAction::SetTrue) + .long(DISABLE_CHECK_QUEUE), + Arg::new(CHECK_FUZZER_HELP) + .action(ArgAction::SetTrue) + .long(CHECK_FUZZER_HELP), + ] +} + +pub fn args(name: &'static str) -> Command { + Command::new(name) + .about("execute a local-only libfuzzer crash report task") + .args(&build_shared_args()) +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibfuzzerCrashReport { target_exe: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs b/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs index d4915e6b4c..1e128f0dfc 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs @@ -3,15 +3,97 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::tasks::{config::CommonConfig, utils::default_bool_true}; +use crate::{ + local::common::{ + build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, + get_synced_dirs, CmdType, SyncCountDirMonitor, UiEvent, ANALYSIS_INPUTS, + ANALYSIS_UNIQUE_INPUTS, CHECK_FUZZER_HELP, INPUTS_DIR, PRESERVE_EXISTING_OUTPUTS, + TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, + }, + tasks::{ + config::CommonConfig, + merge::libfuzzer_merge::{spawn, Config}, + utils::default_bool_true, + }, +}; use anyhow::Result; use async_trait::async_trait; +use clap::{Arg, ArgAction, Command}; +use flume::Sender; use futures::future::OptionFuture; use onefuzz::syncdir::SyncedDir; use schemars::JsonSchema; +use storage_queue::QueueClient; use super::template::{RunContext, Template}; +pub fn build_merge_config( + args: &clap::ArgMatches, + input_queue: Option, + common: CommonConfig, + event_sender: Option>, +) -> Result { + let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); + let target_env = get_cmd_env(CmdType::Target, args)?; + let target_options = get_cmd_arg(CmdType::Target, args); + let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP); + let inputs = get_synced_dirs(ANALYSIS_INPUTS, common.job_id, common.task_id, args)? + .into_iter() + .map(|sd| sd.monitor_count(&event_sender)) + .collect::>>()?; + let unique_inputs = + get_synced_dir(ANALYSIS_UNIQUE_INPUTS, common.job_id, common.task_id, args)? + .monitor_count(&event_sender)?; + let preserve_existing_outputs = args + .get_one::(PRESERVE_EXISTING_OUTPUTS) + .copied() + .unwrap_or_default(); + + let config = Config { + target_exe, + target_env, + target_options, + input_queue, + inputs, + unique_inputs, + preserve_existing_outputs, + check_fuzzer_help, + common, + }; + + Ok(config) +} + +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender.clone()).await?; + let config = build_merge_config(args, None, context.common_config.clone(), event_sender)?; + spawn(config).await +} + +pub fn build_shared_args() -> Vec { + vec![ + Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), + Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), + Arg::new(TARGET_OPTIONS) + .long(TARGET_OPTIONS) + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(CHECK_FUZZER_HELP) + .action(ArgAction::SetTrue) + .long(CHECK_FUZZER_HELP), + Arg::new(INPUTS_DIR) + .long(INPUTS_DIR) + .value_parser(value_parser!(PathBuf)) + .num_args(0..), + ] +} + +pub fn args(name: &'static str) -> Command { + Command::new(name) + .about("execute a local-only libfuzzer crash report task") + .args(&build_shared_args()) +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibfuzzerMerge { target_exe: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs b/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs index b53fb84c22..b8a5766e10 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs @@ -3,13 +3,145 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::tasks::{config::CommonConfig, utils::default_bool_true}; +use crate::{ + local::common::{ + build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType, + SyncCountDirMonitor, UiEvent, CHECK_FUZZER_HELP, CHECK_RETRY_COUNT, COVERAGE_DIR, + CRASHES_DIR, NO_REPRO_DIR, REGRESSION_REPORTS_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE, + TARGET_OPTIONS, TARGET_TIMEOUT, UNIQUE_REPORTS_DIR, + }, + tasks::{ + config::CommonConfig, + regression::libfuzzer::{Config, LibFuzzerRegressionTask}, + utils::default_bool_true, + }, +}; use anyhow::Result; use async_trait::async_trait; +use clap::{Arg, ArgAction, Command}; +use flume::Sender; use schemars::JsonSchema; use super::template::{RunContext, Template}; +const REPORT_NAMES: &str = "report_names"; + +pub fn build_regression_config( + args: &clap::ArgMatches, + common: CommonConfig, + event_sender: Option>, +) -> Result { + let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); + let target_env = get_cmd_env(CmdType::Target, args)?; + let target_options = get_cmd_arg(CmdType::Target, args); + let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); + let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)? + .monitor_count(&event_sender)?; + let regression_reports = + get_synced_dir(REGRESSION_REPORTS_DIR, common.job_id, common.task_id, args)? + .monitor_count(&event_sender)?; + let check_retry_count = args + .get_one::(CHECK_RETRY_COUNT) + .copied() + .expect("has a default value"); + + let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + + let report_list: Option> = args + .get_many::(REPORT_NAMES) + .map(|x| x.cloned().collect()); + + let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP); + + let config = Config { + target_exe, + target_env, + target_options, + target_timeout, + check_fuzzer_help, + check_retry_count, + crashes, + regression_reports, + reports, + no_repro, + unique_reports, + readonly_inputs: None, + report_list, + minimized_stack_depth: None, + common, + }; + Ok(config) +} + +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender.clone()).await?; + let config = build_regression_config(args, context.common_config.clone(), event_sender)?; + LibFuzzerRegressionTask::new(config).run().await +} + +pub fn build_shared_args(local_job: bool) -> Vec { + let mut args = vec![ + Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), + Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), + Arg::new(TARGET_OPTIONS) + .long(TARGET_OPTIONS) + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(COVERAGE_DIR) + .required(!local_job) + .long(COVERAGE_DIR) + .value_parser(value_parser!(PathBuf)), + Arg::new(CHECK_FUZZER_HELP) + .action(ArgAction::SetTrue) + .long(CHECK_FUZZER_HELP), + Arg::new(TARGET_TIMEOUT) + .long(TARGET_TIMEOUT) + .value_parser(value_parser!(u64)), + Arg::new(CRASHES_DIR) + .long(CRASHES_DIR) + .required(true) + .value_parser(value_parser!(PathBuf)), + Arg::new(REGRESSION_REPORTS_DIR) + .long(REGRESSION_REPORTS_DIR) + .required(local_job) + .value_parser(value_parser!(PathBuf)), + Arg::new(REPORTS_DIR) + .long(REPORTS_DIR) + .required(false) + .value_parser(value_parser!(PathBuf)), + Arg::new(NO_REPRO_DIR) + .long(NO_REPRO_DIR) + .required(false) + .value_parser(value_parser!(PathBuf)), + Arg::new(UNIQUE_REPORTS_DIR) + .long(UNIQUE_REPORTS_DIR) + .value_parser(value_parser!(PathBuf)) + .required(true), + Arg::new(CHECK_RETRY_COUNT) + .long(CHECK_RETRY_COUNT) + .value_parser(value_parser!(u64)) + .default_value("0"), + ]; + if local_job { + args.push(Arg::new(REPORT_NAMES).long(REPORT_NAMES).num_args(0..)) + } + args +} + +pub fn args(name: &'static str) -> Command { + Command::new(name) + .about("execute a local-only libfuzzer regression task") + .args(&build_shared_args(true)) +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibfuzzerRegression { target_exe: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs b/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs index 88c3cd1a3d..30f9c446c8 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs @@ -1,14 +1,97 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. +use crate::{ + local::common::{ + build_local_context, get_cmd_arg, get_cmd_env, CmdType, UiEvent, CHECK_RETRY_COUNT, + TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, + }, + tasks::report::libfuzzer_report::{test_input, TestInputArgs}, +}; use anyhow::Result; use async_trait::async_trait; +use clap::{Arg, Command}; +use flume::Sender; use onefuzz::machine_id::MachineIdentity; use schemars::JsonSchema; use std::{collections::HashMap, path::PathBuf}; use super::template::{RunContext, Template}; +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender).await?; + + let target_exe = args + .get_one::(TARGET_EXE) + .expect("marked as required"); + let target_env = get_cmd_env(CmdType::Target, args)?; + let target_options = get_cmd_arg(CmdType::Target, args); + let input = args + .get_one::("input") + .expect("marked as required"); + let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); + let check_retry_count = args + .get_one::(CHECK_RETRY_COUNT) + .copied() + .expect("has a default value"); + + let extra_setup_dir = context.common_config.extra_setup_dir.as_deref(); + let extra_output_dir = context + .common_config + .extra_output + .as_ref() + .map(|x| x.local_path.as_path()); + + let config = TestInputArgs { + target_exe: target_exe.as_path(), + target_env: &target_env, + target_options: &target_options, + input_url: None, + input: input.as_path(), + job_id: context.common_config.job_id, + task_id: context.common_config.task_id, + target_timeout, + check_retry_count, + setup_dir: &context.common_config.setup_dir, + extra_setup_dir, + extra_output_dir, + minimized_stack_depth: None, + machine_identity: context.common_config.machine_identity, + }; + + let result = test_input(config).await?; + println!("{}", serde_json::to_string_pretty(&result)?); + Ok(()) +} + +pub fn build_shared_args() -> Vec { + vec![ + Arg::new(TARGET_EXE).required(true), + Arg::new("input") + .required(true) + .value_parser(value_parser!(PathBuf)), + Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), + Arg::new(TARGET_OPTIONS) + .default_value("{input}") + .long(TARGET_OPTIONS) + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(TARGET_TIMEOUT) + .long(TARGET_TIMEOUT) + .value_parser(value_parser!(u64)), + Arg::new(CHECK_RETRY_COUNT) + .long(CHECK_RETRY_COUNT) + .value_parser(value_parser!(u64)) + .default_value("0"), + ] +} + +pub fn args(name: &'static str) -> Command { + Command::new(name) + .about("test a libfuzzer application with a specific input") + .args(&build_shared_args()) +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibfuzzerTestInput { input: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/mod.rs b/src/agent/onefuzz-task/src/local/mod.rs index 6020cb0fa6..9ea530f575 100644 --- a/src/agent/onefuzz-task/src/local/mod.rs +++ b/src/agent/onefuzz-task/src/local/mod.rs @@ -15,6 +15,7 @@ pub mod libfuzzer_fuzz; pub mod libfuzzer_merge; pub mod libfuzzer_regression; pub mod libfuzzer_test_input; +pub mod radamsa; pub mod template; pub mod test_input; pub mod tui; diff --git a/src/agent/onefuzz-task/src/local/radamsa.rs b/src/agent/onefuzz-task/src/local/radamsa.rs new file mode 100644 index 0000000000..4d84de027a --- /dev/null +++ b/src/agent/onefuzz-task/src/local/radamsa.rs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +use crate::{ + local::{ + common::{build_local_context, DirectoryMonitorQueue, UiEvent}, + generic_crash_report::{build_report_config, build_shared_args as build_crash_args}, + generic_generator::{build_fuzz_config, build_shared_args as build_fuzz_args}, + }, + tasks::{config::CommonConfig, fuzz::generator::GeneratorTask, report::generic::ReportTask}, +}; +use anyhow::{Context, Result}; +use clap::Command; +use flume::Sender; +use onefuzz::utils::try_wait_all_join_handles; +use std::collections::HashSet; +use tokio::task::spawn; +use uuid::Uuid; + +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender.clone()).await?; + let fuzz_config = build_fuzz_config(args, context.common_config.clone(), event_sender.clone())?; + let crash_dir = fuzz_config + .crashes + .remote_url()? + .as_file_path() + .ok_or_else(|| format_err!("invalid crash directory"))?; + + tokio::fs::create_dir_all(&crash_dir) + .await + .with_context(|| { + format!( + "unable to create crashes directory: {}", + crash_dir.display() + ) + })?; + + let fuzzer = GeneratorTask::new(fuzz_config); + let fuzz_task = spawn(async move { fuzzer.run().await }); + + let crash_report_input_monitor = DirectoryMonitorQueue::start_monitoring(crash_dir) + .await + .context("directory monitor failed")?; + let report_config = build_report_config( + args, + Some(crash_report_input_monitor.queue_client), + CommonConfig { + task_id: Uuid::new_v4(), + ..context.common_config.clone() + }, + event_sender, + )?; + let report_task = spawn(async move { ReportTask::new(report_config).managed_run().await }); + + try_wait_all_join_handles(vec![ + fuzz_task, + report_task, + crash_report_input_monitor.handle, + ]) + .await?; + + Ok(()) +} + +pub fn args(name: &'static str) -> Command { + let mut app = Command::new(name).about("run a local generator & crash reporting job"); + + let mut used = HashSet::new(); + for args in &[build_fuzz_args(), build_crash_args()] { + for arg in args { + if used.insert(arg.get_id()) { + app = app.arg(arg); + } + } + } + + app +} diff --git a/src/agent/onefuzz-task/src/local/schema.json b/src/agent/onefuzz-task/src/local/schema.json index e5b00f6e17..0a1f128e67 100644 --- a/src/agent/onefuzz-task/src/local/schema.json +++ b/src/agent/onefuzz-task/src/local/schema.json @@ -126,6 +126,7 @@ "analyzer_options", "target_exe", "target_options", + "tools", "type" ], "properties": { @@ -181,10 +182,7 @@ } }, "tools": { - "type": [ - "string", - "null" - ] + "type": "string" }, "type": { "type": "string", @@ -895,4 +893,4 @@ ] } } -} \ No newline at end of file +} diff --git a/src/agent/onefuzz-task/src/local/template.rs b/src/agent/onefuzz-task/src/local/template.rs index 3393edd89a..adcca9bfa3 100644 --- a/src/agent/onefuzz-task/src/local/template.rs +++ b/src/agent/onefuzz-task/src/local/template.rs @@ -199,7 +199,6 @@ pub async fn launch( job_id: Uuid::new_v4(), instance_id: Uuid::new_v4(), heartbeat_queue: None, - job_result_queue: None, instance_telemetry_key: None, microsoft_telemetry_key: None, logs: None, @@ -245,10 +244,12 @@ mod test { .expect("Couldn't find checked-in schema.json") .replace("\r\n", "\n"); - if schema_str.replace('\n', "") != checked_in_schema.replace('\n', "") { - std::fs::write("src/local/new.schema.json", schema_str) - .expect("The schemas did not match but failed to write new schema to file."); - panic!("The checked-in local fuzzing schema did not match the generated schema. The generated schema can be found at src/local/new.schema.json"); - } + println!("{}", schema_str); + + assert_eq!( + schema_str.replace('\n', ""), + checked_in_schema.replace('\n', ""), + "The checked-in local fuzzing schema did not match the generated schema." + ); } } diff --git a/src/agent/onefuzz-task/src/local/test_input.rs b/src/agent/onefuzz-task/src/local/test_input.rs index 0018494ec0..6e59fb3ff5 100644 --- a/src/agent/onefuzz-task/src/local/test_input.rs +++ b/src/agent/onefuzz-task/src/local/test_input.rs @@ -1,8 +1,18 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. +use crate::{ + local::common::{ + build_local_context, get_cmd_arg, get_cmd_env, CmdType, UiEvent, CHECK_ASAN_LOG, + CHECK_RETRY_COUNT, DISABLE_CHECK_DEBUGGER, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, + TARGET_TIMEOUT, + }, + tasks::report::generic::{test_input, TestInputArgs}, +}; use anyhow::Result; use async_trait::async_trait; +use clap::{Arg, ArgAction, Command}; +use flume::Sender; use onefuzz::machine_id::MachineIdentity; use schemars::JsonSchema; use std::{collections::HashMap, path::PathBuf}; @@ -10,6 +20,82 @@ use uuid::Uuid; use super::template::{RunContext, Template}; +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, false, event_sender).await?; + + let target_exe = args + .get_one::(TARGET_EXE) + .expect("is marked required"); + let target_env = get_cmd_env(CmdType::Target, args)?; + let target_options = get_cmd_arg(CmdType::Target, args); + let input = args + .get_one::("input") + .expect("is marked required"); + let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); + let check_retry_count = args + .get_one::(CHECK_RETRY_COUNT) + .copied() + .expect("has default value"); + let check_asan_log = args.get_flag(CHECK_ASAN_LOG); + let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER); + + let config = TestInputArgs { + target_exe: target_exe.as_path(), + target_env: &target_env, + target_options: &target_options, + input_url: None, + input: input.as_path(), + job_id: context.common_config.job_id, + task_id: context.common_config.task_id, + target_timeout, + check_retry_count, + setup_dir: &context.common_config.setup_dir, + extra_setup_dir: context.common_config.extra_setup_dir.as_deref(), + minimized_stack_depth: None, + check_asan_log, + check_debugger, + machine_identity: context.common_config.machine_identity.clone(), + }; + + let result = test_input(config).await?; + println!("{}", serde_json::to_string_pretty(&result)?); + Ok(()) +} + +pub fn build_shared_args() -> Vec { + vec![ + Arg::new(TARGET_EXE).required(true), + Arg::new("input") + .required(true) + .value_parser(value_parser!(PathBuf)), + Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), + Arg::new(TARGET_OPTIONS) + .default_value("{input}") + .long(TARGET_OPTIONS) + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(TARGET_TIMEOUT) + .long(TARGET_TIMEOUT) + .value_parser(value_parser!(u64)), + Arg::new(CHECK_RETRY_COUNT) + .long(CHECK_RETRY_COUNT) + .value_parser(value_parser!(u64)) + .default_value("0"), + Arg::new(CHECK_ASAN_LOG) + .action(ArgAction::SetTrue) + .long(CHECK_ASAN_LOG), + Arg::new(DISABLE_CHECK_DEBUGGER) + .action(ArgAction::SetTrue) + .long("disable_check_debugger"), + ] +} + +pub fn args(name: &'static str) -> Command { + Command::new(name) + .about("test an application with a specific input") + .args(&build_shared_args()) +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct TestInput { input: PathBuf, diff --git a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs index 05c6c3d169..3ba068a614 100644 --- a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs @@ -65,8 +65,6 @@ pub async fn run(config: Config) -> Result<()> { tools.init_pull().await?; } - let job_result_client = config.common.init_job_result().await?; - // the tempdir is always created, however, the reports_path and // reports_monitor_future are only created if we have one of the three // report SyncedDir. The idea is that the option for where to write reports @@ -90,7 +88,6 @@ pub async fn run(config: Config) -> Result<()> { &config.unique_reports, &config.reports, &config.no_repro, - &job_result_client, ); ( Some(reports_dir.path().to_path_buf()), @@ -174,7 +171,7 @@ async fn poll_inputs( } message.delete().await?; } else { - debug!("no new candidate inputs found, sleeping"); + warn!("no new candidate inputs found, sleeping"); delay_with_jitter(EMPTY_QUEUE_DELAY).await; } } diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs index e29e0fd60d..0848379d73 100644 --- a/src/agent/onefuzz-task/src/tasks/config.rs +++ b/src/agent/onefuzz-task/src/tasks/config.rs @@ -14,7 +14,6 @@ use onefuzz::{ machine_id::MachineIdentity, syncdir::{SyncOperation, SyncedDir}, }; -use onefuzz_result::job_result::{init_job_result, TaskJobResultClient}; use onefuzz_telemetry::{ self as telemetry, Event::task_start, EventData, InstanceTelemetryKey, MicrosoftTelemetryKey, Role, @@ -51,8 +50,6 @@ pub struct CommonConfig { pub heartbeat_queue: Option, - pub job_result_queue: Option, - pub instance_telemetry_key: Option, pub microsoft_telemetry_key: Option, @@ -106,23 +103,6 @@ impl CommonConfig { None => Ok(None), } } - - pub async fn init_job_result(&self) -> Result> { - match &self.job_result_queue { - Some(url) => { - let result = init_job_result( - url.clone(), - self.task_id, - self.job_id, - self.machine_identity.machine_id, - self.machine_identity.machine_name.clone(), - ) - .await?; - Ok(Some(result)) - } - None => Ok(None), - } - } } #[derive(Debug, Deserialize)] diff --git a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs index 704188293b..0b19f03122 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs @@ -26,8 +26,6 @@ use onefuzz_file_format::coverage::{ binary::{v1::BinaryCoverageJson as BinaryCoverageJsonV1, BinaryCoverageJson}, source::{v1::SourceCoverageJson as SourceCoverageJsonV1, SourceCoverageJson}, }; -use onefuzz_result::job_result::JobResultData; -use onefuzz_result::job_result::{JobResultSender, TaskJobResultClient}; use onefuzz_telemetry::{event, warn, Event::coverage_data, Event::coverage_failed, EventData}; use storage_queue::{Message, QueueClient}; use tokio::fs; @@ -116,7 +114,7 @@ impl CoverageTask { let allowlist = self.load_target_allowlist().await?; let heartbeat = self.config.common.init_heartbeat(None).await?; - let job_result = self.config.common.init_job_result().await?; + let mut seen_inputs = false; let target_exe_path = @@ -131,7 +129,6 @@ impl CoverageTask { coverage, allowlist, heartbeat, - job_result, target_exe.to_string(), )?; @@ -226,7 +223,6 @@ struct TaskContext<'a> { module_allowlist: AllowList, source_allowlist: Arc, heartbeat: Option, - job_result: Option, cache: Arc, } @@ -236,7 +232,6 @@ impl<'a> TaskContext<'a> { coverage: BinaryCoverage, allowlist: TargetAllowList, heartbeat: Option, - job_result: Option, target_exe: String, ) -> Result { let cache = DebugInfoCache::new(allowlist.source_files.clone()); @@ -256,7 +251,6 @@ impl<'a> TaskContext<'a> { module_allowlist: allowlist.modules, source_allowlist: Arc::new(allowlist.source_files), heartbeat, - job_result, cache: Arc::new(cache), }) } diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs index bd7511cac2..d9116a1ed2 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs @@ -73,7 +73,6 @@ impl GeneratorTask { } let hb_client = self.config.common.init_heartbeat(None).await?; - let jr_client = self.config.common.init_job_result().await?; for dir in &self.config.readonly_inputs { dir.init_pull().await?; @@ -85,10 +84,7 @@ impl GeneratorTask { self.config.ensemble_sync_delay, ); - let crash_dir_monitor = self - .config - .crashes - .monitor_results(new_result, false, &jr_client); + let crash_dir_monitor = self.config.crashes.monitor_results(new_result, false); let fuzzer = self.fuzzing_loop(hb_client); @@ -302,7 +298,6 @@ mod tests { task_id: Default::default(), instance_id: Default::default(), heartbeat_queue: Default::default(), - job_result_queue: Default::default(), instance_telemetry_key: Default::default(), microsoft_telemetry_key: Default::default(), logs: Default::default(), diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs b/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs index 32f3372958..3336ed4d7a 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs @@ -1,11 +1,7 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -use crate::tasks::{ - config::CommonConfig, - heartbeat::{HeartbeatSender, TaskHeartbeatClient}, - utils::default_bool_true, -}; +use crate::tasks::{config::CommonConfig, heartbeat::HeartbeatSender, utils::default_bool_true}; use anyhow::{Context, Result}; use arraydeque::{ArrayDeque, Wrapping}; use async_trait::async_trait; @@ -16,7 +12,6 @@ use onefuzz::{ process::ExitStatus, syncdir::{continuous_sync, SyncOperation::Pull, SyncedDir}, }; -use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient}; use onefuzz_telemetry::{ Event::{new_coverage, new_crashdump, new_result, runtime_stats}, EventData, @@ -131,31 +126,21 @@ where self.verify().await?; let hb_client = self.config.common.init_heartbeat(None).await?; - let jr_client = self.config.common.init_job_result().await?; // To be scheduled. let resync = self.continuous_sync_inputs(); - - let new_inputs = self - .config - .inputs - .monitor_results(new_coverage, true, &jr_client); - let new_crashes = self - .config - .crashes - .monitor_results(new_result, true, &jr_client); + let new_inputs = self.config.inputs.monitor_results(new_coverage, true); + let new_crashes = self.config.crashes.monitor_results(new_result, true); let new_crashdumps = async { if let Some(crashdumps) = &self.config.crashdumps { - crashdumps - .monitor_results(new_crashdump, true, &jr_client) - .await + crashdumps.monitor_results(new_crashdump, true).await } else { Ok(()) } }; let (stats_sender, stats_receiver) = mpsc::unbounded_channel(); - let report_stats = report_runtime_stats(stats_receiver, &hb_client, &jr_client); + let report_stats = report_runtime_stats(stats_receiver, hb_client); let fuzzers = self.run_fuzzers(Some(&stats_sender)); futures::try_join!( resync, @@ -198,7 +183,7 @@ where .inputs .local_path .parent() - .ok_or_else(|| anyhow!("invalid input path"))?; + .ok_or_else(|| anyhow!("Invalid input path"))?; let temp_path = task_dir.join(".temp"); tokio::fs::create_dir_all(&temp_path).await?; let temp_dir = tempdir_in(temp_path)?; @@ -516,7 +501,7 @@ impl TotalStats { self.execs_sec = self.worker_stats.values().map(|x| x.execs_sec).sum(); } - async fn report(&self, jr_client: &Option) { + fn report(&self) { event!( runtime_stats; EventData::Count = self.count, @@ -528,17 +513,6 @@ impl TotalStats { EventData::Count = self.count, EventData::ExecsSecond = self.execs_sec ); - if let Some(jr_client) = jr_client { - let _ = jr_client - .send_direct( - JobResultData::RuntimeStats, - HashMap::from([ - ("total_count".to_string(), self.count as f64), - ("execs_sec".to_string(), self.execs_sec), - ]), - ) - .await; - } } } @@ -568,8 +542,7 @@ impl Timer { // are approximating nearest-neighbor interpolation on the runtime stats time series. async fn report_runtime_stats( mut stats_channel: mpsc::UnboundedReceiver, - heartbeat_client: &Option, - jr_client: &Option, + heartbeat_client: impl HeartbeatSender, ) -> Result<()> { // Cache the last-reported stats for a given worker. // @@ -578,7 +551,7 @@ async fn report_runtime_stats( let mut total = TotalStats::default(); // report all zeros to start - total.report(jr_client).await; + total.report(); let timer = Timer::new(RUNTIME_STATS_PERIOD); @@ -587,10 +560,10 @@ async fn report_runtime_stats( Some(stats) = stats_channel.recv() => { heartbeat_client.alive(); total.update(stats); - total.report(jr_client).await + total.report() } _ = timer.wait() => { - total.report(jr_client).await + total.report() } } } diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs index 3f00e20b8d..de1e1106ba 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs @@ -79,10 +79,7 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> { remote_path: config.crashes.remote_path.clone(), }; crashes.init().await?; - - let jr_client = config.common.init_job_result().await?; - - let monitor_crashes = crashes.monitor_results(new_result, false, &jr_client); + let monitor_crashes = crashes.monitor_results(new_result, false); // setup crashdumps let (crashdump_dir, monitor_crashdumps) = { @@ -98,12 +95,9 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> { }; let monitor_dir = crashdump_dir.clone(); - let monitor_jr_client = config.common.init_job_result().await?; let monitor_crashdumps = async move { if let Some(crashdumps) = monitor_dir { - crashdumps - .monitor_results(new_crashdump, false, &monitor_jr_client) - .await + crashdumps.monitor_results(new_crashdump, false).await } else { Ok(()) } @@ -135,13 +129,11 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> { if let Some(no_repro) = &config.no_repro { no_repro.init().await?; } - let monitor_reports_future = monitor_reports( reports_dir.path(), &config.unique_reports, &config.reports, &config.no_repro, - &jr_client, ); let inputs = SyncedDir { @@ -164,7 +156,7 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> { delay_with_jitter(delay).await; } } - let monitor_inputs = inputs.monitor_results(new_coverage, false, &jr_client); + let monitor_inputs = inputs.monitor_results(new_coverage, false); let inputs_sync_cancellation = CancellationToken::new(); // never actually cancelled let inputs_sync_task = inputs.continuous_sync(Pull, config.ensemble_sync_delay, &inputs_sync_cancellation); @@ -452,7 +444,6 @@ mod tests { task_id: Default::default(), instance_id: Default::default(), heartbeat_queue: Default::default(), - job_result_queue: Default::default(), instance_telemetry_key: Default::default(), microsoft_telemetry_key: Default::default(), logs: Default::default(), diff --git a/src/agent/onefuzz-task/src/tasks/heartbeat.rs b/src/agent/onefuzz-task/src/tasks/heartbeat.rs index e13b661909..515fa39d0c 100644 --- a/src/agent/onefuzz-task/src/tasks/heartbeat.rs +++ b/src/agent/onefuzz-task/src/tasks/heartbeat.rs @@ -1,8 +1,8 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. +use crate::onefuzz::heartbeat::HeartbeatClient; use anyhow::Result; -use onefuzz::heartbeat::HeartbeatClient; use reqwest::Url; use serde::{self, Deserialize, Serialize}; use std::time::Duration; diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs index 3b6a2094d8..4f2e8234a8 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs @@ -83,7 +83,7 @@ pub async fn spawn(config: &Config) -> Result<()> { } } } else { - debug!("no new candidate inputs found, sleeping"); + warn!("no new candidate inputs found, sleeping"); delay_with_jitter(EMPTY_QUEUE_DELAY).await; }; } diff --git a/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs b/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs index 2d53bc8c07..1c334b3f18 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs @@ -120,7 +120,7 @@ async fn process_message(config: &Config, input_queue: QueueClient) -> Result<() } Ok(()) } else { - debug!("no new candidate inputs found, sleeping"); + warn!("no new candidate inputs found, sleeping"); delay_with_jitter(EMPTY_QUEUE_DELAY).await; Ok(()) } diff --git a/src/agent/onefuzz-task/src/tasks/regression/common.rs b/src/agent/onefuzz-task/src/tasks/regression/common.rs index b61a97df4c..60023cfa6e 100644 --- a/src/agent/onefuzz-task/src/tasks/regression/common.rs +++ b/src/agent/onefuzz-task/src/tasks/regression/common.rs @@ -2,14 +2,12 @@ // Licensed under the MIT License. use crate::tasks::{ - config::CommonConfig, heartbeat::{HeartbeatSender, TaskHeartbeatClient}, report::crash_report::{parse_report_file, CrashTestResult, RegressionReport}, }; use anyhow::{Context, Result}; use async_trait::async_trait; use onefuzz::syncdir::SyncedDir; -use onefuzz_result::job_result::TaskJobResultClient; use reqwest::Url; use std::path::PathBuf; @@ -26,7 +24,7 @@ pub trait RegressionHandler { /// Runs the regression task pub async fn run( - common_config: &CommonConfig, + heartbeat_client: Option, regression_reports: &SyncedDir, crashes: &SyncedDir, report_dirs: &[&SyncedDir], @@ -37,9 +35,6 @@ pub async fn run( info!("starting regression task"); regression_reports.init().await?; - let heartbeat_client = common_config.init_heartbeat(None).await?; - let job_result_client = common_config.init_job_result().await?; - handle_crash_reports( handler, crashes, @@ -47,7 +42,6 @@ pub async fn run( report_list, regression_reports, &heartbeat_client, - &job_result_client, ) .await .context("handling crash reports")?; @@ -58,7 +52,6 @@ pub async fn run( readonly_inputs, regression_reports, &heartbeat_client, - &job_result_client, ) .await .context("handling inputs")?; @@ -78,7 +71,6 @@ pub async fn handle_inputs( readonly_inputs: &SyncedDir, regression_reports: &SyncedDir, heartbeat_client: &Option, - job_result_client: &Option, ) -> Result<()> { readonly_inputs.init_pull().await?; let mut input_files = tokio::fs::read_dir(&readonly_inputs.local_path).await?; @@ -103,7 +95,7 @@ pub async fn handle_inputs( crash_test_result, original_crash_test_result: None, } - .save(None, regression_reports, job_result_client) + .save(None, regression_reports) .await? } @@ -117,7 +109,6 @@ pub async fn handle_crash_reports( report_list: &Option>, regression_reports: &SyncedDir, heartbeat_client: &Option, - job_result_client: &Option, ) -> Result<()> { // without crash report containers, skip this method if report_dirs.is_empty() { @@ -167,7 +158,7 @@ pub async fn handle_crash_reports( crash_test_result, original_crash_test_result: Some(original_crash_test_result), } - .save(Some(file_name), regression_reports, job_result_client) + .save(Some(file_name), regression_reports) .await? } } diff --git a/src/agent/onefuzz-task/src/tasks/regression/generic.rs b/src/agent/onefuzz-task/src/tasks/regression/generic.rs index 8570208d59..640e80db9a 100644 --- a/src/agent/onefuzz-task/src/tasks/regression/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/regression/generic.rs @@ -89,6 +89,7 @@ impl GenericRegressionTask { pub async fn run(&self) -> Result<()> { info!("Starting generic regression task"); + let heartbeat_client = self.config.common.init_heartbeat(None).await?; let mut report_dirs = vec![]; for dir in vec![ @@ -102,7 +103,7 @@ impl GenericRegressionTask { report_dirs.push(dir); } common::run( - &self.config.common, + heartbeat_client, &self.config.regression_reports, &self.config.crashes, &report_dirs, diff --git a/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs b/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs index e65f46bb64..06dd7c00d9 100644 --- a/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs +++ b/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs @@ -103,8 +103,9 @@ impl LibFuzzerRegressionTask { report_dirs.push(dir); } + let heartbeat_client = self.config.common.init_heartbeat(None).await?; common::run( - &self.config.common, + heartbeat_client, &self.config.regression_reports, &self.config.crashes, &report_dirs, diff --git a/src/agent/onefuzz-task/src/tasks/report/crash_report.rs b/src/agent/onefuzz-task/src/tasks/report/crash_report.rs index 9ae618ce93..6ea6c845e7 100644 --- a/src/agent/onefuzz-task/src/tasks/report/crash_report.rs +++ b/src/agent/onefuzz-task/src/tasks/report/crash_report.rs @@ -3,7 +3,6 @@ use anyhow::{Context, Result}; use onefuzz::{blob::BlobUrl, monitor::DirectoryMonitor, syncdir::SyncedDir}; -use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient}; use onefuzz_telemetry::{ Event::{ new_report, new_unable_to_reproduce, new_unique_report, regression_report, @@ -13,7 +12,6 @@ use onefuzz_telemetry::{ }; use serde::{Deserialize, Serialize}; use stacktrace_parser::CrashLog; -use std::collections::HashMap; use std::path::{Path, PathBuf}; use uuid::Uuid; @@ -113,7 +111,6 @@ impl RegressionReport { self, report_name: Option, regression_reports: &SyncedDir, - jr_client: &Option, ) -> Result<()> { let (event, name) = match &self.crash_test_result { CrashTestResult::CrashReport(report) => { @@ -129,15 +126,6 @@ impl RegressionReport { if upload_or_save_local(&self, &name, regression_reports).await? { event!(event; EventData::Path = name.clone()); metric!(event; 1.0; EventData::Path = name.clone()); - - if let Some(jr_client) = jr_client { - let _ = jr_client - .send_direct( - JobResultData::NewRegressionReport, - HashMap::from([("count".to_string(), 1.0)]), - ) - .await; - } } Ok(()) } @@ -161,7 +149,6 @@ impl CrashTestResult { unique_reports: &Option, reports: &Option, no_repro: &Option, - jr_client: &Option, ) -> Result<()> { match self { Self::CrashReport(report) => { @@ -179,15 +166,6 @@ impl CrashTestResult { if upload_or_save_local(&report, &name, unique_reports).await? { event!(new_unique_report; EventData::Path = report.unique_blob_name()); metric!(new_unique_report; 1.0; EventData::Path = report.unique_blob_name()); - - if let Some(jr_client) = jr_client { - let _ = jr_client - .send_direct( - JobResultData::NewUniqueReport, - HashMap::from([("count".to_string(), 1.0)]), - ) - .await; - } } } @@ -196,15 +174,6 @@ impl CrashTestResult { if upload_or_save_local(&report, &name, reports).await? { event!(new_report; EventData::Path = report.blob_name()); metric!(new_report; 1.0; EventData::Path = report.blob_name()); - - if let Some(jr_client) = jr_client { - let _ = jr_client - .send_direct( - JobResultData::NewReport, - HashMap::from([("count".to_string(), 1.0)]), - ) - .await; - } } } } @@ -215,15 +184,6 @@ impl CrashTestResult { if upload_or_save_local(&report, &name, no_repro).await? { event!(new_unable_to_reproduce; EventData::Path = report.blob_name()); metric!(new_unable_to_reproduce; 1.0; EventData::Path = report.blob_name()); - - if let Some(jr_client) = jr_client { - let _ = jr_client - .send_direct( - JobResultData::NoReproCrashingInput, - HashMap::from([("count".to_string(), 1.0)]), - ) - .await; - } } } } @@ -372,7 +332,6 @@ pub async fn monitor_reports( unique_reports: &Option, reports: &Option, no_crash: &Option, - jr_client: &Option, ) -> Result<()> { if unique_reports.is_none() && reports.is_none() && no_crash.is_none() { debug!("no report directories configured"); @@ -383,9 +342,7 @@ pub async fn monitor_reports( while let Some(file) = monitor.next_file().await? { let result = parse_report_file(file).await?; - result - .save(unique_reports, reports, no_crash, jr_client) - .await?; + result.save(unique_reports, reports, no_crash).await?; } Ok(()) diff --git a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs index b8659845de..9b626a7d89 100644 --- a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs @@ -8,25 +8,25 @@ use std::{ sync::Arc, }; -use crate::tasks::report::crash_report::*; -use crate::tasks::report::dotnet::common::collect_exception_info; -use crate::tasks::{ - config::CommonConfig, - generic::input_poller::*, - heartbeat::{HeartbeatSender, TaskHeartbeatClient}, - utils::{default_bool_true, try_resolve_setup_relative_path}, -}; use anyhow::{Context, Result}; use async_trait::async_trait; use onefuzz::expand::Expand; use onefuzz::fs::set_executable; use onefuzz::{blob::BlobUrl, sha256, syncdir::SyncedDir}; -use onefuzz_result::job_result::TaskJobResultClient; use reqwest::Url; use serde::Deserialize; use storage_queue::{Message, QueueClient}; use tokio::fs; +use crate::tasks::report::crash_report::*; +use crate::tasks::report::dotnet::common::collect_exception_info; +use crate::tasks::{ + config::CommonConfig, + generic::input_poller::*, + heartbeat::{HeartbeatSender, TaskHeartbeatClient}, + utils::{default_bool_true, try_resolve_setup_relative_path}, +}; + const DOTNET_DUMP_TOOL_NAME: &str = "dotnet-dump"; #[derive(Debug, Deserialize)] @@ -114,18 +114,15 @@ impl DotnetCrashReportTask { pub struct AsanProcessor { config: Arc, heartbeat_client: Option, - job_result_client: Option, } impl AsanProcessor { pub async fn new(config: Arc) -> Result { let heartbeat_client = config.common.init_heartbeat(None).await?; - let job_result_client = config.common.init_job_result().await?; Ok(Self { config, heartbeat_client, - job_result_client, }) } @@ -263,7 +260,6 @@ impl Processor for AsanProcessor { &self.config.unique_reports, &self.config.reports, &self.config.no_repro, - &self.job_result_client, ) .await; diff --git a/src/agent/onefuzz-task/src/tasks/report/generic.rs b/src/agent/onefuzz-task/src/tasks/report/generic.rs index 8ad259f0a5..9088f98acc 100644 --- a/src/agent/onefuzz-task/src/tasks/report/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/report/generic.rs @@ -13,7 +13,6 @@ use async_trait::async_trait; use onefuzz::{ blob::BlobUrl, input_tester::Tester, machine_id::MachineIdentity, sha256, syncdir::SyncedDir, }; -use onefuzz_result::job_result::TaskJobResultClient; use reqwest::Url; use serde::Deserialize; use std::{ @@ -74,9 +73,7 @@ impl ReportTask { pub async fn managed_run(&mut self) -> Result<()> { info!("Starting generic crash report task"); let heartbeat_client = self.config.common.init_heartbeat(None).await?; - let job_result_client = self.config.common.init_job_result().await?; - let mut processor = - GenericReportProcessor::new(&self.config, heartbeat_client, job_result_client); + let mut processor = GenericReportProcessor::new(&self.config, heartbeat_client); #[allow(clippy::manual_flatten)] for entry in [ @@ -186,19 +183,13 @@ pub async fn test_input(args: TestInputArgs<'_>) -> Result { pub struct GenericReportProcessor<'a> { config: &'a Config, heartbeat_client: Option, - job_result_client: Option, } impl<'a> GenericReportProcessor<'a> { - pub fn new( - config: &'a Config, - heartbeat_client: Option, - job_result_client: Option, - ) -> Self { + pub fn new(config: &'a Config, heartbeat_client: Option) -> Self { Self { config, heartbeat_client, - job_result_client, } } @@ -248,7 +239,6 @@ impl<'a> Processor for GenericReportProcessor<'a> { &self.config.unique_reports, &self.config.reports, &self.config.no_repro, - &self.job_result_client, ) .await .context("saving report failed") diff --git a/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs b/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs index 587ed2e3dc..f18f638fa3 100644 --- a/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs +++ b/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs @@ -13,7 +13,6 @@ use async_trait::async_trait; use onefuzz::{ blob::BlobUrl, libfuzzer::LibFuzzer, machine_id::MachineIdentity, sha256, syncdir::SyncedDir, }; -use onefuzz_result::job_result::TaskJobResultClient; use reqwest::Url; use serde::Deserialize; use std::{ @@ -197,18 +196,15 @@ pub async fn test_input(args: TestInputArgs<'_>) -> Result { pub struct AsanProcessor { config: Arc, heartbeat_client: Option, - job_result_client: Option, } impl AsanProcessor { pub async fn new(config: Arc) -> Result { let heartbeat_client = config.common.init_heartbeat(None).await?; - let job_result_client = config.common.init_job_result().await?; Ok(Self { config, heartbeat_client, - job_result_client, }) } @@ -261,7 +257,6 @@ impl Processor for AsanProcessor { &self.config.unique_reports, &self.config.reports, &self.config.no_repro, - &self.job_result_client, ) .await } diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml index 55042607fa..f834c7b6dc 100644 --- a/src/agent/onefuzz/Cargo.toml +++ b/src/agent/onefuzz/Cargo.toml @@ -44,7 +44,6 @@ tempfile = "3.8.0" process_control = "4.0" reqwest-retry = { path = "../reqwest-retry" } onefuzz-telemetry = { path = "../onefuzz-telemetry" } -onefuzz-result = { path = "../onefuzz-result" } stacktrace-parser = { path = "../stacktrace-parser" } backoff = { version = "0.4", features = ["tokio"] } diff --git a/src/agent/onefuzz/src/blob/url.rs b/src/agent/onefuzz/src/blob/url.rs index 134b59dea0..f55ffbb23a 100644 --- a/src/agent/onefuzz/src/blob/url.rs +++ b/src/agent/onefuzz/src/blob/url.rs @@ -192,15 +192,10 @@ impl BlobContainerUrl { } pub fn as_path(&self, prefix: impl AsRef) -> Result { - match (self.account(), self.container()) { - (Some(account), Some(container)) => { - let mut path = PathBuf::new(); - path.push(account); - path.push(container); - Ok(prefix.as_ref().join(path)) - } - _ => bail!("Invalid container Url"), - } + let dir = self + .account() + .ok_or_else(|| anyhow!("Invalid container Url"))?; + Ok(prefix.as_ref().join(dir)) } } @@ -531,14 +526,4 @@ mod tests { "id:000000,sig:06,src:000000,op:havoc,rep:128" ); } - - #[test] - fn test_as_path() -> Result<()> { - let root = PathBuf::from(r"/onefuzz"); - let url = BlobContainerUrl::parse("https://myaccount.blob.core.windows.net/mycontainer")?; - let path = url.as_path(root)?; - assert_eq!(PathBuf::from(r"/onefuzz/myaccount/mycontainer"), path); - - Ok(()) - } } diff --git a/src/agent/onefuzz/src/syncdir.rs b/src/agent/onefuzz/src/syncdir.rs index efd8f8e0e3..6ff4d4fc68 100644 --- a/src/agent/onefuzz/src/syncdir.rs +++ b/src/agent/onefuzz/src/syncdir.rs @@ -11,12 +11,10 @@ use crate::{ }; use anyhow::{Context, Result}; use dunce::canonicalize; -use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient}; use onefuzz_telemetry::{Event, EventData}; use reqwest::{StatusCode, Url}; use reqwest_retry::{RetryCheck, SendRetry, DEFAULT_RETRY_PERIOD, MAX_RETRY_ATTEMPTS}; use serde::{Deserialize, Serialize}; -use std::collections::HashMap; use std::{env::current_dir, path::PathBuf, str, time::Duration}; use tokio::{fs, select}; use tokio_util::sync::CancellationToken; @@ -243,7 +241,6 @@ impl SyncedDir { url: BlobContainerUrl, event: Event, ignore_dotfiles: bool, - jr_client: &Option, ) -> Result<()> { debug!("monitoring {}", path.display()); @@ -268,6 +265,7 @@ impl SyncedDir { if ignore_dotfiles && file_name_event_str.starts_with('.') { continue; } + event!(event.clone(); EventData::Path = file_name_event_str); metric!(event.clone(); 1.0; EventData::Path = file_name_str_metric_str); if let Some(jr_client) = jr_client { @@ -338,6 +336,7 @@ impl SyncedDir { event!(event.clone(); EventData::Path = file_name_event_str); metric!(event.clone(); 1.0; EventData::Path = file_name_str_metric_str); +<<<<<<< HEAD if let Some(jr_client) = jr_client { match event { Event::new_result => { @@ -361,6 +360,8 @@ impl SyncedDir { } } } +======= +>>>>>>> c69deed5 (Release 8.7.1 (hotfix) (#3459)) if let Err(err) = uploader.upload(item.clone()).await { let error_message = format!( "Couldn't upload file. path:{} dir:{} err:{:?}", @@ -392,12 +393,7 @@ impl SyncedDir { /// The intent of this is to support use cases where we usually want a directory /// to be initialized, but a user-supplied binary, (such as AFL) logically owns /// a directory, and may reset it. - pub async fn monitor_results( - &self, - event: Event, - ignore_dotfiles: bool, - job_result_client: &Option, - ) -> Result<()> { + pub async fn monitor_results(&self, event: Event, ignore_dotfiles: bool) -> Result<()> { if let Some(url) = self.remote_path.clone() { loop { debug!("waiting to monitor {}", self.local_path.display()); @@ -416,7 +412,6 @@ impl SyncedDir { url.clone(), event.clone(), ignore_dotfiles, - job_result_client, ) .await?; } diff --git a/src/deployment/bicep-templates/storageAccounts.bicep b/src/deployment/bicep-templates/storageAccounts.bicep index 27f2da21d8..6a96cea6a0 100644 --- a/src/deployment/bicep-templates/storageAccounts.bicep +++ b/src/deployment/bicep-templates/storageAccounts.bicep @@ -33,7 +33,7 @@ var storageAccountFuncQueuesParams = [ 'update-queue' 'webhooks' 'signalr-events' - 'job-result' + 'custom-metrics' ] var fileChangesQueueIndex = 0 diff --git a/src/integration-tests/integration-test.py b/src/integration-tests/integration-test.py index b71b0b7378..edc43c8edd 100755 --- a/src/integration-tests/integration-test.py +++ b/src/integration-tests/integration-test.py @@ -88,7 +88,6 @@ class Integration(BaseModel): target_method: Optional[str] setup_dir: Optional[str] target_env: Optional[Dict[str, str]] - pool: PoolName TARGETS: Dict[str, Integration] = { @@ -98,7 +97,6 @@ class Integration(BaseModel): target_exe="fuzz.exe", inputs="seeds", wait_for_files={ContainerType.unique_reports: 1}, - pool="linux", ), "linux-libfuzzer": Integration( template=TemplateType.libfuzzer, @@ -126,7 +124,6 @@ class Integration(BaseModel): "--only_asan_failures", "--write_test_file={extra_output_dir}/test.txt", ], - pool="linux", ), "linux-libfuzzer-with-options": Integration( template=TemplateType.libfuzzer, @@ -140,7 +137,6 @@ class Integration(BaseModel): }, reboot_after_setup=True, fuzzing_target_options=["-runs=10000000"], - pool="linux", ), "linux-libfuzzer-dlopen": Integration( template=TemplateType.libfuzzer, @@ -154,7 +150,6 @@ class Integration(BaseModel): }, reboot_after_setup=True, use_setup=True, - pool="linux", ), "linux-libfuzzer-linked-library": Integration( template=TemplateType.libfuzzer, @@ -168,7 +163,6 @@ class Integration(BaseModel): }, reboot_after_setup=True, use_setup=True, - pool="linux", ), "linux-libfuzzer-dotnet": Integration( template=TemplateType.libfuzzer_dotnet, @@ -186,7 +180,6 @@ class Integration(BaseModel): ContainerType.unique_reports: 1, }, test_repro=False, - pool="linux", ), "linux-libfuzzer-aarch64-crosscompile": Integration( template=TemplateType.libfuzzer_qemu_user, @@ -196,7 +189,6 @@ class Integration(BaseModel): use_setup=True, wait_for_files={ContainerType.inputs: 2, ContainerType.crashes: 1}, test_repro=False, - pool="linux", ), "linux-libfuzzer-rust": Integration( template=TemplateType.libfuzzer, @@ -204,7 +196,6 @@ class Integration(BaseModel): target_exe="fuzz_target_1", wait_for_files={ContainerType.unique_reports: 1, ContainerType.coverage: 1}, fuzzing_target_options=["--test:{extra_setup_dir}"], - pool="linux", ), "linux-trivial-crash": Integration( template=TemplateType.radamsa, @@ -213,7 +204,6 @@ class Integration(BaseModel): inputs="seeds", wait_for_files={ContainerType.unique_reports: 1}, inject_fake_regression=True, - pool="linux", ), "linux-trivial-crash-asan": Integration( template=TemplateType.radamsa, @@ -266,7 +256,6 @@ class Integration(BaseModel): "--only_asan_failures", "--write_test_file={extra_output_dir}/test.txt", ], - pool="windows", ), "windows-libfuzzer-linked-library": Integration( template=TemplateType.libfuzzer, @@ -279,7 +268,6 @@ class Integration(BaseModel): ContainerType.coverage: 1, }, use_setup=True, - pool="windows", ), "windows-libfuzzer-load-library": Integration( template=TemplateType.libfuzzer, @@ -292,7 +280,6 @@ class Integration(BaseModel): ContainerType.coverage: 1, }, use_setup=True, - pool="windows", ), "windows-libfuzzer-dotnet": Integration( template=TemplateType.libfuzzer_dotnet, @@ -310,7 +297,6 @@ class Integration(BaseModel): ContainerType.unique_reports: 1, }, test_repro=False, - pool="windows", ), "windows-trivial-crash": Integration( template=TemplateType.radamsa, @@ -319,7 +305,6 @@ class Integration(BaseModel): inputs="seeds", wait_for_files={ContainerType.unique_reports: 1}, inject_fake_regression=True, - pool="windows", ), } @@ -388,7 +373,7 @@ def try_info_get(data: Any) -> None: self.inject_log(self.start_log_marker) for entry in os_list: - name = self.build_pool_name(entry.name) + name = PoolName(f"testpool-{entry.name}-{self.test_id}") self.logger.info("creating pool: %s:%s", entry.name, name) self.of.pools.create(name, entry) self.logger.info("creating scaleset for pool: %s", name) @@ -609,9 +594,12 @@ def launch( ) -> List[UUID]: """Launch all of the fuzzing templates""" - pool = None + pools: Dict[OS, Pool] = {} if unmanaged_pool is not None: - pool = unmanaged_pool.pool_name + pools[unmanaged_pool.the_os] = self.of.pools.get(unmanaged_pool.pool_name) + else: + for pool in self.of.pools.list(): + pools[pool.os] = pool job_ids = [] @@ -622,8 +610,8 @@ def launch( if config.os not in os_list: continue - if pool is None: - pool = self.build_pool_name(config.pool) + if config.os not in pools.keys(): + raise Exception(f"No pool for target: {target} ,os: {config.os}") self.logger.info("launching: %s", target) @@ -647,9 +635,12 @@ def launch( setup = Directory(os.path.join(setup, config.nested_setup_dir)) job: Optional[Job] = None +<<<<<<< HEAD +======= +>>>>>>> c69deed5 (Release 8.7.1 (hotfix) (#3459)) job = self.build_job( - duration, pool, target, config, setup, target_exe, inputs + duration, pools, target, config, setup, target_exe, inputs ) if config.inject_fake_regression and job is not None: @@ -665,7 +656,7 @@ def launch( def build_job( self, duration: int, - pool: PoolName, + pools: Dict[OS, Pool], target: str, config: Integration, setup: Optional[Directory], @@ -681,7 +672,7 @@ def build_job( self.project, target, BUILD, - pool, + pools[config.os].name, target_exe=target_exe, inputs=inputs, setup_dir=setup, @@ -706,7 +697,7 @@ def build_job( self.project, target, BUILD, - pool, + pools[config.os].name, target_dll=File(config.target_exe), inputs=inputs, setup_dir=setup, @@ -722,7 +713,7 @@ def build_job( self.project, target, BUILD, - pool, + pools[config.os].name, inputs=inputs, target_exe=target_exe, duration=duration, @@ -735,7 +726,7 @@ def build_job( self.project, target, BUILD, - pool_name=pool, + pool_name=pools[config.os].name, target_exe=target_exe, inputs=inputs, setup_dir=setup, @@ -750,7 +741,7 @@ def build_job( self.project, target, BUILD, - pool_name=pool, + pool_name=pools[config.os].name, target_exe=target_exe, inputs=inputs, setup_dir=setup, @@ -1281,11 +1272,6 @@ def check_logs_for_errors(self) -> None: if seen_errors: raise Exception("logs included errors") - def build_pool_name(self, os_type: str) -> PoolName: - return PoolName(f"testpool-{os_type}-{self.test_id}") - - -class Run(Command): def check_jobs( self, test_id: UUID, diff --git a/src/runtime-tools/linux/setup.sh b/src/runtime-tools/linux/setup.sh old mode 100644 new mode 100755 index 794e827f4d..f6859003b4 --- a/src/runtime-tools/linux/setup.sh +++ b/src/runtime-tools/linux/setup.sh @@ -18,14 +18,6 @@ export DOTNET_CLI_HOME="$DOTNET_ROOT" export ONEFUZZ_ROOT=/onefuzz export LLVM_SYMBOLIZER_PATH=/onefuzz/bin/llvm-symbolizer -# `logger` won't work on mariner unless we install this package first -if type yum > /dev/null 2> /dev/null; then - until yum install -y util-linux sudo; do - echo "yum failed. sleep 10s, then retrying" - sleep 10 - done -fi - logger "onefuzz: making directories" sudo mkdir -p /onefuzz/downloaded sudo chown -R $(whoami) /onefuzz @@ -142,53 +134,31 @@ if type apt > /dev/null 2> /dev/null; then sudo ln -f -s $(which llvm-symbolizer-12) $LLVM_SYMBOLIZER_PATH fi - # Needed to install dotnet + # Install dotnet until sudo apt install -y curl libicu-dev; do logger "apt failed, sleeping 10s then retrying" sleep 10 done -elif type yum > /dev/null 2> /dev/null; then - until yum install -y gdb gdb-gdbserver libunwind awk ca-certificates tar yum-utils shadow-utils cronie procps; do - echo "yum failed. sleep 10s, then retrying" - sleep 10 - done - - # Install updated Microsoft Open Management Infrastructure - github.com/microsoft/omi - yum-config-manager --add-repo=https://packages.microsoft.com/config/rhel/8/prod.repo 2>&1 | logger -s -i -t 'onefuzz-OMI-add-MS-repo' - yum install -y omi 2>&1 | logger -s -i -t 'onefuzz-OMI-install' + logger "downloading dotnet install" + curl --retry 10 -sSL https://dot.net/v1/dotnet-install.sh -o dotnet-install.sh 2>&1 | logger -s -i -t 'onefuzz-curl-dotnet-install' + chmod +x dotnet-install.sh - if ! [ -f ${LLVM_SYMBOLIZER_PATH} ]; then - until yum install -y llvm-12.0.1; do - echo "yum failed, sleeping 10s then retrying" - sleep 10 - done - - # If specifying symbolizer, exe name must be a "known symbolizer". - # Using `llvm-symbolizer` works for clang 8 .. 12. - sudo ln -f -s $(which llvm-symbolizer-12) $LLVM_SYMBOLIZER_PATH - fi + for version in "${DOTNET_VERSIONS[@]}"; do + logger "running dotnet install $version" + /bin/bash ./dotnet-install.sh --channel "$version" --install-dir "$DOTNET_ROOT" 2>&1 | logger -s -i -t 'onefuzz-dotnet-setup' + done + rm dotnet-install.sh + + logger "install dotnet tools" + pushd "$DOTNET_ROOT" + ls -lah 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' + "$DOTNET_ROOT"/dotnet tool install dotnet-dump --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' + "$DOTNET_ROOT"/dotnet tool install dotnet-coverage --version 17.5 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' + "$DOTNET_ROOT"/dotnet tool install dotnet-sos --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' + popd fi -# Install dotnet -logger "downloading dotnet install" -curl --retry 10 -sSL https://dot.net/v1/dotnet-install.sh -o dotnet-install.sh 2>&1 | logger -s -i -t 'onefuzz-curl-dotnet-install' -chmod +x dotnet-install.sh - -for version in "${DOTNET_VERSIONS[@]}"; do - logger "running dotnet install $version" - /bin/bash ./dotnet-install.sh --channel "$version" --install-dir "$DOTNET_ROOT" 2>&1 | logger -s -i -t 'onefuzz-dotnet-setup' -done -rm dotnet-install.sh - -logger "install dotnet tools" -pushd "$DOTNET_ROOT" -ls -lah 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' -"$DOTNET_ROOT"/dotnet tool install dotnet-dump --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' -"$DOTNET_ROOT"/dotnet tool install dotnet-coverage --version 17.5 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' -"$DOTNET_ROOT"/dotnet tool install dotnet-sos --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' -popd - if [ -v DOCKER_BUILD ]; then echo "building for docker" elif [ -d /etc/systemd/system ]; then From bab42565e9a30f31e931f09aa4532174ae978858 Mon Sep 17 00:00:00 2001 From: Adam <103067949+AdamL-Microsoft@users.noreply.github.com> Date: Wed, 30 Aug 2023 13:53:49 -0700 Subject: [PATCH 02/32] Revert "Release 8.7.1 (hotfix) (#3459)" (#3468) This reverts commit c69deed50e81cc1805f6f82ebb10513a211cbbe2. --- .devcontainer/devcontainer.json | 3 +- .github/workflows/ci.yml | 2 + .../ApiService/Functions/QueueJobResult.cs | 60 +++++++ .../ApiService/OneFuzzTypes/Model.cs | 45 +++++ src/ApiService/ApiService/Program.cs | 1 + .../ApiService/onefuzzlib/Config.cs | 1 + .../ApiService/onefuzzlib/Extension.cs | 44 ++--- .../onefuzzlib/JobResultOperations.cs | 121 +++++++++++++ .../ApiService/onefuzzlib/OnefuzzContext.cs | 2 + .../IntegrationTests/Fakes/TestContext.cs | 3 + src/agent/Cargo.lock | 16 ++ src/agent/Cargo.toml | 1 + src/agent/onefuzz-agent/src/config.rs | 12 ++ src/agent/onefuzz-agent/src/log_uploader.rs | 29 ---- src/agent/onefuzz-agent/src/work.rs | 5 +- src/agent/onefuzz-result/Cargo.toml | 18 ++ src/agent/onefuzz-result/src/lib.rs | 4 + src/agent/onefuzz-task/Cargo.toml | 1 + src/agent/onefuzz-task/src/local/cmd.rs | 42 +---- src/agent/onefuzz-task/src/local/common.rs | 26 +-- .../example_templates/libfuzzer_basic.yml | 34 ++-- .../src/local/generic_analysis.rs | 137 +-------------- .../src/local/generic_crash_report.rs | 138 +-------------- .../src/local/generic_generator.rs | 142 +-------------- src/agent/onefuzz-task/src/local/libfuzzer.rs | 161 +----------------- .../src/local/libfuzzer_crash_report.rs | 128 +------------- .../onefuzz-task/src/local/libfuzzer_merge.rs | 84 +-------- .../src/local/libfuzzer_regression.rs | 134 +-------------- .../src/local/libfuzzer_test_input.rs | 83 --------- src/agent/onefuzz-task/src/local/mod.rs | 1 - src/agent/onefuzz-task/src/local/radamsa.rs | 78 --------- src/agent/onefuzz-task/src/local/schema.json | 8 +- src/agent/onefuzz-task/src/local/template.rs | 13 +- .../onefuzz-task/src/local/test_input.rs | 86 ---------- .../src/tasks/analysis/generic.rs | 5 +- src/agent/onefuzz-task/src/tasks/config.rs | 20 +++ .../src/tasks/coverage/generic.rs | 8 +- .../onefuzz-task/src/tasks/fuzz/generator.rs | 7 +- .../src/tasks/fuzz/libfuzzer/common.rs | 49 ++++-- .../onefuzz-task/src/tasks/fuzz/supervisor.rs | 15 +- src/agent/onefuzz-task/src/tasks/heartbeat.rs | 2 +- .../onefuzz-task/src/tasks/merge/generic.rs | 2 +- .../src/tasks/merge/libfuzzer_merge.rs | 2 +- .../src/tasks/regression/common.rs | 15 +- .../src/tasks/regression/generic.rs | 3 +- .../src/tasks/regression/libfuzzer.rs | 3 +- .../src/tasks/report/crash_report.rs | 45 ++++- .../src/tasks/report/dotnet/generic.rs | 22 ++- .../onefuzz-task/src/tasks/report/generic.rs | 14 +- .../src/tasks/report/libfuzzer_report.rs | 5 + src/agent/onefuzz/Cargo.toml | 1 + src/agent/onefuzz/src/blob/url.rs | 23 ++- src/agent/onefuzz/src/syncdir.rs | 26 ++- .../bicep-templates/storageAccounts.bicep | 2 +- src/integration-tests/integration-test.py | 49 ++++-- src/runtime-tools/linux/setup.sh | 64 +++++-- 56 files changed, 664 insertions(+), 1381 deletions(-) create mode 100644 src/ApiService/ApiService/Functions/QueueJobResult.cs create mode 100644 src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs create mode 100644 src/agent/onefuzz-result/Cargo.toml create mode 100644 src/agent/onefuzz-result/src/lib.rs delete mode 100644 src/agent/onefuzz-task/src/local/radamsa.rs mode change 100755 => 100644 src/runtime-tools/linux/setup.sh diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 4059b3d7c1..d3fcf050ed 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -13,6 +13,7 @@ "**/target/**": true }, "lldb.executable": "/usr/bin/lldb", + "dotnet.server.useOmnisharp": true, "omnisharp.enableEditorConfigSupport": true, "omnisharp.enableRoslynAnalyzers": true, "python.defaultInterpreterPath": "/workspaces/onefuzz/src/venv/bin/python", @@ -48,4 +49,4 @@ "features": { "ghcr.io/devcontainers/features/azure-cli:1": {} } -} +} \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 456491b6c7..5f07124dd7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -550,9 +550,11 @@ jobs: mkdir -p artifacts/linux-libfuzzer mkdir -p artifacts/linux-libfuzzer-with-options + mkdir -p artifacts/mariner-libfuzzer (cd libfuzzer ; make ) cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/linux-libfuzzer cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/linux-libfuzzer-with-options + cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/mariner-libfuzzer mkdir -p artifacts/linux-libfuzzer-regression (cd libfuzzer-regression ; make ) diff --git a/src/ApiService/ApiService/Functions/QueueJobResult.cs b/src/ApiService/ApiService/Functions/QueueJobResult.cs new file mode 100644 index 0000000000..d781a4d1e1 --- /dev/null +++ b/src/ApiService/ApiService/Functions/QueueJobResult.cs @@ -0,0 +1,60 @@ +using System.Text.Json; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Extensions.Logging; +using Microsoft.OneFuzz.Service.OneFuzzLib.Orm; +namespace Microsoft.OneFuzz.Service.Functions; + + +public class QueueJobResult { + private readonly ILogger _log; + private readonly IOnefuzzContext _context; + + public QueueJobResult(ILogger logTracer, IOnefuzzContext context) { + _log = logTracer; + _context = context; + } + + [Function("QueueJobResult")] + public async Async.Task Run([QueueTrigger("job-result", Connection = "AzureWebJobsStorage")] string msg) { + + var _tasks = _context.TaskOperations; + var _jobs = _context.JobOperations; + + _log.LogInformation("job result: {msg}", msg); + var jr = JsonSerializer.Deserialize(msg, EntityConverter.GetJsonSerializerOptions()).EnsureNotNull($"wrong data {msg}"); + + var task = await _tasks.GetByTaskId(jr.TaskId); + if (task == null) { + _log.LogWarning("invalid {TaskId}", jr.TaskId); + return; + } + + var job = await _jobs.Get(task.JobId); + if (job == null) { + _log.LogWarning("invalid {JobId}", task.JobId); + return; + } + + JobResultData? data = jr.Data; + if (data == null) { + _log.LogWarning($"job result data is empty, throwing out: {jr}"); + return; + } + + var jobResultType = data.Type; + _log.LogInformation($"job result data type: {jobResultType}"); + + Dictionary value; + if (jr.Value.Count > 0) { + value = jr.Value; + } else { + _log.LogWarning($"job result data is empty, throwing out: {jr}"); + return; + } + + var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jobResultType, value); + if (!jobResult.IsOk) { + _log.LogError("failed to create or update with job result {JobId}", job.JobId); + } + } +} diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index 67cbea39b6..23811e9fe0 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -33,6 +33,19 @@ public enum HeartbeatType { TaskAlive, } +[SkipRename] +public enum JobResultType { + NewCrashingInput, + NoReproCrashingInput, + NewReport, + NewUniqueReport, + NewRegressionReport, + NewCoverage, + NewCrashDump, + CoverageData, + RuntimeStats, +} + public record HeartbeatData(HeartbeatType Type); public record TaskHeartbeatEntry( @@ -41,6 +54,16 @@ public record TaskHeartbeatEntry( Guid MachineId, HeartbeatData[] Data); +public record JobResultData(JobResultType Type); + +public record TaskJobResultEntry( + Guid TaskId, + Guid? JobId, + Guid MachineId, + JobResultData Data, + Dictionary Value + ); + public record NodeHeartbeatEntry(Guid NodeId, HeartbeatData[] Data); public record NodeCommandStopIfFree(); @@ -895,6 +918,27 @@ public record SecretAddress(Uri Url) : ISecret { public record SecretData(ISecret Secret) { } +public record JobResult( + [PartitionKey][RowKey] Guid JobId, + string Project, + string Name, + double NewCrashingInput = 0, + double NoReproCrashingInput = 0, + double NewReport = 0, + double NewUniqueReport = 0, + double NewRegressionReport = 0, + double NewCrashDump = 0, + double InstructionsCovered = 0, + double TotalInstructions = 0, + double CoverageRate = 0, + double IterationCount = 0 +) : EntityBase() { + public JobResult(Guid JobId, string Project, string Name) : this( + JobId: JobId, + Project: Project, + Name: Name, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) { } +} + public record JobConfig( string Project, string Name, @@ -1061,6 +1105,7 @@ public record TaskUnitConfig( string? InstanceTelemetryKey, string? MicrosoftTelemetryKey, Uri HeartbeatQueue, + Uri JobResultQueue, Dictionary Tags ) { public Uri? inputQueue { get; set; } diff --git a/src/ApiService/ApiService/Program.cs b/src/ApiService/ApiService/Program.cs index 7db11a8569..f26463883b 100644 --- a/src/ApiService/ApiService/Program.cs +++ b/src/ApiService/ApiService/Program.cs @@ -180,6 +180,7 @@ public static async Async.Task Main() { .AddScoped() .AddScoped() .AddScoped() + .AddScoped() .AddScoped() .AddScoped() .AddScoped() diff --git a/src/ApiService/ApiService/onefuzzlib/Config.cs b/src/ApiService/ApiService/onefuzzlib/Config.cs index 71af317348..872cedbc01 100644 --- a/src/ApiService/ApiService/onefuzzlib/Config.cs +++ b/src/ApiService/ApiService/onefuzzlib/Config.cs @@ -71,6 +71,7 @@ private static BlobContainerSasPermissions ConvertPermissions(ContainerPermissio InstanceTelemetryKey: _serviceConfig.ApplicationInsightsInstrumentationKey, MicrosoftTelemetryKey: _serviceConfig.OneFuzzTelemetry, HeartbeatQueue: await _queue.GetQueueSas("task-heartbeat", StorageType.Config, QueueSasPermissions.Add) ?? throw new Exception("unable to get heartbeat queue sas"), + JobResultQueue: await _queue.GetQueueSas("job-result", StorageType.Config, QueueSasPermissions.Add) ?? throw new Exception("unable to get heartbeat queue sas"), Tags: task.Config.Tags ?? new Dictionary() ); diff --git a/src/ApiService/ApiService/onefuzzlib/Extension.cs b/src/ApiService/ApiService/onefuzzlib/Extension.cs index 7995026eca..fbf62dd343 100644 --- a/src/ApiService/ApiService/onefuzzlib/Extension.cs +++ b/src/ApiService/ApiService/onefuzzlib/Extension.cs @@ -36,7 +36,9 @@ public async Async.Task> GenericExtensions(AzureLocati var extensions = new List(); var instanceConfig = await _context.ConfigOperations.Fetch(); - extensions.Add(await MonitorExtension(region, vmOs)); + if (vmOs == Os.Windows) { + extensions.Add(await MonitorExtension(region)); + } var depenency = DependencyExtension(region, vmOs); if (depenency is not null) { @@ -329,37 +331,21 @@ public async Async.Task AgentConfig(AzureLocation region, Os throw new NotSupportedException($"unsupported OS: {vmOs}"); } - public async Async.Task MonitorExtension(AzureLocation region, Os vmOs) { + public async Async.Task MonitorExtension(AzureLocation region) { var settings = await _context.LogAnalytics.GetMonitorSettings(); var extensionSettings = JsonSerializer.Serialize(new { WorkspaceId = settings.Id }, _extensionSerializerOptions); var protectedExtensionSettings = JsonSerializer.Serialize(new { WorkspaceKey = settings.Key }, _extensionSerializerOptions); - if (vmOs == Os.Windows) { - return new VMExtensionWrapper { - Location = region, - Name = "OMSExtension", - TypePropertiesType = "MicrosoftMonitoringAgent", - Publisher = "Microsoft.EnterpriseCloud.Monitoring", - TypeHandlerVersion = "1.0", - AutoUpgradeMinorVersion = true, - Settings = new BinaryData(extensionSettings), - ProtectedSettings = new BinaryData(protectedExtensionSettings), - EnableAutomaticUpgrade = false - }; - } else if (vmOs == Os.Linux) { - return new VMExtensionWrapper { - Location = region, - Name = "OmsAgentForLinux", - TypePropertiesType = "OmsAgentForLinux", - Publisher = "Microsoft.EnterpriseCloud.Monitoring", - TypeHandlerVersion = "1.0", - AutoUpgradeMinorVersion = true, - Settings = new BinaryData(extensionSettings), - ProtectedSettings = new BinaryData(protectedExtensionSettings), - EnableAutomaticUpgrade = false - }; - } else { - throw new NotSupportedException($"unsupported os: {vmOs}"); - } + return new VMExtensionWrapper { + Location = region, + Name = "OMSExtension", + TypePropertiesType = "MicrosoftMonitoringAgent", + Publisher = "Microsoft.EnterpriseCloud.Monitoring", + TypeHandlerVersion = "1.0", + AutoUpgradeMinorVersion = true, + Settings = new BinaryData(extensionSettings), + ProtectedSettings = new BinaryData(protectedExtensionSettings), + EnableAutomaticUpgrade = false + }; } diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs new file mode 100644 index 0000000000..1166cf91d4 --- /dev/null +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -0,0 +1,121 @@ +using ApiService.OneFuzzLib.Orm; +using Microsoft.Extensions.Logging; +using Polly; +namespace Microsoft.OneFuzz.Service; + +public interface IJobResultOperations : IOrm { + + Async.Task GetJobResult(Guid jobId); + Async.Task CreateOrUpdate(Guid jobId, JobResultType resultType, Dictionary resultValue); + +} +public class JobResultOperations : Orm, IJobResultOperations { + + public JobResultOperations(ILogger log, IOnefuzzContext context) + : base(log, context) { + } + + public async Async.Task GetJobResult(Guid jobId) { + return await SearchByPartitionKeys(new[] { jobId.ToString() }).SingleOrDefaultAsync(); + } + + private JobResult UpdateResult(JobResult result, JobResultType type, Dictionary resultValue) { + + var newResult = result; + double newValue; + switch (type) { + case JobResultType.NewCrashingInput: + newValue = result.NewCrashingInput + resultValue["count"]; + newResult = result with { NewCrashingInput = newValue }; + break; + case JobResultType.NewReport: + newValue = result.NewReport + resultValue["count"]; + newResult = result with { NewReport = newValue }; + break; + case JobResultType.NewUniqueReport: + newValue = result.NewUniqueReport + resultValue["count"]; + newResult = result with { NewUniqueReport = newValue }; + break; + case JobResultType.NewRegressionReport: + newValue = result.NewRegressionReport + resultValue["count"]; + newResult = result with { NewRegressionReport = newValue }; + break; + case JobResultType.NewCrashDump: + newValue = result.NewCrashDump + resultValue["count"]; + newResult = result with { NewCrashDump = newValue }; + break; + case JobResultType.CoverageData: + double newCovered = resultValue["covered"]; + double newTotalCovered = resultValue["features"]; + double newCoverageRate = resultValue["rate"]; + newResult = result with { InstructionsCovered = newCovered, TotalInstructions = newTotalCovered, CoverageRate = newCoverageRate }; + break; + case JobResultType.RuntimeStats: + double newTotalIterations = resultValue["total_count"]; + newResult = result with { IterationCount = newTotalIterations }; + break; + default: + _logTracer.LogWarning($"Invalid Field {type}."); + break; + } + _logTracer.LogInformation($"Attempting to log new result: {newResult}"); + return newResult; + } + + private async Async.Task TryUpdate(Job job, JobResultType resultType, Dictionary resultValue) { + var jobId = job.JobId; + + var jobResult = await GetJobResult(jobId); + + if (jobResult == null) { + _logTracer.LogInformation("Creating new JobResult for Job {JobId}", jobId); + + var entry = new JobResult(JobId: jobId, Project: job.Config.Project, Name: job.Config.Name); + + jobResult = UpdateResult(entry, resultType, resultValue); + + var r = await Insert(jobResult); + if (!r.IsOk) { + throw new InvalidOperationException($"failed to insert job result {jobResult.JobId}"); + } + _logTracer.LogInformation("created job result {JobId}", jobResult.JobId); + } else { + _logTracer.LogInformation("Updating existing JobResult entry for Job {JobId}", jobId); + + jobResult = UpdateResult(jobResult, resultType, resultValue); + + var r = await Update(jobResult); + if (!r.IsOk) { + throw new InvalidOperationException($"failed to insert job result {jobResult.JobId}"); + } + _logTracer.LogInformation("updated job result {JobId}", jobResult.JobId); + } + + return true; + } + + public async Async.Task CreateOrUpdate(Guid jobId, JobResultType resultType, Dictionary resultValue) { + + var job = await _context.JobOperations.Get(jobId); + if (job == null) { + return OneFuzzResultVoid.Error(ErrorCode.INVALID_REQUEST, "invalid job"); + } + + var success = false; + try { + _logTracer.LogInformation("attempt to update job result {JobId}", job.JobId); + var policy = Policy.Handle().WaitAndRetryAsync(50, _ => new TimeSpan(0, 0, 5)); + await policy.ExecuteAsync(async () => { + success = await TryUpdate(job, resultType, resultValue); + _logTracer.LogInformation("attempt {success}", success); + }); + return OneFuzzResultVoid.Ok; + } catch (Exception e) { + return OneFuzzResultVoid.Error(ErrorCode.UNABLE_TO_UPDATE, new string[] { + $"Unexpected failure when attempting to update job result for {job.JobId}", + $"Exception: {e}" + }); + } + } +} + diff --git a/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs b/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs index d877bfddbb..03c6322663 100644 --- a/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs +++ b/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs @@ -19,6 +19,7 @@ public interface IOnefuzzContext { IExtensions Extensions { get; } IIpOperations IpOperations { get; } IJobOperations JobOperations { get; } + IJobResultOperations JobResultOperations { get; } ILogAnalytics LogAnalytics { get; } INodeMessageOperations NodeMessageOperations { get; } INodeOperations NodeOperations { get; } @@ -83,6 +84,7 @@ public OnefuzzContext(IServiceProvider serviceProvider) { public IVmOperations VmOperations => _serviceProvider.GetRequiredService(); public ISecretsOperations SecretsOperations => _serviceProvider.GetRequiredService(); public IJobOperations JobOperations => _serviceProvider.GetRequiredService(); + public IJobResultOperations JobResultOperations => _serviceProvider.GetRequiredService(); public IScheduler Scheduler => _serviceProvider.GetRequiredService(); public IConfig Config => _serviceProvider.GetRequiredService(); public ILogAnalytics LogAnalytics => _serviceProvider.GetRequiredService(); diff --git a/src/ApiService/IntegrationTests/Fakes/TestContext.cs b/src/ApiService/IntegrationTests/Fakes/TestContext.cs index c46ff5fce7..66d121e746 100644 --- a/src/ApiService/IntegrationTests/Fakes/TestContext.cs +++ b/src/ApiService/IntegrationTests/Fakes/TestContext.cs @@ -32,6 +32,7 @@ public TestContext(IHttpClientFactory httpClientFactory, OneFuzzLoggerProvider p TaskOperations = new TaskOperations(provider.CreateLogger(), Cache, this); NodeOperations = new NodeOperations(provider.CreateLogger(), this); JobOperations = new JobOperations(provider.CreateLogger(), this); + JobResultOperations = new JobResultOperations(provider.CreateLogger(), this); NodeTasksOperations = new NodeTasksOperations(provider.CreateLogger(), this); TaskEventOperations = new TaskEventOperations(provider.CreateLogger(), this); NodeMessageOperations = new NodeMessageOperations(provider.CreateLogger(), this); @@ -57,6 +58,7 @@ public Async.Task InsertAll(params EntityBase[] objs) Node n => NodeOperations.Insert(n), Pool p => PoolOperations.Insert(p), Job j => JobOperations.Insert(j), + JobResult jr => JobResultOperations.Insert(jr), Repro r => ReproOperations.Insert(r), Scaleset ss => ScalesetOperations.Insert(ss), NodeTasks nt => NodeTasksOperations.Insert(nt), @@ -84,6 +86,7 @@ public Async.Task InsertAll(params EntityBase[] objs) public ITaskOperations TaskOperations { get; } public IJobOperations JobOperations { get; } + public IJobResultOperations JobResultOperations { get; } public INodeOperations NodeOperations { get; } public INodeTasksOperations NodeTasksOperations { get; } public ITaskEventOperations TaskEventOperations { get; } diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index 6136357d65..eb35241201 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -2138,6 +2138,7 @@ dependencies = [ "log", "nix", "notify", + "onefuzz-result", "onefuzz-telemetry", "pete", "pretty_assertions", @@ -2212,6 +2213,20 @@ dependencies = [ "serde_json", ] +[[package]] +name = "onefuzz-result" +version = "0.2.0" +dependencies = [ + "anyhow", + "async-trait", + "log", + "onefuzz-telemetry", + "reqwest", + "serde", + "storage-queue", + "uuid", +] + [[package]] name = "onefuzz-task" version = "0.2.0" @@ -2241,6 +2256,7 @@ dependencies = [ "num_cpus", "onefuzz", "onefuzz-file-format", + "onefuzz-result", "onefuzz-telemetry", "path-absolutize", "pretty_assertions", diff --git a/src/agent/Cargo.toml b/src/agent/Cargo.toml index 2f4cea41a4..ce01ae880c 100644 --- a/src/agent/Cargo.toml +++ b/src/agent/Cargo.toml @@ -10,6 +10,7 @@ members = [ "onefuzz", "onefuzz-task", "onefuzz-agent", + "onefuzz-result", "onefuzz-file-format", "onefuzz-telemetry", "reqwest-retry", diff --git a/src/agent/onefuzz-agent/src/config.rs b/src/agent/onefuzz-agent/src/config.rs index 87edfb2c1b..fc623e72af 100644 --- a/src/agent/onefuzz-agent/src/config.rs +++ b/src/agent/onefuzz-agent/src/config.rs @@ -34,6 +34,8 @@ pub struct StaticConfig { pub heartbeat_queue: Option, + pub job_result_queue: Option, + pub instance_id: Uuid, #[serde(default = "default_as_true")] @@ -71,6 +73,8 @@ struct RawStaticConfig { pub heartbeat_queue: Option, + pub job_result_queue: Option, + pub instance_id: Uuid, #[serde(default = "default_as_true")] @@ -117,6 +121,7 @@ impl StaticConfig { microsoft_telemetry_key: config.microsoft_telemetry_key, instance_telemetry_key: config.instance_telemetry_key, heartbeat_queue: config.heartbeat_queue, + job_result_queue: config.job_result_queue, instance_id: config.instance_id, managed: config.managed, machine_identity, @@ -152,6 +157,12 @@ impl StaticConfig { None }; + let job_result_queue = if let Ok(key) = std::env::var("ONEFUZZ_JOB_RESULT") { + Some(Url::parse(&key)?) + } else { + None + }; + let instance_telemetry_key = if let Ok(key) = std::env::var("ONEFUZZ_INSTANCE_TELEMETRY_KEY") { Some(InstanceTelemetryKey::new(Uuid::parse_str(&key)?)) @@ -183,6 +194,7 @@ impl StaticConfig { instance_telemetry_key, microsoft_telemetry_key, heartbeat_queue, + job_result_queue, instance_id, managed: !is_unmanaged, machine_identity, diff --git a/src/agent/onefuzz-agent/src/log_uploader.rs b/src/agent/onefuzz-agent/src/log_uploader.rs index 6bccc0bef2..d424013421 100644 --- a/src/agent/onefuzz-agent/src/log_uploader.rs +++ b/src/agent/onefuzz-agent/src/log_uploader.rs @@ -210,32 +210,3 @@ async fn sync_file( blob_client.append_block(Body::from(f)).await?; Ok(len) } - -#[cfg(test)] -mod tests { - use std::io::Seek; - - use anyhow::Result; - use tokio::io::{AsyncReadExt, AsyncSeekExt}; - - #[allow(clippy::unused_io_amount)] - #[tokio::test] - #[ignore] - - async fn test_seek_behavior() -> Result<()> { - let path = "C:\\temp\\test.ps1"; - let mut std_file = std::fs::File::open(path)?; - std_file.seek(std::io::SeekFrom::Start(3))?; - - let mut tokio_file = tokio::fs::File::from_std(std_file); - - let buf = &mut [0u8; 5]; - tokio_file.read(buf).await?; - println!("******** buf {:?}", buf); - tokio_file.seek(std::io::SeekFrom::Start(0)).await?; - tokio_file.read(buf).await?; - println!("******** buf {:?}", buf); - - Ok(()) - } -} diff --git a/src/agent/onefuzz-agent/src/work.rs b/src/agent/onefuzz-agent/src/work.rs index b55d1d86a1..d0222744a7 100644 --- a/src/agent/onefuzz-agent/src/work.rs +++ b/src/agent/onefuzz-agent/src/work.rs @@ -91,7 +91,10 @@ impl WorkSet { pub fn setup_dir(&self) -> Result { let root = self.get_root_folder()?; - self.setup_url.as_path(root) + // Putting the setup container at the root for backward compatibility. + // The path of setup folder can be used as part of the deduplication logic in the bug filing service + let setup_root = root.parent().ok_or_else(|| anyhow!("Invalid root"))?; + self.setup_url.as_path(setup_root) } pub fn extra_setup_dir(&self) -> Result> { diff --git a/src/agent/onefuzz-result/Cargo.toml b/src/agent/onefuzz-result/Cargo.toml new file mode 100644 index 0000000000..7c7de6615c --- /dev/null +++ b/src/agent/onefuzz-result/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "onefuzz-result" +version = "0.2.0" +authors = ["fuzzing@microsoft.com"] +edition = "2021" +publish = false +license = "MIT" + +[dependencies] +anyhow = { version = "1.0", features = ["backtrace"] } +async-trait = "0.1" +reqwest = "0.11" +serde = "1.0" +storage-queue = { path = "../storage-queue" } +uuid = { version = "1.4", features = ["serde", "v4"] } +onefuzz-telemetry = { path = "../onefuzz-telemetry" } +log = "0.4" + diff --git a/src/agent/onefuzz-result/src/lib.rs b/src/agent/onefuzz-result/src/lib.rs new file mode 100644 index 0000000000..dae666ca9a --- /dev/null +++ b/src/agent/onefuzz-result/src/lib.rs @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +pub mod job_result; diff --git a/src/agent/onefuzz-task/Cargo.toml b/src/agent/onefuzz-task/Cargo.toml index 4c6d48d8a7..4b3e8e8c43 100644 --- a/src/agent/onefuzz-task/Cargo.toml +++ b/src/agent/onefuzz-task/Cargo.toml @@ -47,6 +47,7 @@ serde_json = "1.0" serde_yaml = "0.9.21" onefuzz = { path = "../onefuzz" } onefuzz-telemetry = { path = "../onefuzz-telemetry" } +onefuzz-result = { path = "../onefuzz-result" } path-absolutize = "3.1" reqwest-retry = { path = "../reqwest-retry" } strum = "0.25" diff --git a/src/agent/onefuzz-task/src/local/cmd.rs b/src/agent/onefuzz-task/src/local/cmd.rs index f1df4002c0..cb800d445e 100644 --- a/src/agent/onefuzz-task/src/local/cmd.rs +++ b/src/agent/onefuzz-task/src/local/cmd.rs @@ -4,11 +4,7 @@ use super::{create_template, template}; #[cfg(any(target_os = "linux", target_os = "windows"))] use crate::local::coverage; -use crate::local::{ - common::add_common_config, generic_analysis, generic_crash_report, generic_generator, - libfuzzer, libfuzzer_crash_report, libfuzzer_fuzz, libfuzzer_merge, libfuzzer_regression, - libfuzzer_test_input, radamsa, test_input, tui::TerminalUi, -}; +use crate::local::{common::add_common_config, libfuzzer_fuzz, tui::TerminalUi}; use anyhow::{Context, Result}; use clap::{Arg, ArgAction, Command}; @@ -20,19 +16,9 @@ use tokio::{select, time::timeout}; #[derive(Debug, PartialEq, Eq, EnumString, IntoStaticStr, EnumIter)] #[strum(serialize_all = "kebab-case")] enum Commands { - Radamsa, #[cfg(any(target_os = "linux", target_os = "windows"))] Coverage, LibfuzzerFuzz, - LibfuzzerMerge, - LibfuzzerCrashReport, - LibfuzzerTestInput, - LibfuzzerRegression, - Libfuzzer, - CrashReport, - Generator, - Analysis, - TestInput, Template, CreateTemplate, } @@ -68,23 +54,7 @@ pub async fn run(args: clap::ArgMatches) -> Result<()> { match command { #[cfg(any(target_os = "linux", target_os = "windows"))] Commands::Coverage => coverage::run(&sub_args, event_sender).await, - Commands::Radamsa => radamsa::run(&sub_args, event_sender).await, - Commands::LibfuzzerCrashReport => { - libfuzzer_crash_report::run(&sub_args, event_sender).await - } Commands::LibfuzzerFuzz => libfuzzer_fuzz::run(&sub_args, event_sender).await, - Commands::LibfuzzerMerge => libfuzzer_merge::run(&sub_args, event_sender).await, - Commands::LibfuzzerTestInput => { - libfuzzer_test_input::run(&sub_args, event_sender).await - } - Commands::LibfuzzerRegression => { - libfuzzer_regression::run(&sub_args, event_sender).await - } - Commands::Libfuzzer => libfuzzer::run(&sub_args, event_sender).await, - Commands::CrashReport => generic_crash_report::run(&sub_args, event_sender).await, - Commands::Generator => generic_generator::run(&sub_args, event_sender).await, - Commands::Analysis => generic_analysis::run(&sub_args, event_sender).await, - Commands::TestInput => test_input::run(&sub_args, event_sender).await, Commands::Template => { let config = sub_args .get_one::("config") @@ -141,17 +111,7 @@ pub fn args(name: &'static str) -> Command { let app = match subcommand { #[cfg(any(target_os = "linux", target_os = "windows"))] Commands::Coverage => coverage::args(subcommand.into()), - Commands::Radamsa => radamsa::args(subcommand.into()), - Commands::LibfuzzerCrashReport => libfuzzer_crash_report::args(subcommand.into()), Commands::LibfuzzerFuzz => libfuzzer_fuzz::args(subcommand.into()), - Commands::LibfuzzerMerge => libfuzzer_merge::args(subcommand.into()), - Commands::LibfuzzerTestInput => libfuzzer_test_input::args(subcommand.into()), - Commands::LibfuzzerRegression => libfuzzer_regression::args(subcommand.into()), - Commands::Libfuzzer => libfuzzer::args(subcommand.into()), - Commands::CrashReport => generic_crash_report::args(subcommand.into()), - Commands::Generator => generic_generator::args(subcommand.into()), - Commands::Analysis => generic_analysis::args(subcommand.into()), - Commands::TestInput => test_input::args(subcommand.into()), Commands::Template => Command::new("template") .about("uses the template to generate a run") .args(vec![Arg::new("config") diff --git a/src/agent/onefuzz-task/src/local/common.rs b/src/agent/onefuzz-task/src/local/common.rs index f8d7949e80..17940d799f 100644 --- a/src/agent/onefuzz-task/src/local/common.rs +++ b/src/agent/onefuzz-task/src/local/common.rs @@ -26,20 +26,10 @@ pub const INPUTS_DIR: &str = "inputs_dir"; pub const CRASHES_DIR: &str = "crashes_dir"; pub const CRASHDUMPS_DIR: &str = "crashdumps_dir"; pub const TARGET_WORKERS: &str = "target_workers"; -pub const REPORTS_DIR: &str = "reports_dir"; -pub const NO_REPRO_DIR: &str = "no_repro_dir"; pub const TARGET_TIMEOUT: &str = "target_timeout"; -pub const CHECK_RETRY_COUNT: &str = "check_retry_count"; -pub const DISABLE_CHECK_QUEUE: &str = "disable_check_queue"; -pub const UNIQUE_REPORTS_DIR: &str = "unique_reports_dir"; pub const COVERAGE_DIR: &str = "coverage_dir"; pub const READONLY_INPUTS: &str = "readonly_inputs_dir"; -pub const CHECK_ASAN_LOG: &str = "check_asan_log"; -pub const TOOLS_DIR: &str = "tools_dir"; -pub const RENAME_OUTPUT: &str = "rename_output"; pub const CHECK_FUZZER_HELP: &str = "check_fuzzer_help"; -pub const DISABLE_CHECK_DEBUGGER: &str = "disable_check_debugger"; -pub const REGRESSION_REPORTS_DIR: &str = "regression_reports_dir"; pub const TARGET_EXE: &str = "target_exe"; pub const TARGET_ENV: &str = "target_env"; @@ -47,17 +37,6 @@ pub const TARGET_OPTIONS: &str = "target_options"; // pub const SUPERVISOR_EXE: &str = "supervisor_exe"; // pub const SUPERVISOR_ENV: &str = "supervisor_env"; // pub const SUPERVISOR_OPTIONS: &str = "supervisor_options"; -pub const GENERATOR_EXE: &str = "generator_exe"; -pub const GENERATOR_ENV: &str = "generator_env"; -pub const GENERATOR_OPTIONS: &str = "generator_options"; - -pub const ANALYZER_EXE: &str = "analyzer_exe"; -pub const ANALYZER_OPTIONS: &str = "analyzer_options"; -pub const ANALYZER_ENV: &str = "analyzer_env"; -pub const ANALYSIS_DIR: &str = "analysis_dir"; -pub const ANALYSIS_INPUTS: &str = "analysis_inputs"; -pub const ANALYSIS_UNIQUE_INPUTS: &str = "analysis_unique_inputs"; -pub const PRESERVE_EXISTING_OUTPUTS: &str = "preserve_existing_outputs"; pub const CREATE_JOB_DIR: &str = "create_job_dir"; @@ -66,7 +45,6 @@ const WAIT_FOR_DIR_DELAY: Duration = Duration::from_secs(1); pub enum CmdType { Target, - Generator, // Supervisor, } @@ -90,7 +68,6 @@ pub fn get_cmd_exe(cmd_type: CmdType, args: &clap::ArgMatches) -> Result let name = match cmd_type { CmdType::Target => TARGET_EXE, // CmdType::Supervisor => SUPERVISOR_EXE, - CmdType::Generator => GENERATOR_EXE, }; args.get_one::(name) @@ -102,7 +79,6 @@ pub fn get_cmd_arg(cmd_type: CmdType, args: &clap::ArgMatches) -> Vec { let name = match cmd_type { CmdType::Target => TARGET_OPTIONS, // CmdType::Supervisor => SUPERVISOR_OPTIONS, - CmdType::Generator => GENERATOR_OPTIONS, }; args.get_many::(name) @@ -115,7 +91,6 @@ pub fn get_cmd_env(cmd_type: CmdType, args: &clap::ArgMatches) -> Result TARGET_ENV, // CmdType::Supervisor => SUPERVISOR_ENV, - CmdType::Generator => GENERATOR_ENV, }; get_hash_map(args, env_name) } @@ -265,6 +240,7 @@ pub async fn build_local_context( }, instance_telemetry_key: None, heartbeat_queue: None, + job_result_queue: None, microsoft_telemetry_key: None, logs: None, min_available_memory_mb: 0, diff --git a/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml b/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml index 7210893809..aba02c7991 100644 --- a/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml +++ b/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml @@ -5,28 +5,31 @@ # 2. Install llvm and export LLVM_SYMBOLIZER_PATH like we do in setup.sh +required_args: &required_args + target_exe: "REPLACE_ME" # The path to your target + inputs: &inputs "REPLACE_ME" # A folder containining your inputs + crashes: &crashes "REPLACE_ME" # The folder where you want the crashing inputs to be output + crashdumps: "REPLACE_ME" # The folder where you want the crash dumps to be output + coverage: "REPLACE_ME" # The folder where you want the code coverage to be output + regression_reports: "REPLACE_ME" # The folder where you want the regression reports to be output + target_args: &target_args + <<: *required_args target_env: {} - target_exe: "C:\\temp\\onefuzz\\integration\\windows-libfuzzer\\fuzz.exe" target_options: [] -inputs: &inputs "C:\\temp\\onefuzz\\integration\\windows-libfuzzer\\seeds" - tasks: - type: LibFuzzer <<: *target_args - inputs: *inputs - crashes: &crash "./crashes" readonly_inputs: [] check_fuzzer_help: true - - type: "Report" + - type: LibfuzzerRegression <<: *target_args - input_queue: *crash - crashes: *crash - reports: "./reports" - unique_reports: "./unique_reports" - no_repro: "./no_repro" + + - type: "LibfuzzerCrashReport" + <<: *target_args + input_queue: *crashes check_fuzzer_help: true - type: "Coverage" @@ -35,4 +38,11 @@ tasks: - "{input}" input_queue: *inputs readonly_inputs: [*inputs] - coverage: "./coverage" + + # The analysis task is optional in the libfuzzer_basic template + # - type: Analysis + # <<: *target_args + # analysis: "REPLACE_ME" # The folder where you want the analysis results to be output + # analyzer_exe: "REPLACE_ME" + # analyzer_options: [] + # analyzer_env: {} diff --git a/src/agent/onefuzz-task/src/local/generic_analysis.rs b/src/agent/onefuzz-task/src/local/generic_analysis.rs index a1bc714d1a..cbb31a1ff9 100644 --- a/src/agent/onefuzz-task/src/local/generic_analysis.rs +++ b/src/agent/onefuzz-task/src/local/generic_analysis.rs @@ -3,139 +3,13 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::{ - local::common::{ - build_local_context, get_cmd_arg, get_cmd_exe, get_hash_map, get_synced_dir, CmdType, - SyncCountDirMonitor, UiEvent, ANALYSIS_DIR, ANALYZER_ENV, ANALYZER_EXE, ANALYZER_OPTIONS, - CRASHES_DIR, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TOOLS_DIR, - UNIQUE_REPORTS_DIR, - }, - tasks::{ - analysis::generic::{run as run_analysis, Config}, - config::CommonConfig, - }, -}; +use crate::tasks::config::CommonConfig; use anyhow::Result; use async_trait::async_trait; -use clap::{Arg, Command}; -use flume::Sender; use schemars::JsonSchema; -use storage_queue::QueueClient; use super::template::{RunContext, Template}; -pub fn build_analysis_config( - args: &clap::ArgMatches, - input_queue: Option, - common: CommonConfig, - event_sender: Option>, -) -> Result { - let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); - let target_options = get_cmd_arg(CmdType::Target, args); - - let analyzer_exe = args - .get_one::(ANALYZER_EXE) - .cloned() - .ok_or_else(|| format_err!("expected {ANALYZER_EXE}"))?; - - let analyzer_options = args - .get_many::(ANALYZER_OPTIONS) - .unwrap_or_default() - .map(|x| x.to_string()) - .collect(); - - let analyzer_env = get_hash_map(args, ANALYZER_ENV)?; - let analysis = get_synced_dir(ANALYSIS_DIR, common.job_id, common.task_id, args)? - .monitor_count(&event_sender)?; - let tools = get_synced_dir(TOOLS_DIR, common.job_id, common.task_id, args)?; - let crashes = if input_queue.is_none() { - get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)? - } else { - None - }; - let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - - let config = Config { - analyzer_exe, - analyzer_options, - analyzer_env, - target_exe, - target_options, - input_queue, - crashes, - analysis, - tools: Some(tools), - reports, - unique_reports, - no_repro, - common, - }; - - Ok(config) -} - -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender.clone()).await?; - let config = build_analysis_config(args, None, context.common_config.clone(), event_sender)?; - run_analysis(config).await -} - -pub fn build_shared_args(required_task: bool) -> Vec { - vec![ - Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), - Arg::new(TARGET_ENV) - .long(TARGET_ENV) - .requires(TARGET_EXE) - .num_args(0..), - Arg::new(TARGET_OPTIONS) - .long(TARGET_OPTIONS) - .default_value("{input}") - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(CRASHES_DIR) - .long(CRASHES_DIR) - .value_parser(value_parser!(PathBuf)), - Arg::new(ANALYZER_OPTIONS) - .long(ANALYZER_OPTIONS) - .requires(ANALYZER_EXE) - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(ANALYZER_ENV) - .long(ANALYZER_ENV) - .requires(ANALYZER_EXE) - .num_args(0..), - Arg::new(TOOLS_DIR) - .long(TOOLS_DIR) - .value_parser(value_parser!(PathBuf)), - Arg::new(ANALYZER_EXE) - .long(ANALYZER_EXE) - .requires(ANALYSIS_DIR) - .requires(CRASHES_DIR) - .required(required_task), - Arg::new(ANALYSIS_DIR) - .long(ANALYSIS_DIR) - .requires(ANALYZER_EXE) - .requires(CRASHES_DIR) - .required(required_task), - ] -} - -pub fn args(name: &'static str) -> Command { - Command::new(name) - .about("execute a local-only generic analysis") - .args(&build_shared_args(true)) -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct Analysis { analyzer_exe: String, @@ -146,7 +20,7 @@ pub struct Analysis { input_queue: Option, crashes: Option, analysis: PathBuf, - tools: PathBuf, + tools: Option, reports: Option, unique_reports: Option, no_repro: Option, @@ -191,9 +65,10 @@ impl Template for Analysis { .and_then(|path| context.to_monitored_sync_dir("crashes", path).ok()), analysis: context.to_monitored_sync_dir("analysis", self.analysis.clone())?, - tools: context - .to_monitored_sync_dir("tools", self.tools.clone()) - .ok(), + tools: self + .tools + .as_ref() + .and_then(|path| context.to_monitored_sync_dir("tools", path).ok()), reports: self .reports diff --git a/src/agent/onefuzz-task/src/local/generic_crash_report.rs b/src/agent/onefuzz-task/src/local/generic_crash_report.rs index dc2773b341..91dec1ae44 100644 --- a/src/agent/onefuzz-task/src/local/generic_crash_report.rs +++ b/src/agent/onefuzz-task/src/local/generic_crash_report.rs @@ -3,150 +3,14 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::{ - local::common::{ - build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType, - SyncCountDirMonitor, UiEvent, CHECK_ASAN_LOG, CHECK_RETRY_COUNT, CRASHES_DIR, - DISABLE_CHECK_DEBUGGER, DISABLE_CHECK_QUEUE, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV, - TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, UNIQUE_REPORTS_DIR, - }, - tasks::{ - config::CommonConfig, - report::generic::{Config, ReportTask}, - utils::default_bool_true, - }, -}; +use crate::tasks::{config::CommonConfig, utils::default_bool_true}; use anyhow::Result; use async_trait::async_trait; -use clap::{Arg, ArgAction, Command}; -use flume::Sender; use futures::future::OptionFuture; use schemars::JsonSchema; -use storage_queue::QueueClient; use super::template::{RunContext, Template}; -pub fn build_report_config( - args: &clap::ArgMatches, - input_queue: Option, - common: CommonConfig, - event_sender: Option>, -) -> Result { - let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); - let target_env = get_cmd_env(CmdType::Target, args)?; - let target_options = get_cmd_arg(CmdType::Target, args); - - let crashes = Some(get_synced_dir( - CRASHES_DIR, - common.job_id, - common.task_id, - args, - )?) - .monitor_count(&event_sender)?; - let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - - let unique_reports = Some(get_synced_dir( - UNIQUE_REPORTS_DIR, - common.job_id, - common.task_id, - args, - )?) - .monitor_count(&event_sender)?; - - let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); - - let check_retry_count = args - .get_one::(CHECK_RETRY_COUNT) - .copied() - .expect("has a default"); - - let check_queue = !args.get_flag(DISABLE_CHECK_QUEUE); - let check_asan_log = args.get_flag(CHECK_ASAN_LOG); - let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER); - - let config = Config { - target_exe, - target_env, - target_options, - target_timeout, - check_asan_log, - check_debugger, - check_retry_count, - check_queue, - crashes, - minimized_stack_depth: None, - input_queue, - no_repro, - reports, - unique_reports, - common, - }; - - Ok(config) -} - -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender.clone()).await?; - let config = build_report_config(args, None, context.common_config.clone(), event_sender)?; - ReportTask::new(config).managed_run().await -} - -pub fn build_shared_args() -> Vec { - vec![ - Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), - Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), - Arg::new(TARGET_OPTIONS) - .default_value("{input}") - .long(TARGET_OPTIONS) - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(CRASHES_DIR) - .long(CRASHES_DIR) - .required(true) - .value_parser(value_parser!(PathBuf)), - Arg::new(REPORTS_DIR) - .long(REPORTS_DIR) - .required(false) - .value_parser(value_parser!(PathBuf)), - Arg::new(NO_REPRO_DIR) - .long(NO_REPRO_DIR) - .required(false) - .value_parser(value_parser!(PathBuf)), - Arg::new(UNIQUE_REPORTS_DIR) - .long(UNIQUE_REPORTS_DIR) - .value_parser(value_parser!(PathBuf)) - .required(true), - Arg::new(TARGET_TIMEOUT) - .long(TARGET_TIMEOUT) - .value_parser(value_parser!(u64)) - .default_value("30"), - Arg::new(CHECK_RETRY_COUNT) - .long(CHECK_RETRY_COUNT) - .value_parser(value_parser!(u64)) - .default_value("0"), - Arg::new(DISABLE_CHECK_QUEUE) - .action(ArgAction::SetTrue) - .long(DISABLE_CHECK_QUEUE), - Arg::new(CHECK_ASAN_LOG) - .action(ArgAction::SetTrue) - .long(CHECK_ASAN_LOG), - Arg::new(DISABLE_CHECK_DEBUGGER) - .action(ArgAction::SetTrue) - .long(DISABLE_CHECK_DEBUGGER), - ] -} - -pub fn args(name: &'static str) -> Command { - Command::new(name) - .about("execute a local-only generic crash report") - .args(&build_shared_args()) -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct CrashReport { target_exe: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/generic_generator.rs b/src/agent/onefuzz-task/src/local/generic_generator.rs index 68490cd29d..3c26af4cf8 100644 --- a/src/agent/onefuzz-task/src/local/generic_generator.rs +++ b/src/agent/onefuzz-task/src/local/generic_generator.rs @@ -3,154 +3,14 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::{ - local::common::{ - build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, - get_synced_dirs, CmdType, SyncCountDirMonitor, UiEvent, CHECK_ASAN_LOG, CHECK_RETRY_COUNT, - CRASHES_DIR, DISABLE_CHECK_DEBUGGER, GENERATOR_ENV, GENERATOR_EXE, GENERATOR_OPTIONS, - READONLY_INPUTS, RENAME_OUTPUT, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, - TOOLS_DIR, - }, - tasks::{ - config::CommonConfig, - fuzz::generator::{Config, GeneratorTask}, - utils::default_bool_true, - }, -}; +use crate::tasks::{config::CommonConfig, utils::default_bool_true}; use anyhow::Result; use async_trait::async_trait; -use clap::{Arg, ArgAction, Command}; -use flume::Sender; use onefuzz::syncdir::SyncedDir; use schemars::JsonSchema; use super::template::{RunContext, Template}; -pub fn build_fuzz_config( - args: &clap::ArgMatches, - common: CommonConfig, - event_sender: Option>, -) -> Result { - let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)? - .monitor_count(&event_sender)?; - let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); - let target_options = get_cmd_arg(CmdType::Target, args); - let target_env = get_cmd_env(CmdType::Target, args)?; - - let generator_exe = get_cmd_exe(CmdType::Generator, args)?; - let generator_options = get_cmd_arg(CmdType::Generator, args); - let generator_env = get_cmd_env(CmdType::Generator, args)?; - let readonly_inputs = get_synced_dirs(READONLY_INPUTS, common.job_id, common.task_id, args)? - .into_iter() - .map(|sd| sd.monitor_count(&event_sender)) - .collect::>>()?; - - let rename_output = args.get_flag(RENAME_OUTPUT); - let check_asan_log = args.get_flag(CHECK_ASAN_LOG); - let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER); - - let check_retry_count = args - .get_one::(CHECK_RETRY_COUNT) - .copied() - .expect("has a default"); - - let target_timeout = Some( - args.get_one::(TARGET_TIMEOUT) - .copied() - .expect("has a default"), - ); - - let tools = get_synced_dir(TOOLS_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - - let ensemble_sync_delay = None; - - let config = Config { - generator_exe, - generator_env, - generator_options, - readonly_inputs, - crashes, - tools, - target_exe, - target_env, - target_options, - target_timeout, - check_asan_log, - check_debugger, - check_retry_count, - rename_output, - ensemble_sync_delay, - common, - }; - - Ok(config) -} - -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender.clone()).await?; - let config = build_fuzz_config(args, context.common_config.clone(), event_sender)?; - GeneratorTask::new(config).run().await -} - -pub fn build_shared_args() -> Vec { - vec![ - Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), - Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), - Arg::new(TARGET_OPTIONS) - .default_value("{input}") - .long(TARGET_OPTIONS) - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(GENERATOR_EXE) - .long(GENERATOR_EXE) - .default_value("radamsa") - .required(true), - Arg::new(GENERATOR_ENV).long(GENERATOR_ENV).num_args(0..), - Arg::new(GENERATOR_OPTIONS) - .long(GENERATOR_OPTIONS) - .value_delimiter(' ') - .default_value("-H sha256 -o {generated_inputs}/input-%h.%s -n 100 -r {input_corpus}") - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(CRASHES_DIR) - .required(true) - .long(CRASHES_DIR) - .value_parser(value_parser!(PathBuf)), - Arg::new(READONLY_INPUTS) - .required(true) - .num_args(1..) - .value_parser(value_parser!(PathBuf)) - .long(READONLY_INPUTS), - Arg::new(TOOLS_DIR) - .long(TOOLS_DIR) - .value_parser(value_parser!(PathBuf)), - Arg::new(CHECK_RETRY_COUNT) - .long(CHECK_RETRY_COUNT) - .value_parser(value_parser!(u64)) - .default_value("0"), - Arg::new(CHECK_ASAN_LOG) - .action(ArgAction::SetTrue) - .long(CHECK_ASAN_LOG), - Arg::new(RENAME_OUTPUT) - .action(ArgAction::SetTrue) - .long(RENAME_OUTPUT), - Arg::new(TARGET_TIMEOUT) - .long(TARGET_TIMEOUT) - .value_parser(value_parser!(u64)) - .default_value("30"), - Arg::new(DISABLE_CHECK_DEBUGGER) - .action(ArgAction::SetTrue) - .long(DISABLE_CHECK_DEBUGGER), - ] -} - -pub fn args(name: &'static str) -> Command { - Command::new(name) - .about("execute a local-only generator fuzzing task") - .args(&build_shared_args()) -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct Generator { generator_exe: String, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer.rs b/src/agent/onefuzz-task/src/local/libfuzzer.rs index 12abae88b0..472a6ae9e8 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer.rs @@ -1,168 +1,19 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -#[cfg(any(target_os = "linux", target_os = "windows"))] -use crate::{ - local::{common::COVERAGE_DIR, coverage, coverage::build_shared_args as build_coverage_args}, - tasks::coverage::generic::CoverageTask, -}; -use crate::{ - local::{ - common::{ - build_local_context, wait_for_dir, DirectoryMonitorQueue, UiEvent, ANALYZER_EXE, - REGRESSION_REPORTS_DIR, UNIQUE_REPORTS_DIR, - }, - generic_analysis::{build_analysis_config, build_shared_args as build_analysis_args}, - libfuzzer_crash_report::{build_report_config, build_shared_args as build_crash_args}, - libfuzzer_fuzz::{build_fuzz_config, build_shared_args as build_fuzz_args}, - libfuzzer_regression::{ - build_regression_config, build_shared_args as build_regression_args, - }, - }, - tasks::{ - analysis::generic::run as run_analysis, - config::CommonConfig, - fuzz::libfuzzer::{common::default_workers, generic::LibFuzzerFuzzTask}, - regression::libfuzzer::LibFuzzerRegressionTask, - report::libfuzzer_report::ReportTask, - utils::default_bool_true, - }, +use crate::tasks::{ + config::CommonConfig, + fuzz::libfuzzer::{common::default_workers, generic::LibFuzzerFuzzTask}, + utils::default_bool_true, }; use anyhow::Result; use async_trait::async_trait; -use clap::Command; -use flume::Sender; -use onefuzz::{syncdir::SyncedDir, utils::try_wait_all_join_handles}; +use onefuzz::syncdir::SyncedDir; use schemars::JsonSchema; -use std::{ - collections::{HashMap, HashSet}, - path::PathBuf, -}; -use tokio::task::spawn; -use uuid::Uuid; +use std::{collections::HashMap, path::PathBuf}; use super::template::{RunContext, Template}; -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender.clone()).await?; - let fuzz_config = build_fuzz_config(args, context.common_config.clone(), event_sender.clone())?; - let crash_dir = fuzz_config - .crashes - .remote_url()? - .as_file_path() - .expect("invalid crash dir remote location"); - - let fuzzer = LibFuzzerFuzzTask::new(fuzz_config)?; - let mut task_handles = vec![]; - - let fuzz_task = spawn(async move { fuzzer.run().await }); - - wait_for_dir(&crash_dir).await?; - - task_handles.push(fuzz_task); - - if args.contains_id(UNIQUE_REPORTS_DIR) { - let crash_report_input_monitor = - DirectoryMonitorQueue::start_monitoring(crash_dir.clone()).await?; - - let report_config = build_report_config( - args, - Some(crash_report_input_monitor.queue_client), - CommonConfig { - task_id: Uuid::new_v4(), - ..context.common_config.clone() - }, - event_sender.clone(), - )?; - - let mut report = ReportTask::new(report_config); - let report_task = spawn(async move { report.managed_run().await }); - - task_handles.push(report_task); - task_handles.push(crash_report_input_monitor.handle); - } - - #[cfg(any(target_os = "linux", target_os = "windows"))] - if args.contains_id(COVERAGE_DIR) { - let coverage_input_monitor = - DirectoryMonitorQueue::start_monitoring(crash_dir.clone()).await?; - let coverage_config = coverage::build_coverage_config( - args, - true, - Some(coverage_input_monitor.queue_client), - CommonConfig { - task_id: Uuid::new_v4(), - ..context.common_config.clone() - }, - event_sender.clone(), - )?; - - let mut coverage = CoverageTask::new(coverage_config); - let coverage_task = spawn(async move { coverage.run().await }); - - task_handles.push(coverage_task); - task_handles.push(coverage_input_monitor.handle); - } - - if args.contains_id(ANALYZER_EXE) { - let analysis_input_monitor = DirectoryMonitorQueue::start_monitoring(crash_dir).await?; - let analysis_config = build_analysis_config( - args, - Some(analysis_input_monitor.queue_client), - CommonConfig { - task_id: Uuid::new_v4(), - ..context.common_config.clone() - }, - event_sender.clone(), - )?; - let analysis_task = spawn(async move { run_analysis(analysis_config).await }); - - task_handles.push(analysis_task); - task_handles.push(analysis_input_monitor.handle); - } - - if args.contains_id(REGRESSION_REPORTS_DIR) { - let regression_config = build_regression_config( - args, - CommonConfig { - task_id: Uuid::new_v4(), - ..context.common_config.clone() - }, - event_sender, - )?; - let regression = LibFuzzerRegressionTask::new(regression_config); - let regression_task = spawn(async move { regression.run().await }); - task_handles.push(regression_task); - } - - try_wait_all_join_handles(task_handles).await?; - - Ok(()) -} - -pub fn args(name: &'static str) -> Command { - let mut app = Command::new(name).about("run a local libfuzzer & crash reporting task"); - - let mut used = HashSet::new(); - - for args in &[ - build_fuzz_args(), - build_crash_args(), - build_analysis_args(false), - #[cfg(any(target_os = "linux", target_os = "windows"))] - build_coverage_args(true), - build_regression_args(false), - ] { - for arg in args { - if used.insert(arg.get_id()) { - app = app.arg(arg); - } - } - } - - app -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibFuzzer { inputs: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs b/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs index be3326f749..9de1fc66ce 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs @@ -3,139 +3,13 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::{ - local::common::{ - build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType, - SyncCountDirMonitor, UiEvent, CHECK_FUZZER_HELP, CHECK_RETRY_COUNT, CRASHES_DIR, - DISABLE_CHECK_QUEUE, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, - TARGET_TIMEOUT, UNIQUE_REPORTS_DIR, - }, - tasks::{ - config::CommonConfig, - report::libfuzzer_report::{Config, ReportTask}, - utils::default_bool_true, - }, -}; +use crate::tasks::{config::CommonConfig, utils::default_bool_true}; use anyhow::Result; use async_trait::async_trait; -use clap::{Arg, ArgAction, Command}; -use flume::Sender; use futures::future::OptionFuture; use schemars::JsonSchema; -use storage_queue::QueueClient; use super::template::{RunContext, Template}; - -pub fn build_report_config( - args: &clap::ArgMatches, - input_queue: Option, - common: CommonConfig, - event_sender: Option>, -) -> Result { - let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); - let target_env = get_cmd_env(CmdType::Target, args)?; - let target_options = get_cmd_arg(CmdType::Target, args); - - let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - - let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - - let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - - let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); - - let check_retry_count = args - .get_one::(CHECK_RETRY_COUNT) - .copied() - .expect("has a default"); - - let check_queue = !args.get_flag(DISABLE_CHECK_QUEUE); - - let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP); - - let crashes = if input_queue.is_none() { crashes } else { None }; - - let config = Config { - target_exe, - target_env, - target_options, - target_timeout, - check_retry_count, - check_fuzzer_help, - minimized_stack_depth: None, - input_queue, - check_queue, - crashes, - reports, - no_repro, - unique_reports, - common, - }; - - Ok(config) -} - -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender.clone()).await?; - let config = build_report_config(args, None, context.common_config.clone(), event_sender)?; - ReportTask::new(config).managed_run().await -} - -pub fn build_shared_args() -> Vec { - vec![ - Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), - Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), - Arg::new(TARGET_OPTIONS) - .long(TARGET_OPTIONS) - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(CRASHES_DIR) - .long(CRASHES_DIR) - .required(true) - .value_parser(value_parser!(PathBuf)), - Arg::new(REPORTS_DIR) - .long(REPORTS_DIR) - .required(false) - .value_parser(value_parser!(PathBuf)), - Arg::new(NO_REPRO_DIR) - .long(NO_REPRO_DIR) - .required(false) - .value_parser(value_parser!(PathBuf)), - Arg::new(UNIQUE_REPORTS_DIR) - .long(UNIQUE_REPORTS_DIR) - .required(true) - .value_parser(value_parser!(PathBuf)), - Arg::new(TARGET_TIMEOUT) - .value_parser(value_parser!(u64)) - .long(TARGET_TIMEOUT), - Arg::new(CHECK_RETRY_COUNT) - .long(CHECK_RETRY_COUNT) - .value_parser(value_parser!(u64)) - .default_value("0"), - Arg::new(DISABLE_CHECK_QUEUE) - .action(ArgAction::SetTrue) - .long(DISABLE_CHECK_QUEUE), - Arg::new(CHECK_FUZZER_HELP) - .action(ArgAction::SetTrue) - .long(CHECK_FUZZER_HELP), - ] -} - -pub fn args(name: &'static str) -> Command { - Command::new(name) - .about("execute a local-only libfuzzer crash report task") - .args(&build_shared_args()) -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibfuzzerCrashReport { target_exe: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs b/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs index 1e128f0dfc..d4915e6b4c 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs @@ -3,97 +3,15 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::{ - local::common::{ - build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, - get_synced_dirs, CmdType, SyncCountDirMonitor, UiEvent, ANALYSIS_INPUTS, - ANALYSIS_UNIQUE_INPUTS, CHECK_FUZZER_HELP, INPUTS_DIR, PRESERVE_EXISTING_OUTPUTS, - TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, - }, - tasks::{ - config::CommonConfig, - merge::libfuzzer_merge::{spawn, Config}, - utils::default_bool_true, - }, -}; +use crate::tasks::{config::CommonConfig, utils::default_bool_true}; use anyhow::Result; use async_trait::async_trait; -use clap::{Arg, ArgAction, Command}; -use flume::Sender; use futures::future::OptionFuture; use onefuzz::syncdir::SyncedDir; use schemars::JsonSchema; -use storage_queue::QueueClient; use super::template::{RunContext, Template}; -pub fn build_merge_config( - args: &clap::ArgMatches, - input_queue: Option, - common: CommonConfig, - event_sender: Option>, -) -> Result { - let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); - let target_env = get_cmd_env(CmdType::Target, args)?; - let target_options = get_cmd_arg(CmdType::Target, args); - let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP); - let inputs = get_synced_dirs(ANALYSIS_INPUTS, common.job_id, common.task_id, args)? - .into_iter() - .map(|sd| sd.monitor_count(&event_sender)) - .collect::>>()?; - let unique_inputs = - get_synced_dir(ANALYSIS_UNIQUE_INPUTS, common.job_id, common.task_id, args)? - .monitor_count(&event_sender)?; - let preserve_existing_outputs = args - .get_one::(PRESERVE_EXISTING_OUTPUTS) - .copied() - .unwrap_or_default(); - - let config = Config { - target_exe, - target_env, - target_options, - input_queue, - inputs, - unique_inputs, - preserve_existing_outputs, - check_fuzzer_help, - common, - }; - - Ok(config) -} - -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender.clone()).await?; - let config = build_merge_config(args, None, context.common_config.clone(), event_sender)?; - spawn(config).await -} - -pub fn build_shared_args() -> Vec { - vec![ - Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), - Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), - Arg::new(TARGET_OPTIONS) - .long(TARGET_OPTIONS) - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(CHECK_FUZZER_HELP) - .action(ArgAction::SetTrue) - .long(CHECK_FUZZER_HELP), - Arg::new(INPUTS_DIR) - .long(INPUTS_DIR) - .value_parser(value_parser!(PathBuf)) - .num_args(0..), - ] -} - -pub fn args(name: &'static str) -> Command { - Command::new(name) - .about("execute a local-only libfuzzer crash report task") - .args(&build_shared_args()) -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibfuzzerMerge { target_exe: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs b/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs index b8a5766e10..b53fb84c22 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs @@ -3,145 +3,13 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::{ - local::common::{ - build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType, - SyncCountDirMonitor, UiEvent, CHECK_FUZZER_HELP, CHECK_RETRY_COUNT, COVERAGE_DIR, - CRASHES_DIR, NO_REPRO_DIR, REGRESSION_REPORTS_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE, - TARGET_OPTIONS, TARGET_TIMEOUT, UNIQUE_REPORTS_DIR, - }, - tasks::{ - config::CommonConfig, - regression::libfuzzer::{Config, LibFuzzerRegressionTask}, - utils::default_bool_true, - }, -}; +use crate::tasks::{config::CommonConfig, utils::default_bool_true}; use anyhow::Result; use async_trait::async_trait; -use clap::{Arg, ArgAction, Command}; -use flume::Sender; use schemars::JsonSchema; use super::template::{RunContext, Template}; -const REPORT_NAMES: &str = "report_names"; - -pub fn build_regression_config( - args: &clap::ArgMatches, - common: CommonConfig, - event_sender: Option>, -) -> Result { - let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); - let target_env = get_cmd_env(CmdType::Target, args)?; - let target_options = get_cmd_arg(CmdType::Target, args); - let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); - let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)? - .monitor_count(&event_sender)?; - let regression_reports = - get_synced_dir(REGRESSION_REPORTS_DIR, common.job_id, common.task_id, args)? - .monitor_count(&event_sender)?; - let check_retry_count = args - .get_one::(CHECK_RETRY_COUNT) - .copied() - .expect("has a default value"); - - let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - - let report_list: Option> = args - .get_many::(REPORT_NAMES) - .map(|x| x.cloned().collect()); - - let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP); - - let config = Config { - target_exe, - target_env, - target_options, - target_timeout, - check_fuzzer_help, - check_retry_count, - crashes, - regression_reports, - reports, - no_repro, - unique_reports, - readonly_inputs: None, - report_list, - minimized_stack_depth: None, - common, - }; - Ok(config) -} - -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender.clone()).await?; - let config = build_regression_config(args, context.common_config.clone(), event_sender)?; - LibFuzzerRegressionTask::new(config).run().await -} - -pub fn build_shared_args(local_job: bool) -> Vec { - let mut args = vec![ - Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), - Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), - Arg::new(TARGET_OPTIONS) - .long(TARGET_OPTIONS) - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(COVERAGE_DIR) - .required(!local_job) - .long(COVERAGE_DIR) - .value_parser(value_parser!(PathBuf)), - Arg::new(CHECK_FUZZER_HELP) - .action(ArgAction::SetTrue) - .long(CHECK_FUZZER_HELP), - Arg::new(TARGET_TIMEOUT) - .long(TARGET_TIMEOUT) - .value_parser(value_parser!(u64)), - Arg::new(CRASHES_DIR) - .long(CRASHES_DIR) - .required(true) - .value_parser(value_parser!(PathBuf)), - Arg::new(REGRESSION_REPORTS_DIR) - .long(REGRESSION_REPORTS_DIR) - .required(local_job) - .value_parser(value_parser!(PathBuf)), - Arg::new(REPORTS_DIR) - .long(REPORTS_DIR) - .required(false) - .value_parser(value_parser!(PathBuf)), - Arg::new(NO_REPRO_DIR) - .long(NO_REPRO_DIR) - .required(false) - .value_parser(value_parser!(PathBuf)), - Arg::new(UNIQUE_REPORTS_DIR) - .long(UNIQUE_REPORTS_DIR) - .value_parser(value_parser!(PathBuf)) - .required(true), - Arg::new(CHECK_RETRY_COUNT) - .long(CHECK_RETRY_COUNT) - .value_parser(value_parser!(u64)) - .default_value("0"), - ]; - if local_job { - args.push(Arg::new(REPORT_NAMES).long(REPORT_NAMES).num_args(0..)) - } - args -} - -pub fn args(name: &'static str) -> Command { - Command::new(name) - .about("execute a local-only libfuzzer regression task") - .args(&build_shared_args(true)) -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibfuzzerRegression { target_exe: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs b/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs index 30f9c446c8..88c3cd1a3d 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs @@ -1,97 +1,14 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -use crate::{ - local::common::{ - build_local_context, get_cmd_arg, get_cmd_env, CmdType, UiEvent, CHECK_RETRY_COUNT, - TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, - }, - tasks::report::libfuzzer_report::{test_input, TestInputArgs}, -}; use anyhow::Result; use async_trait::async_trait; -use clap::{Arg, Command}; -use flume::Sender; use onefuzz::machine_id::MachineIdentity; use schemars::JsonSchema; use std::{collections::HashMap, path::PathBuf}; use super::template::{RunContext, Template}; -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender).await?; - - let target_exe = args - .get_one::(TARGET_EXE) - .expect("marked as required"); - let target_env = get_cmd_env(CmdType::Target, args)?; - let target_options = get_cmd_arg(CmdType::Target, args); - let input = args - .get_one::("input") - .expect("marked as required"); - let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); - let check_retry_count = args - .get_one::(CHECK_RETRY_COUNT) - .copied() - .expect("has a default value"); - - let extra_setup_dir = context.common_config.extra_setup_dir.as_deref(); - let extra_output_dir = context - .common_config - .extra_output - .as_ref() - .map(|x| x.local_path.as_path()); - - let config = TestInputArgs { - target_exe: target_exe.as_path(), - target_env: &target_env, - target_options: &target_options, - input_url: None, - input: input.as_path(), - job_id: context.common_config.job_id, - task_id: context.common_config.task_id, - target_timeout, - check_retry_count, - setup_dir: &context.common_config.setup_dir, - extra_setup_dir, - extra_output_dir, - minimized_stack_depth: None, - machine_identity: context.common_config.machine_identity, - }; - - let result = test_input(config).await?; - println!("{}", serde_json::to_string_pretty(&result)?); - Ok(()) -} - -pub fn build_shared_args() -> Vec { - vec![ - Arg::new(TARGET_EXE).required(true), - Arg::new("input") - .required(true) - .value_parser(value_parser!(PathBuf)), - Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), - Arg::new(TARGET_OPTIONS) - .default_value("{input}") - .long(TARGET_OPTIONS) - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(TARGET_TIMEOUT) - .long(TARGET_TIMEOUT) - .value_parser(value_parser!(u64)), - Arg::new(CHECK_RETRY_COUNT) - .long(CHECK_RETRY_COUNT) - .value_parser(value_parser!(u64)) - .default_value("0"), - ] -} - -pub fn args(name: &'static str) -> Command { - Command::new(name) - .about("test a libfuzzer application with a specific input") - .args(&build_shared_args()) -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibfuzzerTestInput { input: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/mod.rs b/src/agent/onefuzz-task/src/local/mod.rs index 9ea530f575..6020cb0fa6 100644 --- a/src/agent/onefuzz-task/src/local/mod.rs +++ b/src/agent/onefuzz-task/src/local/mod.rs @@ -15,7 +15,6 @@ pub mod libfuzzer_fuzz; pub mod libfuzzer_merge; pub mod libfuzzer_regression; pub mod libfuzzer_test_input; -pub mod radamsa; pub mod template; pub mod test_input; pub mod tui; diff --git a/src/agent/onefuzz-task/src/local/radamsa.rs b/src/agent/onefuzz-task/src/local/radamsa.rs deleted file mode 100644 index 4d84de027a..0000000000 --- a/src/agent/onefuzz-task/src/local/radamsa.rs +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -use crate::{ - local::{ - common::{build_local_context, DirectoryMonitorQueue, UiEvent}, - generic_crash_report::{build_report_config, build_shared_args as build_crash_args}, - generic_generator::{build_fuzz_config, build_shared_args as build_fuzz_args}, - }, - tasks::{config::CommonConfig, fuzz::generator::GeneratorTask, report::generic::ReportTask}, -}; -use anyhow::{Context, Result}; -use clap::Command; -use flume::Sender; -use onefuzz::utils::try_wait_all_join_handles; -use std::collections::HashSet; -use tokio::task::spawn; -use uuid::Uuid; - -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender.clone()).await?; - let fuzz_config = build_fuzz_config(args, context.common_config.clone(), event_sender.clone())?; - let crash_dir = fuzz_config - .crashes - .remote_url()? - .as_file_path() - .ok_or_else(|| format_err!("invalid crash directory"))?; - - tokio::fs::create_dir_all(&crash_dir) - .await - .with_context(|| { - format!( - "unable to create crashes directory: {}", - crash_dir.display() - ) - })?; - - let fuzzer = GeneratorTask::new(fuzz_config); - let fuzz_task = spawn(async move { fuzzer.run().await }); - - let crash_report_input_monitor = DirectoryMonitorQueue::start_monitoring(crash_dir) - .await - .context("directory monitor failed")?; - let report_config = build_report_config( - args, - Some(crash_report_input_monitor.queue_client), - CommonConfig { - task_id: Uuid::new_v4(), - ..context.common_config.clone() - }, - event_sender, - )?; - let report_task = spawn(async move { ReportTask::new(report_config).managed_run().await }); - - try_wait_all_join_handles(vec![ - fuzz_task, - report_task, - crash_report_input_monitor.handle, - ]) - .await?; - - Ok(()) -} - -pub fn args(name: &'static str) -> Command { - let mut app = Command::new(name).about("run a local generator & crash reporting job"); - - let mut used = HashSet::new(); - for args in &[build_fuzz_args(), build_crash_args()] { - for arg in args { - if used.insert(arg.get_id()) { - app = app.arg(arg); - } - } - } - - app -} diff --git a/src/agent/onefuzz-task/src/local/schema.json b/src/agent/onefuzz-task/src/local/schema.json index 0a1f128e67..e5b00f6e17 100644 --- a/src/agent/onefuzz-task/src/local/schema.json +++ b/src/agent/onefuzz-task/src/local/schema.json @@ -126,7 +126,6 @@ "analyzer_options", "target_exe", "target_options", - "tools", "type" ], "properties": { @@ -182,7 +181,10 @@ } }, "tools": { - "type": "string" + "type": [ + "string", + "null" + ] }, "type": { "type": "string", @@ -893,4 +895,4 @@ ] } } -} +} \ No newline at end of file diff --git a/src/agent/onefuzz-task/src/local/template.rs b/src/agent/onefuzz-task/src/local/template.rs index adcca9bfa3..3393edd89a 100644 --- a/src/agent/onefuzz-task/src/local/template.rs +++ b/src/agent/onefuzz-task/src/local/template.rs @@ -199,6 +199,7 @@ pub async fn launch( job_id: Uuid::new_v4(), instance_id: Uuid::new_v4(), heartbeat_queue: None, + job_result_queue: None, instance_telemetry_key: None, microsoft_telemetry_key: None, logs: None, @@ -244,12 +245,10 @@ mod test { .expect("Couldn't find checked-in schema.json") .replace("\r\n", "\n"); - println!("{}", schema_str); - - assert_eq!( - schema_str.replace('\n', ""), - checked_in_schema.replace('\n', ""), - "The checked-in local fuzzing schema did not match the generated schema." - ); + if schema_str.replace('\n', "") != checked_in_schema.replace('\n', "") { + std::fs::write("src/local/new.schema.json", schema_str) + .expect("The schemas did not match but failed to write new schema to file."); + panic!("The checked-in local fuzzing schema did not match the generated schema. The generated schema can be found at src/local/new.schema.json"); + } } } diff --git a/src/agent/onefuzz-task/src/local/test_input.rs b/src/agent/onefuzz-task/src/local/test_input.rs index 6e59fb3ff5..0018494ec0 100644 --- a/src/agent/onefuzz-task/src/local/test_input.rs +++ b/src/agent/onefuzz-task/src/local/test_input.rs @@ -1,18 +1,8 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -use crate::{ - local::common::{ - build_local_context, get_cmd_arg, get_cmd_env, CmdType, UiEvent, CHECK_ASAN_LOG, - CHECK_RETRY_COUNT, DISABLE_CHECK_DEBUGGER, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, - TARGET_TIMEOUT, - }, - tasks::report::generic::{test_input, TestInputArgs}, -}; use anyhow::Result; use async_trait::async_trait; -use clap::{Arg, ArgAction, Command}; -use flume::Sender; use onefuzz::machine_id::MachineIdentity; use schemars::JsonSchema; use std::{collections::HashMap, path::PathBuf}; @@ -20,82 +10,6 @@ use uuid::Uuid; use super::template::{RunContext, Template}; -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, false, event_sender).await?; - - let target_exe = args - .get_one::(TARGET_EXE) - .expect("is marked required"); - let target_env = get_cmd_env(CmdType::Target, args)?; - let target_options = get_cmd_arg(CmdType::Target, args); - let input = args - .get_one::("input") - .expect("is marked required"); - let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); - let check_retry_count = args - .get_one::(CHECK_RETRY_COUNT) - .copied() - .expect("has default value"); - let check_asan_log = args.get_flag(CHECK_ASAN_LOG); - let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER); - - let config = TestInputArgs { - target_exe: target_exe.as_path(), - target_env: &target_env, - target_options: &target_options, - input_url: None, - input: input.as_path(), - job_id: context.common_config.job_id, - task_id: context.common_config.task_id, - target_timeout, - check_retry_count, - setup_dir: &context.common_config.setup_dir, - extra_setup_dir: context.common_config.extra_setup_dir.as_deref(), - minimized_stack_depth: None, - check_asan_log, - check_debugger, - machine_identity: context.common_config.machine_identity.clone(), - }; - - let result = test_input(config).await?; - println!("{}", serde_json::to_string_pretty(&result)?); - Ok(()) -} - -pub fn build_shared_args() -> Vec { - vec![ - Arg::new(TARGET_EXE).required(true), - Arg::new("input") - .required(true) - .value_parser(value_parser!(PathBuf)), - Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), - Arg::new(TARGET_OPTIONS) - .default_value("{input}") - .long(TARGET_OPTIONS) - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(TARGET_TIMEOUT) - .long(TARGET_TIMEOUT) - .value_parser(value_parser!(u64)), - Arg::new(CHECK_RETRY_COUNT) - .long(CHECK_RETRY_COUNT) - .value_parser(value_parser!(u64)) - .default_value("0"), - Arg::new(CHECK_ASAN_LOG) - .action(ArgAction::SetTrue) - .long(CHECK_ASAN_LOG), - Arg::new(DISABLE_CHECK_DEBUGGER) - .action(ArgAction::SetTrue) - .long("disable_check_debugger"), - ] -} - -pub fn args(name: &'static str) -> Command { - Command::new(name) - .about("test an application with a specific input") - .args(&build_shared_args()) -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct TestInput { input: PathBuf, diff --git a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs index 3ba068a614..05c6c3d169 100644 --- a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs @@ -65,6 +65,8 @@ pub async fn run(config: Config) -> Result<()> { tools.init_pull().await?; } + let job_result_client = config.common.init_job_result().await?; + // the tempdir is always created, however, the reports_path and // reports_monitor_future are only created if we have one of the three // report SyncedDir. The idea is that the option for where to write reports @@ -88,6 +90,7 @@ pub async fn run(config: Config) -> Result<()> { &config.unique_reports, &config.reports, &config.no_repro, + &job_result_client, ); ( Some(reports_dir.path().to_path_buf()), @@ -171,7 +174,7 @@ async fn poll_inputs( } message.delete().await?; } else { - warn!("no new candidate inputs found, sleeping"); + debug!("no new candidate inputs found, sleeping"); delay_with_jitter(EMPTY_QUEUE_DELAY).await; } } diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs index 0848379d73..e29e0fd60d 100644 --- a/src/agent/onefuzz-task/src/tasks/config.rs +++ b/src/agent/onefuzz-task/src/tasks/config.rs @@ -14,6 +14,7 @@ use onefuzz::{ machine_id::MachineIdentity, syncdir::{SyncOperation, SyncedDir}, }; +use onefuzz_result::job_result::{init_job_result, TaskJobResultClient}; use onefuzz_telemetry::{ self as telemetry, Event::task_start, EventData, InstanceTelemetryKey, MicrosoftTelemetryKey, Role, @@ -50,6 +51,8 @@ pub struct CommonConfig { pub heartbeat_queue: Option, + pub job_result_queue: Option, + pub instance_telemetry_key: Option, pub microsoft_telemetry_key: Option, @@ -103,6 +106,23 @@ impl CommonConfig { None => Ok(None), } } + + pub async fn init_job_result(&self) -> Result> { + match &self.job_result_queue { + Some(url) => { + let result = init_job_result( + url.clone(), + self.task_id, + self.job_id, + self.machine_identity.machine_id, + self.machine_identity.machine_name.clone(), + ) + .await?; + Ok(Some(result)) + } + None => Ok(None), + } + } } #[derive(Debug, Deserialize)] diff --git a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs index 0b19f03122..704188293b 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs @@ -26,6 +26,8 @@ use onefuzz_file_format::coverage::{ binary::{v1::BinaryCoverageJson as BinaryCoverageJsonV1, BinaryCoverageJson}, source::{v1::SourceCoverageJson as SourceCoverageJsonV1, SourceCoverageJson}, }; +use onefuzz_result::job_result::JobResultData; +use onefuzz_result::job_result::{JobResultSender, TaskJobResultClient}; use onefuzz_telemetry::{event, warn, Event::coverage_data, Event::coverage_failed, EventData}; use storage_queue::{Message, QueueClient}; use tokio::fs; @@ -114,7 +116,7 @@ impl CoverageTask { let allowlist = self.load_target_allowlist().await?; let heartbeat = self.config.common.init_heartbeat(None).await?; - + let job_result = self.config.common.init_job_result().await?; let mut seen_inputs = false; let target_exe_path = @@ -129,6 +131,7 @@ impl CoverageTask { coverage, allowlist, heartbeat, + job_result, target_exe.to_string(), )?; @@ -223,6 +226,7 @@ struct TaskContext<'a> { module_allowlist: AllowList, source_allowlist: Arc, heartbeat: Option, + job_result: Option, cache: Arc, } @@ -232,6 +236,7 @@ impl<'a> TaskContext<'a> { coverage: BinaryCoverage, allowlist: TargetAllowList, heartbeat: Option, + job_result: Option, target_exe: String, ) -> Result { let cache = DebugInfoCache::new(allowlist.source_files.clone()); @@ -251,6 +256,7 @@ impl<'a> TaskContext<'a> { module_allowlist: allowlist.modules, source_allowlist: Arc::new(allowlist.source_files), heartbeat, + job_result, cache: Arc::new(cache), }) } diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs index d9116a1ed2..bd7511cac2 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs @@ -73,6 +73,7 @@ impl GeneratorTask { } let hb_client = self.config.common.init_heartbeat(None).await?; + let jr_client = self.config.common.init_job_result().await?; for dir in &self.config.readonly_inputs { dir.init_pull().await?; @@ -84,7 +85,10 @@ impl GeneratorTask { self.config.ensemble_sync_delay, ); - let crash_dir_monitor = self.config.crashes.monitor_results(new_result, false); + let crash_dir_monitor = self + .config + .crashes + .monitor_results(new_result, false, &jr_client); let fuzzer = self.fuzzing_loop(hb_client); @@ -298,6 +302,7 @@ mod tests { task_id: Default::default(), instance_id: Default::default(), heartbeat_queue: Default::default(), + job_result_queue: Default::default(), instance_telemetry_key: Default::default(), microsoft_telemetry_key: Default::default(), logs: Default::default(), diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs b/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs index 3336ed4d7a..32f3372958 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs @@ -1,7 +1,11 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -use crate::tasks::{config::CommonConfig, heartbeat::HeartbeatSender, utils::default_bool_true}; +use crate::tasks::{ + config::CommonConfig, + heartbeat::{HeartbeatSender, TaskHeartbeatClient}, + utils::default_bool_true, +}; use anyhow::{Context, Result}; use arraydeque::{ArrayDeque, Wrapping}; use async_trait::async_trait; @@ -12,6 +16,7 @@ use onefuzz::{ process::ExitStatus, syncdir::{continuous_sync, SyncOperation::Pull, SyncedDir}, }; +use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient}; use onefuzz_telemetry::{ Event::{new_coverage, new_crashdump, new_result, runtime_stats}, EventData, @@ -126,21 +131,31 @@ where self.verify().await?; let hb_client = self.config.common.init_heartbeat(None).await?; + let jr_client = self.config.common.init_job_result().await?; // To be scheduled. let resync = self.continuous_sync_inputs(); - let new_inputs = self.config.inputs.monitor_results(new_coverage, true); - let new_crashes = self.config.crashes.monitor_results(new_result, true); + + let new_inputs = self + .config + .inputs + .monitor_results(new_coverage, true, &jr_client); + let new_crashes = self + .config + .crashes + .monitor_results(new_result, true, &jr_client); let new_crashdumps = async { if let Some(crashdumps) = &self.config.crashdumps { - crashdumps.monitor_results(new_crashdump, true).await + crashdumps + .monitor_results(new_crashdump, true, &jr_client) + .await } else { Ok(()) } }; let (stats_sender, stats_receiver) = mpsc::unbounded_channel(); - let report_stats = report_runtime_stats(stats_receiver, hb_client); + let report_stats = report_runtime_stats(stats_receiver, &hb_client, &jr_client); let fuzzers = self.run_fuzzers(Some(&stats_sender)); futures::try_join!( resync, @@ -183,7 +198,7 @@ where .inputs .local_path .parent() - .ok_or_else(|| anyhow!("Invalid input path"))?; + .ok_or_else(|| anyhow!("invalid input path"))?; let temp_path = task_dir.join(".temp"); tokio::fs::create_dir_all(&temp_path).await?; let temp_dir = tempdir_in(temp_path)?; @@ -501,7 +516,7 @@ impl TotalStats { self.execs_sec = self.worker_stats.values().map(|x| x.execs_sec).sum(); } - fn report(&self) { + async fn report(&self, jr_client: &Option) { event!( runtime_stats; EventData::Count = self.count, @@ -513,6 +528,17 @@ impl TotalStats { EventData::Count = self.count, EventData::ExecsSecond = self.execs_sec ); + if let Some(jr_client) = jr_client { + let _ = jr_client + .send_direct( + JobResultData::RuntimeStats, + HashMap::from([ + ("total_count".to_string(), self.count as f64), + ("execs_sec".to_string(), self.execs_sec), + ]), + ) + .await; + } } } @@ -542,7 +568,8 @@ impl Timer { // are approximating nearest-neighbor interpolation on the runtime stats time series. async fn report_runtime_stats( mut stats_channel: mpsc::UnboundedReceiver, - heartbeat_client: impl HeartbeatSender, + heartbeat_client: &Option, + jr_client: &Option, ) -> Result<()> { // Cache the last-reported stats for a given worker. // @@ -551,7 +578,7 @@ async fn report_runtime_stats( let mut total = TotalStats::default(); // report all zeros to start - total.report(); + total.report(jr_client).await; let timer = Timer::new(RUNTIME_STATS_PERIOD); @@ -560,10 +587,10 @@ async fn report_runtime_stats( Some(stats) = stats_channel.recv() => { heartbeat_client.alive(); total.update(stats); - total.report() + total.report(jr_client).await } _ = timer.wait() => { - total.report() + total.report(jr_client).await } } } diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs index de1e1106ba..3f00e20b8d 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs @@ -79,7 +79,10 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> { remote_path: config.crashes.remote_path.clone(), }; crashes.init().await?; - let monitor_crashes = crashes.monitor_results(new_result, false); + + let jr_client = config.common.init_job_result().await?; + + let monitor_crashes = crashes.monitor_results(new_result, false, &jr_client); // setup crashdumps let (crashdump_dir, monitor_crashdumps) = { @@ -95,9 +98,12 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> { }; let monitor_dir = crashdump_dir.clone(); + let monitor_jr_client = config.common.init_job_result().await?; let monitor_crashdumps = async move { if let Some(crashdumps) = monitor_dir { - crashdumps.monitor_results(new_crashdump, false).await + crashdumps + .monitor_results(new_crashdump, false, &monitor_jr_client) + .await } else { Ok(()) } @@ -129,11 +135,13 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> { if let Some(no_repro) = &config.no_repro { no_repro.init().await?; } + let monitor_reports_future = monitor_reports( reports_dir.path(), &config.unique_reports, &config.reports, &config.no_repro, + &jr_client, ); let inputs = SyncedDir { @@ -156,7 +164,7 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> { delay_with_jitter(delay).await; } } - let monitor_inputs = inputs.monitor_results(new_coverage, false); + let monitor_inputs = inputs.monitor_results(new_coverage, false, &jr_client); let inputs_sync_cancellation = CancellationToken::new(); // never actually cancelled let inputs_sync_task = inputs.continuous_sync(Pull, config.ensemble_sync_delay, &inputs_sync_cancellation); @@ -444,6 +452,7 @@ mod tests { task_id: Default::default(), instance_id: Default::default(), heartbeat_queue: Default::default(), + job_result_queue: Default::default(), instance_telemetry_key: Default::default(), microsoft_telemetry_key: Default::default(), logs: Default::default(), diff --git a/src/agent/onefuzz-task/src/tasks/heartbeat.rs b/src/agent/onefuzz-task/src/tasks/heartbeat.rs index 515fa39d0c..e13b661909 100644 --- a/src/agent/onefuzz-task/src/tasks/heartbeat.rs +++ b/src/agent/onefuzz-task/src/tasks/heartbeat.rs @@ -1,8 +1,8 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -use crate::onefuzz::heartbeat::HeartbeatClient; use anyhow::Result; +use onefuzz::heartbeat::HeartbeatClient; use reqwest::Url; use serde::{self, Deserialize, Serialize}; use std::time::Duration; diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs index 4f2e8234a8..3b6a2094d8 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs @@ -83,7 +83,7 @@ pub async fn spawn(config: &Config) -> Result<()> { } } } else { - warn!("no new candidate inputs found, sleeping"); + debug!("no new candidate inputs found, sleeping"); delay_with_jitter(EMPTY_QUEUE_DELAY).await; }; } diff --git a/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs b/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs index 1c334b3f18..2d53bc8c07 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs @@ -120,7 +120,7 @@ async fn process_message(config: &Config, input_queue: QueueClient) -> Result<() } Ok(()) } else { - warn!("no new candidate inputs found, sleeping"); + debug!("no new candidate inputs found, sleeping"); delay_with_jitter(EMPTY_QUEUE_DELAY).await; Ok(()) } diff --git a/src/agent/onefuzz-task/src/tasks/regression/common.rs b/src/agent/onefuzz-task/src/tasks/regression/common.rs index 60023cfa6e..b61a97df4c 100644 --- a/src/agent/onefuzz-task/src/tasks/regression/common.rs +++ b/src/agent/onefuzz-task/src/tasks/regression/common.rs @@ -2,12 +2,14 @@ // Licensed under the MIT License. use crate::tasks::{ + config::CommonConfig, heartbeat::{HeartbeatSender, TaskHeartbeatClient}, report::crash_report::{parse_report_file, CrashTestResult, RegressionReport}, }; use anyhow::{Context, Result}; use async_trait::async_trait; use onefuzz::syncdir::SyncedDir; +use onefuzz_result::job_result::TaskJobResultClient; use reqwest::Url; use std::path::PathBuf; @@ -24,7 +26,7 @@ pub trait RegressionHandler { /// Runs the regression task pub async fn run( - heartbeat_client: Option, + common_config: &CommonConfig, regression_reports: &SyncedDir, crashes: &SyncedDir, report_dirs: &[&SyncedDir], @@ -35,6 +37,9 @@ pub async fn run( info!("starting regression task"); regression_reports.init().await?; + let heartbeat_client = common_config.init_heartbeat(None).await?; + let job_result_client = common_config.init_job_result().await?; + handle_crash_reports( handler, crashes, @@ -42,6 +47,7 @@ pub async fn run( report_list, regression_reports, &heartbeat_client, + &job_result_client, ) .await .context("handling crash reports")?; @@ -52,6 +58,7 @@ pub async fn run( readonly_inputs, regression_reports, &heartbeat_client, + &job_result_client, ) .await .context("handling inputs")?; @@ -71,6 +78,7 @@ pub async fn handle_inputs( readonly_inputs: &SyncedDir, regression_reports: &SyncedDir, heartbeat_client: &Option, + job_result_client: &Option, ) -> Result<()> { readonly_inputs.init_pull().await?; let mut input_files = tokio::fs::read_dir(&readonly_inputs.local_path).await?; @@ -95,7 +103,7 @@ pub async fn handle_inputs( crash_test_result, original_crash_test_result: None, } - .save(None, regression_reports) + .save(None, regression_reports, job_result_client) .await? } @@ -109,6 +117,7 @@ pub async fn handle_crash_reports( report_list: &Option>, regression_reports: &SyncedDir, heartbeat_client: &Option, + job_result_client: &Option, ) -> Result<()> { // without crash report containers, skip this method if report_dirs.is_empty() { @@ -158,7 +167,7 @@ pub async fn handle_crash_reports( crash_test_result, original_crash_test_result: Some(original_crash_test_result), } - .save(Some(file_name), regression_reports) + .save(Some(file_name), regression_reports, job_result_client) .await? } } diff --git a/src/agent/onefuzz-task/src/tasks/regression/generic.rs b/src/agent/onefuzz-task/src/tasks/regression/generic.rs index 640e80db9a..8570208d59 100644 --- a/src/agent/onefuzz-task/src/tasks/regression/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/regression/generic.rs @@ -89,7 +89,6 @@ impl GenericRegressionTask { pub async fn run(&self) -> Result<()> { info!("Starting generic regression task"); - let heartbeat_client = self.config.common.init_heartbeat(None).await?; let mut report_dirs = vec![]; for dir in vec![ @@ -103,7 +102,7 @@ impl GenericRegressionTask { report_dirs.push(dir); } common::run( - heartbeat_client, + &self.config.common, &self.config.regression_reports, &self.config.crashes, &report_dirs, diff --git a/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs b/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs index 06dd7c00d9..e65f46bb64 100644 --- a/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs +++ b/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs @@ -103,9 +103,8 @@ impl LibFuzzerRegressionTask { report_dirs.push(dir); } - let heartbeat_client = self.config.common.init_heartbeat(None).await?; common::run( - heartbeat_client, + &self.config.common, &self.config.regression_reports, &self.config.crashes, &report_dirs, diff --git a/src/agent/onefuzz-task/src/tasks/report/crash_report.rs b/src/agent/onefuzz-task/src/tasks/report/crash_report.rs index 6ea6c845e7..9ae618ce93 100644 --- a/src/agent/onefuzz-task/src/tasks/report/crash_report.rs +++ b/src/agent/onefuzz-task/src/tasks/report/crash_report.rs @@ -3,6 +3,7 @@ use anyhow::{Context, Result}; use onefuzz::{blob::BlobUrl, monitor::DirectoryMonitor, syncdir::SyncedDir}; +use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient}; use onefuzz_telemetry::{ Event::{ new_report, new_unable_to_reproduce, new_unique_report, regression_report, @@ -12,6 +13,7 @@ use onefuzz_telemetry::{ }; use serde::{Deserialize, Serialize}; use stacktrace_parser::CrashLog; +use std::collections::HashMap; use std::path::{Path, PathBuf}; use uuid::Uuid; @@ -111,6 +113,7 @@ impl RegressionReport { self, report_name: Option, regression_reports: &SyncedDir, + jr_client: &Option, ) -> Result<()> { let (event, name) = match &self.crash_test_result { CrashTestResult::CrashReport(report) => { @@ -126,6 +129,15 @@ impl RegressionReport { if upload_or_save_local(&self, &name, regression_reports).await? { event!(event; EventData::Path = name.clone()); metric!(event; 1.0; EventData::Path = name.clone()); + + if let Some(jr_client) = jr_client { + let _ = jr_client + .send_direct( + JobResultData::NewRegressionReport, + HashMap::from([("count".to_string(), 1.0)]), + ) + .await; + } } Ok(()) } @@ -149,6 +161,7 @@ impl CrashTestResult { unique_reports: &Option, reports: &Option, no_repro: &Option, + jr_client: &Option, ) -> Result<()> { match self { Self::CrashReport(report) => { @@ -166,6 +179,15 @@ impl CrashTestResult { if upload_or_save_local(&report, &name, unique_reports).await? { event!(new_unique_report; EventData::Path = report.unique_blob_name()); metric!(new_unique_report; 1.0; EventData::Path = report.unique_blob_name()); + + if let Some(jr_client) = jr_client { + let _ = jr_client + .send_direct( + JobResultData::NewUniqueReport, + HashMap::from([("count".to_string(), 1.0)]), + ) + .await; + } } } @@ -174,6 +196,15 @@ impl CrashTestResult { if upload_or_save_local(&report, &name, reports).await? { event!(new_report; EventData::Path = report.blob_name()); metric!(new_report; 1.0; EventData::Path = report.blob_name()); + + if let Some(jr_client) = jr_client { + let _ = jr_client + .send_direct( + JobResultData::NewReport, + HashMap::from([("count".to_string(), 1.0)]), + ) + .await; + } } } } @@ -184,6 +215,15 @@ impl CrashTestResult { if upload_or_save_local(&report, &name, no_repro).await? { event!(new_unable_to_reproduce; EventData::Path = report.blob_name()); metric!(new_unable_to_reproduce; 1.0; EventData::Path = report.blob_name()); + + if let Some(jr_client) = jr_client { + let _ = jr_client + .send_direct( + JobResultData::NoReproCrashingInput, + HashMap::from([("count".to_string(), 1.0)]), + ) + .await; + } } } } @@ -332,6 +372,7 @@ pub async fn monitor_reports( unique_reports: &Option, reports: &Option, no_crash: &Option, + jr_client: &Option, ) -> Result<()> { if unique_reports.is_none() && reports.is_none() && no_crash.is_none() { debug!("no report directories configured"); @@ -342,7 +383,9 @@ pub async fn monitor_reports( while let Some(file) = monitor.next_file().await? { let result = parse_report_file(file).await?; - result.save(unique_reports, reports, no_crash).await?; + result + .save(unique_reports, reports, no_crash, jr_client) + .await?; } Ok(()) diff --git a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs index 9b626a7d89..b8659845de 100644 --- a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs @@ -8,25 +8,25 @@ use std::{ sync::Arc, }; +use crate::tasks::report::crash_report::*; +use crate::tasks::report::dotnet::common::collect_exception_info; +use crate::tasks::{ + config::CommonConfig, + generic::input_poller::*, + heartbeat::{HeartbeatSender, TaskHeartbeatClient}, + utils::{default_bool_true, try_resolve_setup_relative_path}, +}; use anyhow::{Context, Result}; use async_trait::async_trait; use onefuzz::expand::Expand; use onefuzz::fs::set_executable; use onefuzz::{blob::BlobUrl, sha256, syncdir::SyncedDir}; +use onefuzz_result::job_result::TaskJobResultClient; use reqwest::Url; use serde::Deserialize; use storage_queue::{Message, QueueClient}; use tokio::fs; -use crate::tasks::report::crash_report::*; -use crate::tasks::report::dotnet::common::collect_exception_info; -use crate::tasks::{ - config::CommonConfig, - generic::input_poller::*, - heartbeat::{HeartbeatSender, TaskHeartbeatClient}, - utils::{default_bool_true, try_resolve_setup_relative_path}, -}; - const DOTNET_DUMP_TOOL_NAME: &str = "dotnet-dump"; #[derive(Debug, Deserialize)] @@ -114,15 +114,18 @@ impl DotnetCrashReportTask { pub struct AsanProcessor { config: Arc, heartbeat_client: Option, + job_result_client: Option, } impl AsanProcessor { pub async fn new(config: Arc) -> Result { let heartbeat_client = config.common.init_heartbeat(None).await?; + let job_result_client = config.common.init_job_result().await?; Ok(Self { config, heartbeat_client, + job_result_client, }) } @@ -260,6 +263,7 @@ impl Processor for AsanProcessor { &self.config.unique_reports, &self.config.reports, &self.config.no_repro, + &self.job_result_client, ) .await; diff --git a/src/agent/onefuzz-task/src/tasks/report/generic.rs b/src/agent/onefuzz-task/src/tasks/report/generic.rs index 9088f98acc..8ad259f0a5 100644 --- a/src/agent/onefuzz-task/src/tasks/report/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/report/generic.rs @@ -13,6 +13,7 @@ use async_trait::async_trait; use onefuzz::{ blob::BlobUrl, input_tester::Tester, machine_id::MachineIdentity, sha256, syncdir::SyncedDir, }; +use onefuzz_result::job_result::TaskJobResultClient; use reqwest::Url; use serde::Deserialize; use std::{ @@ -73,7 +74,9 @@ impl ReportTask { pub async fn managed_run(&mut self) -> Result<()> { info!("Starting generic crash report task"); let heartbeat_client = self.config.common.init_heartbeat(None).await?; - let mut processor = GenericReportProcessor::new(&self.config, heartbeat_client); + let job_result_client = self.config.common.init_job_result().await?; + let mut processor = + GenericReportProcessor::new(&self.config, heartbeat_client, job_result_client); #[allow(clippy::manual_flatten)] for entry in [ @@ -183,13 +186,19 @@ pub async fn test_input(args: TestInputArgs<'_>) -> Result { pub struct GenericReportProcessor<'a> { config: &'a Config, heartbeat_client: Option, + job_result_client: Option, } impl<'a> GenericReportProcessor<'a> { - pub fn new(config: &'a Config, heartbeat_client: Option) -> Self { + pub fn new( + config: &'a Config, + heartbeat_client: Option, + job_result_client: Option, + ) -> Self { Self { config, heartbeat_client, + job_result_client, } } @@ -239,6 +248,7 @@ impl<'a> Processor for GenericReportProcessor<'a> { &self.config.unique_reports, &self.config.reports, &self.config.no_repro, + &self.job_result_client, ) .await .context("saving report failed") diff --git a/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs b/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs index f18f638fa3..587ed2e3dc 100644 --- a/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs +++ b/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs @@ -13,6 +13,7 @@ use async_trait::async_trait; use onefuzz::{ blob::BlobUrl, libfuzzer::LibFuzzer, machine_id::MachineIdentity, sha256, syncdir::SyncedDir, }; +use onefuzz_result::job_result::TaskJobResultClient; use reqwest::Url; use serde::Deserialize; use std::{ @@ -196,15 +197,18 @@ pub async fn test_input(args: TestInputArgs<'_>) -> Result { pub struct AsanProcessor { config: Arc, heartbeat_client: Option, + job_result_client: Option, } impl AsanProcessor { pub async fn new(config: Arc) -> Result { let heartbeat_client = config.common.init_heartbeat(None).await?; + let job_result_client = config.common.init_job_result().await?; Ok(Self { config, heartbeat_client, + job_result_client, }) } @@ -257,6 +261,7 @@ impl Processor for AsanProcessor { &self.config.unique_reports, &self.config.reports, &self.config.no_repro, + &self.job_result_client, ) .await } diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml index f834c7b6dc..55042607fa 100644 --- a/src/agent/onefuzz/Cargo.toml +++ b/src/agent/onefuzz/Cargo.toml @@ -44,6 +44,7 @@ tempfile = "3.8.0" process_control = "4.0" reqwest-retry = { path = "../reqwest-retry" } onefuzz-telemetry = { path = "../onefuzz-telemetry" } +onefuzz-result = { path = "../onefuzz-result" } stacktrace-parser = { path = "../stacktrace-parser" } backoff = { version = "0.4", features = ["tokio"] } diff --git a/src/agent/onefuzz/src/blob/url.rs b/src/agent/onefuzz/src/blob/url.rs index f55ffbb23a..134b59dea0 100644 --- a/src/agent/onefuzz/src/blob/url.rs +++ b/src/agent/onefuzz/src/blob/url.rs @@ -192,10 +192,15 @@ impl BlobContainerUrl { } pub fn as_path(&self, prefix: impl AsRef) -> Result { - let dir = self - .account() - .ok_or_else(|| anyhow!("Invalid container Url"))?; - Ok(prefix.as_ref().join(dir)) + match (self.account(), self.container()) { + (Some(account), Some(container)) => { + let mut path = PathBuf::new(); + path.push(account); + path.push(container); + Ok(prefix.as_ref().join(path)) + } + _ => bail!("Invalid container Url"), + } } } @@ -526,4 +531,14 @@ mod tests { "id:000000,sig:06,src:000000,op:havoc,rep:128" ); } + + #[test] + fn test_as_path() -> Result<()> { + let root = PathBuf::from(r"/onefuzz"); + let url = BlobContainerUrl::parse("https://myaccount.blob.core.windows.net/mycontainer")?; + let path = url.as_path(root)?; + assert_eq!(PathBuf::from(r"/onefuzz/myaccount/mycontainer"), path); + + Ok(()) + } } diff --git a/src/agent/onefuzz/src/syncdir.rs b/src/agent/onefuzz/src/syncdir.rs index 6ff4d4fc68..e170901bdc 100644 --- a/src/agent/onefuzz/src/syncdir.rs +++ b/src/agent/onefuzz/src/syncdir.rs @@ -11,10 +11,12 @@ use crate::{ }; use anyhow::{Context, Result}; use dunce::canonicalize; +use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient}; use onefuzz_telemetry::{Event, EventData}; use reqwest::{StatusCode, Url}; use reqwest_retry::{RetryCheck, SendRetry, DEFAULT_RETRY_PERIOD, MAX_RETRY_ATTEMPTS}; use serde::{Deserialize, Serialize}; +use std::collections::HashMap; use std::{env::current_dir, path::PathBuf, str, time::Duration}; use tokio::{fs, select}; use tokio_util::sync::CancellationToken; @@ -241,6 +243,7 @@ impl SyncedDir { url: BlobContainerUrl, event: Event, ignore_dotfiles: bool, + jr_client: &Option, ) -> Result<()> { debug!("monitoring {}", path.display()); @@ -265,7 +268,6 @@ impl SyncedDir { if ignore_dotfiles && file_name_event_str.starts_with('.') { continue; } - event!(event.clone(); EventData::Path = file_name_event_str); metric!(event.clone(); 1.0; EventData::Path = file_name_str_metric_str); if let Some(jr_client) = jr_client { @@ -281,7 +283,11 @@ impl SyncedDir { Event::new_coverage => { jr_client .send_direct( +<<<<<<< HEAD JobResultData::NewCoverage, +======= + JobResultData::CoverageData, +>>>>>>> c8986aaa (Revert "Release 8.7.1 (hotfix) (#3459)" (#3468)) HashMap::from([("count".to_string(), 1.0)]), ) .await; @@ -337,6 +343,9 @@ impl SyncedDir { event!(event.clone(); EventData::Path = file_name_event_str); metric!(event.clone(); 1.0; EventData::Path = file_name_str_metric_str); <<<<<<< HEAD +<<<<<<< HEAD +======= +>>>>>>> c8986aaa (Revert "Release 8.7.1 (hotfix) (#3459)" (#3468)) if let Some(jr_client) = jr_client { match event { Event::new_result => { @@ -350,7 +359,11 @@ impl SyncedDir { Event::new_coverage => { jr_client .send_direct( +<<<<<<< HEAD JobResultData::NewCoverage, +======= + JobResultData::CoverageData, +>>>>>>> c8986aaa (Revert "Release 8.7.1 (hotfix) (#3459)" (#3468)) HashMap::from([("count".to_string(), 1.0)]), ) .await; @@ -360,8 +373,11 @@ impl SyncedDir { } } } +<<<<<<< HEAD ======= >>>>>>> c69deed5 (Release 8.7.1 (hotfix) (#3459)) +======= +>>>>>>> c8986aaa (Revert "Release 8.7.1 (hotfix) (#3459)" (#3468)) if let Err(err) = uploader.upload(item.clone()).await { let error_message = format!( "Couldn't upload file. path:{} dir:{} err:{:?}", @@ -393,7 +409,12 @@ impl SyncedDir { /// The intent of this is to support use cases where we usually want a directory /// to be initialized, but a user-supplied binary, (such as AFL) logically owns /// a directory, and may reset it. - pub async fn monitor_results(&self, event: Event, ignore_dotfiles: bool) -> Result<()> { + pub async fn monitor_results( + &self, + event: Event, + ignore_dotfiles: bool, + job_result_client: &Option, + ) -> Result<()> { if let Some(url) = self.remote_path.clone() { loop { debug!("waiting to monitor {}", self.local_path.display()); @@ -412,6 +433,7 @@ impl SyncedDir { url.clone(), event.clone(), ignore_dotfiles, + job_result_client, ) .await?; } diff --git a/src/deployment/bicep-templates/storageAccounts.bicep b/src/deployment/bicep-templates/storageAccounts.bicep index 6a96cea6a0..27f2da21d8 100644 --- a/src/deployment/bicep-templates/storageAccounts.bicep +++ b/src/deployment/bicep-templates/storageAccounts.bicep @@ -33,7 +33,7 @@ var storageAccountFuncQueuesParams = [ 'update-queue' 'webhooks' 'signalr-events' - 'custom-metrics' + 'job-result' ] var fileChangesQueueIndex = 0 diff --git a/src/integration-tests/integration-test.py b/src/integration-tests/integration-test.py index edc43c8edd..1eada750c3 100755 --- a/src/integration-tests/integration-test.py +++ b/src/integration-tests/integration-test.py @@ -88,6 +88,7 @@ class Integration(BaseModel): target_method: Optional[str] setup_dir: Optional[str] target_env: Optional[Dict[str, str]] + pool: PoolName TARGETS: Dict[str, Integration] = { @@ -97,6 +98,7 @@ class Integration(BaseModel): target_exe="fuzz.exe", inputs="seeds", wait_for_files={ContainerType.unique_reports: 1}, + pool="linux", ), "linux-libfuzzer": Integration( template=TemplateType.libfuzzer, @@ -124,6 +126,7 @@ class Integration(BaseModel): "--only_asan_failures", "--write_test_file={extra_output_dir}/test.txt", ], + pool="linux", ), "linux-libfuzzer-with-options": Integration( template=TemplateType.libfuzzer, @@ -137,6 +140,7 @@ class Integration(BaseModel): }, reboot_after_setup=True, fuzzing_target_options=["-runs=10000000"], + pool="linux", ), "linux-libfuzzer-dlopen": Integration( template=TemplateType.libfuzzer, @@ -150,6 +154,7 @@ class Integration(BaseModel): }, reboot_after_setup=True, use_setup=True, + pool="linux", ), "linux-libfuzzer-linked-library": Integration( template=TemplateType.libfuzzer, @@ -163,6 +168,7 @@ class Integration(BaseModel): }, reboot_after_setup=True, use_setup=True, + pool="linux", ), "linux-libfuzzer-dotnet": Integration( template=TemplateType.libfuzzer_dotnet, @@ -180,6 +186,7 @@ class Integration(BaseModel): ContainerType.unique_reports: 1, }, test_repro=False, + pool="linux", ), "linux-libfuzzer-aarch64-crosscompile": Integration( template=TemplateType.libfuzzer_qemu_user, @@ -189,6 +196,7 @@ class Integration(BaseModel): use_setup=True, wait_for_files={ContainerType.inputs: 2, ContainerType.crashes: 1}, test_repro=False, + pool="linux", ), "linux-libfuzzer-rust": Integration( template=TemplateType.libfuzzer, @@ -196,6 +204,7 @@ class Integration(BaseModel): target_exe="fuzz_target_1", wait_for_files={ContainerType.unique_reports: 1, ContainerType.coverage: 1}, fuzzing_target_options=["--test:{extra_setup_dir}"], + pool="linux", ), "linux-trivial-crash": Integration( template=TemplateType.radamsa, @@ -204,6 +213,7 @@ class Integration(BaseModel): inputs="seeds", wait_for_files={ContainerType.unique_reports: 1}, inject_fake_regression=True, + pool="linux", ), "linux-trivial-crash-asan": Integration( template=TemplateType.radamsa, @@ -256,6 +266,7 @@ class Integration(BaseModel): "--only_asan_failures", "--write_test_file={extra_output_dir}/test.txt", ], + pool="windows", ), "windows-libfuzzer-linked-library": Integration( template=TemplateType.libfuzzer, @@ -268,6 +279,7 @@ class Integration(BaseModel): ContainerType.coverage: 1, }, use_setup=True, + pool="windows", ), "windows-libfuzzer-load-library": Integration( template=TemplateType.libfuzzer, @@ -280,6 +292,7 @@ class Integration(BaseModel): ContainerType.coverage: 1, }, use_setup=True, + pool="windows", ), "windows-libfuzzer-dotnet": Integration( template=TemplateType.libfuzzer_dotnet, @@ -297,6 +310,7 @@ class Integration(BaseModel): ContainerType.unique_reports: 1, }, test_repro=False, + pool="windows", ), "windows-trivial-crash": Integration( template=TemplateType.radamsa, @@ -305,6 +319,7 @@ class Integration(BaseModel): inputs="seeds", wait_for_files={ContainerType.unique_reports: 1}, inject_fake_regression=True, + pool="windows", ), } @@ -373,7 +388,7 @@ def try_info_get(data: Any) -> None: self.inject_log(self.start_log_marker) for entry in os_list: - name = PoolName(f"testpool-{entry.name}-{self.test_id}") + name = self.build_pool_name(entry.name) self.logger.info("creating pool: %s:%s", entry.name, name) self.of.pools.create(name, entry) self.logger.info("creating scaleset for pool: %s", name) @@ -594,12 +609,9 @@ def launch( ) -> List[UUID]: """Launch all of the fuzzing templates""" - pools: Dict[OS, Pool] = {} + pool = None if unmanaged_pool is not None: - pools[unmanaged_pool.the_os] = self.of.pools.get(unmanaged_pool.pool_name) - else: - for pool in self.of.pools.list(): - pools[pool.os] = pool + pool = unmanaged_pool.pool_name job_ids = [] @@ -610,8 +622,8 @@ def launch( if config.os not in os_list: continue - if config.os not in pools.keys(): - raise Exception(f"No pool for target: {target} ,os: {config.os}") + if pool is None: + pool = self.build_pool_name(config.pool) self.logger.info("launching: %s", target) @@ -636,11 +648,15 @@ def launch( job: Optional[Job] = None <<<<<<< HEAD +<<<<<<< HEAD ======= >>>>>>> c69deed5 (Release 8.7.1 (hotfix) (#3459)) +======= + +>>>>>>> c8986aaa (Revert "Release 8.7.1 (hotfix) (#3459)" (#3468)) job = self.build_job( - duration, pools, target, config, setup, target_exe, inputs + duration, pool, target, config, setup, target_exe, inputs ) if config.inject_fake_regression and job is not None: @@ -656,7 +672,7 @@ def launch( def build_job( self, duration: int, - pools: Dict[OS, Pool], + pool: PoolName, target: str, config: Integration, setup: Optional[Directory], @@ -672,7 +688,7 @@ def build_job( self.project, target, BUILD, - pools[config.os].name, + pool, target_exe=target_exe, inputs=inputs, setup_dir=setup, @@ -697,7 +713,7 @@ def build_job( self.project, target, BUILD, - pools[config.os].name, + pool, target_dll=File(config.target_exe), inputs=inputs, setup_dir=setup, @@ -713,7 +729,7 @@ def build_job( self.project, target, BUILD, - pools[config.os].name, + pool, inputs=inputs, target_exe=target_exe, duration=duration, @@ -726,7 +742,7 @@ def build_job( self.project, target, BUILD, - pool_name=pools[config.os].name, + pool_name=pool, target_exe=target_exe, inputs=inputs, setup_dir=setup, @@ -741,7 +757,7 @@ def build_job( self.project, target, BUILD, - pool_name=pools[config.os].name, + pool_name=pool, target_exe=target_exe, inputs=inputs, setup_dir=setup, @@ -1271,6 +1287,9 @@ def check_logs_for_errors(self) -> None: if seen_errors: raise Exception("logs included errors") + + def build_pool_name(self, os_type: str) -> PoolName: + return PoolName(f"testpool-{os_type}-{self.test_id}") def check_jobs( self, diff --git a/src/runtime-tools/linux/setup.sh b/src/runtime-tools/linux/setup.sh old mode 100755 new mode 100644 index f6859003b4..794e827f4d --- a/src/runtime-tools/linux/setup.sh +++ b/src/runtime-tools/linux/setup.sh @@ -18,6 +18,14 @@ export DOTNET_CLI_HOME="$DOTNET_ROOT" export ONEFUZZ_ROOT=/onefuzz export LLVM_SYMBOLIZER_PATH=/onefuzz/bin/llvm-symbolizer +# `logger` won't work on mariner unless we install this package first +if type yum > /dev/null 2> /dev/null; then + until yum install -y util-linux sudo; do + echo "yum failed. sleep 10s, then retrying" + sleep 10 + done +fi + logger "onefuzz: making directories" sudo mkdir -p /onefuzz/downloaded sudo chown -R $(whoami) /onefuzz @@ -134,31 +142,53 @@ if type apt > /dev/null 2> /dev/null; then sudo ln -f -s $(which llvm-symbolizer-12) $LLVM_SYMBOLIZER_PATH fi - # Install dotnet + # Needed to install dotnet until sudo apt install -y curl libicu-dev; do logger "apt failed, sleeping 10s then retrying" sleep 10 done +elif type yum > /dev/null 2> /dev/null; then + until yum install -y gdb gdb-gdbserver libunwind awk ca-certificates tar yum-utils shadow-utils cronie procps; do + echo "yum failed. sleep 10s, then retrying" + sleep 10 + done + + # Install updated Microsoft Open Management Infrastructure - github.com/microsoft/omi + yum-config-manager --add-repo=https://packages.microsoft.com/config/rhel/8/prod.repo 2>&1 | logger -s -i -t 'onefuzz-OMI-add-MS-repo' + yum install -y omi 2>&1 | logger -s -i -t 'onefuzz-OMI-install' - logger "downloading dotnet install" - curl --retry 10 -sSL https://dot.net/v1/dotnet-install.sh -o dotnet-install.sh 2>&1 | logger -s -i -t 'onefuzz-curl-dotnet-install' - chmod +x dotnet-install.sh - for version in "${DOTNET_VERSIONS[@]}"; do - logger "running dotnet install $version" - /bin/bash ./dotnet-install.sh --channel "$version" --install-dir "$DOTNET_ROOT" 2>&1 | logger -s -i -t 'onefuzz-dotnet-setup' - done - rm dotnet-install.sh - - logger "install dotnet tools" - pushd "$DOTNET_ROOT" - ls -lah 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' - "$DOTNET_ROOT"/dotnet tool install dotnet-dump --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' - "$DOTNET_ROOT"/dotnet tool install dotnet-coverage --version 17.5 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' - "$DOTNET_ROOT"/dotnet tool install dotnet-sos --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' - popd + if ! [ -f ${LLVM_SYMBOLIZER_PATH} ]; then + until yum install -y llvm-12.0.1; do + echo "yum failed, sleeping 10s then retrying" + sleep 10 + done + + # If specifying symbolizer, exe name must be a "known symbolizer". + # Using `llvm-symbolizer` works for clang 8 .. 12. + sudo ln -f -s $(which llvm-symbolizer-12) $LLVM_SYMBOLIZER_PATH + fi fi +# Install dotnet +logger "downloading dotnet install" +curl --retry 10 -sSL https://dot.net/v1/dotnet-install.sh -o dotnet-install.sh 2>&1 | logger -s -i -t 'onefuzz-curl-dotnet-install' +chmod +x dotnet-install.sh + +for version in "${DOTNET_VERSIONS[@]}"; do + logger "running dotnet install $version" + /bin/bash ./dotnet-install.sh --channel "$version" --install-dir "$DOTNET_ROOT" 2>&1 | logger -s -i -t 'onefuzz-dotnet-setup' +done +rm dotnet-install.sh + +logger "install dotnet tools" +pushd "$DOTNET_ROOT" +ls -lah 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' +"$DOTNET_ROOT"/dotnet tool install dotnet-dump --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' +"$DOTNET_ROOT"/dotnet tool install dotnet-coverage --version 17.5 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' +"$DOTNET_ROOT"/dotnet tool install dotnet-sos --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' +popd + if [ -v DOCKER_BUILD ]; then echo "building for docker" elif [ -d /etc/systemd/system ]; then From 6c5f9855bad21291401c7d4edd5cfa7a91c25f39 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Tue, 26 Sep 2023 15:38:23 -0700 Subject: [PATCH 03/32] Add a ToExpand trait --- src/agent/onefuzz/src/expand.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/agent/onefuzz/src/expand.rs b/src/agent/onefuzz/src/expand.rs index 7f1813899f..fa6e60fd25 100644 --- a/src/agent/onefuzz/src/expand.rs +++ b/src/agent/onefuzz/src/expand.rs @@ -93,6 +93,10 @@ impl PlaceHolder { } } +pub trait ToExpand { + fn to_expand<'a>(&'a self) -> Expand<'a>; +} + pub struct Expand<'a> { values: HashMap<&'static str, ExpandedValue<'a>>, machine_identity: &'a MachineIdentity, From 5e61ac521c38baad179db20d80a08df6e4e18363 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Tue, 26 Sep 2023 15:40:19 -0700 Subject: [PATCH 04/32] Implement ToExpand for CommonConfig --- src/agent/onefuzz-task/src/tasks/config.rs | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs index e29e0fd60d..80b59d3a67 100644 --- a/src/agent/onefuzz-task/src/tasks/config.rs +++ b/src/agent/onefuzz-task/src/tasks/config.rs @@ -12,7 +12,7 @@ use crate::tasks::{ use anyhow::{Context, Result}; use onefuzz::{ machine_id::MachineIdentity, - syncdir::{SyncOperation, SyncedDir}, + syncdir::{SyncOperation, SyncedDir}, expand::{ToExpand, Expand}, }; use onefuzz_result::job_result::{init_job_result, TaskJobResultClient}; use onefuzz_telemetry::{ @@ -125,6 +125,22 @@ impl CommonConfig { } } +impl ToExpand for CommonConfig { + fn to_expand<'a>(&'a self) -> Expand<'a> { + Expand::new(&self.machine_identity) + .machine_id() + .job_id(&self.job_id) + .task_id(&self.task_id) + .set_optional_ref(&self.instance_telemetry_key, Expand::instance_telemetry_key) + .set_optional_ref(&self.microsoft_telemetry_key, Expand::microsoft_telemetry_key) + .setup_dir(&self.setup_dir) + .set_optional_ref(&self.extra_setup_dir, Expand::extra_setup_dir) + .set_optional_ref(&self.extra_output, |expand, value| { + expand.extra_output_dir(value.local_path.as_path()) + }) + } +} + #[derive(Debug, Deserialize)] #[serde(tag = "task_type")] pub enum Config { From f1dbd8a6b127f795eb98c8ecb92cc794414472b7 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Wed, 27 Sep 2023 16:22:15 -0700 Subject: [PATCH 05/32] Rename to GetExpand and implement for all task configs --- .../src/tasks/analysis/generic.rs | 71 +++++++++---------- src/agent/onefuzz-task/src/tasks/config.rs | 35 +++++---- .../onefuzz-task/src/tasks/merge/generic.rs | 42 +++++------ .../src/tasks/report/dotnet/generic.rs | 39 +++++++--- src/agent/onefuzz/src/expand.rs | 4 +- 5 files changed, 103 insertions(+), 88 deletions(-) diff --git a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs index 05c6c3d169..161849029b 100644 --- a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs @@ -6,6 +6,7 @@ use crate::tasks::{ utils::try_resolve_setup_relative_path, }; use anyhow::{Context, Result}; +use onefuzz::expand::GetExpand; use onefuzz::{az_copy, blob::url::BlobUrl}; use onefuzz::{ expand::Expand, @@ -47,6 +48,35 @@ pub struct Config { pub common: CommonConfig, } +impl GetExpand for Config { + fn get_expand<'a>(&'a self) -> Expand<'a> { + self.common.get_expand() + .analyzer_exe(&self.analyzer_exe) + .analyzer_options(&self.analyzer_options) + .target_exe(&self.target_exe) + .target_options(&self.target_options) + .output_dir(&self.analysis.local_path) + .set_optional( + self.tools.clone().map(|t| t.local_path), + Expand::tools_dir, + ) + .set_optional_ref(&self.reports, |expand, reports| { + expand.reports_dir(&reports.local_path.as_path()) + }) + .set_optional_ref(&self.crashes, |expand, crashes| { + expand + .set_optional_ref( + &crashes.remote_path.clone().and_then(|u| u.account()), + |expand, account| expand.crashes_account(account), + ) + .set_optional_ref( + &crashes.remote_path.clone().and_then(|u| u.container()), + |expand, container| expand.crashes_container(container), + ) + }) + } +} + pub async fn run(config: Config) -> Result<()> { let task_dir = config .analysis @@ -206,45 +236,10 @@ pub async fn run_tool( let target_exe = try_resolve_setup_relative_path(&config.common.setup_dir, &config.target_exe).await?; - let expand = Expand::new(&config.common.machine_identity) - .machine_id() - .input_path(&input) + let expand = config.get_expand() + .input_path(&input) // Only this one is dynamic, the other two should probably be a part of the config .target_exe(&target_exe) - .target_options(&config.target_options) - .analyzer_exe(&config.analyzer_exe) - .analyzer_options(&config.analyzer_options) - .output_dir(&config.analysis.local_path) - .setup_dir(&config.common.setup_dir) - .set_optional( - config.tools.clone().map(|t| t.local_path), - Expand::tools_dir, - ) - .set_optional_ref(&config.common.extra_setup_dir, Expand::extra_setup_dir) - .set_optional_ref(&config.common.extra_output, |expand, value| { - expand.extra_output_dir(value.local_path.as_path()) - }) - .job_id(&config.common.job_id) - .task_id(&config.common.task_id) - .set_optional_ref(&config.common.microsoft_telemetry_key, |tester, key| { - tester.microsoft_telemetry_key(key) - }) - .set_optional_ref(&config.common.instance_telemetry_key, |tester, key| { - tester.instance_telemetry_key(key) - }) - .set_optional_ref(reports_dir, |tester, reports_dir| { - tester.reports_dir(reports_dir) - }) - .set_optional_ref(&config.crashes, |tester, crashes| { - tester - .set_optional_ref( - &crashes.remote_path.clone().and_then(|u| u.account()), - |tester, account| tester.crashes_account(account), - ) - .set_optional_ref( - &crashes.remote_path.clone().and_then(|u| u.container()), - |tester, container| tester.crashes_container(container), - ) - }); + .set_optional_ref(reports_dir, Expand::reports_dir); let analyzer_path = expand.evaluate_value(&config.analyzer_exe)?; diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs index 80b59d3a67..d0a24bc58c 100644 --- a/src/agent/onefuzz-task/src/tasks/config.rs +++ b/src/agent/onefuzz-task/src/tasks/config.rs @@ -12,7 +12,7 @@ use crate::tasks::{ use anyhow::{Context, Result}; use onefuzz::{ machine_id::MachineIdentity, - syncdir::{SyncOperation, SyncedDir}, expand::{ToExpand, Expand}, + syncdir::{SyncOperation, SyncedDir}, expand::{GetExpand, Expand}, }; use onefuzz_result::job_result::{init_job_result, TaskJobResultClient}; use onefuzz_telemetry::{ @@ -125,19 +125,28 @@ impl CommonConfig { } } -impl ToExpand for CommonConfig { - fn to_expand<'a>(&'a self) -> Expand<'a> { +impl GetExpand for CommonConfig { + fn get_expand<'a>(&'a self) -> Expand<'a> { Expand::new(&self.machine_identity) - .machine_id() - .job_id(&self.job_id) - .task_id(&self.task_id) - .set_optional_ref(&self.instance_telemetry_key, Expand::instance_telemetry_key) - .set_optional_ref(&self.microsoft_telemetry_key, Expand::microsoft_telemetry_key) - .setup_dir(&self.setup_dir) - .set_optional_ref(&self.extra_setup_dir, Expand::extra_setup_dir) - .set_optional_ref(&self.extra_output, |expand, value| { - expand.extra_output_dir(value.local_path.as_path()) - }) + .machine_id() + .job_id(&self.job_id) + .task_id(&self.task_id) + .set_optional_ref( + &self.instance_telemetry_key, + Expand::instance_telemetry_key + ) + .set_optional_ref( + &self.microsoft_telemetry_key, + Expand::microsoft_telemetry_key + ) + .setup_dir(&self.setup_dir) + .set_optional_ref( + &self.extra_setup_dir, + Expand::extra_setup_dir + ) + .set_optional_ref(&self.extra_output, |expand, extra_output| { + expand.extra_output_dir(extra_output.local_path.as_path()) + }) } } diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs index 3b6a2094d8..db95716fcf 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs @@ -8,7 +8,7 @@ use crate::tasks::{ }; use anyhow::{Context, Result}; use onefuzz::{ - expand::Expand, fs::set_executable, http::ResponseExt, jitter::delay_with_jitter, + expand::{Expand, GetExpand}, fs::set_executable, http::ResponseExt, jitter::delay_with_jitter, syncdir::SyncedDir, }; use reqwest::Url; @@ -33,13 +33,27 @@ pub struct Config { pub target_options_merge: bool, pub tools: SyncedDir, pub input_queue: Url, - pub inputs: SyncedDir, + pub inputs: SyncedDir, // is this input corpus, generated inputs, or neither? pub unique_inputs: SyncedDir, #[serde(flatten)] pub common: CommonConfig, } +impl GetExpand for Config { + fn get_expand<'a>(&'a self) -> Expand<'a> { + self.common.get_expand() + .input_marker(&self.supervisor_input_marker) + .input_corpus(&self.unique_inputs.local_path) + .target_exe(&self.target_exe) + .target_options(&self.target_options) + .supervisor_exe(&self.supervisor_exe) + .supervisor_options(&self.supervisor_options) + .tools_dir(&self.tools.local_path) + .generated_inputs(&self.inputs.local_path.as_path()) + } +} + pub async fn spawn(config: &Config) -> Result<()> { config.tools.init_pull().await?; set_executable(&config.tools.local_path).await?; @@ -129,29 +143,9 @@ async fn merge(config: &Config, output_dir: impl AsRef) -> Result<()> { let target_exe = try_resolve_setup_relative_path(&config.common.setup_dir, &config.target_exe).await?; - let expand = Expand::new(&config.common.machine_identity) - .machine_id() - .input_marker(&config.supervisor_input_marker) - .input_corpus(&config.unique_inputs.local_path) - .target_options(&config.target_options) - .supervisor_exe(&config.supervisor_exe) - .supervisor_options(&config.supervisor_options) + let expand = config.get_expand() .generated_inputs(output_dir) - .target_exe(&target_exe) - .setup_dir(&config.common.setup_dir) - .set_optional_ref(&config.common.extra_setup_dir, Expand::extra_setup_dir) - .set_optional_ref(&config.common.extra_output, |expand, value| { - expand.extra_output_dir(value.local_path.as_path()) - }) - .tools_dir(&config.tools.local_path) - .job_id(&config.common.job_id) - .task_id(&config.common.task_id) - .set_optional_ref(&config.common.microsoft_telemetry_key, |tester, key| { - tester.microsoft_telemetry_key(key) - }) - .set_optional_ref(&config.common.instance_telemetry_key, |tester, key| { - tester.instance_telemetry_key(key) - }); + .target_exe(&target_exe); let supervisor_path = expand.evaluate_value(&config.supervisor_exe)?; diff --git a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs index b8659845de..703b1c80bd 100644 --- a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs @@ -18,7 +18,7 @@ use crate::tasks::{ }; use anyhow::{Context, Result}; use async_trait::async_trait; -use onefuzz::expand::Expand; +use onefuzz::expand::{Expand, GetExpand}; use onefuzz::fs::set_executable; use onefuzz::{blob::BlobUrl, sha256, syncdir::SyncedDir}; use onefuzz_result::job_result::TaskJobResultClient; @@ -59,6 +59,31 @@ pub struct Config { pub common: CommonConfig, } +impl GetExpand for Config { + fn get_expand(&self) -> Expand { + let tools_dir = self.tools.local_path.to_string_lossy().into_owned(); + + self.common.get_expand() + .target_exe(&self.target_exe) + .target_options(&self.target_options) + .tools_dir(tools_dir) + .set_optional_ref(&self.reports, |expand, reports| { + expand.reports_dir(&reports.local_path.as_path()) + }) + .set_optional_ref(&self.crashes, |expand, crashes| { + expand + .set_optional_ref( + &crashes.remote_path.clone().and_then(|u| u.account()), + |expand, account| expand.crashes_account(account), + ) + .set_optional_ref( + &crashes.remote_path.clone().and_then(|u| u.container()), + |expand, container| expand.crashes_container(container), + ) + }) + } +} + pub struct DotnetCrashReportTask { config: Arc, pub poller: InputPoller, @@ -130,12 +155,10 @@ impl AsanProcessor { } async fn target_exe(&self) -> Result { - let tools_dir = self.config.tools.local_path.to_string_lossy().into_owned(); - // Try to expand `target_exe` with support for `{tools_dir}`. // // Allows using `LibFuzzerDotnetLoader.exe` from a shared tools container. - let expand = Expand::new(&self.config.common.machine_identity).tools_dir(tools_dir); + let expand = self.config.get_expand(); let expanded = expand.evaluate_value(self.config.target_exe.to_string_lossy())?; let expanded_path = Path::new(&expanded); @@ -183,13 +206,7 @@ impl AsanProcessor { let mut args = vec![target_exe]; args.extend(self.config.target_options.clone()); - let expand = Expand::new(&self.config.common.machine_identity) - .input_path(input) - .setup_dir(&self.config.common.setup_dir) - .set_optional_ref(&self.config.common.extra_setup_dir, Expand::extra_setup_dir) - .set_optional_ref(&self.config.common.extra_output, |expand, value| { - expand.extra_output_dir(value.local_path.as_path()) - }); + let expand = self.config.get_expand(); let expanded_args = expand.evaluate(&args)?; diff --git a/src/agent/onefuzz/src/expand.rs b/src/agent/onefuzz/src/expand.rs index fa6e60fd25..caa7262f09 100644 --- a/src/agent/onefuzz/src/expand.rs +++ b/src/agent/onefuzz/src/expand.rs @@ -93,8 +93,8 @@ impl PlaceHolder { } } -pub trait ToExpand { - fn to_expand<'a>(&'a self) -> Expand<'a>; +pub trait GetExpand { + fn get_expand<'a>(&'a self) -> Expand<'a>; } pub struct Expand<'a> { From f553c0c5d305e5104e2ed7baf471011d115702e9 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Wed, 4 Oct 2023 14:11:07 -0700 Subject: [PATCH 06/32] Add proptest to the crate and a simple test function (broken) --- src/agent/Cargo.lock | 96 +++++++++++++- src/agent/onefuzz-task/Cargo.toml | 2 + src/agent/onefuzz-task/src/lib.rs | 2 + src/agent/onefuzz-task/src/tasks/config.rs | 63 +++++++++ src/agent/onefuzz-task/src/test_utils.rs | 143 +++++++++++++++++++++ 5 files changed, 304 insertions(+), 2 deletions(-) create mode 100644 src/agent/onefuzz-task/src/test_utils.rs diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index eb35241201..af17d6ad2b 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -342,6 +342,21 @@ dependencies = [ "shlex", ] +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + [[package]] name = "bitflags" version = "1.3.2" @@ -1776,6 +1791,12 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "libm" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" + [[package]] name = "libproc" version = "0.12.0" @@ -2080,6 +2101,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" dependencies = [ "autocfg", + "libm", ] [[package]] @@ -2245,6 +2267,7 @@ dependencies = [ "coverage", "crossterm 0.27.0", "debuggable-module", + "dunce", "env_logger", "flexi_logger", "flume", @@ -2260,6 +2283,7 @@ dependencies = [ "onefuzz-telemetry", "path-absolutize", "pretty_assertions", + "proptest", "ratatui", "regex", "reqwest", @@ -2592,6 +2616,26 @@ dependencies = [ "rustix 0.36.15", ] +[[package]] +name = "proptest" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e35c06b98bf36aba164cc17cb25f7e232f5c4aeea73baa14b8a9f0d92dbfa65" +dependencies = [ + "bit-set", + "bitflags 1.3.2", + "byteorder", + "lazy_static", + "num-traits", + "rand 0.8.5", + "rand_chacha 0.3.1", + "rand_xorshift", + "regex-syntax 0.6.29", + "rusty-fork", + "tempfile", + "unarray", +] + [[package]] name = "queue-file" version = "1.4.10" @@ -2602,6 +2646,12 @@ dependencies = [ "snafu", ] +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + [[package]] name = "quick-xml" version = "0.30.0" @@ -2701,6 +2751,15 @@ dependencies = [ "rand_core 0.5.1", ] +[[package]] +name = "rand_xorshift" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" +dependencies = [ + "rand_core 0.6.4", +] + [[package]] name = "range-collections" version = "0.2.4" @@ -2765,7 +2824,7 @@ dependencies = [ "aho-corasick", "memchr", "regex-automata", - "regex-syntax", + "regex-syntax 0.7.4", ] [[package]] @@ -2776,9 +2835,15 @@ checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69" dependencies = [ "aho-corasick", "memchr", - "regex-syntax", + "regex-syntax 0.7.4", ] +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + [[package]] name = "regex-syntax" version = "0.7.4" @@ -2911,6 +2976,18 @@ version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" +[[package]] +name = "rusty-fork" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +dependencies = [ + "fnv", + "quick-error", + "tempfile", + "wait-timeout", +] + [[package]] name = "ryu" version = "1.0.15" @@ -3676,6 +3753,12 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" +[[package]] +name = "unarray" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + [[package]] name = "unicode-bidi" version = "0.3.13" @@ -3797,6 +3880,15 @@ version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +dependencies = [ + "libc", +] + [[package]] name = "waker-fn" version = "1.1.0" diff --git a/src/agent/onefuzz-task/Cargo.toml b/src/agent/onefuzz-task/Cargo.toml index 4b3e8e8c43..47d49351ef 100644 --- a/src/agent/onefuzz-task/Cargo.toml +++ b/src/agent/onefuzz-task/Cargo.toml @@ -85,4 +85,6 @@ schemars = { version = "0.8.12", features = ["uuid1"] } [dev-dependencies] pretty_assertions = "1.4" +proptest = "1.2.0" tempfile = "3.8" +dunce = "1.0" diff --git a/src/agent/onefuzz-task/src/lib.rs b/src/agent/onefuzz-task/src/lib.rs index 997eea549d..51eb0c3210 100644 --- a/src/agent/onefuzz-task/src/lib.rs +++ b/src/agent/onefuzz-task/src/lib.rs @@ -7,3 +7,5 @@ extern crate onefuzz_telemetry; pub mod local; pub mod tasks; +#[cfg(test)] +pub mod test_utils; diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs index d0a24bc58c..2c4e76310a 100644 --- a/src/agent/onefuzz-task/src/tasks/config.rs +++ b/src/agent/onefuzz-task/src/tasks/config.rs @@ -389,3 +389,66 @@ impl Config { Ok(()) } } + +#[cfg(test)] +mod tests { + use proptest::prelude::*; + use onefuzz::expand::GetExpand; + use std::collections::HashMap; + use crate::test_utils::arbitraries::*; + + use super::CommonConfig; + + proptest! { + // generate an arbitrary config + // map the expanded values from the config to their expander names + // verify that the get_expand() result has all the same values as are in the config + #[test] + fn test_get_expand_values_match_config( + config in any::(), + ) { + let expand = config.get_expand(); + + // for now, use a hardcoded list of parameter names that the config supplies + let mut params = HashMap::from([ + ("machine_id", config.machine_identity.machine_id.to_string()), + ("job_id", config.job_id.to_string()), + ("task_id", config.task_id.to_string()), + ("setup_dir", dunce::canonicalize(config.setup_dir.clone()).unwrap().to_string_lossy().to_string()), + ]); + // Look for a shorthand way of doing these optional hashmap inserts + // config.instance_telemetry_key.iter().for_each(|key| { + // params.insert("instance_telemetry_key", key.to_string()); + // }); + match &config.instance_telemetry_key { + Some(key) => { + params.insert("instance_telemetry_key", key.clone().to_string()); + }, + None => {}, + } + match &config.microsoft_telemetry_key { + Some(key) => { + params.insert("microsoft_telemetry_key", key.clone().to_string()); + }, + None => {}, + } + match &config.extra_setup_dir { + Some(dir) => { + params.insert("extra_setup_dir", dunce::canonicalize(dir).unwrap().to_string_lossy().to_string()); + }, + None => {}, + } + match &config.extra_output { + Some(dir) => { + params.insert("extra_output_dir", dunce::canonicalize(&dir.local_path).unwrap().to_string_lossy().to_string()); + }, + None => {}, + } + + for (param, expected) in params.iter() { + let evaluated = expand.evaluate_value(format!("{{{param}}}")).unwrap(); + assert_eq!(evaluated, *expected); + } + } + } +} diff --git a/src/agent/onefuzz-task/src/test_utils.rs b/src/agent/onefuzz-task/src/test_utils.rs new file mode 100644 index 0000000000..d8ad0fbe5b --- /dev/null +++ b/src/agent/onefuzz-task/src/test_utils.rs @@ -0,0 +1,143 @@ +#[cfg(test)] +pub mod arbitraries { + use std::path::PathBuf; + + use onefuzz::{blob::BlobContainerUrl, machine_id::MachineIdentity, syncdir::SyncedDir}; + use onefuzz_telemetry::{InstanceTelemetryKey, MicrosoftTelemetryKey}; + use proptest::{option, prelude::*}; + use reqwest::Url; + use uuid::Uuid; + + use crate::tasks::config::CommonConfig; + + prop_compose! { + fn arb_uuid()( + uuid in "[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}" + ) -> Uuid { + Uuid::parse_str(&uuid).unwrap() + } + } + + prop_compose! { + fn arb_instance_telemetry_key()( + uuid in arb_uuid() + ) -> InstanceTelemetryKey { + InstanceTelemetryKey::new(uuid) + } + } + + prop_compose! { + fn arb_microsoft_telemetry_key()( + uuid in arb_uuid() + ) -> MicrosoftTelemetryKey { + MicrosoftTelemetryKey::new(uuid) + } + } + + prop_compose! { + fn arb_url()( + url in r"https?://(www\.)?[-a-zA-Z0-9]{1,256}\.[a-zA-Z0-9]{1,6}([-a-zA-Z0-9]*)" + ) -> Url { + match Url::parse(&url) { + Ok(url) => url, + Err(err) => panic!("invalid url generated ({}): {}", err, url), + } + } + } + + prop_compose! { + // Todo: consider a better way to generate a path + fn arb_pathbuf()( + path in "src" + ) -> PathBuf { + PathBuf::from(path) + } + } + + prop_compose! { + fn arb_machine_identity()( + machine_id in arb_uuid(), + machine_name in ".*", + scaleset_name in ".*", + ) -> MachineIdentity { + MachineIdentity { + machine_id, + machine_name, + scaleset_name: Some(scaleset_name), + } + } + } + + fn arb_blob_container_url() -> impl Strategy { + prop_oneof![ + arb_url().prop_map(BlobContainerUrl::BlobContainer), + arb_pathbuf().prop_map(BlobContainerUrl::Path), + ] + } + + prop_compose! { + fn arb_synced_dir()( + local_path in arb_pathbuf(), + remote_path in option::of(arb_blob_container_url()), + ) -> SyncedDir { + SyncedDir { + local_path, + remote_path, + } + } + } + + prop_compose! { + fn arb_common_config(tag_limit: usize)( + job_id in arb_uuid(), + task_id in arb_uuid(), + instance_id in arb_uuid(), + heartbeat_queue in option::of(arb_url()), + job_result_queue in option::of(arb_url()), + instance_telemetry_key in option::of(arb_instance_telemetry_key()), // consider implementing Arbitrary for these types for a canonical way to generate them + microsoft_telemetry_key in option::of(arb_microsoft_telemetry_key()), // We can probably derive Arbitrary if it's implemented for the composing types like Url + logs in option::of(arb_url()), + setup_dir in arb_pathbuf(), + extra_setup_dir in option::of(arb_pathbuf()), + extra_output in option::of(arb_synced_dir()), + min_available_memory_mb in any::(), + machine_identity in arb_machine_identity(), + tags in prop::collection::hash_map(".*", ".*", tag_limit), + from_agent_to_task_endpoint in ".*", + from_task_to_agent_endpoint in ".*", + ) -> CommonConfig { + CommonConfig { + job_id, + task_id, + instance_id, + heartbeat_queue, + job_result_queue, + instance_telemetry_key, + microsoft_telemetry_key, + logs, + setup_dir, + extra_setup_dir, + extra_output, + min_available_memory_mb, + machine_identity, + tags, + from_agent_to_task_endpoint, + from_task_to_agent_endpoint, + } + } + } + + impl Arbitrary for CommonConfig { + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { + arb_common_config(10).boxed() + } + } + + // Make a trait out of this and add it to a common test module + impl CommonConfig { + // Get all the fields from the type that are passed to the expander + } +} From aa7415f90d9d385822f26f18581a19a68125d1f0 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Wed, 4 Oct 2023 15:22:48 -0700 Subject: [PATCH 07/32] Fix import issues --- src/agent/onefuzz-task/src/tasks/config.rs | 33 ++++++---------------- src/agent/onefuzz-task/src/test_utils.rs | 1 - 2 files changed, 8 insertions(+), 26 deletions(-) diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs index 2c4e76310a..4ee71d9350 100644 --- a/src/agent/onefuzz-task/src/tasks/config.rs +++ b/src/agent/onefuzz-task/src/tasks/config.rs @@ -395,7 +395,6 @@ mod tests { use proptest::prelude::*; use onefuzz::expand::GetExpand; use std::collections::HashMap; - use crate::test_utils::arbitraries::*; use super::CommonConfig; @@ -416,33 +415,17 @@ mod tests { ("task_id", config.task_id.to_string()), ("setup_dir", dunce::canonicalize(config.setup_dir.clone()).unwrap().to_string_lossy().to_string()), ]); - // Look for a shorthand way of doing these optional hashmap inserts - // config.instance_telemetry_key.iter().for_each(|key| { - // params.insert("instance_telemetry_key", key.to_string()); - // }); - match &config.instance_telemetry_key { - Some(key) => { - params.insert("instance_telemetry_key", key.clone().to_string()); - }, - None => {}, + if let Some(key) = &config.instance_telemetry_key { + params.insert("instance_telemetry_key", key.to_string()); } - match &config.microsoft_telemetry_key { - Some(key) => { - params.insert("microsoft_telemetry_key", key.clone().to_string()); - }, - None => {}, + if let Some(key) = &config.microsoft_telemetry_key { + params.insert("microsoft_telemetry_key", key.clone().to_string()); } - match &config.extra_setup_dir { - Some(dir) => { - params.insert("extra_setup_dir", dunce::canonicalize(dir).unwrap().to_string_lossy().to_string()); - }, - None => {}, + if let Some(dir) = &config.extra_setup_dir { + params.insert("extra_setup_dir", dunce::canonicalize(dir).unwrap().to_string_lossy().to_string()); } - match &config.extra_output { - Some(dir) => { - params.insert("extra_output_dir", dunce::canonicalize(&dir.local_path).unwrap().to_string_lossy().to_string()); - }, - None => {}, + if let Some(dir) = &config.extra_output { + params.insert("extra_output_dir", dunce::canonicalize(&dir.local_path).unwrap().to_string_lossy().to_string()); } for (param, expected) in params.iter() { diff --git a/src/agent/onefuzz-task/src/test_utils.rs b/src/agent/onefuzz-task/src/test_utils.rs index d8ad0fbe5b..29f8ce16d4 100644 --- a/src/agent/onefuzz-task/src/test_utils.rs +++ b/src/agent/onefuzz-task/src/test_utils.rs @@ -1,4 +1,3 @@ -#[cfg(test)] pub mod arbitraries { use std::path::PathBuf; From c0ac5e309b53d7f85a2bb7130bbeb4ff4e710406 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Wed, 4 Oct 2023 17:39:06 -0700 Subject: [PATCH 08/32] Add config tests for analysis task --- .../{test_utils.rs => config_test_utils.rs} | 75 +++++++++++++++++-- src/agent/onefuzz-task/src/lib.rs | 2 +- .../src/tasks/analysis/generic.rs | 52 +++++++++++++ src/agent/onefuzz-task/src/tasks/config.rs | 58 +++++++------- 4 files changed, 151 insertions(+), 36 deletions(-) rename src/agent/onefuzz-task/src/{test_utils.rs => config_test_utils.rs} (61%) diff --git a/src/agent/onefuzz-task/src/test_utils.rs b/src/agent/onefuzz-task/src/config_test_utils.rs similarity index 61% rename from src/agent/onefuzz-task/src/test_utils.rs rename to src/agent/onefuzz-task/src/config_test_utils.rs index 29f8ce16d4..22c83e60db 100644 --- a/src/agent/onefuzz-task/src/test_utils.rs +++ b/src/agent/onefuzz-task/src/config_test_utils.rs @@ -1,3 +1,13 @@ +use onefuzz::expand::{GetExpand, PlaceHolder}; + +// Moving this trait method into the GetExpand trait--and returning `Vec<(PlaceHolder, Box)>` instead +// would let us use define a default implementation for `get_expand()` while also coupling the expand values we +// test with those we give to the expander. +// It seems to me like a non-trivial (and perhaps bad) design change though. +pub trait GetExpandFields: GetExpand { + fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)>; +} + pub mod arbitraries { use std::path::PathBuf; @@ -7,7 +17,7 @@ pub mod arbitraries { use reqwest::Url; use uuid::Uuid; - use crate::tasks::config::CommonConfig; + use crate::tasks::{config::CommonConfig, analysis}; prop_compose! { fn arb_uuid()( @@ -85,9 +95,20 @@ pub mod arbitraries { } } } + + prop_compose! { + fn arb_string_vec_no_vars()( + // I don't know how to figure out the expected value of the target options if they could contain variables (e.g. {machine_id}) + // This should be fine since this isn't used to test nested expansion + options in prop::collection::vec("[^{}]*", 10), + ) -> Vec { + options + } + } + prop_compose! { - fn arb_common_config(tag_limit: usize)( + fn arb_common_config()( job_id in arb_uuid(), task_id in arb_uuid(), instance_id in arb_uuid(), @@ -101,7 +122,7 @@ pub mod arbitraries { extra_output in option::of(arb_synced_dir()), min_available_memory_mb in any::(), machine_identity in arb_machine_identity(), - tags in prop::collection::hash_map(".*", ".*", tag_limit), + tags in prop::collection::hash_map(".*", ".*", 10), from_agent_to_task_endpoint in ".*", from_task_to_agent_endpoint in ".*", ) -> CommonConfig { @@ -131,12 +152,50 @@ pub mod arbitraries { type Strategy = BoxedStrategy; fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { - arb_common_config(10).boxed() + arb_common_config().boxed() } } - - // Make a trait out of this and add it to a common test module - impl CommonConfig { - // Get all the fields from the type that are passed to the expander + + prop_compose! { + fn arb_analysis_config()( + analyzer_exe in Just("src/lib.rs".to_string()), + analyzer_options in arb_string_vec_no_vars(), + analyzer_env in prop::collection::hash_map(".*", ".*", 10), + target_exe in arb_pathbuf(), + target_options in arb_string_vec_no_vars(), + input_queue in Just(None), + crashes in option::of(arb_synced_dir()), + analysis in arb_synced_dir(), + tools in option::of(arb_synced_dir()), + reports in option::of(arb_synced_dir()), + unique_reports in option::of(arb_synced_dir()), + no_repro in option::of(arb_synced_dir()), + common in arb_common_config(), + ) -> analysis::generic::Config { + analysis::generic::Config { + analyzer_exe, + analyzer_options, + analyzer_env, + target_exe, + target_options, + input_queue, + crashes, + analysis, + tools, + reports, + unique_reports, + no_repro, + common, + } + } + } + + impl Arbitrary for analysis::generic::Config { + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { + arb_analysis_config().boxed() + } } } diff --git a/src/agent/onefuzz-task/src/lib.rs b/src/agent/onefuzz-task/src/lib.rs index 51eb0c3210..9e01e5e049 100644 --- a/src/agent/onefuzz-task/src/lib.rs +++ b/src/agent/onefuzz-task/src/lib.rs @@ -8,4 +8,4 @@ extern crate onefuzz_telemetry; pub mod local; pub mod tasks; #[cfg(test)] -pub mod test_utils; +pub mod config_test_utils; diff --git a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs index 161849029b..4ed14a4324 100644 --- a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs @@ -268,3 +268,55 @@ pub async fn run_tool( .with_context(|| format!("analyzer failed to run: {analyzer_path}"))?; Ok(()) } + +#[cfg(test)] +mod tests { + use proptest::prelude::*; + use onefuzz::expand::{GetExpand, PlaceHolder}; + + use crate::config_test_utils::GetExpandFields; + + use super::Config; + + impl GetExpandFields for Config { + fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)> { + let mut params = self.common.get_expand_fields(); + params.push((PlaceHolder::AnalyzerExe, dunce::canonicalize(&self.analyzer_exe).unwrap().to_string_lossy().to_string())); + params.push((PlaceHolder::AnalyzerOptions, self.analyzer_options.join(" "))); + params.push((PlaceHolder::TargetExe, dunce::canonicalize(&self.target_exe).unwrap().to_string_lossy().to_string())); + params.push((PlaceHolder::TargetOptions, self.target_options.join(" "))); + params.push((PlaceHolder::OutputDir, dunce::canonicalize(&self.analysis.local_path).unwrap().to_string_lossy().to_string())); + if let Some(tools) = &self.tools { + params.push((PlaceHolder::ToolsDir, dunce::canonicalize(&tools.local_path).unwrap().to_string_lossy().to_string())); + } + if let Some(reports) = &self.reports { + params.push((PlaceHolder::ReportsDir, dunce::canonicalize(&reports.local_path).unwrap().to_string_lossy().to_string())); + } + if let Some(crashes) = &self.crashes { + if let Some(account) = crashes.remote_path.clone().and_then(|u| u.account()) { + params.push((PlaceHolder::CrashesAccount, account)); + } + if let Some(container) = crashes.remote_path.clone().and_then(|u| u.container()) { + params.push((PlaceHolder::CrashesContainer, container)); + } + } + + params + } + } + + proptest! { + #[test] + fn test_get_expand_values_match_config( + config in any::(), + ) { + let expand = config.get_expand(); + let params = config.get_expand_fields(); + + for (param, expected) in params.iter() { + let evaluated = expand.evaluate_value(format!("{}", param.get_string())).unwrap(); + assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); + } + } + } +} diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs index 4ee71d9350..cfbb9fa09c 100644 --- a/src/agent/onefuzz-task/src/tasks/config.rs +++ b/src/agent/onefuzz-task/src/tasks/config.rs @@ -131,6 +131,7 @@ impl GetExpand for CommonConfig { .machine_id() .job_id(&self.job_id) .task_id(&self.task_id) + .setup_dir(&self.setup_dir) .set_optional_ref( &self.instance_telemetry_key, Expand::instance_telemetry_key @@ -139,7 +140,6 @@ impl GetExpand for CommonConfig { &self.microsoft_telemetry_key, Expand::microsoft_telemetry_key ) - .setup_dir(&self.setup_dir) .set_optional_ref( &self.extra_setup_dir, Expand::extra_setup_dir @@ -393,43 +393,47 @@ impl Config { #[cfg(test)] mod tests { use proptest::prelude::*; - use onefuzz::expand::GetExpand; - use std::collections::HashMap; + use onefuzz::expand::{GetExpand, PlaceHolder}; + + use crate::config_test_utils::GetExpandFields; use super::CommonConfig; + impl GetExpandFields for CommonConfig { + fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)> { + let mut params = vec![ + (PlaceHolder::MachineId, self.machine_identity.machine_id.to_string()), + (PlaceHolder::JobId, self.job_id.to_string()), + (PlaceHolder::TaskId, self.task_id.to_string()), + (PlaceHolder::SetupDir, dunce::canonicalize(&self.setup_dir).unwrap().to_string_lossy().to_string()), + ]; + if let Some(key) = &self.instance_telemetry_key { + params.push((PlaceHolder::InstanceTelemetryKey, key.to_string())); + } + if let Some(key) = &self.microsoft_telemetry_key { + params.push((PlaceHolder::MicrosoftTelemetryKey, key.clone().to_string())); + } + if let Some(dir) = &self.extra_setup_dir { + params.push((PlaceHolder::ExtraSetupDir, dunce::canonicalize(dir).unwrap().to_string_lossy().to_string())); + } + if let Some(dir) = &self.extra_output { + params.push((PlaceHolder::ExtraOutputDir, dunce::canonicalize(&dir.local_path).unwrap().to_string_lossy().to_string())); + } + + params + } + } + proptest! { - // generate an arbitrary config - // map the expanded values from the config to their expander names - // verify that the get_expand() result has all the same values as are in the config #[test] fn test_get_expand_values_match_config( config in any::(), ) { let expand = config.get_expand(); - - // for now, use a hardcoded list of parameter names that the config supplies - let mut params = HashMap::from([ - ("machine_id", config.machine_identity.machine_id.to_string()), - ("job_id", config.job_id.to_string()), - ("task_id", config.task_id.to_string()), - ("setup_dir", dunce::canonicalize(config.setup_dir.clone()).unwrap().to_string_lossy().to_string()), - ]); - if let Some(key) = &config.instance_telemetry_key { - params.insert("instance_telemetry_key", key.to_string()); - } - if let Some(key) = &config.microsoft_telemetry_key { - params.insert("microsoft_telemetry_key", key.clone().to_string()); - } - if let Some(dir) = &config.extra_setup_dir { - params.insert("extra_setup_dir", dunce::canonicalize(dir).unwrap().to_string_lossy().to_string()); - } - if let Some(dir) = &config.extra_output { - params.insert("extra_output_dir", dunce::canonicalize(&dir.local_path).unwrap().to_string_lossy().to_string()); - } + let params = config.get_expand_fields(); for (param, expected) in params.iter() { - let evaluated = expand.evaluate_value(format!("{{{param}}}")).unwrap(); + let evaluated = expand.evaluate_value(format!("{}", param.get_string())).unwrap(); assert_eq!(evaluated, *expected); } } From 5a83b095177babd77ef9ee6d407ac29c6867755d Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Thu, 5 Oct 2023 08:52:28 -0700 Subject: [PATCH 09/32] Implement tests for merge task --- .../onefuzz-task/src/config_test_utils.rs | 46 ++++++++++++++++++- .../onefuzz-task/src/tasks/merge/generic.rs | 43 ++++++++++++++++- 2 files changed, 86 insertions(+), 3 deletions(-) diff --git a/src/agent/onefuzz-task/src/config_test_utils.rs b/src/agent/onefuzz-task/src/config_test_utils.rs index 22c83e60db..dc668c1cf8 100644 --- a/src/agent/onefuzz-task/src/config_test_utils.rs +++ b/src/agent/onefuzz-task/src/config_test_utils.rs @@ -17,7 +17,7 @@ pub mod arbitraries { use reqwest::Url; use uuid::Uuid; - use crate::tasks::{config::CommonConfig, analysis}; + use crate::tasks::{config::CommonConfig, analysis, merge}; prop_compose! { fn arb_uuid()( @@ -45,7 +45,8 @@ pub mod arbitraries { prop_compose! { fn arb_url()( - url in r"https?://(www\.)?[-a-zA-Z0-9]{1,256}\.[a-zA-Z0-9]{1,6}([-a-zA-Z0-9]*)" + // Don't use this for any url that isn't just being used for a string comparison (as for the config tests) + url in r"https?://(www\.)?[-a-zA-Z0-9]{1,256}\.[a-zA-Z]{1,6}([-a-zA-Z]*)" ) -> Url { match Url::parse(&url) { Ok(url) => url, @@ -198,4 +199,45 @@ pub mod arbitraries { arb_analysis_config().boxed() } } + + prop_compose! { + fn arb_merge_config()( + supervisor_exe in Just("src/lib.rs".to_string()), + supervisor_options in arb_string_vec_no_vars(), + supervisor_env in prop::collection::hash_map(".*", ".*", 10), + supervisor_input_marker in ".*", + target_exe in arb_pathbuf(), + target_options in arb_string_vec_no_vars(), + target_options_merge in any::(), + tools in arb_synced_dir(), + input_queue in arb_url(), + inputs in arb_synced_dir(), + unique_inputs in arb_synced_dir(), + common in arb_common_config(), + ) -> merge::generic::Config { + merge::generic::Config { + supervisor_exe, + supervisor_options, + supervisor_env, + supervisor_input_marker, + target_exe, + target_options, + target_options_merge, + tools, + input_queue, + inputs, + unique_inputs, + common, + } + } + } + + impl Arbitrary for merge::generic::Config { + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { + arb_merge_config().boxed() + } + } } diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs index db95716fcf..4b79bfb86f 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs @@ -50,7 +50,7 @@ impl GetExpand for Config { .supervisor_exe(&self.supervisor_exe) .supervisor_options(&self.supervisor_options) .tools_dir(&self.tools.local_path) - .generated_inputs(&self.inputs.local_path.as_path()) + .generated_inputs(&self.inputs.local_path) } } @@ -175,3 +175,44 @@ async fn merge(config: &Config, output_dir: impl AsRef) -> Result<()> { cmd.spawn()?.wait_with_output().await?; Ok(()) } + +#[cfg(test)] +mod tests { + use proptest::prelude::*; + use onefuzz::expand::{GetExpand, PlaceHolder}; + + use crate::config_test_utils::GetExpandFields; + + use super::Config; + + impl GetExpandFields for Config { + fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)> { + let mut params = self.common.get_expand_fields(); + params.push((PlaceHolder::Input, self.supervisor_input_marker.clone())); + params.push((PlaceHolder::InputCorpus, dunce::canonicalize(&self.unique_inputs.local_path).unwrap().to_string_lossy().to_string())); + params.push((PlaceHolder::TargetExe, dunce::canonicalize(&self.target_exe).unwrap().to_string_lossy().to_string())); + params.push((PlaceHolder::TargetOptions, self.target_options.join(" "))); + params.push((PlaceHolder::SupervisorExe, dunce::canonicalize(&self.supervisor_exe).unwrap().to_string_lossy().to_string())); + params.push((PlaceHolder::SupervisorOptions, self.supervisor_options.join(" "))); + params.push((PlaceHolder::ToolsDir, dunce::canonicalize(&self.tools.local_path).unwrap().to_string_lossy().to_string())); + params.push((PlaceHolder::GeneratedInputs, dunce::canonicalize(&self.inputs.local_path).unwrap().to_string_lossy().to_string())); + + params + } + } + + proptest! { + #[test] + fn test_get_expand_values_match_config( + config in any::(), + ) { + let expand = config.get_expand(); + let params = config.get_expand_fields(); + + for (param, expected) in params.iter() { + let evaluated = expand.evaluate_value(format!("{}", param.get_string())).unwrap(); + assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); + } + } + } +} From d15ab703fad60799ff5817e91eb3f20732635656 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Thu, 5 Oct 2023 14:22:35 -0700 Subject: [PATCH 10/32] Return Result from get_expand() and implement for the rest of the configs --- .../onefuzz-task/src/config_test_utils.rs | 232 ++++++++- .../src/tasks/analysis/generic.rs | 13 +- src/agent/onefuzz-task/src/tasks/config.rs | 13 +- .../onefuzz-task/src/tasks/coverage/dotnet.rs | 69 ++- .../src/tasks/coverage/generic.rs | 63 ++- .../onefuzz-task/src/tasks/fuzz/generator.rs | 288 ++++++----- .../onefuzz-task/src/tasks/fuzz/supervisor.rs | 479 ++++++++++-------- .../onefuzz-task/src/tasks/merge/generic.rs | 19 +- .../src/tasks/report/dotnet/generic.rs | 60 ++- src/agent/onefuzz/src/expand.rs | 2 +- 10 files changed, 859 insertions(+), 379 deletions(-) diff --git a/src/agent/onefuzz-task/src/config_test_utils.rs b/src/agent/onefuzz-task/src/config_test_utils.rs index dc668c1cf8..fe27b74890 100644 --- a/src/agent/onefuzz-task/src/config_test_utils.rs +++ b/src/agent/onefuzz-task/src/config_test_utils.rs @@ -17,7 +17,7 @@ pub mod arbitraries { use reqwest::Url; use uuid::Uuid; - use crate::tasks::{config::CommonConfig, analysis, merge}; + use crate::tasks::{config::CommonConfig, analysis, merge, coverage, report, fuzz}; prop_compose! { fn arb_uuid()( @@ -46,7 +46,8 @@ pub mod arbitraries { prop_compose! { fn arb_url()( // Don't use this for any url that isn't just being used for a string comparison (as for the config tests) - url in r"https?://(www\.)?[-a-zA-Z0-9]{1,256}\.[a-zA-Z]{1,6}([-a-zA-Z]*)" + // basically all that matters here is that we generate a parsable url + url in r"https?://(www\.)?[-a-zA-Z0-9]{1,256}\.com" ) -> Url { match Url::parse(&url) { Ok(url) => url, @@ -123,7 +124,7 @@ pub mod arbitraries { extra_output in option::of(arb_synced_dir()), min_available_memory_mb in any::(), machine_identity in arb_machine_identity(), - tags in prop::collection::hash_map(".*", ".*", 10), + tags in prop::collection::hash_map(".*", ".*", 3), from_agent_to_task_endpoint in ".*", from_task_to_agent_endpoint in ".*", ) -> CommonConfig { @@ -240,4 +241,229 @@ pub mod arbitraries { arb_merge_config().boxed() } } + + prop_compose! { + fn arb_coverage_config()( + target_exe in arb_pathbuf(), + target_env in prop::collection::hash_map(".*", ".*", 10), + target_options in arb_string_vec_no_vars(), + target_timeout in option::of(any::()), + coverage_filter in option::of(".*"), + module_allowlist in option::of(".*"), + source_allowlist in option::of(".*"), + input_queue in Just(None), + readonly_inputs in prop::collection::vec(arb_synced_dir(), 10), + coverage in arb_synced_dir(), + common in arb_common_config(), + ) -> coverage::generic::Config { + coverage::generic::Config { + target_exe, + target_env, + target_options, + target_timeout, + coverage_filter, + module_allowlist, + source_allowlist, + input_queue, + readonly_inputs, + coverage, + common, + } + } + } + + impl Arbitrary for coverage::generic::Config { + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { + arb_coverage_config().boxed() + } + } + + prop_compose! { + fn arb_dotnet_coverage_config()( + target_exe in arb_pathbuf(), + target_env in prop::collection::hash_map(".*", ".*", 10), + target_options in arb_string_vec_no_vars(), + target_timeout in option::of(any::()), + input_queue in Just(None), + readonly_inputs in prop::collection::vec(arb_synced_dir(), 10), + coverage in arb_synced_dir(), + tools in arb_synced_dir(), + common in arb_common_config(), + ) -> coverage::dotnet::Config { + coverage::dotnet::Config { + target_exe, + target_env, + target_options, + target_timeout, + input_queue, + readonly_inputs, + coverage, + tools, + common, + } + } + } + + impl Arbitrary for coverage::dotnet::Config { + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { + arb_dotnet_coverage_config().boxed() + } + } + + prop_compose! { + fn arb_dotnet_report_config()( + target_exe in arb_pathbuf(), + target_env in prop::collection::hash_map(".*", ".*", 10), + target_options in arb_string_vec_no_vars(), + target_timeout in option::of(any::()), + input_queue in Just(None), + crashes in option::of(arb_synced_dir()), + reports in option::of(arb_synced_dir()), + unique_reports in option::of(arb_synced_dir()), + no_repro in option::of(arb_synced_dir()), + tools in arb_synced_dir(), + check_fuzzer_help in any::(), + check_retry_count in any::(), + minimized_stack_depth in option::of(any::()), + check_queue in any::(), + common in arb_common_config(), + ) -> report::dotnet::generic::Config { + report::dotnet::generic::Config { + target_exe, + target_env, + target_options, + target_timeout, + input_queue, + crashes, + reports, + unique_reports, + no_repro, + tools, + check_fuzzer_help, + check_retry_count, + minimized_stack_depth, + check_queue, + common, + } + } + } + + impl Arbitrary for report::dotnet::generic::Config { + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { + arb_dotnet_report_config().boxed() + } + } + + prop_compose! { + fn arb_generator_fuzz_config()( + generator_exe in Just("src/lib.rs".to_string()), + generator_env in prop::collection::hash_map(".*", ".*", 10), + generator_options in arb_string_vec_no_vars(), + readonly_inputs in prop::collection::vec(arb_synced_dir(), 10), + crashes in arb_synced_dir(), + tools in option::of(arb_synced_dir()), + target_exe in arb_pathbuf(), + target_env in prop::collection::hash_map(".*", ".*", 10), + target_options in arb_string_vec_no_vars(), + target_timeout in option::of(any::()), + check_asan_log in any::(), + check_debugger in any::(), + check_retry_count in any::(), + rename_output in any::(), + ensemble_sync_delay in option::of(any::()), + common in arb_common_config(), + ) -> fuzz::generator::Config { + fuzz::generator::Config { + generator_exe, + generator_env, + generator_options, + readonly_inputs, + crashes, + tools, + target_exe, + target_env, + target_options, + target_timeout, + check_asan_log, + check_debugger, + check_retry_count, + rename_output, + ensemble_sync_delay, + common, + } + } + } + + impl Arbitrary for fuzz::generator::Config { + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { + arb_generator_fuzz_config().boxed() + } + } + + prop_compose! { + fn arb_supervisor_config()( + inputs in arb_synced_dir(), + crashes in arb_synced_dir(), + crashdumps in option::of(arb_synced_dir()), + supervisor_exe in Just("src/lib.rs".to_string()), + supervisor_env in prop::collection::hash_map(".*", ".*", 0), + supervisor_options in arb_string_vec_no_vars(), + supervisor_input_marker in option::of(".*"), + target_exe in option::of(arb_pathbuf()), + target_options in option::of(arb_string_vec_no_vars()), + tools in option::of(arb_synced_dir()), + wait_for_files in Just(None), + stats_file in Just(None), + stats_format in Just(None), + ensemble_sync_delay in Just(None), + reports in option::of(arb_synced_dir()), + unique_reports in Just(None), + no_repro in Just(None), + coverage in option::of(arb_synced_dir()), + common in arb_common_config(), + ) -> fuzz::supervisor::SupervisorConfig { + fuzz::supervisor::SupervisorConfig { + inputs, + crashes, + crashdumps, + supervisor_exe, + supervisor_env, + supervisor_options, + supervisor_input_marker, + target_exe, + target_options, + tools, + wait_for_files, + stats_file, + stats_format, + ensemble_sync_delay, + reports, + unique_reports, + no_repro, + coverage, + common, + } + } + } + + impl Arbitrary for fuzz::supervisor::SupervisorConfig { + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { + arb_supervisor_config().boxed() + } + } } diff --git a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs index 4ed14a4324..e54f796d77 100644 --- a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs @@ -49,8 +49,9 @@ pub struct Config { } impl GetExpand for Config { - fn get_expand<'a>(&'a self) -> Expand<'a> { - self.common.get_expand() + fn get_expand<'a>(&'a self) -> Result> { + Ok( + self.common.get_expand()? .analyzer_exe(&self.analyzer_exe) .analyzer_options(&self.analyzer_options) .target_exe(&self.target_exe) @@ -74,6 +75,7 @@ impl GetExpand for Config { |expand, container| expand.crashes_container(container), ) }) + ) } } @@ -236,7 +238,7 @@ pub async fn run_tool( let target_exe = try_resolve_setup_relative_path(&config.common.setup_dir, &config.target_exe).await?; - let expand = config.get_expand() + let expand = config.get_expand()? .input_path(&input) // Only this one is dynamic, the other two should probably be a part of the config .target_exe(&target_exe) .set_optional_ref(reports_dir, Expand::reports_dir); @@ -310,7 +312,10 @@ mod tests { fn test_get_expand_values_match_config( config in any::(), ) { - let expand = config.get_expand(); + let expand = match config.get_expand() { + Ok(expand) => expand, + Err(err) => panic!("error getting expand: {}", err), + }; let params = config.get_expand_fields(); for (param, expected) in params.iter() { diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs index cfbb9fa09c..f440121f6a 100644 --- a/src/agent/onefuzz-task/src/tasks/config.rs +++ b/src/agent/onefuzz-task/src/tasks/config.rs @@ -126,8 +126,9 @@ impl CommonConfig { } impl GetExpand for CommonConfig { - fn get_expand<'a>(&'a self) -> Expand<'a> { - Expand::new(&self.machine_identity) + fn get_expand<'a>(&'a self) -> Result> { + Ok( + Expand::new(&self.machine_identity) .machine_id() .job_id(&self.job_id) .task_id(&self.task_id) @@ -147,6 +148,7 @@ impl GetExpand for CommonConfig { .set_optional_ref(&self.extra_output, |expand, extra_output| { expand.extra_output_dir(extra_output.local_path.as_path()) }) + ) } } @@ -429,7 +431,12 @@ mod tests { fn test_get_expand_values_match_config( config in any::(), ) { - let expand = config.get_expand(); + // This function implementation is repeated across all config tests + // There might be a way to share it by taking advantage of the `GetExpandFields` trait, but I'm not sure how + let expand = match config.get_expand() { + Ok(expand) => expand, + Err(err) => panic!("error getting expand: {}", err), + }; let params = config.get_expand_fields(); for (param, expected) in params.iter() { diff --git a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs index 93afd1dfd7..146bdaf092 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs @@ -4,7 +4,7 @@ use anyhow::{Context, Result}; use async_trait::async_trait; use onefuzz::{ - expand::{Expand, PlaceHolder}, + expand::{Expand, PlaceHolder, GetExpand}, monitor::DirectoryMonitor, syncdir::SyncedDir, }; @@ -56,6 +56,18 @@ impl Config { } } +impl GetExpand for Config { + fn get_expand<'a>(&'a self) -> Result> { + Ok( + self.common.get_expand()? + .target_exe(&self.target_exe) + .target_options(&self.target_options) + .coverage_dir(&self.coverage.local_path) + .tools_dir(self.tools.local_path.to_string_lossy().into_owned()) + ) + } +} + pub struct DotnetCoverageTask { config: Config, poller: InputPoller, @@ -263,7 +275,7 @@ impl<'a> TaskContext<'a> { async fn target_exe(&self) -> Result { let tools_dir = self.config.tools.local_path.to_string_lossy().into_owned(); - // Try to expand `target_exe` with support for `{tools_dir}`. + // Try to expand `target_exe` with support for `{tools_dir}` and the rest. // // Allows using `LibFuzzerDotnetLoader.exe` from a shared tools container. let expand = Expand::new(&self.config.common.machine_identity).tools_dir(tools_dir); @@ -293,18 +305,9 @@ impl<'a> TaskContext<'a> { async fn command_for_input(&self, input: &Path) -> Result { let target_exe = self.target_exe().await?; - let expand = Expand::new(&self.config.common.machine_identity) - .machine_id() + let expand = self.config.get_expand()? .input_path(input) - .job_id(&self.config.common.job_id) - .setup_dir(&self.config.common.setup_dir) - .set_optional_ref(&self.config.common.extra_setup_dir, Expand::extra_setup_dir) - .set_optional_ref(&self.config.common.extra_output, |expand, value| { - expand.extra_output_dir(value.local_path.as_path()) - }) - .target_exe(&target_exe) - .target_options(&self.config.target_options) - .task_id(&self.config.common.task_id); + .target_exe(&target_exe); let dotnet_coverage_path = &self.dotnet_coverage_path; let dotnet_path = &self.dotnet_path; @@ -458,3 +461,43 @@ impl<'a> Processor for TaskContext<'a> { Ok(()) } } + +#[cfg(test)] +mod tests { + use proptest::prelude::*; + use onefuzz::expand::{GetExpand, PlaceHolder}; + + use crate::config_test_utils::GetExpandFields; + + use super::Config; + + impl GetExpandFields for Config { + fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)> { + let mut params = self.common.get_expand_fields(); + params.push((PlaceHolder::TargetExe, dunce::canonicalize(&self.target_exe).unwrap().to_string_lossy().to_string())); + params.push((PlaceHolder::TargetOptions, self.target_options.join(" "))); + params.push((PlaceHolder::CoverageDir, dunce::canonicalize(&self.coverage.local_path).unwrap().to_string_lossy().to_string())); + params.push((PlaceHolder::ToolsDir, dunce::canonicalize(&self.tools.local_path).unwrap().to_string_lossy().to_string())); + + params + } + } + + proptest! { + #[test] + fn test_get_expand_values_match_config( + config in any::(), + ) { + let expand = match config.get_expand() { + Ok(expand) => expand, + Err(err) => panic!("error getting expand: {}", err), + }; + let params = config.get_expand_fields(); + + for (param, expected) in params.iter() { + let evaluated = expand.evaluate_value(format!("{}", param.get_string())).unwrap(); + assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); + } + } + } +} diff --git a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs index 704188293b..3c53288b15 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs @@ -20,7 +20,7 @@ use debuggable_module::loader::Loader; use debuggable_module::path::FilePath; use debuggable_module::Module; use onefuzz::env::LD_LIBRARY_PATH; -use onefuzz::expand::{Expand, PlaceHolder}; +use onefuzz::expand::{Expand, PlaceHolder, GetExpand}; use onefuzz::syncdir::SyncedDir; use onefuzz_file_format::coverage::{ binary::{v1::BinaryCoverageJson as BinaryCoverageJsonV1, BinaryCoverageJson}, @@ -82,6 +82,16 @@ impl Config { } } +impl GetExpand for Config { + fn get_expand<'a>(&'a self) -> Result> { + Ok( + self.common.get_expand()? + .target_options(&self.target_options) + .coverage_dir(&self.coverage.local_path) + ) + } +} + pub struct CoverageTask { config: Config, poller: InputPoller, @@ -348,18 +358,9 @@ impl<'a> TaskContext<'a> { try_resolve_setup_relative_path(&self.config.common.setup_dir, &self.config.target_exe) .await?; - let expand = Expand::new(&self.config.common.machine_identity) - .machine_id() - .input_path(input) - .job_id(&self.config.common.job_id) - .setup_dir(&self.config.common.setup_dir) - .set_optional_ref(&self.config.common.extra_setup_dir, Expand::extra_setup_dir) - .set_optional_ref(&self.config.common.extra_output, |expand, value| { - expand.extra_output_dir(value.local_path.as_path()) - }) + let expand = self.config.get_expand()? .target_exe(&target_exe) - .target_options(&self.config.target_options) - .task_id(&self.config.common.task_id); + .input_path(input); let mut cmd = Command::new(&target_exe); @@ -603,3 +604,41 @@ impl CoverageStats { stats } } + +#[cfg(test)] +mod tests { + use proptest::prelude::*; + use onefuzz::expand::{GetExpand, PlaceHolder}; + + use crate::config_test_utils::GetExpandFields; + + use super::Config; + + impl GetExpandFields for Config { + fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)> { + let mut params = self.common.get_expand_fields(); + params.push((PlaceHolder::TargetOptions, self.target_options.join(" "))); + params.push((PlaceHolder::CoverageDir, dunce::canonicalize(&self.coverage.local_path).unwrap().to_string_lossy().to_string())); + + params + } + } + + proptest! { + #[test] + fn test_get_expand_values_match_config( + config in any::(), + ) { + let expand = match config.get_expand() { + Ok(expand) => expand, + Err(err) => panic!("error getting expand: {}", err), + }; + let params = config.get_expand_fields(); + + for (param, expected) in params.iter() { + let evaluated = expand.evaluate_value(format!("{}", param.get_string())).unwrap(); + assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); + } + } + } +} diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs index bd7511cac2..3dd2eacd66 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs @@ -8,7 +8,7 @@ use crate::tasks::{ }; use anyhow::{Context, Result}; use onefuzz::{ - expand::Expand, + expand::{Expand, GetExpand}, fs::set_executable, input_tester::Tester, process::monitor_process, @@ -51,6 +51,22 @@ pub struct Config { pub common: CommonConfig, } +impl GetExpand for Config { + fn get_expand<'a>(&'a self) -> Result> { + Ok( + self.common.get_expand()? + .generator_exe(&self.generator_exe) + .generator_options(&self.generator_options) + .crashes(&self.crashes.local_path) + .target_exe(&self.target_exe) + .target_options(&self.target_options) + .set_optional_ref(&self.tools, |expand, tools| { + expand.tools_dir(&tools.local_path) + }) + ) + } +} + pub struct GeneratorTask { config: Config, } @@ -169,29 +185,9 @@ impl GeneratorTask { ) -> Result<()> { utils::reset_tmp_dir(&output_dir).await?; let (mut generator, generator_path) = { - let expand = Expand::new(&self.config.common.machine_identity) - .machine_id() - .setup_dir(&self.config.common.setup_dir) - .set_optional_ref(&self.config.common.extra_setup_dir, Expand::extra_setup_dir) - .set_optional_ref(&self.config.common.extra_output, |expand, value| { - expand.extra_output_dir(value.local_path.as_path()) - }) + let expand = self.config.get_expand()? .generated_inputs(&output_dir) - .input_corpus(&corpus_dir) - .generator_exe(&self.config.generator_exe) - .generator_options(&self.config.generator_options) - .job_id(&self.config.common.job_id) - .task_id(&self.config.common.task_id) - .set_optional_ref( - &self.config.common.microsoft_telemetry_key, - |tester, key| tester.microsoft_telemetry_key(key), - ) - .set_optional_ref(&self.config.common.instance_telemetry_key, |tester, key| { - tester.instance_telemetry_key(key) - }) - .set_optional_ref(&self.config.tools, |expand, tools| { - expand.tools_dir(&tools.local_path) - }); + .input_corpus(&corpus_dir); let generator_path = expand.evaluate_value(&self.config.generator_exe)?; @@ -225,109 +221,153 @@ impl GeneratorTask { } } +#[cfg(test)] mod tests { - #[tokio::test] - #[cfg(target_os = "linux")] - #[ignore] - async fn test_radamsa_linux() -> anyhow::Result<()> { - use super::{Config, GeneratorTask}; - use crate::tasks::config::CommonConfig; - use onefuzz::blob::BlobContainerUrl; - use onefuzz::syncdir::SyncedDir; - use reqwest::Url; - use std::collections::HashMap; - use std::env; - use tempfile::tempdir; - - let crashes_temp = tempfile::tempdir()?; - let crashes: &std::path::Path = crashes_temp.path(); - - let inputs_temp = tempfile::tempdir()?; - let inputs: &std::path::Path = inputs_temp.path(); - let input_file = inputs.join("seed.txt"); - tokio::fs::write(input_file, "test").await?; - - let generator_options: Vec = vec![ - "-o", - "{generated_inputs}/input-%n-%s", - "-n", - "100", - "-r", - "{input_corpus}", - ] - .iter() - .map(|p| p.to_string()) - .collect(); - - let radamsa_path = env::var("ONEFUZZ_TEST_RADAMSA_LINUX")?; - let radamsa_as_path = std::path::Path::new(&radamsa_path); - let radamsa_dir = radamsa_as_path.parent().unwrap(); - - let readonly_inputs_local = tempfile::tempdir().unwrap().path().into(); - let crashes_local = tempfile::tempdir().unwrap().path().into(); - let tools_local = tempfile::tempdir().unwrap().path().into(); - let config = Config { - generator_exe: String::from("{tools_dir}/radamsa"), - generator_options, - readonly_inputs: vec![SyncedDir { - local_path: readonly_inputs_local, - remote_path: Some(BlobContainerUrl::parse( - Url::from_directory_path(inputs).unwrap(), - )?), - }], - crashes: SyncedDir { - local_path: crashes_local, - remote_path: Some(BlobContainerUrl::parse( - Url::from_directory_path(crashes).unwrap(), - )?), - }, - tools: Some(SyncedDir { - local_path: tools_local, - remote_path: Some(BlobContainerUrl::parse( - Url::from_directory_path(radamsa_dir).unwrap(), - )?), - }), - target_exe: Default::default(), - target_env: Default::default(), - target_options: Default::default(), - target_timeout: None, - check_asan_log: false, - check_debugger: false, - rename_output: false, - ensemble_sync_delay: None, - generator_env: HashMap::default(), - check_retry_count: 0, - common: CommonConfig { - job_id: Default::default(), - task_id: Default::default(), - instance_id: Default::default(), - heartbeat_queue: Default::default(), - job_result_queue: Default::default(), - instance_telemetry_key: Default::default(), - microsoft_telemetry_key: Default::default(), - logs: Default::default(), - setup_dir: Default::default(), - extra_setup_dir: Default::default(), - extra_output: Default::default(), - min_available_memory_mb: Default::default(), - machine_identity: onefuzz::machine_id::MachineIdentity { - machine_id: uuid::Uuid::new_v4(), - machine_name: "test".to_string(), - scaleset_name: None, - }, - tags: Default::default(), - from_agent_to_task_endpoint: "/".to_string(), - from_task_to_agent_endpoint: "/".to_string(), - }, - }; - let task = GeneratorTask::new(config); + use proptest::prelude::*; + use onefuzz::expand::{GetExpand, PlaceHolder}; + + use crate::config_test_utils::GetExpandFields; + + use super::Config; + + impl GetExpandFields for Config { + fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)> { + let mut params = self.common.get_expand_fields(); + params.push((PlaceHolder::GeneratorExe, dunce::canonicalize(&self.generator_exe).unwrap().to_string_lossy().to_string())); + params.push((PlaceHolder::GeneratorOptions, self.generator_options.join(" "))); + params.push((PlaceHolder::Crashes, dunce::canonicalize(&self.crashes.local_path).unwrap().to_string_lossy().to_string())); + params.push((PlaceHolder::TargetExe, dunce::canonicalize(&self.target_exe).unwrap().to_string_lossy().to_string())); + params.push((PlaceHolder::TargetOptions, self.target_options.join(" "))); + if let Some(dir) = &self.tools { + params.push((PlaceHolder::ToolsDir, dunce::canonicalize(&dir.local_path).unwrap().to_string_lossy().to_string())); + } - let generated_inputs = tempdir()?; - task.generate_inputs(inputs.to_path_buf(), generated_inputs.path()) - .await?; + params + } + } - let count = std::fs::read_dir(generated_inputs.path())?.count(); - assert_eq!(count, 100, "No inputs generated"); - Ok(()) + proptest! { + #[test] + fn test_get_expand_values_match_config( + config in any::(), + ) { + let expand = match config.get_expand() { + Ok(expand) => expand, + Err(err) => panic!("error getting expand: {}", err), + }; + let params = config.get_expand_fields(); + + for (param, expected) in params.iter() { + let evaluated = expand.evaluate_value(format!("{}", param.get_string())).unwrap(); + assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); + } + } + } + + #[cfg(target_os = "linux")] + mod linux { + #[tokio::test] + #[ignore] + async fn test_radamsa_linux() -> anyhow::Result<()> { + use super::{Config, GeneratorTask}; + use crate::tasks::config::CommonConfig; + use onefuzz::blob::BlobContainerUrl; + use onefuzz::syncdir::SyncedDir; + use reqwest::Url; + use std::collections::HashMap; + use std::env; + use tempfile::tempdir; + + let crashes_temp = tempfile::tempdir()?; + let crashes: &std::path::Path = crashes_temp.path(); + + let inputs_temp = tempfile::tempdir()?; + let inputs: &std::path::Path = inputs_temp.path(); + let input_file = inputs.join("seed.txt"); + tokio::fs::write(input_file, "test").await?; + + let generator_options: Vec = vec![ + "-o", + "{generated_inputs}/input-%n-%s", + "-n", + "100", + "-r", + "{input_corpus}", + ] + .iter() + .map(|p| p.to_string()) + .collect(); + + let radamsa_path = env::var("ONEFUZZ_TEST_RADAMSA_LINUX")?; + let radamsa_as_path = std::path::Path::new(&radamsa_path); + let radamsa_dir = radamsa_as_path.parent().unwrap(); + + let readonly_inputs_local = tempfile::tempdir().unwrap().path().into(); + let crashes_local = tempfile::tempdir().unwrap().path().into(); + let tools_local = tempfile::tempdir().unwrap().path().into(); + let config = Config { + generator_exe: String::from("{tools_dir}/radamsa"), + generator_options, + readonly_inputs: vec![SyncedDir { + local_path: readonly_inputs_local, + remote_path: Some(BlobContainerUrl::parse( + Url::from_directory_path(inputs).unwrap(), + )?), + }], + crashes: SyncedDir { + local_path: crashes_local, + remote_path: Some(BlobContainerUrl::parse( + Url::from_directory_path(crashes).unwrap(), + )?), + }, + tools: Some(SyncedDir { + local_path: tools_local, + remote_path: Some(BlobContainerUrl::parse( + Url::from_directory_path(radamsa_dir).unwrap(), + )?), + }), + target_exe: Default::default(), + target_env: Default::default(), + target_options: Default::default(), + target_timeout: None, + check_asan_log: false, + check_debugger: false, + rename_output: false, + ensemble_sync_delay: None, + generator_env: HashMap::default(), + check_retry_count: 0, + common: CommonConfig { + job_id: Default::default(), + task_id: Default::default(), + instance_id: Default::default(), + heartbeat_queue: Default::default(), + job_result_queue: Default::default(), + instance_telemetry_key: Default::default(), + microsoft_telemetry_key: Default::default(), + logs: Default::default(), + setup_dir: Default::default(), + extra_setup_dir: Default::default(), + extra_output: Default::default(), + min_available_memory_mb: Default::default(), + machine_identity: onefuzz::machine_id::MachineIdentity { + machine_id: uuid::Uuid::new_v4(), + machine_name: "test".to_string(), + scaleset_name: None, + }, + tags: Default::default(), + from_agent_to_task_endpoint: "/".to_string(), + from_task_to_agent_endpoint: "/".to_string(), + }, + }; + let task = GeneratorTask::new(config); + + let generated_inputs = tempdir()?; + task.generate_inputs(inputs.to_path_buf(), generated_inputs.path()) + .await?; + + let count = std::fs::read_dir(generated_inputs.path())?.count(); + assert_eq!(count, 100, "No inputs generated"); + Ok(()) + } } } diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs index 3f00e20b8d..c58b884c7e 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs @@ -11,7 +11,7 @@ use crate::tasks::{ }; use anyhow::{Context, Error, Result}; use onefuzz::{ - expand::Expand, + expand::{Expand, GetExpand}, fs::{has_files, set_executable, OwnedDir}, jitter::delay_with_jitter, process::monitor_process, @@ -61,6 +61,44 @@ pub struct SupervisorConfig { pub common: CommonConfig, } +impl GetExpand for SupervisorConfig { + fn get_expand<'a>(&'a self) -> Result> { + Ok( + self.common.get_expand()? + .input_corpus(&self.inputs.local_path) + .supervisor_exe(&self.supervisor_exe) + .supervisor_options(&self.supervisor_options) + .set_optional_ref(&self.target_exe, Expand::target_exe) + .set_optional_ref(&self.supervisor_input_marker, |expand, input_marker| { + expand.input_marker(input_marker) + }) + .set_optional_ref(&self.target_options, |expand, target_options| { + expand.target_options(&target_options) + }) + .set_optional_ref(&self.tools, |expand, tools| { + expand.tools_dir(&tools.local_path) + }) + .set_optional_ref(&self.coverage, |expand, coverage| { + expand.coverage_dir(&coverage.local_path) + }) + .set_optional_ref(&self.crashdumps, |expand, crashdumps| { + expand.crashdumps(&crashdumps.local_path) + }) + .set_optional_ref(&self.reports, |expand, reports| { + expand.reports_dir(&reports.local_path) + }) + .set_optional_ref( + &self.crashes.remote_path.clone().and_then(|u| u.account()), + |expand, account| expand.crashes_account(account), + ) + .set_optional_ref( + &self.crashes.remote_path.clone().and_then(|u| u.container()), + |expand, container| expand.crashes_container(container), + ) + ) + } +} + const HEARTBEAT_PERIOD: Duration = Duration::from_secs(60); pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> { @@ -252,57 +290,17 @@ async fn start_supervisor( None }; - let expand = Expand::new(&config.common.machine_identity) - .machine_id() - .supervisor_exe(&config.supervisor_exe) - .supervisor_options(&config.supervisor_options) + let expand = config.get_expand()? .runtime_dir(&runtime_dir) .crashes(&crashes.local_path) - .set_optional_ref(&crashdumps, |expand, crashdumps| { - expand.crashdumps(&crashdumps.local_path) - }) - .input_corpus(&inputs.local_path) + .input_corpus(&inputs.local_path) // And this one too... .reports_dir(reports_dir) - .setup_dir(&config.common.setup_dir) - .set_optional_ref(&config.common.extra_setup_dir, Expand::extra_setup_dir) - .set_optional_ref(&config.common.extra_output, |expand, value| { - expand.extra_output_dir(value.local_path.as_path()) - }) - .job_id(&config.common.job_id) - .task_id(&config.common.task_id) - .set_optional_ref(&config.tools, |expand, tools| { - expand.tools_dir(&tools.local_path) - }) - .set_optional_ref(&config.coverage, |expand, coverage| { - expand.coverage_dir(&coverage.local_path) + .set_optional_ref(&crashdumps, |expand, crashdumps| { // Why isn't this value in the config? + expand.crashdumps(&crashdumps.local_path) }) .set_optional_ref(&target_exe, |expand, target_exe| { - expand.target_exe(target_exe) - }) - .set_optional_ref(&config.supervisor_input_marker, |expand, input_marker| { - expand.input_marker(input_marker) - }) - .set_optional_ref(&config.target_options, |expand, target_options| { - expand.target_options(target_options) - }) - .set_optional_ref(&config.common.microsoft_telemetry_key, |expand, key| { - expand.microsoft_telemetry_key(key) - }) - .set_optional_ref(&config.common.instance_telemetry_key, |expand, key| { - expand.instance_telemetry_key(key) - }) - .set_optional_ref( - &config.crashes.remote_path.clone().and_then(|u| u.account()), - |expand, account| expand.crashes_account(account), - ) - .set_optional_ref( - &config - .crashes - .remote_path - .clone() - .and_then(|u| u.container()), - |expand, container| expand.crashes_container(container), - ); + expand.target_exe(&target_exe) + }); let supervisor_path = expand.evaluate_value(&config.supervisor_exe)?; let mut cmd = Command::new(supervisor_path); @@ -328,177 +326,242 @@ async fn start_supervisor( } #[cfg(test)] -#[cfg(target_os = "linux")] mod tests { - use super::*; - use crate::tasks::stats::afl::read_stats; - use onefuzz::blob::BlobContainerUrl; - use onefuzz::machine_id::MachineIdentity; - use onefuzz::process::monitor_process; - use onefuzz_telemetry::EventData; - use reqwest::Url; - use std::collections::HashMap; - use std::env; - use std::time::Instant; - - const MAX_FUZZ_TIME_SECONDS: u64 = 120; - - async fn has_stats(path: &PathBuf) -> bool { - if let Ok(stats) = read_stats(path).await { - for entry in stats { - if matches!(entry, EventData::ExecsSecond(x) if x > 0.0) { - return true; - } - } - false - } else { - false - } - } - - #[tokio::test] - #[cfg_attr(not(feature = "integration_test"), ignore)] - async fn test_fuzzer_linux() { - let runtime_dir = tempfile::tempdir().unwrap(); - - let supervisor_exe = if let Ok(x) = env::var("ONEFUZZ_TEST_AFL_LINUX_FUZZER") { - x - } else { - warn!("Unable to test AFL integration"); - return; - }; + use proptest::prelude::*; + use onefuzz::expand::{GetExpand, PlaceHolder}; - let target_exe = if let Ok(x) = env::var("ONEFUZZ_TEST_AFL_LINUX_TEST_BINARY") { - Some(x.into()) - } else { - warn!("Unable to test AFL integration"); - return; - }; + use crate::config_test_utils::GetExpandFields; - let reports_dir_temp = tempfile::tempdir().unwrap(); - let reports_dir = reports_dir_temp.path().into(); + use super::SupervisorConfig; - let fault_dir_temp = tempfile::tempdir().unwrap(); - let crashes_local = tempfile::tempdir().unwrap().path().into(); - let crashes = SyncedDir { - local_path: crashes_local, - remote_path: Some( - BlobContainerUrl::parse(Url::from_directory_path(fault_dir_temp).unwrap()).unwrap(), - ), - }; + impl GetExpandFields for SupervisorConfig { + fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)> { + let mut params = self.common.get_expand_fields(); + params.push((PlaceHolder::InputCorpus, dunce::canonicalize(&self.inputs.local_path).unwrap().to_string_lossy().to_string())); + params.push((PlaceHolder::SupervisorExe, dunce::canonicalize(&self.supervisor_exe).unwrap().to_string_lossy().to_string())); + params.push((PlaceHolder::SupervisorOptions, self.supervisor_options.join(" "))); + if let Some(target_exe) = &self.target_exe { + params.push((PlaceHolder::TargetExe, dunce::canonicalize(&target_exe).unwrap().to_string_lossy().to_string())); + } + if let Some(input_marker) = &self.supervisor_input_marker { + params.push((PlaceHolder::Input, input_marker.clone())); + } + if let Some(target_options) = &self.target_options { + params.push((PlaceHolder::TargetOptions, target_options.join(" "))); + } + if let Some(tools) = &self.tools { + params.push((PlaceHolder::ToolsDir, dunce::canonicalize(&tools.local_path).unwrap().to_string_lossy().to_string())); + } + if let Some(coverage) = &self.coverage { + params.push((PlaceHolder::CoverageDir, dunce::canonicalize(&coverage.local_path).unwrap().to_string_lossy().to_string())); + } + if let Some(crashdumps) = &self.crashdumps { + params.push((PlaceHolder::Crashdumps, dunce::canonicalize(&crashdumps.local_path).unwrap().to_string_lossy().to_string())); + } + if let Some(reports) = &self.reports { + params.push((PlaceHolder::ReportsDir, dunce::canonicalize(&reports.local_path).unwrap().to_string_lossy().to_string())); + } + if let Some(account) = &self.crashes.remote_path.clone().and_then(|u| u.account()) { + params.push((PlaceHolder::CrashesAccount, account.clone())); + } + if let Some(container) = &self.crashes.remote_path.clone().and_then(|u| u.container()) { + params.push((PlaceHolder::CrashesContainer, container.clone())); + } - let crashdumps_dir_temp = tempfile::tempdir().unwrap(); - let crashdumps_local = tempfile::tempdir().unwrap().path().into(); - let crashdumps = SyncedDir { - local_path: crashdumps_local, - remote_path: Some( - BlobContainerUrl::parse(Url::from_directory_path(crashdumps_dir_temp).unwrap()) - .unwrap(), - ), - }; + params + } + } - let corpus_dir_local = tempfile::tempdir().unwrap().path().into(); - let corpus_dir_temp = tempfile::tempdir().unwrap(); - let corpus_dir = SyncedDir { - local_path: corpus_dir_local, - remote_path: Some( - BlobContainerUrl::parse(Url::from_directory_path(corpus_dir_temp).unwrap()) - .unwrap(), - ), - }; - let seed_file_name = corpus_dir.local_path.join("seed.txt"); - tokio::fs::write(seed_file_name, "xyz").await.unwrap(); - - let target_options = Some(vec!["{input}".to_owned()]); - let supervisor_env = HashMap::new(); - let supervisor_options: Vec<_> = vec![ - "-d", - "-i", - "{input_corpus}", - "-o", - "{crashes}", - "--", - "{target_exe}", - "{target_options}", - ] - .iter() - .map(|p| p.to_string()) - .collect(); - - // AFL input marker - let supervisor_input_marker = Some("@@".to_owned()); - - let config = SupervisorConfig { - supervisor_exe, - supervisor_env, - supervisor_options, - supervisor_input_marker, - target_exe, - target_options, - inputs: corpus_dir.clone(), - crashes: crashes.clone(), - crashdumps: Some(crashdumps.clone()), - tools: None, - wait_for_files: None, - stats_file: None, - stats_format: None, - ensemble_sync_delay: None, - reports: None, - unique_reports: None, - no_repro: None, - coverage: None, - common: CommonConfig { - job_id: Default::default(), - task_id: Default::default(), - instance_id: Default::default(), - heartbeat_queue: Default::default(), - job_result_queue: Default::default(), - instance_telemetry_key: Default::default(), - microsoft_telemetry_key: Default::default(), - logs: Default::default(), - setup_dir: Default::default(), - extra_setup_dir: Default::default(), - extra_output: Default::default(), - min_available_memory_mb: Default::default(), - machine_identity: MachineIdentity { - machine_id: uuid::Uuid::new_v4(), - machine_name: "test".to_string(), - scaleset_name: None, - }, - tags: Default::default(), - from_agent_to_task_endpoint: "/".to_string(), - from_task_to_agent_endpoint: "/".to_string(), - }, - }; + proptest! { + #[test] + fn test_get_expand_values_match_config( + config in any::(), + ) { + let expand = match config.get_expand() { + Ok(expand) => expand, + Err(err) => panic!("error getting expand: {}", err), + }; + let params = config.get_expand_fields(); - let process = start_supervisor( - runtime_dir, - &config, - &crashes, - Some(&crashdumps), - &corpus_dir, - reports_dir, - ) - .await - .unwrap(); - - let notify = Notify::new(); - let _fuzzing_monitor = - monitor_process(process, "supervisor".to_string(), false, Some(¬ify)); - let stat_output = crashes.local_path.join("fuzzer_stats"); - let start = Instant::now(); - loop { - if has_stats(&stat_output).await { - break; + for (param, expected) in params.iter() { + let evaluated = expand.evaluate_value(format!("{}", param.get_string())).unwrap(); + assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); } + } + } - if start.elapsed().as_secs() > MAX_FUZZ_TIME_SECONDS { - panic!( - "afl did not generate stats in {} seconds", - MAX_FUZZ_TIME_SECONDS - ); + #[cfg(target_os = "linux")] + mod linux { + use super::*; + use crate::tasks::stats::afl::read_stats; + use onefuzz::blob::BlobContainerUrl; + use onefuzz::machine_id::MachineIdentity; + use onefuzz::process::monitor_process; + use onefuzz_telemetry::EventData; + use reqwest::Url; + use std::collections::HashMap; + use std::env; + use std::time::Instant; + + const MAX_FUZZ_TIME_SECONDS: u64 = 120; + + async fn has_stats(path: &PathBuf) -> bool { + if let Ok(stats) = read_stats(path).await { + for entry in stats { + if matches!(entry, EventData::ExecsSecond(x) if x > 0.0) { + return true; + } + } + false + } else { + false + } + } + + #[tokio::test] + #[cfg_attr(not(feature = "integration_test"), ignore)] + async fn test_fuzzer_linux() { + let runtime_dir = tempfile::tempdir().unwrap(); + + let supervisor_exe = if let Ok(x) = env::var("ONEFUZZ_TEST_AFL_LINUX_FUZZER") { + x + } else { + warn!("Unable to test AFL integration"); + return; + }; + + let target_exe = if let Ok(x) = env::var("ONEFUZZ_TEST_AFL_LINUX_TEST_BINARY") { + Some(x.into()) + } else { + warn!("Unable to test AFL integration"); + return; + }; + + let reports_dir_temp = tempfile::tempdir().unwrap(); + let reports_dir = reports_dir_temp.path().into(); + + let fault_dir_temp = tempfile::tempdir().unwrap(); + let crashes_local = tempfile::tempdir().unwrap().path().into(); + let crashes = SyncedDir { + local_path: crashes_local, + remote_path: Some( + BlobContainerUrl::parse(Url::from_directory_path(fault_dir_temp).unwrap()).unwrap(), + ), + }; + + let crashdumps_dir_temp = tempfile::tempdir().unwrap(); + let crashdumps_local = tempfile::tempdir().unwrap().path().into(); + let crashdumps = SyncedDir { + local_path: crashdumps_local, + remote_path: Some( + BlobContainerUrl::parse(Url::from_directory_path(crashdumps_dir_temp).unwrap()) + .unwrap(), + ), + }; + + let corpus_dir_local = tempfile::tempdir().unwrap().path().into(); + let corpus_dir_temp = tempfile::tempdir().unwrap(); + let corpus_dir = SyncedDir { + local_path: corpus_dir_local, + remote_path: Some( + BlobContainerUrl::parse(Url::from_directory_path(corpus_dir_temp).unwrap()) + .unwrap(), + ), + }; + let seed_file_name = corpus_dir.local_path.join("seed.txt"); + tokio::fs::write(seed_file_name, "xyz").await.unwrap(); + + let target_options = Some(vec!["{input}".to_owned()]); + let supervisor_env = HashMap::new(); + let supervisor_options: Vec<_> = vec![ + "-d", + "-i", + "{input_corpus}", + "-o", + "{crashes}", + "--", + "{target_exe}", + "{target_options}", + ] + .iter() + .map(|p| p.to_string()) + .collect(); + + // AFL input marker + let supervisor_input_marker = Some("@@".to_owned()); + + let config = SupervisorConfig { + supervisor_exe, + supervisor_env, + supervisor_options, + supervisor_input_marker, + target_exe, + target_options, + inputs: corpus_dir.clone(), + crashes: crashes.clone(), + crashdumps: Some(crashdumps.clone()), + tools: None, + wait_for_files: None, + stats_file: None, + stats_format: None, + ensemble_sync_delay: None, + reports: None, + unique_reports: None, + no_repro: None, + coverage: None, + common: CommonConfig { + job_id: Default::default(), + task_id: Default::default(), + instance_id: Default::default(), + heartbeat_queue: Default::default(), + job_result_queue: Default::default(), + instance_telemetry_key: Default::default(), + microsoft_telemetry_key: Default::default(), + logs: Default::default(), + setup_dir: Default::default(), + extra_setup_dir: Default::default(), + extra_output: Default::default(), + min_available_memory_mb: Default::default(), + machine_identity: MachineIdentity { + machine_id: uuid::Uuid::new_v4(), + machine_name: "test".to_string(), + scaleset_name: None, + }, + tags: Default::default(), + from_agent_to_task_endpoint: "/".to_string(), + from_task_to_agent_endpoint: "/".to_string(), + }, + }; + + let process = start_supervisor( + runtime_dir, + &config, + &crashes, + Some(&crashdumps), + &corpus_dir, + reports_dir, + ) + .await + .unwrap(); + + let notify = Notify::new(); + let _fuzzing_monitor = + monitor_process(process, "supervisor".to_string(), false, Some(¬ify)); + let stat_output = crashes.local_path.join("fuzzer_stats"); + let start = Instant::now(); + loop { + if has_stats(&stat_output).await { + break; + } + + if start.elapsed().as_secs() > MAX_FUZZ_TIME_SECONDS { + panic!( + "afl did not generate stats in {} seconds", + MAX_FUZZ_TIME_SECONDS + ); + } + tokio::time::sleep(std::time::Duration::from_secs(1)).await; } - tokio::time::sleep(std::time::Duration::from_secs(1)).await; } } } diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs index 4b79bfb86f..dfadd3cd9b 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs @@ -33,7 +33,7 @@ pub struct Config { pub target_options_merge: bool, pub tools: SyncedDir, pub input_queue: Url, - pub inputs: SyncedDir, // is this input corpus, generated inputs, or neither? + pub inputs: SyncedDir, pub unique_inputs: SyncedDir, #[serde(flatten)] @@ -41,16 +41,18 @@ pub struct Config { } impl GetExpand for Config { - fn get_expand<'a>(&'a self) -> Expand<'a> { - self.common.get_expand() + fn get_expand<'a>(&'a self) -> Result> { + Ok( + self.common.get_expand()? .input_marker(&self.supervisor_input_marker) - .input_corpus(&self.unique_inputs.local_path) + .input_corpus(&self.unique_inputs.local_path) // TODO: verify that this is correct (should it be self.inputs.local_path?) .target_exe(&self.target_exe) .target_options(&self.target_options) .supervisor_exe(&self.supervisor_exe) .supervisor_options(&self.supervisor_options) - .tools_dir(&self.tools.local_path) + .tools_dir(self.tools.local_path.to_string_lossy().into_owned()) .generated_inputs(&self.inputs.local_path) + ) } } @@ -143,7 +145,7 @@ async fn merge(config: &Config, output_dir: impl AsRef) -> Result<()> { let target_exe = try_resolve_setup_relative_path(&config.common.setup_dir, &config.target_exe).await?; - let expand = config.get_expand() + let expand = config.get_expand()? .generated_inputs(output_dir) .target_exe(&target_exe); @@ -206,7 +208,10 @@ mod tests { fn test_get_expand_values_match_config( config in any::(), ) { - let expand = config.get_expand(); + let expand = match config.get_expand() { + Ok(expand) => expand, + Err(err) => panic!("error getting expand: {}", err), + }; let params = config.get_expand_fields(); for (param, expected) in params.iter() { diff --git a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs index 703b1c80bd..1e01971251 100644 --- a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs @@ -60,10 +60,11 @@ pub struct Config { } impl GetExpand for Config { - fn get_expand(&self) -> Expand { + fn get_expand<'a>(&'a self) -> Result> { let tools_dir = self.tools.local_path.to_string_lossy().into_owned(); - self.common.get_expand() + Ok( + self.common.get_expand()? .target_exe(&self.target_exe) .target_options(&self.target_options) .tools_dir(tools_dir) @@ -81,6 +82,7 @@ impl GetExpand for Config { |expand, container| expand.crashes_container(container), ) }) + ) } } @@ -158,7 +160,7 @@ impl AsanProcessor { // Try to expand `target_exe` with support for `{tools_dir}`. // // Allows using `LibFuzzerDotnetLoader.exe` from a shared tools container. - let expand = self.config.get_expand(); + let expand = self.config.get_expand()?; let expanded = expand.evaluate_value(self.config.target_exe.to_string_lossy())?; let expanded_path = Path::new(&expanded); @@ -206,7 +208,7 @@ impl AsanProcessor { let mut args = vec![target_exe]; args.extend(self.config.target_options.clone()); - let expand = self.config.get_expand(); + let expand = self.config.get_expand()?; let expanded_args = expand.evaluate(&args)?; @@ -295,3 +297,53 @@ impl Processor for AsanProcessor { Ok(()) } } + +#[cfg(test)] +mod tests { + use proptest::prelude::*; + use onefuzz::expand::{GetExpand, PlaceHolder}; + + use crate::config_test_utils::GetExpandFields; + + use super::Config; + + impl GetExpandFields for Config { + fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)> { + let mut params = self.common.get_expand_fields(); + params.push((PlaceHolder::TargetExe, dunce::canonicalize(&self.target_exe).unwrap().to_string_lossy().to_string())); + params.push((PlaceHolder::TargetOptions, self.target_options.join(" "))); + params.push((PlaceHolder::ToolsDir, dunce::canonicalize(&self.tools.local_path).unwrap().to_string_lossy().to_string())); + if let Some(reports) = &self.reports { + params.push((PlaceHolder::ReportsDir, dunce::canonicalize(&reports.local_path).unwrap().to_string_lossy().to_string())); + } + if let Some(crashes) = &self.crashes { + if let Some(account) = crashes.remote_path.clone().and_then(|u| u.account()) { + params.push((PlaceHolder::CrashesAccount, account)); + } + if let Some(container) = crashes.remote_path.clone().and_then(|u| u.container()) { + params.push((PlaceHolder::CrashesContainer, container)); + } + } + + params + } + } + + proptest! { + #[test] + fn test_get_expand_values_match_config( + config in any::(), + ) { + let expand = match config.get_expand() { + Ok(expand) => expand, + Err(err) => panic!("error getting expand: {}", err), + }; + let params = config.get_expand_fields(); + + for (param, expected) in params.iter() { + let evaluated = expand.evaluate_value(format!("{}", param.get_string())).unwrap(); + assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); + } + } + } +} diff --git a/src/agent/onefuzz/src/expand.rs b/src/agent/onefuzz/src/expand.rs index caa7262f09..407a5adef1 100644 --- a/src/agent/onefuzz/src/expand.rs +++ b/src/agent/onefuzz/src/expand.rs @@ -94,7 +94,7 @@ impl PlaceHolder { } pub trait GetExpand { - fn get_expand<'a>(&'a self) -> Expand<'a>; + fn get_expand<'a>(&'a self) -> Result>; } pub struct Expand<'a> { From 66ef8a3829e335dc75bede7dfda877028c676cbe Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Thu, 5 Oct 2023 15:08:07 -0700 Subject: [PATCH 11/32] Resolve merge conflict --- src/agent/onefuzz/src/syncdir.rs | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/src/agent/onefuzz/src/syncdir.rs b/src/agent/onefuzz/src/syncdir.rs index e170901bdc..efd8f8e0e3 100644 --- a/src/agent/onefuzz/src/syncdir.rs +++ b/src/agent/onefuzz/src/syncdir.rs @@ -283,11 +283,7 @@ impl SyncedDir { Event::new_coverage => { jr_client .send_direct( -<<<<<<< HEAD JobResultData::NewCoverage, -======= - JobResultData::CoverageData, ->>>>>>> c8986aaa (Revert "Release 8.7.1 (hotfix) (#3459)" (#3468)) HashMap::from([("count".to_string(), 1.0)]), ) .await; @@ -342,10 +338,6 @@ impl SyncedDir { event!(event.clone(); EventData::Path = file_name_event_str); metric!(event.clone(); 1.0; EventData::Path = file_name_str_metric_str); -<<<<<<< HEAD -<<<<<<< HEAD -======= ->>>>>>> c8986aaa (Revert "Release 8.7.1 (hotfix) (#3459)" (#3468)) if let Some(jr_client) = jr_client { match event { Event::new_result => { @@ -359,11 +351,7 @@ impl SyncedDir { Event::new_coverage => { jr_client .send_direct( -<<<<<<< HEAD JobResultData::NewCoverage, -======= - JobResultData::CoverageData, ->>>>>>> c8986aaa (Revert "Release 8.7.1 (hotfix) (#3459)" (#3468)) HashMap::from([("count".to_string(), 1.0)]), ) .await; @@ -373,11 +361,6 @@ impl SyncedDir { } } } -<<<<<<< HEAD -======= ->>>>>>> c69deed5 (Release 8.7.1 (hotfix) (#3459)) -======= ->>>>>>> c8986aaa (Revert "Release 8.7.1 (hotfix) (#3459)" (#3468)) if let Err(err) = uploader.upload(item.clone()).await { let error_message = format!( "Couldn't upload file. path:{} dir:{} err:{:?}", From 5925b97e1117f674cead5363d6362fa9c2a6cfeb Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Thu, 5 Oct 2023 15:20:45 -0700 Subject: [PATCH 12/32] Undo comment --- src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs index 146bdaf092..ed6035c35b 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs @@ -275,7 +275,7 @@ impl<'a> TaskContext<'a> { async fn target_exe(&self) -> Result { let tools_dir = self.config.tools.local_path.to_string_lossy().into_owned(); - // Try to expand `target_exe` with support for `{tools_dir}` and the rest. + // Try to expand `target_exe` with support for `{tools_dir}`. // // Allows using `LibFuzzerDotnetLoader.exe` from a shared tools container. let expand = Expand::new(&self.config.common.machine_identity).tools_dir(tools_dir); From 19715c0f0b7f701c8eb3be274fd660647e472c4f Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Thu, 5 Oct 2023 15:26:56 -0700 Subject: [PATCH 13/32] Update comments --- src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs | 4 ++-- src/agent/onefuzz-task/src/tasks/merge/generic.rs | 6 ++---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs index c58b884c7e..c8546c3727 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs @@ -293,9 +293,9 @@ async fn start_supervisor( let expand = config.get_expand()? .runtime_dir(&runtime_dir) .crashes(&crashes.local_path) - .input_corpus(&inputs.local_path) // And this one too... + .input_corpus(&inputs.local_path) // Why isn't this value in the config? It's not super clear to me from looking at the calling code. .reports_dir(reports_dir) - .set_optional_ref(&crashdumps, |expand, crashdumps| { // Why isn't this value in the config? + .set_optional_ref(&crashdumps, |expand, crashdumps| { // And this one too... expand.crashdumps(&crashdumps.local_path) }) .set_optional_ref(&target_exe, |expand, target_exe| { diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs index dfadd3cd9b..a7f7643e99 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs @@ -33,7 +33,7 @@ pub struct Config { pub target_options_merge: bool, pub tools: SyncedDir, pub input_queue: Url, - pub inputs: SyncedDir, + pub inputs: SyncedDir, // Is this something we can pass to the expander? pub unique_inputs: SyncedDir, #[serde(flatten)] @@ -45,13 +45,12 @@ impl GetExpand for Config { Ok( self.common.get_expand()? .input_marker(&self.supervisor_input_marker) - .input_corpus(&self.unique_inputs.local_path) // TODO: verify that this is correct (should it be self.inputs.local_path?) + .input_corpus(&self.unique_inputs.local_path) .target_exe(&self.target_exe) .target_options(&self.target_options) .supervisor_exe(&self.supervisor_exe) .supervisor_options(&self.supervisor_options) .tools_dir(self.tools.local_path.to_string_lossy().into_owned()) - .generated_inputs(&self.inputs.local_path) ) } } @@ -197,7 +196,6 @@ mod tests { params.push((PlaceHolder::SupervisorExe, dunce::canonicalize(&self.supervisor_exe).unwrap().to_string_lossy().to_string())); params.push((PlaceHolder::SupervisorOptions, self.supervisor_options.join(" "))); params.push((PlaceHolder::ToolsDir, dunce::canonicalize(&self.tools.local_path).unwrap().to_string_lossy().to_string())); - params.push((PlaceHolder::GeneratedInputs, dunce::canonicalize(&self.inputs.local_path).unwrap().to_string_lossy().to_string())); params } From 27792f4c764df8d9d9352f1dae86b736a78521b9 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Thu, 5 Oct 2023 15:29:58 -0700 Subject: [PATCH 14/32] Undo unintentional changes --- src/integration-tests/integration-test.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/src/integration-tests/integration-test.py b/src/integration-tests/integration-test.py index 1eada750c3..bf2418ce30 100755 --- a/src/integration-tests/integration-test.py +++ b/src/integration-tests/integration-test.py @@ -647,14 +647,7 @@ def launch( setup = Directory(os.path.join(setup, config.nested_setup_dir)) job: Optional[Job] = None -<<<<<<< HEAD -<<<<<<< HEAD - -======= ->>>>>>> c69deed5 (Release 8.7.1 (hotfix) (#3459)) -======= - ->>>>>>> c8986aaa (Revert "Release 8.7.1 (hotfix) (#3459)" (#3468)) + job = self.build_job( duration, pool, target, config, setup, target_exe, inputs ) @@ -1287,10 +1280,11 @@ def check_logs_for_errors(self) -> None: if seen_errors: raise Exception("logs included errors") - + def build_pool_name(self, os_type: str) -> PoolName: return PoolName(f"testpool-{os_type}-{self.test_id}") +class Run(Command): def check_jobs( self, test_id: UUID, From c3ca50e009e55a8990079dc44c876294f0e90d50 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Thu, 5 Oct 2023 15:30:37 -0700 Subject: [PATCH 15/32] Add missing newline --- src/integration-tests/integration-test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/integration-tests/integration-test.py b/src/integration-tests/integration-test.py index bf2418ce30..b71b0b7378 100755 --- a/src/integration-tests/integration-test.py +++ b/src/integration-tests/integration-test.py @@ -1284,6 +1284,7 @@ def check_logs_for_errors(self) -> None: def build_pool_name(self, os_type: str) -> PoolName: return PoolName(f"testpool-{os_type}-{self.test_id}") + class Run(Command): def check_jobs( self, From 245117ab218b605693607c34fb1bdc106dc14e21 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Fri, 6 Oct 2023 15:33:42 -0700 Subject: [PATCH 16/32] Fix build checks --- .../onefuzz-task/src/config_test_utils.rs | 27 ++--- src/agent/onefuzz-task/src/lib.rs | 4 +- .../src/tasks/analysis/generic.rs | 63 +++++++--- src/agent/onefuzz-task/src/tasks/config.rs | 52 +++++--- .../onefuzz-task/src/tasks/coverage/dotnet.rs | 40 +++++-- .../src/tasks/coverage/generic.rs | 24 ++-- .../onefuzz-task/src/tasks/fuzz/generator.rs | 67 ++++++++--- .../onefuzz-task/src/tasks/fuzz/supervisor.rs | 112 +++++++++++++----- .../onefuzz-task/src/tasks/merge/generic.rs | 55 +++++++-- .../src/tasks/report/dotnet/generic.rs | 34 ++++-- 10 files changed, 338 insertions(+), 140 deletions(-) diff --git a/src/agent/onefuzz-task/src/config_test_utils.rs b/src/agent/onefuzz-task/src/config_test_utils.rs index fe27b74890..a2498d02a6 100644 --- a/src/agent/onefuzz-task/src/config_test_utils.rs +++ b/src/agent/onefuzz-task/src/config_test_utils.rs @@ -16,9 +16,9 @@ pub mod arbitraries { use proptest::{option, prelude::*}; use reqwest::Url; use uuid::Uuid; - - use crate::tasks::{config::CommonConfig, analysis, merge, coverage, report, fuzz}; - + + use crate::tasks::{analysis, config::CommonConfig, coverage, fuzz, merge, report}; + prop_compose! { fn arb_uuid()( uuid in "[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}" @@ -26,7 +26,7 @@ pub mod arbitraries { Uuid::parse_str(&uuid).unwrap() } } - + prop_compose! { fn arb_instance_telemetry_key()( uuid in arb_uuid() @@ -34,7 +34,7 @@ pub mod arbitraries { InstanceTelemetryKey::new(uuid) } } - + prop_compose! { fn arb_microsoft_telemetry_key()( uuid in arb_uuid() @@ -42,7 +42,7 @@ pub mod arbitraries { MicrosoftTelemetryKey::new(uuid) } } - + prop_compose! { fn arb_url()( // Don't use this for any url that isn't just being used for a string comparison (as for the config tests) @@ -55,7 +55,7 @@ pub mod arbitraries { } } } - + prop_compose! { // Todo: consider a better way to generate a path fn arb_pathbuf()( @@ -64,7 +64,7 @@ pub mod arbitraries { PathBuf::from(path) } } - + prop_compose! { fn arb_machine_identity()( machine_id in arb_uuid(), @@ -78,14 +78,14 @@ pub mod arbitraries { } } } - + fn arb_blob_container_url() -> impl Strategy { prop_oneof![ arb_url().prop_map(BlobContainerUrl::BlobContainer), arb_pathbuf().prop_map(BlobContainerUrl::Path), ] } - + prop_compose! { fn arb_synced_dir()( local_path in arb_pathbuf(), @@ -107,7 +107,6 @@ pub mod arbitraries { options } } - prop_compose! { fn arb_common_config()( @@ -148,11 +147,11 @@ pub mod arbitraries { } } } - + impl Arbitrary for CommonConfig { type Parameters = (); type Strategy = BoxedStrategy; - + fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { arb_common_config().boxed() } @@ -315,7 +314,7 @@ pub mod arbitraries { arb_dotnet_coverage_config().boxed() } } - + prop_compose! { fn arb_dotnet_report_config()( target_exe in arb_pathbuf(), diff --git a/src/agent/onefuzz-task/src/lib.rs b/src/agent/onefuzz-task/src/lib.rs index 9e01e5e049..08babfba75 100644 --- a/src/agent/onefuzz-task/src/lib.rs +++ b/src/agent/onefuzz-task/src/lib.rs @@ -5,7 +5,7 @@ extern crate clap; #[macro_use] extern crate onefuzz_telemetry; -pub mod local; -pub mod tasks; #[cfg(test)] pub mod config_test_utils; +pub mod local; +pub mod tasks; diff --git a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs index e54f796d77..249408e900 100644 --- a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs @@ -50,17 +50,15 @@ pub struct Config { impl GetExpand for Config { fn get_expand<'a>(&'a self) -> Result> { - Ok( - self.common.get_expand()? + Ok(self + .common + .get_expand()? .analyzer_exe(&self.analyzer_exe) .analyzer_options(&self.analyzer_options) .target_exe(&self.target_exe) .target_options(&self.target_options) .output_dir(&self.analysis.local_path) - .set_optional( - self.tools.clone().map(|t| t.local_path), - Expand::tools_dir, - ) + .set_optional(self.tools.clone().map(|t| t.local_path), Expand::tools_dir) .set_optional_ref(&self.reports, |expand, reports| { expand.reports_dir(&reports.local_path.as_path()) }) @@ -74,8 +72,7 @@ impl GetExpand for Config { &crashes.remote_path.clone().and_then(|u| u.container()), |expand, container| expand.crashes_container(container), ) - }) - ) + })) } } @@ -238,7 +235,8 @@ pub async fn run_tool( let target_exe = try_resolve_setup_relative_path(&config.common.setup_dir, &config.target_exe).await?; - let expand = config.get_expand()? + let expand = config + .get_expand()? .input_path(&input) // Only this one is dynamic, the other two should probably be a part of the config .target_exe(&target_exe) .set_optional_ref(reports_dir, Expand::reports_dir); @@ -273,8 +271,8 @@ pub async fn run_tool( #[cfg(test)] mod tests { - use proptest::prelude::*; use onefuzz::expand::{GetExpand, PlaceHolder}; + use proptest::prelude::*; use crate::config_test_utils::GetExpandFields; @@ -283,16 +281,49 @@ mod tests { impl GetExpandFields for Config { fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)> { let mut params = self.common.get_expand_fields(); - params.push((PlaceHolder::AnalyzerExe, dunce::canonicalize(&self.analyzer_exe).unwrap().to_string_lossy().to_string())); - params.push((PlaceHolder::AnalyzerOptions, self.analyzer_options.join(" "))); - params.push((PlaceHolder::TargetExe, dunce::canonicalize(&self.target_exe).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::AnalyzerExe, + dunce::canonicalize(&self.analyzer_exe) + .unwrap() + .to_string_lossy() + .to_string(), + )); + params.push(( + PlaceHolder::AnalyzerOptions, + self.analyzer_options.join(" "), + )); + params.push(( + PlaceHolder::TargetExe, + dunce::canonicalize(&self.target_exe) + .unwrap() + .to_string_lossy() + .to_string(), + )); params.push((PlaceHolder::TargetOptions, self.target_options.join(" "))); - params.push((PlaceHolder::OutputDir, dunce::canonicalize(&self.analysis.local_path).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::OutputDir, + dunce::canonicalize(&self.analysis.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); if let Some(tools) = &self.tools { - params.push((PlaceHolder::ToolsDir, dunce::canonicalize(&tools.local_path).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::ToolsDir, + dunce::canonicalize(&tools.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); } if let Some(reports) = &self.reports { - params.push((PlaceHolder::ReportsDir, dunce::canonicalize(&reports.local_path).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::ReportsDir, + dunce::canonicalize(&reports.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); } if let Some(crashes) = &self.crashes { if let Some(account) = crashes.remote_path.clone().and_then(|u| u.account()) { diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs index f440121f6a..af56db4e8f 100644 --- a/src/agent/onefuzz-task/src/tasks/config.rs +++ b/src/agent/onefuzz-task/src/tasks/config.rs @@ -11,8 +11,9 @@ use crate::tasks::{ }; use anyhow::{Context, Result}; use onefuzz::{ + expand::{Expand, GetExpand}, machine_id::MachineIdentity, - syncdir::{SyncOperation, SyncedDir}, expand::{GetExpand, Expand}, + syncdir::{SyncOperation, SyncedDir}, }; use onefuzz_result::job_result::{init_job_result, TaskJobResultClient}; use onefuzz_telemetry::{ @@ -127,28 +128,20 @@ impl CommonConfig { impl GetExpand for CommonConfig { fn get_expand<'a>(&'a self) -> Result> { - Ok( - Expand::new(&self.machine_identity) + Ok(Expand::new(&self.machine_identity) .machine_id() .job_id(&self.job_id) .task_id(&self.task_id) .setup_dir(&self.setup_dir) - .set_optional_ref( - &self.instance_telemetry_key, - Expand::instance_telemetry_key - ) + .set_optional_ref(&self.instance_telemetry_key, Expand::instance_telemetry_key) .set_optional_ref( &self.microsoft_telemetry_key, - Expand::microsoft_telemetry_key - ) - .set_optional_ref( - &self.extra_setup_dir, - Expand::extra_setup_dir + Expand::microsoft_telemetry_key, ) + .set_optional_ref(&self.extra_setup_dir, Expand::extra_setup_dir) .set_optional_ref(&self.extra_output, |expand, extra_output| { expand.extra_output_dir(extra_output.local_path.as_path()) - }) - ) + })) } } @@ -394,8 +387,8 @@ impl Config { #[cfg(test)] mod tests { - use proptest::prelude::*; use onefuzz::expand::{GetExpand, PlaceHolder}; + use proptest::prelude::*; use crate::config_test_utils::GetExpandFields; @@ -404,10 +397,19 @@ mod tests { impl GetExpandFields for CommonConfig { fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)> { let mut params = vec![ - (PlaceHolder::MachineId, self.machine_identity.machine_id.to_string()), + ( + PlaceHolder::MachineId, + self.machine_identity.machine_id.to_string(), + ), (PlaceHolder::JobId, self.job_id.to_string()), (PlaceHolder::TaskId, self.task_id.to_string()), - (PlaceHolder::SetupDir, dunce::canonicalize(&self.setup_dir).unwrap().to_string_lossy().to_string()), + ( + PlaceHolder::SetupDir, + dunce::canonicalize(&self.setup_dir) + .unwrap() + .to_string_lossy() + .to_string(), + ), ]; if let Some(key) = &self.instance_telemetry_key { params.push((PlaceHolder::InstanceTelemetryKey, key.to_string())); @@ -416,10 +418,22 @@ mod tests { params.push((PlaceHolder::MicrosoftTelemetryKey, key.clone().to_string())); } if let Some(dir) = &self.extra_setup_dir { - params.push((PlaceHolder::ExtraSetupDir, dunce::canonicalize(dir).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::ExtraSetupDir, + dunce::canonicalize(dir) + .unwrap() + .to_string_lossy() + .to_string(), + )); } if let Some(dir) = &self.extra_output { - params.push((PlaceHolder::ExtraOutputDir, dunce::canonicalize(&dir.local_path).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::ExtraOutputDir, + dunce::canonicalize(&dir.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); } params diff --git a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs index ed6035c35b..8460937b96 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs @@ -4,7 +4,7 @@ use anyhow::{Context, Result}; use async_trait::async_trait; use onefuzz::{ - expand::{Expand, PlaceHolder, GetExpand}, + expand::{Expand, GetExpand, PlaceHolder}, monitor::DirectoryMonitor, syncdir::SyncedDir, }; @@ -58,13 +58,13 @@ impl Config { impl GetExpand for Config { fn get_expand<'a>(&'a self) -> Result> { - Ok( - self.common.get_expand()? + Ok(self + .common + .get_expand()? .target_exe(&self.target_exe) .target_options(&self.target_options) .coverage_dir(&self.coverage.local_path) - .tools_dir(self.tools.local_path.to_string_lossy().into_owned()) - ) + .tools_dir(self.tools.local_path.to_string_lossy().into_owned())) } } @@ -305,7 +305,9 @@ impl<'a> TaskContext<'a> { async fn command_for_input(&self, input: &Path) -> Result { let target_exe = self.target_exe().await?; - let expand = self.config.get_expand()? + let expand = self + .config + .get_expand()? .input_path(input) .target_exe(&target_exe); @@ -464,8 +466,8 @@ impl<'a> Processor for TaskContext<'a> { #[cfg(test)] mod tests { - use proptest::prelude::*; use onefuzz::expand::{GetExpand, PlaceHolder}; + use proptest::prelude::*; use crate::config_test_utils::GetExpandFields; @@ -474,10 +476,28 @@ mod tests { impl GetExpandFields for Config { fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)> { let mut params = self.common.get_expand_fields(); - params.push((PlaceHolder::TargetExe, dunce::canonicalize(&self.target_exe).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::TargetExe, + dunce::canonicalize(&self.target_exe) + .unwrap() + .to_string_lossy() + .to_string(), + )); params.push((PlaceHolder::TargetOptions, self.target_options.join(" "))); - params.push((PlaceHolder::CoverageDir, dunce::canonicalize(&self.coverage.local_path).unwrap().to_string_lossy().to_string())); - params.push((PlaceHolder::ToolsDir, dunce::canonicalize(&self.tools.local_path).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::CoverageDir, + dunce::canonicalize(&self.coverage.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); + params.push(( + PlaceHolder::ToolsDir, + dunce::canonicalize(&self.tools.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); params } diff --git a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs index 3c53288b15..0221db393e 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs @@ -20,7 +20,7 @@ use debuggable_module::loader::Loader; use debuggable_module::path::FilePath; use debuggable_module::Module; use onefuzz::env::LD_LIBRARY_PATH; -use onefuzz::expand::{Expand, PlaceHolder, GetExpand}; +use onefuzz::expand::{Expand, GetExpand, PlaceHolder}; use onefuzz::syncdir::SyncedDir; use onefuzz_file_format::coverage::{ binary::{v1::BinaryCoverageJson as BinaryCoverageJsonV1, BinaryCoverageJson}, @@ -84,11 +84,11 @@ impl Config { impl GetExpand for Config { fn get_expand<'a>(&'a self) -> Result> { - Ok( - self.common.get_expand()? + Ok(self + .common + .get_expand()? .target_options(&self.target_options) - .coverage_dir(&self.coverage.local_path) - ) + .coverage_dir(&self.coverage.local_path)) } } @@ -358,7 +358,9 @@ impl<'a> TaskContext<'a> { try_resolve_setup_relative_path(&self.config.common.setup_dir, &self.config.target_exe) .await?; - let expand = self.config.get_expand()? + let expand = self + .config + .get_expand()? .target_exe(&target_exe) .input_path(input); @@ -607,8 +609,8 @@ impl CoverageStats { #[cfg(test)] mod tests { - use proptest::prelude::*; use onefuzz::expand::{GetExpand, PlaceHolder}; + use proptest::prelude::*; use crate::config_test_utils::GetExpandFields; @@ -618,7 +620,13 @@ mod tests { fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)> { let mut params = self.common.get_expand_fields(); params.push((PlaceHolder::TargetOptions, self.target_options.join(" "))); - params.push((PlaceHolder::CoverageDir, dunce::canonicalize(&self.coverage.local_path).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::CoverageDir, + dunce::canonicalize(&self.coverage.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); params } diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs index 3dd2eacd66..05d9f45965 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs @@ -53,8 +53,9 @@ pub struct Config { impl GetExpand for Config { fn get_expand<'a>(&'a self) -> Result> { - Ok( - self.common.get_expand()? + Ok(self + .common + .get_expand()? .generator_exe(&self.generator_exe) .generator_options(&self.generator_options) .crashes(&self.crashes.local_path) @@ -62,8 +63,7 @@ impl GetExpand for Config { .target_options(&self.target_options) .set_optional_ref(&self.tools, |expand, tools| { expand.tools_dir(&tools.local_path) - }) - ) + })) } } @@ -185,7 +185,9 @@ impl GeneratorTask { ) -> Result<()> { utils::reset_tmp_dir(&output_dir).await?; let (mut generator, generator_path) = { - let expand = self.config.get_expand()? + let expand = self + .config + .get_expand()? .generated_inputs(&output_dir) .input_corpus(&corpus_dir); @@ -223,8 +225,8 @@ impl GeneratorTask { #[cfg(test)] mod tests { - use proptest::prelude::*; use onefuzz::expand::{GetExpand, PlaceHolder}; + use proptest::prelude::*; use crate::config_test_utils::GetExpandFields; @@ -233,13 +235,40 @@ mod tests { impl GetExpandFields for Config { fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)> { let mut params = self.common.get_expand_fields(); - params.push((PlaceHolder::GeneratorExe, dunce::canonicalize(&self.generator_exe).unwrap().to_string_lossy().to_string())); - params.push((PlaceHolder::GeneratorOptions, self.generator_options.join(" "))); - params.push((PlaceHolder::Crashes, dunce::canonicalize(&self.crashes.local_path).unwrap().to_string_lossy().to_string())); - params.push((PlaceHolder::TargetExe, dunce::canonicalize(&self.target_exe).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::GeneratorExe, + dunce::canonicalize(&self.generator_exe) + .unwrap() + .to_string_lossy() + .to_string(), + )); + params.push(( + PlaceHolder::GeneratorOptions, + self.generator_options.join(" "), + )); + params.push(( + PlaceHolder::Crashes, + dunce::canonicalize(&self.crashes.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); + params.push(( + PlaceHolder::TargetExe, + dunce::canonicalize(&self.target_exe) + .unwrap() + .to_string_lossy() + .to_string(), + )); params.push((PlaceHolder::TargetOptions, self.target_options.join(" "))); if let Some(dir) = &self.tools { - params.push((PlaceHolder::ToolsDir, dunce::canonicalize(&dir.local_path).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::ToolsDir, + dunce::canonicalize(&dir.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); } params @@ -263,7 +292,7 @@ mod tests { } } } - + #[cfg(target_os = "linux")] mod linux { #[tokio::test] @@ -277,15 +306,15 @@ mod tests { use std::collections::HashMap; use std::env; use tempfile::tempdir; - + let crashes_temp = tempfile::tempdir()?; let crashes: &std::path::Path = crashes_temp.path(); - + let inputs_temp = tempfile::tempdir()?; let inputs: &std::path::Path = inputs_temp.path(); let input_file = inputs.join("seed.txt"); tokio::fs::write(input_file, "test").await?; - + let generator_options: Vec = vec![ "-o", "{generated_inputs}/input-%n-%s", @@ -297,11 +326,11 @@ mod tests { .iter() .map(|p| p.to_string()) .collect(); - + let radamsa_path = env::var("ONEFUZZ_TEST_RADAMSA_LINUX")?; let radamsa_as_path = std::path::Path::new(&radamsa_path); let radamsa_dir = radamsa_as_path.parent().unwrap(); - + let readonly_inputs_local = tempfile::tempdir().unwrap().path().into(); let crashes_local = tempfile::tempdir().unwrap().path().into(); let tools_local = tempfile::tempdir().unwrap().path().into(); @@ -360,11 +389,11 @@ mod tests { }, }; let task = GeneratorTask::new(config); - + let generated_inputs = tempdir()?; task.generate_inputs(inputs.to_path_buf(), generated_inputs.path()) .await?; - + let count = std::fs::read_dir(generated_inputs.path())?.count(); assert_eq!(count, 100, "No inputs generated"); Ok(()) diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs index c8546c3727..5fa24b4335 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs @@ -63,8 +63,9 @@ pub struct SupervisorConfig { impl GetExpand for SupervisorConfig { fn get_expand<'a>(&'a self) -> Result> { - Ok( - self.common.get_expand()? + Ok(self + .common + .get_expand()? .input_corpus(&self.inputs.local_path) .supervisor_exe(&self.supervisor_exe) .supervisor_options(&self.supervisor_options) @@ -85,7 +86,7 @@ impl GetExpand for SupervisorConfig { expand.crashdumps(&crashdumps.local_path) }) .set_optional_ref(&self.reports, |expand, reports| { - expand.reports_dir(&reports.local_path) + expand.reports_dir(&reports.local_path) }) .set_optional_ref( &self.crashes.remote_path.clone().and_then(|u| u.account()), @@ -94,8 +95,7 @@ impl GetExpand for SupervisorConfig { .set_optional_ref( &self.crashes.remote_path.clone().and_then(|u| u.container()), |expand, container| expand.crashes_container(container), - ) - ) + )) } } @@ -290,12 +290,14 @@ async fn start_supervisor( None }; - let expand = config.get_expand()? + let expand = config + .get_expand()? .runtime_dir(&runtime_dir) .crashes(&crashes.local_path) .input_corpus(&inputs.local_path) // Why isn't this value in the config? It's not super clear to me from looking at the calling code. .reports_dir(reports_dir) - .set_optional_ref(&crashdumps, |expand, crashdumps| { // And this one too... + .set_optional_ref(&crashdumps, |expand, crashdumps| { + // And this one too... expand.crashdumps(&crashdumps.local_path) }) .set_optional_ref(&target_exe, |expand, target_exe| { @@ -327,8 +329,8 @@ async fn start_supervisor( #[cfg(test)] mod tests { - use proptest::prelude::*; use onefuzz::expand::{GetExpand, PlaceHolder}; + use proptest::prelude::*; use crate::config_test_utils::GetExpandFields; @@ -337,11 +339,32 @@ mod tests { impl GetExpandFields for SupervisorConfig { fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)> { let mut params = self.common.get_expand_fields(); - params.push((PlaceHolder::InputCorpus, dunce::canonicalize(&self.inputs.local_path).unwrap().to_string_lossy().to_string())); - params.push((PlaceHolder::SupervisorExe, dunce::canonicalize(&self.supervisor_exe).unwrap().to_string_lossy().to_string())); - params.push((PlaceHolder::SupervisorOptions, self.supervisor_options.join(" "))); + params.push(( + PlaceHolder::InputCorpus, + dunce::canonicalize(&self.inputs.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); + params.push(( + PlaceHolder::SupervisorExe, + dunce::canonicalize(&self.supervisor_exe) + .unwrap() + .to_string_lossy() + .to_string(), + )); + params.push(( + PlaceHolder::SupervisorOptions, + self.supervisor_options.join(" "), + )); if let Some(target_exe) = &self.target_exe { - params.push((PlaceHolder::TargetExe, dunce::canonicalize(&target_exe).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::TargetExe, + dunce::canonicalize(&target_exe) + .unwrap() + .to_string_lossy() + .to_string(), + )); } if let Some(input_marker) = &self.supervisor_input_marker { params.push((PlaceHolder::Input, input_marker.clone())); @@ -350,16 +373,40 @@ mod tests { params.push((PlaceHolder::TargetOptions, target_options.join(" "))); } if let Some(tools) = &self.tools { - params.push((PlaceHolder::ToolsDir, dunce::canonicalize(&tools.local_path).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::ToolsDir, + dunce::canonicalize(&tools.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); } if let Some(coverage) = &self.coverage { - params.push((PlaceHolder::CoverageDir, dunce::canonicalize(&coverage.local_path).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::CoverageDir, + dunce::canonicalize(&coverage.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); } if let Some(crashdumps) = &self.crashdumps { - params.push((PlaceHolder::Crashdumps, dunce::canonicalize(&crashdumps.local_path).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::Crashdumps, + dunce::canonicalize(&crashdumps.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); } if let Some(reports) = &self.reports { - params.push((PlaceHolder::ReportsDir, dunce::canonicalize(&reports.local_path).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::ReportsDir, + dunce::canonicalize(&reports.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); } if let Some(account) = &self.crashes.remote_path.clone().and_then(|u| u.account()) { params.push((PlaceHolder::CrashesAccount, account.clone())); @@ -402,9 +449,9 @@ mod tests { use std::collections::HashMap; use std::env; use std::time::Instant; - + const MAX_FUZZ_TIME_SECONDS: u64 = 120; - + async fn has_stats(path: &PathBuf) -> bool { if let Ok(stats) = read_stats(path).await { for entry in stats { @@ -417,38 +464,39 @@ mod tests { false } } - + #[tokio::test] #[cfg_attr(not(feature = "integration_test"), ignore)] async fn test_fuzzer_linux() { let runtime_dir = tempfile::tempdir().unwrap(); - + let supervisor_exe = if let Ok(x) = env::var("ONEFUZZ_TEST_AFL_LINUX_FUZZER") { x } else { warn!("Unable to test AFL integration"); return; }; - + let target_exe = if let Ok(x) = env::var("ONEFUZZ_TEST_AFL_LINUX_TEST_BINARY") { Some(x.into()) } else { warn!("Unable to test AFL integration"); return; }; - + let reports_dir_temp = tempfile::tempdir().unwrap(); let reports_dir = reports_dir_temp.path().into(); - + let fault_dir_temp = tempfile::tempdir().unwrap(); let crashes_local = tempfile::tempdir().unwrap().path().into(); let crashes = SyncedDir { local_path: crashes_local, remote_path: Some( - BlobContainerUrl::parse(Url::from_directory_path(fault_dir_temp).unwrap()).unwrap(), + BlobContainerUrl::parse(Url::from_directory_path(fault_dir_temp).unwrap()) + .unwrap(), ), }; - + let crashdumps_dir_temp = tempfile::tempdir().unwrap(); let crashdumps_local = tempfile::tempdir().unwrap().path().into(); let crashdumps = SyncedDir { @@ -458,7 +506,7 @@ mod tests { .unwrap(), ), }; - + let corpus_dir_local = tempfile::tempdir().unwrap().path().into(); let corpus_dir_temp = tempfile::tempdir().unwrap(); let corpus_dir = SyncedDir { @@ -470,7 +518,7 @@ mod tests { }; let seed_file_name = corpus_dir.local_path.join("seed.txt"); tokio::fs::write(seed_file_name, "xyz").await.unwrap(); - + let target_options = Some(vec!["{input}".to_owned()]); let supervisor_env = HashMap::new(); let supervisor_options: Vec<_> = vec![ @@ -486,10 +534,10 @@ mod tests { .iter() .map(|p| p.to_string()) .collect(); - + // AFL input marker let supervisor_input_marker = Some("@@".to_owned()); - + let config = SupervisorConfig { supervisor_exe, supervisor_env, @@ -532,7 +580,7 @@ mod tests { from_task_to_agent_endpoint: "/".to_string(), }, }; - + let process = start_supervisor( runtime_dir, &config, @@ -543,7 +591,7 @@ mod tests { ) .await .unwrap(); - + let notify = Notify::new(); let _fuzzing_monitor = monitor_process(process, "supervisor".to_string(), false, Some(¬ify)); @@ -553,7 +601,7 @@ mod tests { if has_stats(&stat_output).await { break; } - + if start.elapsed().as_secs() > MAX_FUZZ_TIME_SECONDS { panic!( "afl did not generate stats in {} seconds", diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs index a7f7643e99..174f64a8f9 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs @@ -8,7 +8,10 @@ use crate::tasks::{ }; use anyhow::{Context, Result}; use onefuzz::{ - expand::{Expand, GetExpand}, fs::set_executable, http::ResponseExt, jitter::delay_with_jitter, + expand::{Expand, GetExpand}, + fs::set_executable, + http::ResponseExt, + jitter::delay_with_jitter, syncdir::SyncedDir, }; use reqwest::Url; @@ -42,16 +45,16 @@ pub struct Config { impl GetExpand for Config { fn get_expand<'a>(&'a self) -> Result> { - Ok( - self.common.get_expand()? + Ok(self + .common + .get_expand()? .input_marker(&self.supervisor_input_marker) .input_corpus(&self.unique_inputs.local_path) .target_exe(&self.target_exe) .target_options(&self.target_options) .supervisor_exe(&self.supervisor_exe) .supervisor_options(&self.supervisor_options) - .tools_dir(self.tools.local_path.to_string_lossy().into_owned()) - ) + .tools_dir(self.tools.local_path.to_string_lossy().into_owned())) } } @@ -144,7 +147,8 @@ async fn merge(config: &Config, output_dir: impl AsRef) -> Result<()> { let target_exe = try_resolve_setup_relative_path(&config.common.setup_dir, &config.target_exe).await?; - let expand = config.get_expand()? + let expand = config + .get_expand()? .generated_inputs(output_dir) .target_exe(&target_exe); @@ -179,8 +183,8 @@ async fn merge(config: &Config, output_dir: impl AsRef) -> Result<()> { #[cfg(test)] mod tests { - use proptest::prelude::*; use onefuzz::expand::{GetExpand, PlaceHolder}; + use proptest::prelude::*; use crate::config_test_utils::GetExpandFields; @@ -190,12 +194,39 @@ mod tests { fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)> { let mut params = self.common.get_expand_fields(); params.push((PlaceHolder::Input, self.supervisor_input_marker.clone())); - params.push((PlaceHolder::InputCorpus, dunce::canonicalize(&self.unique_inputs.local_path).unwrap().to_string_lossy().to_string())); - params.push((PlaceHolder::TargetExe, dunce::canonicalize(&self.target_exe).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::InputCorpus, + dunce::canonicalize(&self.unique_inputs.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); + params.push(( + PlaceHolder::TargetExe, + dunce::canonicalize(&self.target_exe) + .unwrap() + .to_string_lossy() + .to_string(), + )); params.push((PlaceHolder::TargetOptions, self.target_options.join(" "))); - params.push((PlaceHolder::SupervisorExe, dunce::canonicalize(&self.supervisor_exe).unwrap().to_string_lossy().to_string())); - params.push((PlaceHolder::SupervisorOptions, self.supervisor_options.join(" "))); - params.push((PlaceHolder::ToolsDir, dunce::canonicalize(&self.tools.local_path).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::SupervisorExe, + dunce::canonicalize(&self.supervisor_exe) + .unwrap() + .to_string_lossy() + .to_string(), + )); + params.push(( + PlaceHolder::SupervisorOptions, + self.supervisor_options.join(" "), + )); + params.push(( + PlaceHolder::ToolsDir, + dunce::canonicalize(&self.tools.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); params } diff --git a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs index 1e01971251..0468664764 100644 --- a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs @@ -63,8 +63,9 @@ impl GetExpand for Config { fn get_expand<'a>(&'a self) -> Result> { let tools_dir = self.tools.local_path.to_string_lossy().into_owned(); - Ok( - self.common.get_expand()? + Ok(self + .common + .get_expand()? .target_exe(&self.target_exe) .target_options(&self.target_options) .tools_dir(tools_dir) @@ -81,8 +82,7 @@ impl GetExpand for Config { &crashes.remote_path.clone().and_then(|u| u.container()), |expand, container| expand.crashes_container(container), ) - }) - ) + })) } } @@ -300,8 +300,8 @@ impl Processor for AsanProcessor { #[cfg(test)] mod tests { - use proptest::prelude::*; use onefuzz::expand::{GetExpand, PlaceHolder}; + use proptest::prelude::*; use crate::config_test_utils::GetExpandFields; @@ -310,11 +310,29 @@ mod tests { impl GetExpandFields for Config { fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)> { let mut params = self.common.get_expand_fields(); - params.push((PlaceHolder::TargetExe, dunce::canonicalize(&self.target_exe).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::TargetExe, + dunce::canonicalize(&self.target_exe) + .unwrap() + .to_string_lossy() + .to_string(), + )); params.push((PlaceHolder::TargetOptions, self.target_options.join(" "))); - params.push((PlaceHolder::ToolsDir, dunce::canonicalize(&self.tools.local_path).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::ToolsDir, + dunce::canonicalize(&self.tools.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); if let Some(reports) = &self.reports { - params.push((PlaceHolder::ReportsDir, dunce::canonicalize(&reports.local_path).unwrap().to_string_lossy().to_string())); + params.push(( + PlaceHolder::ReportsDir, + dunce::canonicalize(&reports.local_path) + .unwrap() + .to_string_lossy() + .to_string(), + )); } if let Some(crashes) = &self.crashes { if let Some(account) = crashes.remote_path.clone().and_then(|u| u.account()) { From d35e767a353792f518b544b1b2fadb13dcb05a0f Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Fri, 6 Oct 2023 15:53:15 -0700 Subject: [PATCH 17/32] Fix build checks - lifetime elision --- src/agent/onefuzz-task/src/tasks/analysis/generic.rs | 2 +- src/agent/onefuzz-task/src/tasks/config.rs | 2 +- src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs | 2 +- src/agent/onefuzz-task/src/tasks/coverage/generic.rs | 2 +- src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs | 2 +- src/agent/onefuzz-task/src/tasks/merge/generic.rs | 2 +- src/agent/onefuzz/src/expand.rs | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs index 249408e900..131948b4d3 100644 --- a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs @@ -49,7 +49,7 @@ pub struct Config { } impl GetExpand for Config { - fn get_expand<'a>(&'a self) -> Result> { + fn get_expand(&self) -> Result> { Ok(self .common .get_expand()? diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs index af56db4e8f..af864a20aa 100644 --- a/src/agent/onefuzz-task/src/tasks/config.rs +++ b/src/agent/onefuzz-task/src/tasks/config.rs @@ -127,7 +127,7 @@ impl CommonConfig { } impl GetExpand for CommonConfig { - fn get_expand<'a>(&'a self) -> Result> { + fn get_expand(&self) -> Result> { Ok(Expand::new(&self.machine_identity) .machine_id() .job_id(&self.job_id) diff --git a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs index 8460937b96..017f91c5dd 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs @@ -57,7 +57,7 @@ impl Config { } impl GetExpand for Config { - fn get_expand<'a>(&'a self) -> Result> { + fn get_expand(&self) -> Result> { Ok(self .common .get_expand()? diff --git a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs index 0221db393e..9ee1d2a238 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs @@ -83,7 +83,7 @@ impl Config { } impl GetExpand for Config { - fn get_expand<'a>(&'a self) -> Result> { + fn get_expand(&self) -> Result> { Ok(self .common .get_expand()? diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs index 5fa24b4335..d03ed4b8f2 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs @@ -62,7 +62,7 @@ pub struct SupervisorConfig { } impl GetExpand for SupervisorConfig { - fn get_expand<'a>(&'a self) -> Result> { + fn get_expand(&self) -> Result> { Ok(self .common .get_expand()? diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs index 174f64a8f9..ae8ef45af3 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs @@ -44,7 +44,7 @@ pub struct Config { } impl GetExpand for Config { - fn get_expand<'a>(&'a self) -> Result> { + fn get_expand(&self) -> Result> { Ok(self .common .get_expand()? diff --git a/src/agent/onefuzz/src/expand.rs b/src/agent/onefuzz/src/expand.rs index 407a5adef1..8cdd150f4f 100644 --- a/src/agent/onefuzz/src/expand.rs +++ b/src/agent/onefuzz/src/expand.rs @@ -94,7 +94,7 @@ impl PlaceHolder { } pub trait GetExpand { - fn get_expand<'a>(&'a self) -> Result>; + fn get_expand(&self) -> Result>; } pub struct Expand<'a> { From de995b6dea31a85f12f3955f820b1e2b9bbe52b2 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Fri, 6 Oct 2023 16:27:16 -0700 Subject: [PATCH 18/32] Fix linux test imports --- .../onefuzz-task/src/tasks/fuzz/generator.rs | 22 +++++++++---------- .../onefuzz-task/src/tasks/fuzz/supervisor.rs | 4 ++-- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs index 05d9f45965..459d16b225 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs @@ -52,7 +52,7 @@ pub struct Config { } impl GetExpand for Config { - fn get_expand<'a>(&'a self) -> Result> { + fn get_expand(&self) -> Result> { Ok(self .common .get_expand()? @@ -293,20 +293,20 @@ mod tests { } } - #[cfg(target_os = "linux")] + // #[cfg(target_os = "linux")] mod linux { + use super::super::{Config, GeneratorTask}; + use crate::tasks::config::CommonConfig; + use onefuzz::blob::BlobContainerUrl; + use onefuzz::syncdir::SyncedDir; + use reqwest::Url; + use std::collections::HashMap; + use std::env; + use tempfile::tempdir; + #[tokio::test] #[ignore] async fn test_radamsa_linux() -> anyhow::Result<()> { - use super::{Config, GeneratorTask}; - use crate::tasks::config::CommonConfig; - use onefuzz::blob::BlobContainerUrl; - use onefuzz::syncdir::SyncedDir; - use reqwest::Url; - use std::collections::HashMap; - use std::env; - use tempfile::tempdir; - let crashes_temp = tempfile::tempdir()?; let crashes: &std::path::Path = crashes_temp.path(); diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs index d03ed4b8f2..8a73bd8090 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs @@ -437,9 +437,9 @@ mod tests { } } - #[cfg(target_os = "linux")] + // #[cfg(target_os = "linux")] mod linux { - use super::*; + use super::super::*; use crate::tasks::stats::afl::read_stats; use onefuzz::blob::BlobContainerUrl; use onefuzz::machine_id::MachineIdentity; From b0e4163307d68636c4e66039b5f52e86fd1a3fb6 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Mon, 9 Oct 2023 09:23:32 -0700 Subject: [PATCH 19/32] Fix indented line --- src/agent/onefuzz-task/src/tasks/fuzz/generator.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs index 459d16b225..60edb4b9a6 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs @@ -303,7 +303,7 @@ mod tests { use std::collections::HashMap; use std::env; use tempfile::tempdir; - + #[tokio::test] #[ignore] async fn test_radamsa_linux() -> anyhow::Result<()> { From 63b36cb065cfbdaeb93b2d0d5b5f51c67d0d8096 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Mon, 9 Oct 2023 10:04:50 -0700 Subject: [PATCH 20/32] Fix build checks --- src/agent/onefuzz-task/src/tasks/analysis/generic.rs | 4 ++-- src/agent/onefuzz-task/src/tasks/config.rs | 2 +- src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs | 2 +- src/agent/onefuzz-task/src/tasks/coverage/generic.rs | 2 +- src/agent/onefuzz-task/src/tasks/fuzz/generator.rs | 2 +- src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs | 8 ++++---- src/agent/onefuzz-task/src/tasks/merge/generic.rs | 2 +- src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs | 6 +++--- 8 files changed, 14 insertions(+), 14 deletions(-) diff --git a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs index 131948b4d3..88bccc923a 100644 --- a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs @@ -60,7 +60,7 @@ impl GetExpand for Config { .output_dir(&self.analysis.local_path) .set_optional(self.tools.clone().map(|t| t.local_path), Expand::tools_dir) .set_optional_ref(&self.reports, |expand, reports| { - expand.reports_dir(&reports.local_path.as_path()) + expand.reports_dir(reports.local_path.as_path()) }) .set_optional_ref(&self.crashes, |expand, crashes| { expand @@ -350,7 +350,7 @@ mod tests { let params = config.get_expand_fields(); for (param, expected) in params.iter() { - let evaluated = expand.evaluate_value(format!("{}", param.get_string())).unwrap(); + let evaluated = expand.evaluate_value(param.get_string()).unwrap(); assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); } } diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs index af864a20aa..0abc55497d 100644 --- a/src/agent/onefuzz-task/src/tasks/config.rs +++ b/src/agent/onefuzz-task/src/tasks/config.rs @@ -454,7 +454,7 @@ mod tests { let params = config.get_expand_fields(); for (param, expected) in params.iter() { - let evaluated = expand.evaluate_value(format!("{}", param.get_string())).unwrap(); + let evaluated = expand.evaluate_value(param.get_string()).unwrap(); assert_eq!(evaluated, *expected); } } diff --git a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs index 017f91c5dd..160eeb4f2b 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs @@ -515,7 +515,7 @@ mod tests { let params = config.get_expand_fields(); for (param, expected) in params.iter() { - let evaluated = expand.evaluate_value(format!("{}", param.get_string())).unwrap(); + let evaluated = expand.evaluate_value(param.get_string()).unwrap(); assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); } } diff --git a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs index 9ee1d2a238..d0eb31745e 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs @@ -644,7 +644,7 @@ mod tests { let params = config.get_expand_fields(); for (param, expected) in params.iter() { - let evaluated = expand.evaluate_value(format!("{}", param.get_string())).unwrap(); + let evaluated = expand.evaluate_value(param.get_string()).unwrap(); assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); } } diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs index 60edb4b9a6..d6faebb1a1 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs @@ -287,7 +287,7 @@ mod tests { let params = config.get_expand_fields(); for (param, expected) in params.iter() { - let evaluated = expand.evaluate_value(format!("{}", param.get_string())).unwrap(); + let evaluated = expand.evaluate_value(param.get_string()).unwrap(); assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); } } diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs index 8a73bd8090..707297b16c 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs @@ -74,7 +74,7 @@ impl GetExpand for SupervisorConfig { expand.input_marker(input_marker) }) .set_optional_ref(&self.target_options, |expand, target_options| { - expand.target_options(&target_options) + expand.target_options(target_options) }) .set_optional_ref(&self.tools, |expand, tools| { expand.tools_dir(&tools.local_path) @@ -301,7 +301,7 @@ async fn start_supervisor( expand.crashdumps(&crashdumps.local_path) }) .set_optional_ref(&target_exe, |expand, target_exe| { - expand.target_exe(&target_exe) + expand.target_exe(target_exe) }); let supervisor_path = expand.evaluate_value(&config.supervisor_exe)?; @@ -360,7 +360,7 @@ mod tests { if let Some(target_exe) = &self.target_exe { params.push(( PlaceHolder::TargetExe, - dunce::canonicalize(&target_exe) + dunce::canonicalize(target_exe) .unwrap() .to_string_lossy() .to_string(), @@ -431,7 +431,7 @@ mod tests { let params = config.get_expand_fields(); for (param, expected) in params.iter() { - let evaluated = expand.evaluate_value(format!("{}", param.get_string())).unwrap(); + let evaluated = expand.evaluate_value(param.get_string()).unwrap(); assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); } } diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs index ae8ef45af3..a582ec8bc1 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs @@ -244,7 +244,7 @@ mod tests { let params = config.get_expand_fields(); for (param, expected) in params.iter() { - let evaluated = expand.evaluate_value(format!("{}", param.get_string())).unwrap(); + let evaluated = expand.evaluate_value(param.get_string()).unwrap(); assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); } } diff --git a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs index 0468664764..e9905f687d 100644 --- a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs @@ -60,7 +60,7 @@ pub struct Config { } impl GetExpand for Config { - fn get_expand<'a>(&'a self) -> Result> { + fn get_expand(&self) -> Result> { let tools_dir = self.tools.local_path.to_string_lossy().into_owned(); Ok(self @@ -70,7 +70,7 @@ impl GetExpand for Config { .target_options(&self.target_options) .tools_dir(tools_dir) .set_optional_ref(&self.reports, |expand, reports| { - expand.reports_dir(&reports.local_path.as_path()) + expand.reports_dir(reports.local_path.as_path()) }) .set_optional_ref(&self.crashes, |expand, crashes| { expand @@ -359,7 +359,7 @@ mod tests { let params = config.get_expand_fields(); for (param, expected) in params.iter() { - let evaluated = expand.evaluate_value(format!("{}", param.get_string())).unwrap(); + let evaluated = expand.evaluate_value(param.get_string()).unwrap(); assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); } } From 367a3e3625e89330198b89a8104dd29fdf164e02 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Tue, 10 Oct 2023 14:37:29 -0700 Subject: [PATCH 21/32] Replace common test code with a declarative macro --- .../onefuzz-task/src/config_test_utils.rs | 24 ++++++++++++++++++- src/agent/onefuzz-task/src/lib.rs | 1 + .../src/tasks/analysis/generic.rs | 18 +------------- src/agent/onefuzz-task/src/tasks/config.rs | 20 +--------------- .../onefuzz-task/src/tasks/coverage/dotnet.rs | 18 +------------- .../src/tasks/coverage/generic.rs | 18 +------------- .../onefuzz-task/src/tasks/fuzz/generator.rs | 20 ++-------------- .../onefuzz-task/src/tasks/fuzz/supervisor.rs | 20 ++-------------- .../onefuzz-task/src/tasks/merge/generic.rs | 18 +------------- .../src/tasks/report/dotnet/generic.rs | 18 +------------- 10 files changed, 34 insertions(+), 141 deletions(-) diff --git a/src/agent/onefuzz-task/src/config_test_utils.rs b/src/agent/onefuzz-task/src/config_test_utils.rs index a2498d02a6..1fd260ae7d 100644 --- a/src/agent/onefuzz-task/src/config_test_utils.rs +++ b/src/agent/onefuzz-task/src/config_test_utils.rs @@ -1,6 +1,6 @@ use onefuzz::expand::{GetExpand, PlaceHolder}; -// Moving this trait method into the GetExpand trait--and returning `Vec<(PlaceHolder, Box)>` instead +// Moving this trait method into the GetExpand trait, and returning `Vec<(PlaceHolder, Box)>` instead, // would let us use define a default implementation for `get_expand()` while also coupling the expand values we // test with those we give to the expander. // It seems to me like a non-trivial (and perhaps bad) design change though. @@ -8,6 +8,28 @@ pub trait GetExpandFields: GetExpand { fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)>; } +macro_rules! config_test { + ($t:ty) => { + proptest! { + #[test] + fn test_get_expand_values_match_config( + config in any::<$t>(), + ) { + let expand = match config.get_expand() { + Ok(expand) => expand, + Err(err) => panic!("error getting expand: {}", err), + }; + let params = config.get_expand_fields(); + + for (param, expected) in params.iter() { + let evaluated = expand.evaluate_value(param.get_string()).unwrap(); + assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); + } + } + } + } +} + pub mod arbitraries { use std::path::PathBuf; diff --git a/src/agent/onefuzz-task/src/lib.rs b/src/agent/onefuzz-task/src/lib.rs index 08babfba75..cd5a14073d 100644 --- a/src/agent/onefuzz-task/src/lib.rs +++ b/src/agent/onefuzz-task/src/lib.rs @@ -6,6 +6,7 @@ extern crate clap; extern crate onefuzz_telemetry; #[cfg(test)] +#[macro_use] pub mod config_test_utils; pub mod local; pub mod tasks; diff --git a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs index 88bccc923a..cc3578f19b 100644 --- a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs @@ -338,21 +338,5 @@ mod tests { } } - proptest! { - #[test] - fn test_get_expand_values_match_config( - config in any::(), - ) { - let expand = match config.get_expand() { - Ok(expand) => expand, - Err(err) => panic!("error getting expand: {}", err), - }; - let params = config.get_expand_fields(); - - for (param, expected) in params.iter() { - let evaluated = expand.evaluate_value(param.get_string()).unwrap(); - assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); - } - } - } + config_test!(Config); } diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs index 0abc55497d..e379e50f70 100644 --- a/src/agent/onefuzz-task/src/tasks/config.rs +++ b/src/agent/onefuzz-task/src/tasks/config.rs @@ -440,23 +440,5 @@ mod tests { } } - proptest! { - #[test] - fn test_get_expand_values_match_config( - config in any::(), - ) { - // This function implementation is repeated across all config tests - // There might be a way to share it by taking advantage of the `GetExpandFields` trait, but I'm not sure how - let expand = match config.get_expand() { - Ok(expand) => expand, - Err(err) => panic!("error getting expand: {}", err), - }; - let params = config.get_expand_fields(); - - for (param, expected) in params.iter() { - let evaluated = expand.evaluate_value(param.get_string()).unwrap(); - assert_eq!(evaluated, *expected); - } - } - } + config_test!(CommonConfig); } diff --git a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs index 160eeb4f2b..41dcf44182 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs @@ -503,21 +503,5 @@ mod tests { } } - proptest! { - #[test] - fn test_get_expand_values_match_config( - config in any::(), - ) { - let expand = match config.get_expand() { - Ok(expand) => expand, - Err(err) => panic!("error getting expand: {}", err), - }; - let params = config.get_expand_fields(); - - for (param, expected) in params.iter() { - let evaluated = expand.evaluate_value(param.get_string()).unwrap(); - assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); - } - } - } + config_test!(Config); } diff --git a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs index d0eb31745e..5c549c45d8 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs @@ -632,21 +632,5 @@ mod tests { } } - proptest! { - #[test] - fn test_get_expand_values_match_config( - config in any::(), - ) { - let expand = match config.get_expand() { - Ok(expand) => expand, - Err(err) => panic!("error getting expand: {}", err), - }; - let params = config.get_expand_fields(); - - for (param, expected) in params.iter() { - let evaluated = expand.evaluate_value(param.get_string()).unwrap(); - assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); - } - } - } + config_test!(Config); } diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs index d6faebb1a1..5140e41f5e 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs @@ -275,25 +275,9 @@ mod tests { } } - proptest! { - #[test] - fn test_get_expand_values_match_config( - config in any::(), - ) { - let expand = match config.get_expand() { - Ok(expand) => expand, - Err(err) => panic!("error getting expand: {}", err), - }; - let params = config.get_expand_fields(); - - for (param, expected) in params.iter() { - let evaluated = expand.evaluate_value(param.get_string()).unwrap(); - assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); - } - } - } + config_test!(Config); - // #[cfg(target_os = "linux")] + #[cfg(target_os = "linux")] mod linux { use super::super::{Config, GeneratorTask}; use crate::tasks::config::CommonConfig; diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs index 707297b16c..d6b04becbf 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs @@ -419,25 +419,9 @@ mod tests { } } - proptest! { - #[test] - fn test_get_expand_values_match_config( - config in any::(), - ) { - let expand = match config.get_expand() { - Ok(expand) => expand, - Err(err) => panic!("error getting expand: {}", err), - }; - let params = config.get_expand_fields(); - - for (param, expected) in params.iter() { - let evaluated = expand.evaluate_value(param.get_string()).unwrap(); - assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); - } - } - } + config_test!(SupervisorConfig); - // #[cfg(target_os = "linux")] + #[cfg(target_os = "linux")] mod linux { use super::super::*; use crate::tasks::stats::afl::read_stats; diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs index a582ec8bc1..d702cbc3d4 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs @@ -232,21 +232,5 @@ mod tests { } } - proptest! { - #[test] - fn test_get_expand_values_match_config( - config in any::(), - ) { - let expand = match config.get_expand() { - Ok(expand) => expand, - Err(err) => panic!("error getting expand: {}", err), - }; - let params = config.get_expand_fields(); - - for (param, expected) in params.iter() { - let evaluated = expand.evaluate_value(param.get_string()).unwrap(); - assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); - } - } - } + config_test!(Config); } diff --git a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs index e9905f687d..be92716829 100644 --- a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs @@ -347,21 +347,5 @@ mod tests { } } - proptest! { - #[test] - fn test_get_expand_values_match_config( - config in any::(), - ) { - let expand = match config.get_expand() { - Ok(expand) => expand, - Err(err) => panic!("error getting expand: {}", err), - }; - let params = config.get_expand_fields(); - - for (param, expected) in params.iter() { - let evaluated = expand.evaluate_value(param.get_string()).unwrap(); - assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); - } - } - } + config_test!(Config); } From 3fe1098b20d06162ba0be5404fd52d9554130510 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Tue, 10 Oct 2023 14:46:36 -0700 Subject: [PATCH 22/32] Format code --- src/agent/onefuzz-task/src/config_test_utils.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/agent/onefuzz-task/src/config_test_utils.rs b/src/agent/onefuzz-task/src/config_test_utils.rs index 1fd260ae7d..8257901ef7 100644 --- a/src/agent/onefuzz-task/src/config_test_utils.rs +++ b/src/agent/onefuzz-task/src/config_test_utils.rs @@ -20,7 +20,7 @@ macro_rules! config_test { Err(err) => panic!("error getting expand: {}", err), }; let params = config.get_expand_fields(); - + for (param, expected) in params.iter() { let evaluated = expand.evaluate_value(param.get_string()).unwrap(); assert_eq!(evaluated, *expected, "placeholder {} did not match expected value", param.get_string()); From 7f0a510df7d3108b42ce92aa4c7027aa2c0a89e5 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Wed, 11 Oct 2023 16:08:09 -0700 Subject: [PATCH 23/32] Simplify URL arbitrary even further to guarantee that it's parseable --- src/agent/onefuzz-task/src/config_test_utils.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/agent/onefuzz-task/src/config_test_utils.rs b/src/agent/onefuzz-task/src/config_test_utils.rs index 8257901ef7..dbd26bd68a 100644 --- a/src/agent/onefuzz-task/src/config_test_utils.rs +++ b/src/agent/onefuzz-task/src/config_test_utils.rs @@ -69,7 +69,7 @@ pub mod arbitraries { fn arb_url()( // Don't use this for any url that isn't just being used for a string comparison (as for the config tests) // basically all that matters here is that we generate a parsable url - url in r"https?://(www\.)?[-a-zA-Z0-9]{1,256}\.com" + url in r"https?://(www\.)?[a-zA-Z0-9]{1,64}\.com" ) -> Url { match Url::parse(&url) { Ok(url) => url, From fecaee49fd2bd10ca5cc6cdace0303dbe31334b3 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Tue, 17 Oct 2023 18:00:51 -0700 Subject: [PATCH 24/32] Upgrade proptest version --- src/agent/Cargo.lock | 20 +++++++------------- src/agent/onefuzz-task/Cargo.toml | 2 +- 2 files changed, 8 insertions(+), 14 deletions(-) diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index af17d6ad2b..d71771494c 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -2618,19 +2618,19 @@ dependencies = [ [[package]] name = "proptest" -version = "1.2.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e35c06b98bf36aba164cc17cb25f7e232f5c4aeea73baa14b8a9f0d92dbfa65" +checksum = "7c003ac8c77cb07bb74f5f198bce836a689bcd5a42574612bf14d17bfd08c20e" dependencies = [ "bit-set", - "bitflags 1.3.2", - "byteorder", + "bit-vec", + "bitflags 2.3.3", "lazy_static", "num-traits", "rand 0.8.5", "rand_chacha 0.3.1", "rand_xorshift", - "regex-syntax 0.6.29", + "regex-syntax", "rusty-fork", "tempfile", "unarray", @@ -2824,7 +2824,7 @@ dependencies = [ "aho-corasick", "memchr", "regex-automata", - "regex-syntax 0.7.4", + "regex-syntax", ] [[package]] @@ -2835,15 +2835,9 @@ checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.7.4", + "regex-syntax", ] -[[package]] -name = "regex-syntax" -version = "0.6.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" - [[package]] name = "regex-syntax" version = "0.7.4" diff --git a/src/agent/onefuzz-task/Cargo.toml b/src/agent/onefuzz-task/Cargo.toml index 47d49351ef..8d12c1cacb 100644 --- a/src/agent/onefuzz-task/Cargo.toml +++ b/src/agent/onefuzz-task/Cargo.toml @@ -85,6 +85,6 @@ schemars = { version = "0.8.12", features = ["uuid1"] } [dev-dependencies] pretty_assertions = "1.4" -proptest = "1.2.0" +proptest = "1.3.1" tempfile = "3.8" dunce = "1.0" From aa30c6817354c01a0d925d3c13ecaf052804cb5c Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Wed, 18 Oct 2023 08:24:13 -0700 Subject: [PATCH 25/32] Implement Default for CommonConfig --- src/agent/onefuzz-task/src/local/common.rs | 12 +------ src/agent/onefuzz-task/src/local/template.rs | 11 +------ src/agent/onefuzz-task/src/tasks/config.rs | 31 +++++++++++++++++++ .../onefuzz-task/src/tasks/fuzz/generator.rs | 23 +------------- .../onefuzz-task/src/tasks/fuzz/supervisor.rs | 24 +------------- 5 files changed, 35 insertions(+), 66 deletions(-) diff --git a/src/agent/onefuzz-task/src/local/common.rs b/src/agent/onefuzz-task/src/local/common.rs index 17940d799f..16c1e326fe 100644 --- a/src/agent/onefuzz-task/src/local/common.rs +++ b/src/agent/onefuzz-task/src/local/common.rs @@ -231,22 +231,12 @@ pub async fn build_local_context( task_id, instance_id, setup_dir, - extra_setup_dir: None, - extra_output: None, machine_identity: MachineIdentity { machine_id: Uuid::nil(), machine_name: "local".to_string(), scaleset_name: None, }, - instance_telemetry_key: None, - heartbeat_queue: None, - job_result_queue: None, - microsoft_telemetry_key: None, - logs: None, - min_available_memory_mb: 0, - tags: Default::default(), - from_agent_to_task_endpoint: "/".to_string(), - from_task_to_agent_endpoint: "/".to_string(), + ..Default::default() }; let current_dir = current_dir()?; diff --git a/src/agent/onefuzz-task/src/local/template.rs b/src/agent/onefuzz-task/src/local/template.rs index 3393edd89a..7b3838e5cd 100644 --- a/src/agent/onefuzz-task/src/local/template.rs +++ b/src/agent/onefuzz-task/src/local/template.rs @@ -195,14 +195,8 @@ pub async fn launch( let task_group: TaskGroup = serde_yaml::from_value(value)?; let common = CommonConfig { - task_id: Uuid::nil(), job_id: Uuid::new_v4(), instance_id: Uuid::new_v4(), - heartbeat_queue: None, - job_result_queue: None, - instance_telemetry_key: None, - microsoft_telemetry_key: None, - logs: None, setup_dir: task_group.common.setup_dir.unwrap_or_default(), extra_setup_dir: task_group.common.extra_setup_dir, min_available_memory_mb: crate::tasks::config::default_min_available_memory_mb(), @@ -211,10 +205,7 @@ pub async fn launch( machine_name: "local".to_string(), scaleset_name: None, }, - tags: Default::default(), - from_agent_to_task_endpoint: "/".to_string(), - from_task_to_agent_endpoint: "/".to_string(), - extra_output: None, + ..Default::default() }; let mut context = RunContext::new(common, event_sender); diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs index e379e50f70..b74d8f65e9 100644 --- a/src/agent/onefuzz-task/src/tasks/config.rs +++ b/src/agent/onefuzz-task/src/tasks/config.rs @@ -126,6 +126,37 @@ impl CommonConfig { } } +impl Default for CommonConfig { + /// Returns an instance with Default:default() values for all fields besides: + /// - `machine_identity`: with a generated id, "test" for machine name, and None for scaleset name + /// - `from_agent_to_task_endpoint`: with a value of "/" + /// - `from_task_to_agent_endpoint`: with a value of "/" + fn default() -> Self { + Self { + job_id: Default::default(), + task_id: Default::default(), + instance_id: Default::default(), + heartbeat_queue: Default::default(), + job_result_queue: Default::default(), + instance_telemetry_key: Default::default(), + microsoft_telemetry_key: Default::default(), + logs: Default::default(), + setup_dir: Default::default(), + extra_setup_dir: Default::default(), + extra_output: Default::default(), + min_available_memory_mb: Default::default(), + machine_identity: MachineIdentity { + machine_id: uuid::Uuid::new_v4(), + machine_name: "test".to_string(), + scaleset_name: None, + }, + tags: Default::default(), + from_agent_to_task_endpoint: "/".to_string(), + from_task_to_agent_endpoint: "/".to_string(), + } + } +} + impl GetExpand for CommonConfig { fn get_expand(&self) -> Result> { Ok(Expand::new(&self.machine_identity) diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs index 5140e41f5e..5356eb5d1e 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs @@ -349,28 +349,7 @@ mod tests { ensemble_sync_delay: None, generator_env: HashMap::default(), check_retry_count: 0, - common: CommonConfig { - job_id: Default::default(), - task_id: Default::default(), - instance_id: Default::default(), - heartbeat_queue: Default::default(), - job_result_queue: Default::default(), - instance_telemetry_key: Default::default(), - microsoft_telemetry_key: Default::default(), - logs: Default::default(), - setup_dir: Default::default(), - extra_setup_dir: Default::default(), - extra_output: Default::default(), - min_available_memory_mb: Default::default(), - machine_identity: onefuzz::machine_id::MachineIdentity { - machine_id: uuid::Uuid::new_v4(), - machine_name: "test".to_string(), - scaleset_name: None, - }, - tags: Default::default(), - from_agent_to_task_endpoint: "/".to_string(), - from_task_to_agent_endpoint: "/".to_string(), - }, + common: Default::default(), }; let task = GeneratorTask::new(config); diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs index d6b04becbf..2e8561d98a 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs @@ -426,7 +426,6 @@ mod tests { use super::super::*; use crate::tasks::stats::afl::read_stats; use onefuzz::blob::BlobContainerUrl; - use onefuzz::machine_id::MachineIdentity; use onefuzz::process::monitor_process; use onefuzz_telemetry::EventData; use reqwest::Url; @@ -541,28 +540,7 @@ mod tests { unique_reports: None, no_repro: None, coverage: None, - common: CommonConfig { - job_id: Default::default(), - task_id: Default::default(), - instance_id: Default::default(), - heartbeat_queue: Default::default(), - job_result_queue: Default::default(), - instance_telemetry_key: Default::default(), - microsoft_telemetry_key: Default::default(), - logs: Default::default(), - setup_dir: Default::default(), - extra_setup_dir: Default::default(), - extra_output: Default::default(), - min_available_memory_mb: Default::default(), - machine_identity: MachineIdentity { - machine_id: uuid::Uuid::new_v4(), - machine_name: "test".to_string(), - scaleset_name: None, - }, - tags: Default::default(), - from_agent_to_task_endpoint: "/".to_string(), - from_task_to_agent_endpoint: "/".to_string(), - }, + common: Default::default(), }; let process = start_supervisor( From 9337a78656faf160746c5e924aff8e7ec171d4f3 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Wed, 18 Oct 2023 09:15:19 -0700 Subject: [PATCH 26/32] Remove the GetExpand trait --- .../onefuzz-task/src/config_test_utils.rs | 4 +- .../src/tasks/analysis/generic.rs | 7 ++-- src/agent/onefuzz-task/src/tasks/config.rs | 40 +++++++++---------- .../onefuzz-task/src/tasks/coverage/dotnet.rs | 8 ++-- .../src/tasks/coverage/generic.rs | 8 ++-- .../onefuzz-task/src/tasks/fuzz/generator.rs | 8 ++-- .../onefuzz-task/src/tasks/fuzz/supervisor.rs | 8 ++-- .../onefuzz-task/src/tasks/merge/generic.rs | 8 ++-- .../src/tasks/report/dotnet/generic.rs | 8 ++-- src/agent/onefuzz/src/expand.rs | 4 -- 10 files changed, 47 insertions(+), 56 deletions(-) diff --git a/src/agent/onefuzz-task/src/config_test_utils.rs b/src/agent/onefuzz-task/src/config_test_utils.rs index dbd26bd68a..15a5987f3a 100644 --- a/src/agent/onefuzz-task/src/config_test_utils.rs +++ b/src/agent/onefuzz-task/src/config_test_utils.rs @@ -1,10 +1,10 @@ -use onefuzz::expand::{GetExpand, PlaceHolder}; +use onefuzz::expand::PlaceHolder; // Moving this trait method into the GetExpand trait, and returning `Vec<(PlaceHolder, Box)>` instead, // would let us use define a default implementation for `get_expand()` while also coupling the expand values we // test with those we give to the expander. // It seems to me like a non-trivial (and perhaps bad) design change though. -pub trait GetExpandFields: GetExpand { +pub trait GetExpandFields { fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)>; } diff --git a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs index cc3578f19b..390f20c7f2 100644 --- a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs @@ -6,7 +6,6 @@ use crate::tasks::{ utils::try_resolve_setup_relative_path, }; use anyhow::{Context, Result}; -use onefuzz::expand::GetExpand; use onefuzz::{az_copy, blob::url::BlobUrl}; use onefuzz::{ expand::Expand, @@ -48,8 +47,8 @@ pub struct Config { pub common: CommonConfig, } -impl GetExpand for Config { - fn get_expand(&self) -> Result> { +impl Config { + pub fn get_expand(&self) -> Result> { Ok(self .common .get_expand()? @@ -271,7 +270,7 @@ pub async fn run_tool( #[cfg(test)] mod tests { - use onefuzz::expand::{GetExpand, PlaceHolder}; + use onefuzz::expand::PlaceHolder; use proptest::prelude::*; use crate::config_test_utils::GetExpandFields; diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs index b74d8f65e9..52b8790131 100644 --- a/src/agent/onefuzz-task/src/tasks/config.rs +++ b/src/agent/onefuzz-task/src/tasks/config.rs @@ -11,7 +11,7 @@ use crate::tasks::{ }; use anyhow::{Context, Result}; use onefuzz::{ - expand::{Expand, GetExpand}, + expand::Expand, machine_id::MachineIdentity, syncdir::{SyncOperation, SyncedDir}, }; @@ -124,6 +124,23 @@ impl CommonConfig { None => Ok(None), } } + + pub fn get_expand(&self) -> Result> { + Ok(Expand::new(&self.machine_identity) + .machine_id() + .job_id(&self.job_id) + .task_id(&self.task_id) + .setup_dir(&self.setup_dir) + .set_optional_ref(&self.instance_telemetry_key, Expand::instance_telemetry_key) + .set_optional_ref( + &self.microsoft_telemetry_key, + Expand::microsoft_telemetry_key, + ) + .set_optional_ref(&self.extra_setup_dir, Expand::extra_setup_dir) + .set_optional_ref(&self.extra_output, |expand, extra_output| { + expand.extra_output_dir(extra_output.local_path.as_path()) + })) + } } impl Default for CommonConfig { @@ -157,25 +174,6 @@ impl Default for CommonConfig { } } -impl GetExpand for CommonConfig { - fn get_expand(&self) -> Result> { - Ok(Expand::new(&self.machine_identity) - .machine_id() - .job_id(&self.job_id) - .task_id(&self.task_id) - .setup_dir(&self.setup_dir) - .set_optional_ref(&self.instance_telemetry_key, Expand::instance_telemetry_key) - .set_optional_ref( - &self.microsoft_telemetry_key, - Expand::microsoft_telemetry_key, - ) - .set_optional_ref(&self.extra_setup_dir, Expand::extra_setup_dir) - .set_optional_ref(&self.extra_output, |expand, extra_output| { - expand.extra_output_dir(extra_output.local_path.as_path()) - })) - } -} - #[derive(Debug, Deserialize)] #[serde(tag = "task_type")] pub enum Config { @@ -418,7 +416,7 @@ impl Config { #[cfg(test)] mod tests { - use onefuzz::expand::{GetExpand, PlaceHolder}; + use onefuzz::expand::PlaceHolder; use proptest::prelude::*; use crate::config_test_utils::GetExpandFields; diff --git a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs index 41dcf44182..389d35414a 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs @@ -4,7 +4,7 @@ use anyhow::{Context, Result}; use async_trait::async_trait; use onefuzz::{ - expand::{Expand, GetExpand, PlaceHolder}, + expand::{Expand, PlaceHolder}, monitor::DirectoryMonitor, syncdir::SyncedDir, }; @@ -56,8 +56,8 @@ impl Config { } } -impl GetExpand for Config { - fn get_expand(&self) -> Result> { +impl Config { + pub fn get_expand(&self) -> Result> { Ok(self .common .get_expand()? @@ -466,7 +466,7 @@ impl<'a> Processor for TaskContext<'a> { #[cfg(test)] mod tests { - use onefuzz::expand::{GetExpand, PlaceHolder}; + use onefuzz::expand::PlaceHolder; use proptest::prelude::*; use crate::config_test_utils::GetExpandFields; diff --git a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs index 5c549c45d8..7fd5b1cd03 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs @@ -20,7 +20,7 @@ use debuggable_module::loader::Loader; use debuggable_module::path::FilePath; use debuggable_module::Module; use onefuzz::env::LD_LIBRARY_PATH; -use onefuzz::expand::{Expand, GetExpand, PlaceHolder}; +use onefuzz::expand::{Expand, PlaceHolder}; use onefuzz::syncdir::SyncedDir; use onefuzz_file_format::coverage::{ binary::{v1::BinaryCoverageJson as BinaryCoverageJsonV1, BinaryCoverageJson}, @@ -80,10 +80,8 @@ impl Config { .map(Duration::from_secs) .unwrap_or(DEFAULT_TARGET_TIMEOUT) } -} -impl GetExpand for Config { - fn get_expand(&self) -> Result> { + pub fn get_expand(&self) -> Result> { Ok(self .common .get_expand()? @@ -609,7 +607,7 @@ impl CoverageStats { #[cfg(test)] mod tests { - use onefuzz::expand::{GetExpand, PlaceHolder}; + use onefuzz::expand::PlaceHolder; use proptest::prelude::*; use crate::config_test_utils::GetExpandFields; diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs index 5356eb5d1e..09572867b2 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs @@ -8,7 +8,7 @@ use crate::tasks::{ }; use anyhow::{Context, Result}; use onefuzz::{ - expand::{Expand, GetExpand}, + expand::Expand, fs::set_executable, input_tester::Tester, process::monitor_process, @@ -51,8 +51,8 @@ pub struct Config { pub common: CommonConfig, } -impl GetExpand for Config { - fn get_expand(&self) -> Result> { +impl Config { + pub fn get_expand(&self) -> Result> { Ok(self .common .get_expand()? @@ -225,7 +225,7 @@ impl GeneratorTask { #[cfg(test)] mod tests { - use onefuzz::expand::{GetExpand, PlaceHolder}; + use onefuzz::expand::PlaceHolder; use proptest::prelude::*; use crate::config_test_utils::GetExpandFields; diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs index 2e8561d98a..8a2bb51938 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs @@ -11,7 +11,7 @@ use crate::tasks::{ }; use anyhow::{Context, Error, Result}; use onefuzz::{ - expand::{Expand, GetExpand}, + expand::Expand, fs::{has_files, set_executable, OwnedDir}, jitter::delay_with_jitter, process::monitor_process, @@ -61,8 +61,8 @@ pub struct SupervisorConfig { pub common: CommonConfig, } -impl GetExpand for SupervisorConfig { - fn get_expand(&self) -> Result> { +impl SupervisorConfig { + pub fn get_expand(&self) -> Result> { Ok(self .common .get_expand()? @@ -329,7 +329,7 @@ async fn start_supervisor( #[cfg(test)] mod tests { - use onefuzz::expand::{GetExpand, PlaceHolder}; + use onefuzz::expand::PlaceHolder; use proptest::prelude::*; use crate::config_test_utils::GetExpandFields; diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs index d702cbc3d4..868c4b2907 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs @@ -8,7 +8,7 @@ use crate::tasks::{ }; use anyhow::{Context, Result}; use onefuzz::{ - expand::{Expand, GetExpand}, + expand::Expand, fs::set_executable, http::ResponseExt, jitter::delay_with_jitter, @@ -43,8 +43,8 @@ pub struct Config { pub common: CommonConfig, } -impl GetExpand for Config { - fn get_expand(&self) -> Result> { +impl Config { + pub fn get_expand(&self) -> Result> { Ok(self .common .get_expand()? @@ -183,7 +183,7 @@ async fn merge(config: &Config, output_dir: impl AsRef) -> Result<()> { #[cfg(test)] mod tests { - use onefuzz::expand::{GetExpand, PlaceHolder}; + use onefuzz::expand::PlaceHolder; use proptest::prelude::*; use crate::config_test_utils::GetExpandFields; diff --git a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs index be92716829..ff3a144828 100644 --- a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs @@ -18,7 +18,7 @@ use crate::tasks::{ }; use anyhow::{Context, Result}; use async_trait::async_trait; -use onefuzz::expand::{Expand, GetExpand}; +use onefuzz::expand::Expand; use onefuzz::fs::set_executable; use onefuzz::{blob::BlobUrl, sha256, syncdir::SyncedDir}; use onefuzz_result::job_result::TaskJobResultClient; @@ -59,8 +59,8 @@ pub struct Config { pub common: CommonConfig, } -impl GetExpand for Config { - fn get_expand(&self) -> Result> { +impl Config { + pub fn get_expand(&self) -> Result> { let tools_dir = self.tools.local_path.to_string_lossy().into_owned(); Ok(self @@ -300,7 +300,7 @@ impl Processor for AsanProcessor { #[cfg(test)] mod tests { - use onefuzz::expand::{GetExpand, PlaceHolder}; + use onefuzz::expand::PlaceHolder; use proptest::prelude::*; use crate::config_test_utils::GetExpandFields; diff --git a/src/agent/onefuzz/src/expand.rs b/src/agent/onefuzz/src/expand.rs index 8cdd150f4f..7f1813899f 100644 --- a/src/agent/onefuzz/src/expand.rs +++ b/src/agent/onefuzz/src/expand.rs @@ -93,10 +93,6 @@ impl PlaceHolder { } } -pub trait GetExpand { - fn get_expand(&self) -> Result>; -} - pub struct Expand<'a> { values: HashMap<&'static str, ExpandedValue<'a>>, machine_identity: &'a MachineIdentity, From fc00e569f07df012cb19fe474028705107852ca5 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Wed, 18 Oct 2023 09:22:09 -0700 Subject: [PATCH 27/32] Return Expand instead of Result --- src/agent/onefuzz-task/src/config_test_utils.rs | 5 +---- src/agent/onefuzz-task/src/tasks/analysis/generic.rs | 10 +++++----- src/agent/onefuzz-task/src/tasks/config.rs | 6 +++--- src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs | 10 +++++----- src/agent/onefuzz-task/src/tasks/coverage/generic.rs | 10 +++++----- src/agent/onefuzz-task/src/tasks/fuzz/generator.rs | 10 +++++----- src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs | 10 +++++----- src/agent/onefuzz-task/src/tasks/merge/generic.rs | 10 +++++----- .../onefuzz-task/src/tasks/report/dotnet/generic.rs | 12 ++++++------ 9 files changed, 40 insertions(+), 43 deletions(-) diff --git a/src/agent/onefuzz-task/src/config_test_utils.rs b/src/agent/onefuzz-task/src/config_test_utils.rs index 15a5987f3a..1b36ce50bf 100644 --- a/src/agent/onefuzz-task/src/config_test_utils.rs +++ b/src/agent/onefuzz-task/src/config_test_utils.rs @@ -15,10 +15,7 @@ macro_rules! config_test { fn test_get_expand_values_match_config( config in any::<$t>(), ) { - let expand = match config.get_expand() { - Ok(expand) => expand, - Err(err) => panic!("error getting expand: {}", err), - }; + let expand = config.get_expand(); let params = config.get_expand_fields(); for (param, expected) in params.iter() { diff --git a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs index 390f20c7f2..cd3a5219b2 100644 --- a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs @@ -48,10 +48,10 @@ pub struct Config { } impl Config { - pub fn get_expand(&self) -> Result> { - Ok(self + pub fn get_expand(&self) -> Expand<'_> { + self .common - .get_expand()? + .get_expand() .analyzer_exe(&self.analyzer_exe) .analyzer_options(&self.analyzer_options) .target_exe(&self.target_exe) @@ -71,7 +71,7 @@ impl Config { &crashes.remote_path.clone().and_then(|u| u.container()), |expand, container| expand.crashes_container(container), ) - })) + }) } } @@ -235,7 +235,7 @@ pub async fn run_tool( try_resolve_setup_relative_path(&config.common.setup_dir, &config.target_exe).await?; let expand = config - .get_expand()? + .get_expand() .input_path(&input) // Only this one is dynamic, the other two should probably be a part of the config .target_exe(&target_exe) .set_optional_ref(reports_dir, Expand::reports_dir); diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs index 52b8790131..c259b413f8 100644 --- a/src/agent/onefuzz-task/src/tasks/config.rs +++ b/src/agent/onefuzz-task/src/tasks/config.rs @@ -125,8 +125,8 @@ impl CommonConfig { } } - pub fn get_expand(&self) -> Result> { - Ok(Expand::new(&self.machine_identity) + pub fn get_expand(&self) -> Expand<'_> { + Expand::new(&self.machine_identity) .machine_id() .job_id(&self.job_id) .task_id(&self.task_id) @@ -139,7 +139,7 @@ impl CommonConfig { .set_optional_ref(&self.extra_setup_dir, Expand::extra_setup_dir) .set_optional_ref(&self.extra_output, |expand, extra_output| { expand.extra_output_dir(extra_output.local_path.as_path()) - })) + }) } } diff --git a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs index 389d35414a..053e68bb2a 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs @@ -57,14 +57,14 @@ impl Config { } impl Config { - pub fn get_expand(&self) -> Result> { - Ok(self + pub fn get_expand(&self) -> Expand<'_> { + self .common - .get_expand()? + .get_expand() .target_exe(&self.target_exe) .target_options(&self.target_options) .coverage_dir(&self.coverage.local_path) - .tools_dir(self.tools.local_path.to_string_lossy().into_owned())) + .tools_dir(self.tools.local_path.to_string_lossy().into_owned()) } } @@ -307,7 +307,7 @@ impl<'a> TaskContext<'a> { let expand = self .config - .get_expand()? + .get_expand() .input_path(input) .target_exe(&target_exe); diff --git a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs index 7fd5b1cd03..692d455f7e 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs @@ -81,12 +81,12 @@ impl Config { .unwrap_or(DEFAULT_TARGET_TIMEOUT) } - pub fn get_expand(&self) -> Result> { - Ok(self + pub fn get_expand(&self) -> Expand<'_> { + self .common - .get_expand()? + .get_expand() .target_options(&self.target_options) - .coverage_dir(&self.coverage.local_path)) + .coverage_dir(&self.coverage.local_path) } } @@ -358,7 +358,7 @@ impl<'a> TaskContext<'a> { let expand = self .config - .get_expand()? + .get_expand() .target_exe(&target_exe) .input_path(input); diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs index 09572867b2..d91cc1857f 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs @@ -52,10 +52,10 @@ pub struct Config { } impl Config { - pub fn get_expand(&self) -> Result> { - Ok(self + pub fn get_expand(&self) -> Expand<'_> { + self .common - .get_expand()? + .get_expand() .generator_exe(&self.generator_exe) .generator_options(&self.generator_options) .crashes(&self.crashes.local_path) @@ -63,7 +63,7 @@ impl Config { .target_options(&self.target_options) .set_optional_ref(&self.tools, |expand, tools| { expand.tools_dir(&tools.local_path) - })) + }) } } @@ -187,7 +187,7 @@ impl GeneratorTask { let (mut generator, generator_path) = { let expand = self .config - .get_expand()? + .get_expand() .generated_inputs(&output_dir) .input_corpus(&corpus_dir); diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs index 8a2bb51938..446c9ba1b7 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs @@ -62,10 +62,10 @@ pub struct SupervisorConfig { } impl SupervisorConfig { - pub fn get_expand(&self) -> Result> { - Ok(self + pub fn get_expand(&self) -> Expand<'_> { + self .common - .get_expand()? + .get_expand() .input_corpus(&self.inputs.local_path) .supervisor_exe(&self.supervisor_exe) .supervisor_options(&self.supervisor_options) @@ -95,7 +95,7 @@ impl SupervisorConfig { .set_optional_ref( &self.crashes.remote_path.clone().and_then(|u| u.container()), |expand, container| expand.crashes_container(container), - )) + ) } } @@ -291,7 +291,7 @@ async fn start_supervisor( }; let expand = config - .get_expand()? + .get_expand() .runtime_dir(&runtime_dir) .crashes(&crashes.local_path) .input_corpus(&inputs.local_path) // Why isn't this value in the config? It's not super clear to me from looking at the calling code. diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs index 868c4b2907..ce0a3526ed 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs @@ -44,17 +44,17 @@ pub struct Config { } impl Config { - pub fn get_expand(&self) -> Result> { - Ok(self + pub fn get_expand(&self) -> Expand<'_> { + self .common - .get_expand()? + .get_expand() .input_marker(&self.supervisor_input_marker) .input_corpus(&self.unique_inputs.local_path) .target_exe(&self.target_exe) .target_options(&self.target_options) .supervisor_exe(&self.supervisor_exe) .supervisor_options(&self.supervisor_options) - .tools_dir(self.tools.local_path.to_string_lossy().into_owned())) + .tools_dir(self.tools.local_path.to_string_lossy().into_owned()) } } @@ -148,7 +148,7 @@ async fn merge(config: &Config, output_dir: impl AsRef) -> Result<()> { try_resolve_setup_relative_path(&config.common.setup_dir, &config.target_exe).await?; let expand = config - .get_expand()? + .get_expand() .generated_inputs(output_dir) .target_exe(&target_exe); diff --git a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs index ff3a144828..1db729cd06 100644 --- a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs @@ -60,12 +60,12 @@ pub struct Config { } impl Config { - pub fn get_expand(&self) -> Result> { + pub fn get_expand(&self) -> Expand<'_> { let tools_dir = self.tools.local_path.to_string_lossy().into_owned(); - Ok(self + self .common - .get_expand()? + .get_expand() .target_exe(&self.target_exe) .target_options(&self.target_options) .tools_dir(tools_dir) @@ -82,7 +82,7 @@ impl Config { &crashes.remote_path.clone().and_then(|u| u.container()), |expand, container| expand.crashes_container(container), ) - })) + }) } } @@ -160,7 +160,7 @@ impl AsanProcessor { // Try to expand `target_exe` with support for `{tools_dir}`. // // Allows using `LibFuzzerDotnetLoader.exe` from a shared tools container. - let expand = self.config.get_expand()?; + let expand = self.config.get_expand(); let expanded = expand.evaluate_value(self.config.target_exe.to_string_lossy())?; let expanded_path = Path::new(&expanded); @@ -208,7 +208,7 @@ impl AsanProcessor { let mut args = vec![target_exe]; args.extend(self.config.target_options.clone()); - let expand = self.config.get_expand()?; + let expand = self.config.get_expand(); let expanded_args = expand.evaluate(&args)?; From d907473e72ace14d54f3807fae03d0ee8df0c8ca Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Wed, 18 Oct 2023 09:24:38 -0700 Subject: [PATCH 28/32] cargo fmt --- src/agent/onefuzz-task/src/tasks/analysis/generic.rs | 3 +-- src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs | 3 +-- src/agent/onefuzz-task/src/tasks/coverage/generic.rs | 3 +-- src/agent/onefuzz-task/src/tasks/fuzz/generator.rs | 3 +-- src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs | 3 +-- src/agent/onefuzz-task/src/tasks/merge/generic.rs | 8 ++------ src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs | 3 +-- 7 files changed, 8 insertions(+), 18 deletions(-) diff --git a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs index cd3a5219b2..0ef97f236f 100644 --- a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs @@ -49,8 +49,7 @@ pub struct Config { impl Config { pub fn get_expand(&self) -> Expand<'_> { - self - .common + self.common .get_expand() .analyzer_exe(&self.analyzer_exe) .analyzer_options(&self.analyzer_options) diff --git a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs index 053e68bb2a..b78e99b1f1 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs @@ -58,8 +58,7 @@ impl Config { impl Config { pub fn get_expand(&self) -> Expand<'_> { - self - .common + self.common .get_expand() .target_exe(&self.target_exe) .target_options(&self.target_options) diff --git a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs index 692d455f7e..2ebc748010 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs @@ -82,8 +82,7 @@ impl Config { } pub fn get_expand(&self) -> Expand<'_> { - self - .common + self.common .get_expand() .target_options(&self.target_options) .coverage_dir(&self.coverage.local_path) diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs index d91cc1857f..74026e7e9d 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs @@ -53,8 +53,7 @@ pub struct Config { impl Config { pub fn get_expand(&self) -> Expand<'_> { - self - .common + self.common .get_expand() .generator_exe(&self.generator_exe) .generator_options(&self.generator_options) diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs index 446c9ba1b7..a7b7ee6087 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs @@ -63,8 +63,7 @@ pub struct SupervisorConfig { impl SupervisorConfig { pub fn get_expand(&self) -> Expand<'_> { - self - .common + self.common .get_expand() .input_corpus(&self.inputs.local_path) .supervisor_exe(&self.supervisor_exe) diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs index ce0a3526ed..abb53f36be 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs @@ -8,10 +8,7 @@ use crate::tasks::{ }; use anyhow::{Context, Result}; use onefuzz::{ - expand::Expand, - fs::set_executable, - http::ResponseExt, - jitter::delay_with_jitter, + expand::Expand, fs::set_executable, http::ResponseExt, jitter::delay_with_jitter, syncdir::SyncedDir, }; use reqwest::Url; @@ -45,8 +42,7 @@ pub struct Config { impl Config { pub fn get_expand(&self) -> Expand<'_> { - self - .common + self.common .get_expand() .input_marker(&self.supervisor_input_marker) .input_corpus(&self.unique_inputs.local_path) diff --git a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs index 1db729cd06..036b20d028 100644 --- a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs @@ -63,8 +63,7 @@ impl Config { pub fn get_expand(&self) -> Expand<'_> { let tools_dir = self.tools.local_path.to_string_lossy().into_owned(); - self - .common + self.common .get_expand() .target_exe(&self.target_exe) .target_options(&self.target_options) From 304c737e31e6eae2216274f06a22bc25eb5e8756 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Wed, 18 Oct 2023 09:33:42 -0700 Subject: [PATCH 29/32] Remove unused import --- src/agent/onefuzz-task/src/tasks/fuzz/generator.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs index 74026e7e9d..8e27c4fb0b 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs @@ -279,7 +279,6 @@ mod tests { #[cfg(target_os = "linux")] mod linux { use super::super::{Config, GeneratorTask}; - use crate::tasks::config::CommonConfig; use onefuzz::blob::BlobContainerUrl; use onefuzz::syncdir::SyncedDir; use reqwest::Url; From d9a86b6ce68679c9e156fd7b4841dcbce2c207d5 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Wed, 18 Oct 2023 15:53:56 -0700 Subject: [PATCH 30/32] Fix PR comments (see full message for details) - Stop initializing target_exe in get_expand() for the dotnet coverage task since it's overwritten when the expander is used. - Expand the inputs field from the generic merge task as generated_inputs. --- src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs | 1 - src/agent/onefuzz-task/src/tasks/merge/generic.rs | 3 ++- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs index b78e99b1f1..1085deac7f 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs @@ -60,7 +60,6 @@ impl Config { pub fn get_expand(&self) -> Expand<'_> { self.common .get_expand() - .target_exe(&self.target_exe) .target_options(&self.target_options) .coverage_dir(&self.coverage.local_path) .tools_dir(self.tools.local_path.to_string_lossy().into_owned()) diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs index abb53f36be..a6cf430b51 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs @@ -33,7 +33,7 @@ pub struct Config { pub target_options_merge: bool, pub tools: SyncedDir, pub input_queue: Url, - pub inputs: SyncedDir, // Is this something we can pass to the expander? + pub inputs: SyncedDir, pub unique_inputs: SyncedDir, #[serde(flatten)] @@ -46,6 +46,7 @@ impl Config { .get_expand() .input_marker(&self.supervisor_input_marker) .input_corpus(&self.unique_inputs.local_path) + .generated_inputs(&self.inputs.local_path) .target_exe(&self.target_exe) .target_options(&self.target_options) .supervisor_exe(&self.supervisor_exe) From a0743dca2344ba3ccc11c5725ac84f4d7a2cd246 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Wed, 18 Oct 2023 17:20:00 -0700 Subject: [PATCH 31/32] Update dotnet coverage task Expander tests --- src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs | 7 ------- 1 file changed, 7 deletions(-) diff --git a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs index 1085deac7f..8eed445a3d 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/dotnet.rs @@ -474,13 +474,6 @@ mod tests { impl GetExpandFields for Config { fn get_expand_fields(&self) -> Vec<(PlaceHolder, String)> { let mut params = self.common.get_expand_fields(); - params.push(( - PlaceHolder::TargetExe, - dunce::canonicalize(&self.target_exe) - .unwrap() - .to_string_lossy() - .to_string(), - )); params.push((PlaceHolder::TargetOptions, self.target_options.join(" "))); params.push(( PlaceHolder::CoverageDir, From cbd6f26b072ae494cec99ccd756ad8cea71dea20 Mon Sep 17 00:00:00 2001 From: Kanan Boubion Date: Thu, 19 Oct 2023 11:01:50 -0700 Subject: [PATCH 32/32] Remove incorrect expansion variable from generic merge task --- src/agent/onefuzz-task/src/tasks/merge/generic.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs index a6cf430b51..aea191d136 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs @@ -46,7 +46,6 @@ impl Config { .get_expand() .input_marker(&self.supervisor_input_marker) .input_corpus(&self.unique_inputs.local_path) - .generated_inputs(&self.inputs.local_path) .target_exe(&self.target_exe) .target_options(&self.target_options) .supervisor_exe(&self.supervisor_exe)