From c69deed50e81cc1805f6f82ebb10513a211cbbe2 Mon Sep 17 00:00:00 2001 From: Adam <103067949+AdamL-Microsoft@users.noreply.github.com> Date: Tue, 29 Aug 2023 12:57:19 -0700 Subject: [PATCH 01/88] Release 8.7.1 (hotfix) (#3459) * Remove the retention policy setting (#3452) --------- Co-authored-by: Cheick Keita --- .devcontainer/devcontainer.json | 3 +- .github/workflows/ci.yml | 2 - CHANGELOG.md | 6 + CURRENT_VERSION | 2 +- .../ApiService/Functions/QueueJobResult.cs | 60 ------- .../ApiService/OneFuzzTypes/Model.cs | 45 ----- src/ApiService/ApiService/Program.cs | 1 - .../ApiService/onefuzzlib/Config.cs | 1 - .../ApiService/onefuzzlib/Extension.cs | 44 +++-- .../onefuzzlib/JobResultOperations.cs | 121 ------------- .../ApiService/onefuzzlib/OnefuzzContext.cs | 2 - .../IntegrationTests/Fakes/TestContext.cs | 3 - src/agent/Cargo.lock | 16 -- src/agent/Cargo.toml | 1 - src/agent/onefuzz-agent/src/config.rs | 12 -- src/agent/onefuzz-agent/src/log_uploader.rs | 29 ++++ src/agent/onefuzz-agent/src/work.rs | 5 +- src/agent/onefuzz-result/Cargo.toml | 18 -- src/agent/onefuzz-result/src/job_result.rs | 129 -------------- src/agent/onefuzz-result/src/lib.rs | 4 - src/agent/onefuzz-task/Cargo.toml | 1 - src/agent/onefuzz-task/src/local/cmd.rs | 42 ++++- src/agent/onefuzz-task/src/local/common.rs | 26 ++- .../example_templates/libfuzzer_basic.yml | 34 ++-- .../src/local/generic_analysis.rs | 137 ++++++++++++++- .../src/local/generic_crash_report.rs | 138 ++++++++++++++- .../src/local/generic_generator.rs | 142 ++++++++++++++- src/agent/onefuzz-task/src/local/libfuzzer.rs | 161 +++++++++++++++++- .../src/local/libfuzzer_crash_report.rs | 128 +++++++++++++- .../onefuzz-task/src/local/libfuzzer_merge.rs | 84 ++++++++- .../src/local/libfuzzer_regression.rs | 134 ++++++++++++++- .../src/local/libfuzzer_test_input.rs | 83 +++++++++ src/agent/onefuzz-task/src/local/mod.rs | 1 + src/agent/onefuzz-task/src/local/radamsa.rs | 78 +++++++++ src/agent/onefuzz-task/src/local/schema.json | 8 +- src/agent/onefuzz-task/src/local/template.rs | 13 +- .../onefuzz-task/src/local/test_input.rs | 86 ++++++++++ .../src/tasks/analysis/generic.rs | 5 +- src/agent/onefuzz-task/src/tasks/config.rs | 20 --- .../src/tasks/coverage/generic.rs | 19 +-- .../onefuzz-task/src/tasks/fuzz/generator.rs | 7 +- .../src/tasks/fuzz/libfuzzer/common.rs | 49 ++---- .../onefuzz-task/src/tasks/fuzz/supervisor.rs | 15 +- src/agent/onefuzz-task/src/tasks/heartbeat.rs | 2 +- .../onefuzz-task/src/tasks/merge/generic.rs | 2 +- .../src/tasks/merge/libfuzzer_merge.rs | 2 +- .../src/tasks/regression/common.rs | 15 +- .../src/tasks/regression/generic.rs | 3 +- .../src/tasks/regression/libfuzzer.rs | 3 +- .../src/tasks/report/crash_report.rs | 45 +---- .../src/tasks/report/dotnet/generic.rs | 22 +-- .../onefuzz-task/src/tasks/report/generic.rs | 14 +- .../src/tasks/report/libfuzzer_report.rs | 5 - src/agent/onefuzz/Cargo.toml | 1 - src/agent/onefuzz/src/blob/url.rs | 23 +-- src/agent/onefuzz/src/syncdir.rs | 66 +------ .../bicep-templates/storageAccounts.bicep | 2 +- src/integration-tests/integration-test.py | 77 ++------- src/runtime-tools/linux/setup.sh | 64 ++----- 59 files changed, 1389 insertions(+), 872 deletions(-) delete mode 100644 src/ApiService/ApiService/Functions/QueueJobResult.cs delete mode 100644 src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs delete mode 100644 src/agent/onefuzz-result/Cargo.toml delete mode 100644 src/agent/onefuzz-result/src/job_result.rs delete mode 100644 src/agent/onefuzz-result/src/lib.rs create mode 100644 src/agent/onefuzz-task/src/local/radamsa.rs mode change 100644 => 100755 src/runtime-tools/linux/setup.sh diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index d3fcf050ed..4059b3d7c1 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -13,7 +13,6 @@ "**/target/**": true }, "lldb.executable": "/usr/bin/lldb", - "dotnet.server.useOmnisharp": true, "omnisharp.enableEditorConfigSupport": true, "omnisharp.enableRoslynAnalyzers": true, "python.defaultInterpreterPath": "/workspaces/onefuzz/src/venv/bin/python", @@ -49,4 +48,4 @@ "features": { "ghcr.io/devcontainers/features/azure-cli:1": {} } -} \ No newline at end of file +} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2dd85d7c92..12824fd182 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -542,11 +542,9 @@ jobs: mkdir -p artifacts/linux-libfuzzer mkdir -p artifacts/linux-libfuzzer-with-options - mkdir -p artifacts/mariner-libfuzzer (cd libfuzzer ; make ) cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/linux-libfuzzer cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/linux-libfuzzer-with-options - cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/mariner-libfuzzer mkdir -p artifacts/linux-libfuzzer-regression (cd libfuzzer-regression ; make ) diff --git a/CHANGELOG.md b/CHANGELOG.md index be4779ad77..8d46ea2a0e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,12 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## 8.7.1 + +### Fixed + +* Service: Removed deprecated Azure retention policy setting that was causing scaleset deployment errors [#3452](https://github.com/microsoft/onefuzz/pull/3452) + ## 8.7.0 ### Added diff --git a/CURRENT_VERSION b/CURRENT_VERSION index c0bcaebe8f..efeecbe2c5 100644 --- a/CURRENT_VERSION +++ b/CURRENT_VERSION @@ -1 +1 @@ -8.7.0 \ No newline at end of file +8.7.1 \ No newline at end of file diff --git a/src/ApiService/ApiService/Functions/QueueJobResult.cs b/src/ApiService/ApiService/Functions/QueueJobResult.cs deleted file mode 100644 index d781a4d1e1..0000000000 --- a/src/ApiService/ApiService/Functions/QueueJobResult.cs +++ /dev/null @@ -1,60 +0,0 @@ -using System.Text.Json; -using Microsoft.Azure.Functions.Worker; -using Microsoft.Extensions.Logging; -using Microsoft.OneFuzz.Service.OneFuzzLib.Orm; -namespace Microsoft.OneFuzz.Service.Functions; - - -public class QueueJobResult { - private readonly ILogger _log; - private readonly IOnefuzzContext _context; - - public QueueJobResult(ILogger logTracer, IOnefuzzContext context) { - _log = logTracer; - _context = context; - } - - [Function("QueueJobResult")] - public async Async.Task Run([QueueTrigger("job-result", Connection = "AzureWebJobsStorage")] string msg) { - - var _tasks = _context.TaskOperations; - var _jobs = _context.JobOperations; - - _log.LogInformation("job result: {msg}", msg); - var jr = JsonSerializer.Deserialize(msg, EntityConverter.GetJsonSerializerOptions()).EnsureNotNull($"wrong data {msg}"); - - var task = await _tasks.GetByTaskId(jr.TaskId); - if (task == null) { - _log.LogWarning("invalid {TaskId}", jr.TaskId); - return; - } - - var job = await _jobs.Get(task.JobId); - if (job == null) { - _log.LogWarning("invalid {JobId}", task.JobId); - return; - } - - JobResultData? data = jr.Data; - if (data == null) { - _log.LogWarning($"job result data is empty, throwing out: {jr}"); - return; - } - - var jobResultType = data.Type; - _log.LogInformation($"job result data type: {jobResultType}"); - - Dictionary value; - if (jr.Value.Count > 0) { - value = jr.Value; - } else { - _log.LogWarning($"job result data is empty, throwing out: {jr}"); - return; - } - - var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jobResultType, value); - if (!jobResult.IsOk) { - _log.LogError("failed to create or update with job result {JobId}", job.JobId); - } - } -} diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index b839f52ddc..e430c1448c 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -33,19 +33,6 @@ public enum HeartbeatType { TaskAlive, } -[SkipRename] -public enum JobResultType { - NewCrashingInput, - NoReproCrashingInput, - NewReport, - NewUniqueReport, - NewRegressionReport, - NewCoverage, - NewCrashDump, - CoverageData, - RuntimeStats, -} - public record HeartbeatData(HeartbeatType Type); public record TaskHeartbeatEntry( @@ -54,16 +41,6 @@ public record TaskHeartbeatEntry( Guid MachineId, HeartbeatData[] Data); -public record JobResultData(JobResultType Type); - -public record TaskJobResultEntry( - Guid TaskId, - Guid? JobId, - Guid MachineId, - JobResultData Data, - Dictionary Value - ); - public record NodeHeartbeatEntry(Guid NodeId, HeartbeatData[] Data); public record NodeCommandStopIfFree(); @@ -915,27 +892,6 @@ public record SecretAddress(Uri Url) : ISecret { public record SecretData(ISecret Secret) { } -public record JobResult( - [PartitionKey][RowKey] Guid JobId, - string Project, - string Name, - double NewCrashingInput = 0, - double NoReproCrashingInput = 0, - double NewReport = 0, - double NewUniqueReport = 0, - double NewRegressionReport = 0, - double NewCrashDump = 0, - double InstructionsCovered = 0, - double TotalInstructions = 0, - double CoverageRate = 0, - double IterationCount = 0 -) : EntityBase() { - public JobResult(Guid JobId, string Project, string Name) : this( - JobId: JobId, - Project: Project, - Name: Name, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) { } -} - public record JobConfig( string Project, string Name, @@ -1100,7 +1056,6 @@ public record TaskUnitConfig( string? InstanceTelemetryKey, string? MicrosoftTelemetryKey, Uri HeartbeatQueue, - Uri JobResultQueue, Dictionary Tags ) { public Uri? inputQueue { get; set; } diff --git a/src/ApiService/ApiService/Program.cs b/src/ApiService/ApiService/Program.cs index d5ee30b45e..f425c00809 100644 --- a/src/ApiService/ApiService/Program.cs +++ b/src/ApiService/ApiService/Program.cs @@ -118,7 +118,6 @@ public static async Async.Task Main() { .AddScoped() .AddScoped() .AddScoped() - .AddScoped() .AddScoped() .AddScoped() .AddScoped() diff --git a/src/ApiService/ApiService/onefuzzlib/Config.cs b/src/ApiService/ApiService/onefuzzlib/Config.cs index 872cedbc01..71af317348 100644 --- a/src/ApiService/ApiService/onefuzzlib/Config.cs +++ b/src/ApiService/ApiService/onefuzzlib/Config.cs @@ -71,7 +71,6 @@ private static BlobContainerSasPermissions ConvertPermissions(ContainerPermissio InstanceTelemetryKey: _serviceConfig.ApplicationInsightsInstrumentationKey, MicrosoftTelemetryKey: _serviceConfig.OneFuzzTelemetry, HeartbeatQueue: await _queue.GetQueueSas("task-heartbeat", StorageType.Config, QueueSasPermissions.Add) ?? throw new Exception("unable to get heartbeat queue sas"), - JobResultQueue: await _queue.GetQueueSas("job-result", StorageType.Config, QueueSasPermissions.Add) ?? throw new Exception("unable to get heartbeat queue sas"), Tags: task.Config.Tags ?? new Dictionary() ); diff --git a/src/ApiService/ApiService/onefuzzlib/Extension.cs b/src/ApiService/ApiService/onefuzzlib/Extension.cs index fbf62dd343..7995026eca 100644 --- a/src/ApiService/ApiService/onefuzzlib/Extension.cs +++ b/src/ApiService/ApiService/onefuzzlib/Extension.cs @@ -36,9 +36,7 @@ public async Async.Task> GenericExtensions(AzureLocati var extensions = new List(); var instanceConfig = await _context.ConfigOperations.Fetch(); - if (vmOs == Os.Windows) { - extensions.Add(await MonitorExtension(region)); - } + extensions.Add(await MonitorExtension(region, vmOs)); var depenency = DependencyExtension(region, vmOs); if (depenency is not null) { @@ -331,21 +329,37 @@ public async Async.Task AgentConfig(AzureLocation region, Os throw new NotSupportedException($"unsupported OS: {vmOs}"); } - public async Async.Task MonitorExtension(AzureLocation region) { + public async Async.Task MonitorExtension(AzureLocation region, Os vmOs) { var settings = await _context.LogAnalytics.GetMonitorSettings(); var extensionSettings = JsonSerializer.Serialize(new { WorkspaceId = settings.Id }, _extensionSerializerOptions); var protectedExtensionSettings = JsonSerializer.Serialize(new { WorkspaceKey = settings.Key }, _extensionSerializerOptions); - return new VMExtensionWrapper { - Location = region, - Name = "OMSExtension", - TypePropertiesType = "MicrosoftMonitoringAgent", - Publisher = "Microsoft.EnterpriseCloud.Monitoring", - TypeHandlerVersion = "1.0", - AutoUpgradeMinorVersion = true, - Settings = new BinaryData(extensionSettings), - ProtectedSettings = new BinaryData(protectedExtensionSettings), - EnableAutomaticUpgrade = false - }; + if (vmOs == Os.Windows) { + return new VMExtensionWrapper { + Location = region, + Name = "OMSExtension", + TypePropertiesType = "MicrosoftMonitoringAgent", + Publisher = "Microsoft.EnterpriseCloud.Monitoring", + TypeHandlerVersion = "1.0", + AutoUpgradeMinorVersion = true, + Settings = new BinaryData(extensionSettings), + ProtectedSettings = new BinaryData(protectedExtensionSettings), + EnableAutomaticUpgrade = false + }; + } else if (vmOs == Os.Linux) { + return new VMExtensionWrapper { + Location = region, + Name = "OmsAgentForLinux", + TypePropertiesType = "OmsAgentForLinux", + Publisher = "Microsoft.EnterpriseCloud.Monitoring", + TypeHandlerVersion = "1.0", + AutoUpgradeMinorVersion = true, + Settings = new BinaryData(extensionSettings), + ProtectedSettings = new BinaryData(protectedExtensionSettings), + EnableAutomaticUpgrade = false + }; + } else { + throw new NotSupportedException($"unsupported os: {vmOs}"); + } } diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs deleted file mode 100644 index 1166cf91d4..0000000000 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ /dev/null @@ -1,121 +0,0 @@ -using ApiService.OneFuzzLib.Orm; -using Microsoft.Extensions.Logging; -using Polly; -namespace Microsoft.OneFuzz.Service; - -public interface IJobResultOperations : IOrm { - - Async.Task GetJobResult(Guid jobId); - Async.Task CreateOrUpdate(Guid jobId, JobResultType resultType, Dictionary resultValue); - -} -public class JobResultOperations : Orm, IJobResultOperations { - - public JobResultOperations(ILogger log, IOnefuzzContext context) - : base(log, context) { - } - - public async Async.Task GetJobResult(Guid jobId) { - return await SearchByPartitionKeys(new[] { jobId.ToString() }).SingleOrDefaultAsync(); - } - - private JobResult UpdateResult(JobResult result, JobResultType type, Dictionary resultValue) { - - var newResult = result; - double newValue; - switch (type) { - case JobResultType.NewCrashingInput: - newValue = result.NewCrashingInput + resultValue["count"]; - newResult = result with { NewCrashingInput = newValue }; - break; - case JobResultType.NewReport: - newValue = result.NewReport + resultValue["count"]; - newResult = result with { NewReport = newValue }; - break; - case JobResultType.NewUniqueReport: - newValue = result.NewUniqueReport + resultValue["count"]; - newResult = result with { NewUniqueReport = newValue }; - break; - case JobResultType.NewRegressionReport: - newValue = result.NewRegressionReport + resultValue["count"]; - newResult = result with { NewRegressionReport = newValue }; - break; - case JobResultType.NewCrashDump: - newValue = result.NewCrashDump + resultValue["count"]; - newResult = result with { NewCrashDump = newValue }; - break; - case JobResultType.CoverageData: - double newCovered = resultValue["covered"]; - double newTotalCovered = resultValue["features"]; - double newCoverageRate = resultValue["rate"]; - newResult = result with { InstructionsCovered = newCovered, TotalInstructions = newTotalCovered, CoverageRate = newCoverageRate }; - break; - case JobResultType.RuntimeStats: - double newTotalIterations = resultValue["total_count"]; - newResult = result with { IterationCount = newTotalIterations }; - break; - default: - _logTracer.LogWarning($"Invalid Field {type}."); - break; - } - _logTracer.LogInformation($"Attempting to log new result: {newResult}"); - return newResult; - } - - private async Async.Task TryUpdate(Job job, JobResultType resultType, Dictionary resultValue) { - var jobId = job.JobId; - - var jobResult = await GetJobResult(jobId); - - if (jobResult == null) { - _logTracer.LogInformation("Creating new JobResult for Job {JobId}", jobId); - - var entry = new JobResult(JobId: jobId, Project: job.Config.Project, Name: job.Config.Name); - - jobResult = UpdateResult(entry, resultType, resultValue); - - var r = await Insert(jobResult); - if (!r.IsOk) { - throw new InvalidOperationException($"failed to insert job result {jobResult.JobId}"); - } - _logTracer.LogInformation("created job result {JobId}", jobResult.JobId); - } else { - _logTracer.LogInformation("Updating existing JobResult entry for Job {JobId}", jobId); - - jobResult = UpdateResult(jobResult, resultType, resultValue); - - var r = await Update(jobResult); - if (!r.IsOk) { - throw new InvalidOperationException($"failed to insert job result {jobResult.JobId}"); - } - _logTracer.LogInformation("updated job result {JobId}", jobResult.JobId); - } - - return true; - } - - public async Async.Task CreateOrUpdate(Guid jobId, JobResultType resultType, Dictionary resultValue) { - - var job = await _context.JobOperations.Get(jobId); - if (job == null) { - return OneFuzzResultVoid.Error(ErrorCode.INVALID_REQUEST, "invalid job"); - } - - var success = false; - try { - _logTracer.LogInformation("attempt to update job result {JobId}", job.JobId); - var policy = Policy.Handle().WaitAndRetryAsync(50, _ => new TimeSpan(0, 0, 5)); - await policy.ExecuteAsync(async () => { - success = await TryUpdate(job, resultType, resultValue); - _logTracer.LogInformation("attempt {success}", success); - }); - return OneFuzzResultVoid.Ok; - } catch (Exception e) { - return OneFuzzResultVoid.Error(ErrorCode.UNABLE_TO_UPDATE, new string[] { - $"Unexpected failure when attempting to update job result for {job.JobId}", - $"Exception: {e}" - }); - } - } -} - diff --git a/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs b/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs index 03c6322663..d877bfddbb 100644 --- a/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs +++ b/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs @@ -19,7 +19,6 @@ public interface IOnefuzzContext { IExtensions Extensions { get; } IIpOperations IpOperations { get; } IJobOperations JobOperations { get; } - IJobResultOperations JobResultOperations { get; } ILogAnalytics LogAnalytics { get; } INodeMessageOperations NodeMessageOperations { get; } INodeOperations NodeOperations { get; } @@ -84,7 +83,6 @@ public OnefuzzContext(IServiceProvider serviceProvider) { public IVmOperations VmOperations => _serviceProvider.GetRequiredService(); public ISecretsOperations SecretsOperations => _serviceProvider.GetRequiredService(); public IJobOperations JobOperations => _serviceProvider.GetRequiredService(); - public IJobResultOperations JobResultOperations => _serviceProvider.GetRequiredService(); public IScheduler Scheduler => _serviceProvider.GetRequiredService(); public IConfig Config => _serviceProvider.GetRequiredService(); public ILogAnalytics LogAnalytics => _serviceProvider.GetRequiredService(); diff --git a/src/ApiService/IntegrationTests/Fakes/TestContext.cs b/src/ApiService/IntegrationTests/Fakes/TestContext.cs index 66d121e746..c46ff5fce7 100644 --- a/src/ApiService/IntegrationTests/Fakes/TestContext.cs +++ b/src/ApiService/IntegrationTests/Fakes/TestContext.cs @@ -32,7 +32,6 @@ public TestContext(IHttpClientFactory httpClientFactory, OneFuzzLoggerProvider p TaskOperations = new TaskOperations(provider.CreateLogger(), Cache, this); NodeOperations = new NodeOperations(provider.CreateLogger(), this); JobOperations = new JobOperations(provider.CreateLogger(), this); - JobResultOperations = new JobResultOperations(provider.CreateLogger(), this); NodeTasksOperations = new NodeTasksOperations(provider.CreateLogger(), this); TaskEventOperations = new TaskEventOperations(provider.CreateLogger(), this); NodeMessageOperations = new NodeMessageOperations(provider.CreateLogger(), this); @@ -58,7 +57,6 @@ public Async.Task InsertAll(params EntityBase[] objs) Node n => NodeOperations.Insert(n), Pool p => PoolOperations.Insert(p), Job j => JobOperations.Insert(j), - JobResult jr => JobResultOperations.Insert(jr), Repro r => ReproOperations.Insert(r), Scaleset ss => ScalesetOperations.Insert(ss), NodeTasks nt => NodeTasksOperations.Insert(nt), @@ -86,7 +84,6 @@ public Async.Task InsertAll(params EntityBase[] objs) public ITaskOperations TaskOperations { get; } public IJobOperations JobOperations { get; } - public IJobResultOperations JobResultOperations { get; } public INodeOperations NodeOperations { get; } public INodeTasksOperations NodeTasksOperations { get; } public ITaskEventOperations TaskEventOperations { get; } diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index 254684be97..a1d86e7d25 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -2123,7 +2123,6 @@ dependencies = [ "log", "nix", "notify", - "onefuzz-result", "onefuzz-telemetry", "pete", "pretty_assertions", @@ -2198,20 +2197,6 @@ dependencies = [ "serde_json", ] -[[package]] -name = "onefuzz-result" -version = "0.2.0" -dependencies = [ - "anyhow", - "async-trait", - "log", - "onefuzz-telemetry", - "reqwest", - "serde", - "storage-queue", - "uuid", -] - [[package]] name = "onefuzz-task" version = "0.2.0" @@ -2241,7 +2226,6 @@ dependencies = [ "num_cpus", "onefuzz", "onefuzz-file-format", - "onefuzz-result", "onefuzz-telemetry", "path-absolutize", "pretty_assertions", diff --git a/src/agent/Cargo.toml b/src/agent/Cargo.toml index ce01ae880c..2f4cea41a4 100644 --- a/src/agent/Cargo.toml +++ b/src/agent/Cargo.toml @@ -10,7 +10,6 @@ members = [ "onefuzz", "onefuzz-task", "onefuzz-agent", - "onefuzz-result", "onefuzz-file-format", "onefuzz-telemetry", "reqwest-retry", diff --git a/src/agent/onefuzz-agent/src/config.rs b/src/agent/onefuzz-agent/src/config.rs index fc623e72af..87edfb2c1b 100644 --- a/src/agent/onefuzz-agent/src/config.rs +++ b/src/agent/onefuzz-agent/src/config.rs @@ -34,8 +34,6 @@ pub struct StaticConfig { pub heartbeat_queue: Option, - pub job_result_queue: Option, - pub instance_id: Uuid, #[serde(default = "default_as_true")] @@ -73,8 +71,6 @@ struct RawStaticConfig { pub heartbeat_queue: Option, - pub job_result_queue: Option, - pub instance_id: Uuid, #[serde(default = "default_as_true")] @@ -121,7 +117,6 @@ impl StaticConfig { microsoft_telemetry_key: config.microsoft_telemetry_key, instance_telemetry_key: config.instance_telemetry_key, heartbeat_queue: config.heartbeat_queue, - job_result_queue: config.job_result_queue, instance_id: config.instance_id, managed: config.managed, machine_identity, @@ -157,12 +152,6 @@ impl StaticConfig { None }; - let job_result_queue = if let Ok(key) = std::env::var("ONEFUZZ_JOB_RESULT") { - Some(Url::parse(&key)?) - } else { - None - }; - let instance_telemetry_key = if let Ok(key) = std::env::var("ONEFUZZ_INSTANCE_TELEMETRY_KEY") { Some(InstanceTelemetryKey::new(Uuid::parse_str(&key)?)) @@ -194,7 +183,6 @@ impl StaticConfig { instance_telemetry_key, microsoft_telemetry_key, heartbeat_queue, - job_result_queue, instance_id, managed: !is_unmanaged, machine_identity, diff --git a/src/agent/onefuzz-agent/src/log_uploader.rs b/src/agent/onefuzz-agent/src/log_uploader.rs index d424013421..6bccc0bef2 100644 --- a/src/agent/onefuzz-agent/src/log_uploader.rs +++ b/src/agent/onefuzz-agent/src/log_uploader.rs @@ -210,3 +210,32 @@ async fn sync_file( blob_client.append_block(Body::from(f)).await?; Ok(len) } + +#[cfg(test)] +mod tests { + use std::io::Seek; + + use anyhow::Result; + use tokio::io::{AsyncReadExt, AsyncSeekExt}; + + #[allow(clippy::unused_io_amount)] + #[tokio::test] + #[ignore] + + async fn test_seek_behavior() -> Result<()> { + let path = "C:\\temp\\test.ps1"; + let mut std_file = std::fs::File::open(path)?; + std_file.seek(std::io::SeekFrom::Start(3))?; + + let mut tokio_file = tokio::fs::File::from_std(std_file); + + let buf = &mut [0u8; 5]; + tokio_file.read(buf).await?; + println!("******** buf {:?}", buf); + tokio_file.seek(std::io::SeekFrom::Start(0)).await?; + tokio_file.read(buf).await?; + println!("******** buf {:?}", buf); + + Ok(()) + } +} diff --git a/src/agent/onefuzz-agent/src/work.rs b/src/agent/onefuzz-agent/src/work.rs index d0222744a7..b55d1d86a1 100644 --- a/src/agent/onefuzz-agent/src/work.rs +++ b/src/agent/onefuzz-agent/src/work.rs @@ -91,10 +91,7 @@ impl WorkSet { pub fn setup_dir(&self) -> Result { let root = self.get_root_folder()?; - // Putting the setup container at the root for backward compatibility. - // The path of setup folder can be used as part of the deduplication logic in the bug filing service - let setup_root = root.parent().ok_or_else(|| anyhow!("Invalid root"))?; - self.setup_url.as_path(setup_root) + self.setup_url.as_path(root) } pub fn extra_setup_dir(&self) -> Result> { diff --git a/src/agent/onefuzz-result/Cargo.toml b/src/agent/onefuzz-result/Cargo.toml deleted file mode 100644 index 7c7de6615c..0000000000 --- a/src/agent/onefuzz-result/Cargo.toml +++ /dev/null @@ -1,18 +0,0 @@ -[package] -name = "onefuzz-result" -version = "0.2.0" -authors = ["fuzzing@microsoft.com"] -edition = "2021" -publish = false -license = "MIT" - -[dependencies] -anyhow = { version = "1.0", features = ["backtrace"] } -async-trait = "0.1" -reqwest = "0.11" -serde = "1.0" -storage-queue = { path = "../storage-queue" } -uuid = { version = "1.4", features = ["serde", "v4"] } -onefuzz-telemetry = { path = "../onefuzz-telemetry" } -log = "0.4" - diff --git a/src/agent/onefuzz-result/src/job_result.rs b/src/agent/onefuzz-result/src/job_result.rs deleted file mode 100644 index b305eca2cb..0000000000 --- a/src/agent/onefuzz-result/src/job_result.rs +++ /dev/null @@ -1,129 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -use anyhow::Result; -use async_trait::async_trait; -use onefuzz_telemetry::warn; -use reqwest::Url; -use serde::{self, Deserialize, Serialize}; -use std::collections::HashMap; -use std::sync::Arc; -use storage_queue::QueueClient; -use uuid::Uuid; - -#[derive(Debug, Deserialize, Serialize, Hash, Eq, PartialEq, Clone)] -#[serde(tag = "type")] -pub enum JobResultData { - NewCrashingInput, - NoReproCrashingInput, - NewReport, - NewUniqueReport, - NewRegressionReport, - NewCoverage, - NewCrashDump, - CoverageData, - RuntimeStats, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -struct JobResult { - task_id: Uuid, - job_id: Uuid, - machine_id: Uuid, - machine_name: String, - data: JobResultData, - value: HashMap, -} - -#[derive(Clone)] -pub struct TaskContext { - task_id: Uuid, - job_id: Uuid, - machine_id: Uuid, - machine_name: String, -} - -pub struct JobResultContext { - pub state: TaskContext, - pub queue_client: QueueClient, -} - -pub struct JobResultClient { - pub context: Arc>, -} - -impl JobResultClient { - pub fn init_job_result( - context: TaskContext, - queue_url: Url, - ) -> Result> - where - TaskContext: Send + Sync + 'static, - { - let context = Arc::new(JobResultContext { - state: context, - queue_client: QueueClient::new(queue_url)?, - }); - - Ok(JobResultClient { context }) - } -} - -pub type TaskJobResultClient = JobResultClient; - -pub async fn init_job_result( - queue_url: Url, - task_id: Uuid, - job_id: Uuid, - machine_id: Uuid, - machine_name: String, -) -> Result { - let hb = JobResultClient::init_job_result( - TaskContext { - task_id, - job_id, - machine_id, - machine_name, - }, - queue_url, - )?; - Ok(hb) -} - -#[async_trait] -pub trait JobResultSender { - async fn send_direct(&self, data: JobResultData, value: HashMap); -} - -#[async_trait] -impl JobResultSender for TaskJobResultClient { - async fn send_direct(&self, data: JobResultData, value: HashMap) { - let task_id = self.context.state.task_id; - let job_id = self.context.state.job_id; - let machine_id = self.context.state.machine_id; - let machine_name = self.context.state.machine_name.clone(); - - let _ = self - .context - .queue_client - .enqueue(JobResult { - task_id, - job_id, - machine_id, - machine_name, - data, - value, - }) - .await; - } -} - -#[async_trait] -impl JobResultSender for Option { - async fn send_direct(&self, data: JobResultData, value: HashMap) { - match self { - Some(client) => client.send_direct(data, value).await, - None => warn!("Failed to send Job Result message data from agent."), - } - } -} diff --git a/src/agent/onefuzz-result/src/lib.rs b/src/agent/onefuzz-result/src/lib.rs deleted file mode 100644 index dae666ca9a..0000000000 --- a/src/agent/onefuzz-result/src/lib.rs +++ /dev/null @@ -1,4 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -pub mod job_result; diff --git a/src/agent/onefuzz-task/Cargo.toml b/src/agent/onefuzz-task/Cargo.toml index 4e0bd381b0..0ad2f9aa4f 100644 --- a/src/agent/onefuzz-task/Cargo.toml +++ b/src/agent/onefuzz-task/Cargo.toml @@ -39,7 +39,6 @@ serde_json = "1.0" serde_yaml = "0.9.21" onefuzz = { path = "../onefuzz" } onefuzz-telemetry = { path = "../onefuzz-telemetry" } -onefuzz-result = { path = "../onefuzz-result" } path-absolutize = "3.1" reqwest-retry = { path = "../reqwest-retry" } strum = "0.25" diff --git a/src/agent/onefuzz-task/src/local/cmd.rs b/src/agent/onefuzz-task/src/local/cmd.rs index eabefb71ee..80fd51a96b 100644 --- a/src/agent/onefuzz-task/src/local/cmd.rs +++ b/src/agent/onefuzz-task/src/local/cmd.rs @@ -3,7 +3,11 @@ #[cfg(any(target_os = "linux", target_os = "windows"))] use crate::local::coverage; -use crate::local::{common::add_common_config, libfuzzer_fuzz, tui::TerminalUi}; +use crate::local::{ + common::add_common_config, generic_analysis, generic_crash_report, generic_generator, + libfuzzer, libfuzzer_crash_report, libfuzzer_fuzz, libfuzzer_merge, libfuzzer_regression, + libfuzzer_test_input, radamsa, test_input, tui::TerminalUi, +}; use anyhow::{Context, Result}; use clap::{Arg, ArgAction, Command}; use std::time::Duration; @@ -17,9 +21,19 @@ use super::template; #[derive(Debug, PartialEq, Eq, EnumString, IntoStaticStr, EnumIter)] #[strum(serialize_all = "kebab-case")] enum Commands { + Radamsa, #[cfg(any(target_os = "linux", target_os = "windows"))] Coverage, LibfuzzerFuzz, + LibfuzzerMerge, + LibfuzzerCrashReport, + LibfuzzerTestInput, + LibfuzzerRegression, + Libfuzzer, + CrashReport, + Generator, + Analysis, + TestInput, Template, } @@ -54,7 +68,23 @@ pub async fn run(args: clap::ArgMatches) -> Result<()> { match command { #[cfg(any(target_os = "linux", target_os = "windows"))] Commands::Coverage => coverage::run(&sub_args, event_sender).await, + Commands::Radamsa => radamsa::run(&sub_args, event_sender).await, + Commands::LibfuzzerCrashReport => { + libfuzzer_crash_report::run(&sub_args, event_sender).await + } Commands::LibfuzzerFuzz => libfuzzer_fuzz::run(&sub_args, event_sender).await, + Commands::LibfuzzerMerge => libfuzzer_merge::run(&sub_args, event_sender).await, + Commands::LibfuzzerTestInput => { + libfuzzer_test_input::run(&sub_args, event_sender).await + } + Commands::LibfuzzerRegression => { + libfuzzer_regression::run(&sub_args, event_sender).await + } + Commands::Libfuzzer => libfuzzer::run(&sub_args, event_sender).await, + Commands::CrashReport => generic_crash_report::run(&sub_args, event_sender).await, + Commands::Generator => generic_generator::run(&sub_args, event_sender).await, + Commands::Analysis => generic_analysis::run(&sub_args, event_sender).await, + Commands::TestInput => test_input::run(&sub_args, event_sender).await, Commands::Template => { let config = sub_args .get_one::("config") @@ -110,7 +140,17 @@ pub fn args(name: &'static str) -> Command { let app = match subcommand { #[cfg(any(target_os = "linux", target_os = "windows"))] Commands::Coverage => coverage::args(subcommand.into()), + Commands::Radamsa => radamsa::args(subcommand.into()), + Commands::LibfuzzerCrashReport => libfuzzer_crash_report::args(subcommand.into()), Commands::LibfuzzerFuzz => libfuzzer_fuzz::args(subcommand.into()), + Commands::LibfuzzerMerge => libfuzzer_merge::args(subcommand.into()), + Commands::LibfuzzerTestInput => libfuzzer_test_input::args(subcommand.into()), + Commands::LibfuzzerRegression => libfuzzer_regression::args(subcommand.into()), + Commands::Libfuzzer => libfuzzer::args(subcommand.into()), + Commands::CrashReport => generic_crash_report::args(subcommand.into()), + Commands::Generator => generic_generator::args(subcommand.into()), + Commands::Analysis => generic_analysis::args(subcommand.into()), + Commands::TestInput => test_input::args(subcommand.into()), Commands::Template => Command::new("template") .about("uses the template to generate a run") .args(vec![Arg::new("config") diff --git a/src/agent/onefuzz-task/src/local/common.rs b/src/agent/onefuzz-task/src/local/common.rs index 17940d799f..f8d7949e80 100644 --- a/src/agent/onefuzz-task/src/local/common.rs +++ b/src/agent/onefuzz-task/src/local/common.rs @@ -26,10 +26,20 @@ pub const INPUTS_DIR: &str = "inputs_dir"; pub const CRASHES_DIR: &str = "crashes_dir"; pub const CRASHDUMPS_DIR: &str = "crashdumps_dir"; pub const TARGET_WORKERS: &str = "target_workers"; +pub const REPORTS_DIR: &str = "reports_dir"; +pub const NO_REPRO_DIR: &str = "no_repro_dir"; pub const TARGET_TIMEOUT: &str = "target_timeout"; +pub const CHECK_RETRY_COUNT: &str = "check_retry_count"; +pub const DISABLE_CHECK_QUEUE: &str = "disable_check_queue"; +pub const UNIQUE_REPORTS_DIR: &str = "unique_reports_dir"; pub const COVERAGE_DIR: &str = "coverage_dir"; pub const READONLY_INPUTS: &str = "readonly_inputs_dir"; +pub const CHECK_ASAN_LOG: &str = "check_asan_log"; +pub const TOOLS_DIR: &str = "tools_dir"; +pub const RENAME_OUTPUT: &str = "rename_output"; pub const CHECK_FUZZER_HELP: &str = "check_fuzzer_help"; +pub const DISABLE_CHECK_DEBUGGER: &str = "disable_check_debugger"; +pub const REGRESSION_REPORTS_DIR: &str = "regression_reports_dir"; pub const TARGET_EXE: &str = "target_exe"; pub const TARGET_ENV: &str = "target_env"; @@ -37,6 +47,17 @@ pub const TARGET_OPTIONS: &str = "target_options"; // pub const SUPERVISOR_EXE: &str = "supervisor_exe"; // pub const SUPERVISOR_ENV: &str = "supervisor_env"; // pub const SUPERVISOR_OPTIONS: &str = "supervisor_options"; +pub const GENERATOR_EXE: &str = "generator_exe"; +pub const GENERATOR_ENV: &str = "generator_env"; +pub const GENERATOR_OPTIONS: &str = "generator_options"; + +pub const ANALYZER_EXE: &str = "analyzer_exe"; +pub const ANALYZER_OPTIONS: &str = "analyzer_options"; +pub const ANALYZER_ENV: &str = "analyzer_env"; +pub const ANALYSIS_DIR: &str = "analysis_dir"; +pub const ANALYSIS_INPUTS: &str = "analysis_inputs"; +pub const ANALYSIS_UNIQUE_INPUTS: &str = "analysis_unique_inputs"; +pub const PRESERVE_EXISTING_OUTPUTS: &str = "preserve_existing_outputs"; pub const CREATE_JOB_DIR: &str = "create_job_dir"; @@ -45,6 +66,7 @@ const WAIT_FOR_DIR_DELAY: Duration = Duration::from_secs(1); pub enum CmdType { Target, + Generator, // Supervisor, } @@ -68,6 +90,7 @@ pub fn get_cmd_exe(cmd_type: CmdType, args: &clap::ArgMatches) -> Result let name = match cmd_type { CmdType::Target => TARGET_EXE, // CmdType::Supervisor => SUPERVISOR_EXE, + CmdType::Generator => GENERATOR_EXE, }; args.get_one::(name) @@ -79,6 +102,7 @@ pub fn get_cmd_arg(cmd_type: CmdType, args: &clap::ArgMatches) -> Vec { let name = match cmd_type { CmdType::Target => TARGET_OPTIONS, // CmdType::Supervisor => SUPERVISOR_OPTIONS, + CmdType::Generator => GENERATOR_OPTIONS, }; args.get_many::(name) @@ -91,6 +115,7 @@ pub fn get_cmd_env(cmd_type: CmdType, args: &clap::ArgMatches) -> Result TARGET_ENV, // CmdType::Supervisor => SUPERVISOR_ENV, + CmdType::Generator => GENERATOR_ENV, }; get_hash_map(args, env_name) } @@ -240,7 +265,6 @@ pub async fn build_local_context( }, instance_telemetry_key: None, heartbeat_queue: None, - job_result_queue: None, microsoft_telemetry_key: None, logs: None, min_available_memory_mb: 0, diff --git a/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml b/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml index aba02c7991..7210893809 100644 --- a/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml +++ b/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml @@ -5,31 +5,28 @@ # 2. Install llvm and export LLVM_SYMBOLIZER_PATH like we do in setup.sh -required_args: &required_args - target_exe: "REPLACE_ME" # The path to your target - inputs: &inputs "REPLACE_ME" # A folder containining your inputs - crashes: &crashes "REPLACE_ME" # The folder where you want the crashing inputs to be output - crashdumps: "REPLACE_ME" # The folder where you want the crash dumps to be output - coverage: "REPLACE_ME" # The folder where you want the code coverage to be output - regression_reports: "REPLACE_ME" # The folder where you want the regression reports to be output - target_args: &target_args - <<: *required_args target_env: {} + target_exe: "C:\\temp\\onefuzz\\integration\\windows-libfuzzer\\fuzz.exe" target_options: [] +inputs: &inputs "C:\\temp\\onefuzz\\integration\\windows-libfuzzer\\seeds" + tasks: - type: LibFuzzer <<: *target_args + inputs: *inputs + crashes: &crash "./crashes" readonly_inputs: [] check_fuzzer_help: true - - type: LibfuzzerRegression + - type: "Report" <<: *target_args - - - type: "LibfuzzerCrashReport" - <<: *target_args - input_queue: *crashes + input_queue: *crash + crashes: *crash + reports: "./reports" + unique_reports: "./unique_reports" + no_repro: "./no_repro" check_fuzzer_help: true - type: "Coverage" @@ -38,11 +35,4 @@ tasks: - "{input}" input_queue: *inputs readonly_inputs: [*inputs] - - # The analysis task is optional in the libfuzzer_basic template - # - type: Analysis - # <<: *target_args - # analysis: "REPLACE_ME" # The folder where you want the analysis results to be output - # analyzer_exe: "REPLACE_ME" - # analyzer_options: [] - # analyzer_env: {} + coverage: "./coverage" diff --git a/src/agent/onefuzz-task/src/local/generic_analysis.rs b/src/agent/onefuzz-task/src/local/generic_analysis.rs index 429e7b0e3b..3d3e2fafc8 100644 --- a/src/agent/onefuzz-task/src/local/generic_analysis.rs +++ b/src/agent/onefuzz-task/src/local/generic_analysis.rs @@ -3,13 +3,139 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::tasks::config::CommonConfig; +use crate::{ + local::common::{ + build_local_context, get_cmd_arg, get_cmd_exe, get_hash_map, get_synced_dir, CmdType, + SyncCountDirMonitor, UiEvent, ANALYSIS_DIR, ANALYZER_ENV, ANALYZER_EXE, ANALYZER_OPTIONS, + CRASHES_DIR, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TOOLS_DIR, + UNIQUE_REPORTS_DIR, + }, + tasks::{ + analysis::generic::{run as run_analysis, Config}, + config::CommonConfig, + }, +}; use anyhow::Result; use async_trait::async_trait; +use clap::{Arg, Command}; +use flume::Sender; use schemars::JsonSchema; +use storage_queue::QueueClient; use super::template::{RunContext, Template}; +pub fn build_analysis_config( + args: &clap::ArgMatches, + input_queue: Option, + common: CommonConfig, + event_sender: Option>, +) -> Result { + let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); + let target_options = get_cmd_arg(CmdType::Target, args); + + let analyzer_exe = args + .get_one::(ANALYZER_EXE) + .cloned() + .ok_or_else(|| format_err!("expected {ANALYZER_EXE}"))?; + + let analyzer_options = args + .get_many::(ANALYZER_OPTIONS) + .unwrap_or_default() + .map(|x| x.to_string()) + .collect(); + + let analyzer_env = get_hash_map(args, ANALYZER_ENV)?; + let analysis = get_synced_dir(ANALYSIS_DIR, common.job_id, common.task_id, args)? + .monitor_count(&event_sender)?; + let tools = get_synced_dir(TOOLS_DIR, common.job_id, common.task_id, args)?; + let crashes = if input_queue.is_none() { + get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)? + } else { + None + }; + let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + + let config = Config { + analyzer_exe, + analyzer_options, + analyzer_env, + target_exe, + target_options, + input_queue, + crashes, + analysis, + tools: Some(tools), + reports, + unique_reports, + no_repro, + common, + }; + + Ok(config) +} + +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender.clone()).await?; + let config = build_analysis_config(args, None, context.common_config.clone(), event_sender)?; + run_analysis(config).await +} + +pub fn build_shared_args(required_task: bool) -> Vec { + vec![ + Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), + Arg::new(TARGET_ENV) + .long(TARGET_ENV) + .requires(TARGET_EXE) + .num_args(0..), + Arg::new(TARGET_OPTIONS) + .long(TARGET_OPTIONS) + .default_value("{input}") + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(CRASHES_DIR) + .long(CRASHES_DIR) + .value_parser(value_parser!(PathBuf)), + Arg::new(ANALYZER_OPTIONS) + .long(ANALYZER_OPTIONS) + .requires(ANALYZER_EXE) + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(ANALYZER_ENV) + .long(ANALYZER_ENV) + .requires(ANALYZER_EXE) + .num_args(0..), + Arg::new(TOOLS_DIR) + .long(TOOLS_DIR) + .value_parser(value_parser!(PathBuf)), + Arg::new(ANALYZER_EXE) + .long(ANALYZER_EXE) + .requires(ANALYSIS_DIR) + .requires(CRASHES_DIR) + .required(required_task), + Arg::new(ANALYSIS_DIR) + .long(ANALYSIS_DIR) + .requires(ANALYZER_EXE) + .requires(CRASHES_DIR) + .required(required_task), + ] +} + +pub fn args(name: &'static str) -> Command { + Command::new(name) + .about("execute a local-only generic analysis") + .args(&build_shared_args(true)) +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct Analysis { analyzer_exe: String, @@ -20,7 +146,7 @@ pub struct Analysis { input_queue: Option, crashes: Option, analysis: PathBuf, - tools: Option, + tools: PathBuf, reports: Option, unique_reports: Option, no_repro: Option, @@ -49,10 +175,9 @@ impl Template for Analysis { .and_then(|path| context.to_monitored_sync_dir("crashes", path).ok()), analysis: context.to_monitored_sync_dir("analysis", self.analysis.clone())?, - tools: self - .tools - .as_ref() - .and_then(|path| context.to_monitored_sync_dir("tools", path).ok()), + tools: context + .to_monitored_sync_dir("tools", self.tools.clone()) + .ok(), reports: self .reports diff --git a/src/agent/onefuzz-task/src/local/generic_crash_report.rs b/src/agent/onefuzz-task/src/local/generic_crash_report.rs index 347a8cac76..6b0e2fccad 100644 --- a/src/agent/onefuzz-task/src/local/generic_crash_report.rs +++ b/src/agent/onefuzz-task/src/local/generic_crash_report.rs @@ -3,14 +3,150 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::tasks::{config::CommonConfig, utils::default_bool_true}; +use crate::{ + local::common::{ + build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType, + SyncCountDirMonitor, UiEvent, CHECK_ASAN_LOG, CHECK_RETRY_COUNT, CRASHES_DIR, + DISABLE_CHECK_DEBUGGER, DISABLE_CHECK_QUEUE, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV, + TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, UNIQUE_REPORTS_DIR, + }, + tasks::{ + config::CommonConfig, + report::generic::{Config, ReportTask}, + utils::default_bool_true, + }, +}; use anyhow::Result; use async_trait::async_trait; +use clap::{Arg, ArgAction, Command}; +use flume::Sender; use futures::future::OptionFuture; use schemars::JsonSchema; +use storage_queue::QueueClient; use super::template::{RunContext, Template}; +pub fn build_report_config( + args: &clap::ArgMatches, + input_queue: Option, + common: CommonConfig, + event_sender: Option>, +) -> Result { + let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); + let target_env = get_cmd_env(CmdType::Target, args)?; + let target_options = get_cmd_arg(CmdType::Target, args); + + let crashes = Some(get_synced_dir( + CRASHES_DIR, + common.job_id, + common.task_id, + args, + )?) + .monitor_count(&event_sender)?; + let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + + let unique_reports = Some(get_synced_dir( + UNIQUE_REPORTS_DIR, + common.job_id, + common.task_id, + args, + )?) + .monitor_count(&event_sender)?; + + let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); + + let check_retry_count = args + .get_one::(CHECK_RETRY_COUNT) + .copied() + .expect("has a default"); + + let check_queue = !args.get_flag(DISABLE_CHECK_QUEUE); + let check_asan_log = args.get_flag(CHECK_ASAN_LOG); + let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER); + + let config = Config { + target_exe, + target_env, + target_options, + target_timeout, + check_asan_log, + check_debugger, + check_retry_count, + check_queue, + crashes, + minimized_stack_depth: None, + input_queue, + no_repro, + reports, + unique_reports, + common, + }; + + Ok(config) +} + +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender.clone()).await?; + let config = build_report_config(args, None, context.common_config.clone(), event_sender)?; + ReportTask::new(config).managed_run().await +} + +pub fn build_shared_args() -> Vec { + vec![ + Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), + Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), + Arg::new(TARGET_OPTIONS) + .default_value("{input}") + .long(TARGET_OPTIONS) + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(CRASHES_DIR) + .long(CRASHES_DIR) + .required(true) + .value_parser(value_parser!(PathBuf)), + Arg::new(REPORTS_DIR) + .long(REPORTS_DIR) + .required(false) + .value_parser(value_parser!(PathBuf)), + Arg::new(NO_REPRO_DIR) + .long(NO_REPRO_DIR) + .required(false) + .value_parser(value_parser!(PathBuf)), + Arg::new(UNIQUE_REPORTS_DIR) + .long(UNIQUE_REPORTS_DIR) + .value_parser(value_parser!(PathBuf)) + .required(true), + Arg::new(TARGET_TIMEOUT) + .long(TARGET_TIMEOUT) + .value_parser(value_parser!(u64)) + .default_value("30"), + Arg::new(CHECK_RETRY_COUNT) + .long(CHECK_RETRY_COUNT) + .value_parser(value_parser!(u64)) + .default_value("0"), + Arg::new(DISABLE_CHECK_QUEUE) + .action(ArgAction::SetTrue) + .long(DISABLE_CHECK_QUEUE), + Arg::new(CHECK_ASAN_LOG) + .action(ArgAction::SetTrue) + .long(CHECK_ASAN_LOG), + Arg::new(DISABLE_CHECK_DEBUGGER) + .action(ArgAction::SetTrue) + .long(DISABLE_CHECK_DEBUGGER), + ] +} + +pub fn args(name: &'static str) -> Command { + Command::new(name) + .about("execute a local-only generic crash report") + .args(&build_shared_args()) +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct CrashReport { target_exe: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/generic_generator.rs b/src/agent/onefuzz-task/src/local/generic_generator.rs index ae9f6a3cc6..823ba221d6 100644 --- a/src/agent/onefuzz-task/src/local/generic_generator.rs +++ b/src/agent/onefuzz-task/src/local/generic_generator.rs @@ -3,14 +3,154 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::tasks::{config::CommonConfig, utils::default_bool_true}; +use crate::{ + local::common::{ + build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, + get_synced_dirs, CmdType, SyncCountDirMonitor, UiEvent, CHECK_ASAN_LOG, CHECK_RETRY_COUNT, + CRASHES_DIR, DISABLE_CHECK_DEBUGGER, GENERATOR_ENV, GENERATOR_EXE, GENERATOR_OPTIONS, + READONLY_INPUTS, RENAME_OUTPUT, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, + TOOLS_DIR, + }, + tasks::{ + config::CommonConfig, + fuzz::generator::{Config, GeneratorTask}, + utils::default_bool_true, + }, +}; use anyhow::Result; use async_trait::async_trait; +use clap::{Arg, ArgAction, Command}; +use flume::Sender; use onefuzz::syncdir::SyncedDir; use schemars::JsonSchema; use super::template::{RunContext, Template}; +pub fn build_fuzz_config( + args: &clap::ArgMatches, + common: CommonConfig, + event_sender: Option>, +) -> Result { + let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)? + .monitor_count(&event_sender)?; + let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); + let target_options = get_cmd_arg(CmdType::Target, args); + let target_env = get_cmd_env(CmdType::Target, args)?; + + let generator_exe = get_cmd_exe(CmdType::Generator, args)?; + let generator_options = get_cmd_arg(CmdType::Generator, args); + let generator_env = get_cmd_env(CmdType::Generator, args)?; + let readonly_inputs = get_synced_dirs(READONLY_INPUTS, common.job_id, common.task_id, args)? + .into_iter() + .map(|sd| sd.monitor_count(&event_sender)) + .collect::>>()?; + + let rename_output = args.get_flag(RENAME_OUTPUT); + let check_asan_log = args.get_flag(CHECK_ASAN_LOG); + let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER); + + let check_retry_count = args + .get_one::(CHECK_RETRY_COUNT) + .copied() + .expect("has a default"); + + let target_timeout = Some( + args.get_one::(TARGET_TIMEOUT) + .copied() + .expect("has a default"), + ); + + let tools = get_synced_dir(TOOLS_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + + let ensemble_sync_delay = None; + + let config = Config { + generator_exe, + generator_env, + generator_options, + readonly_inputs, + crashes, + tools, + target_exe, + target_env, + target_options, + target_timeout, + check_asan_log, + check_debugger, + check_retry_count, + rename_output, + ensemble_sync_delay, + common, + }; + + Ok(config) +} + +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender.clone()).await?; + let config = build_fuzz_config(args, context.common_config.clone(), event_sender)?; + GeneratorTask::new(config).run().await +} + +pub fn build_shared_args() -> Vec { + vec![ + Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), + Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), + Arg::new(TARGET_OPTIONS) + .default_value("{input}") + .long(TARGET_OPTIONS) + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(GENERATOR_EXE) + .long(GENERATOR_EXE) + .default_value("radamsa") + .required(true), + Arg::new(GENERATOR_ENV).long(GENERATOR_ENV).num_args(0..), + Arg::new(GENERATOR_OPTIONS) + .long(GENERATOR_OPTIONS) + .value_delimiter(' ') + .default_value("-H sha256 -o {generated_inputs}/input-%h.%s -n 100 -r {input_corpus}") + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(CRASHES_DIR) + .required(true) + .long(CRASHES_DIR) + .value_parser(value_parser!(PathBuf)), + Arg::new(READONLY_INPUTS) + .required(true) + .num_args(1..) + .value_parser(value_parser!(PathBuf)) + .long(READONLY_INPUTS), + Arg::new(TOOLS_DIR) + .long(TOOLS_DIR) + .value_parser(value_parser!(PathBuf)), + Arg::new(CHECK_RETRY_COUNT) + .long(CHECK_RETRY_COUNT) + .value_parser(value_parser!(u64)) + .default_value("0"), + Arg::new(CHECK_ASAN_LOG) + .action(ArgAction::SetTrue) + .long(CHECK_ASAN_LOG), + Arg::new(RENAME_OUTPUT) + .action(ArgAction::SetTrue) + .long(RENAME_OUTPUT), + Arg::new(TARGET_TIMEOUT) + .long(TARGET_TIMEOUT) + .value_parser(value_parser!(u64)) + .default_value("30"), + Arg::new(DISABLE_CHECK_DEBUGGER) + .action(ArgAction::SetTrue) + .long(DISABLE_CHECK_DEBUGGER), + ] +} + +pub fn args(name: &'static str) -> Command { + Command::new(name) + .about("execute a local-only generator fuzzing task") + .args(&build_shared_args()) +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct Generator { generator_exe: String, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer.rs b/src/agent/onefuzz-task/src/local/libfuzzer.rs index 433636be1c..56dff7dbe3 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer.rs @@ -1,19 +1,168 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -use crate::tasks::{ - config::CommonConfig, - fuzz::libfuzzer::{common::default_workers, generic::LibFuzzerFuzzTask}, - utils::default_bool_true, +#[cfg(any(target_os = "linux", target_os = "windows"))] +use crate::{ + local::{common::COVERAGE_DIR, coverage, coverage::build_shared_args as build_coverage_args}, + tasks::coverage::generic::CoverageTask, +}; +use crate::{ + local::{ + common::{ + build_local_context, wait_for_dir, DirectoryMonitorQueue, UiEvent, ANALYZER_EXE, + REGRESSION_REPORTS_DIR, UNIQUE_REPORTS_DIR, + }, + generic_analysis::{build_analysis_config, build_shared_args as build_analysis_args}, + libfuzzer_crash_report::{build_report_config, build_shared_args as build_crash_args}, + libfuzzer_fuzz::{build_fuzz_config, build_shared_args as build_fuzz_args}, + libfuzzer_regression::{ + build_regression_config, build_shared_args as build_regression_args, + }, + }, + tasks::{ + analysis::generic::run as run_analysis, + config::CommonConfig, + fuzz::libfuzzer::{common::default_workers, generic::LibFuzzerFuzzTask}, + regression::libfuzzer::LibFuzzerRegressionTask, + report::libfuzzer_report::ReportTask, + utils::default_bool_true, + }, }; use anyhow::Result; use async_trait::async_trait; -use onefuzz::syncdir::SyncedDir; +use clap::Command; +use flume::Sender; +use onefuzz::{syncdir::SyncedDir, utils::try_wait_all_join_handles}; use schemars::JsonSchema; -use std::{collections::HashMap, path::PathBuf}; +use std::{ + collections::{HashMap, HashSet}, + path::PathBuf, +}; +use tokio::task::spawn; +use uuid::Uuid; use super::template::{RunContext, Template}; +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender.clone()).await?; + let fuzz_config = build_fuzz_config(args, context.common_config.clone(), event_sender.clone())?; + let crash_dir = fuzz_config + .crashes + .remote_url()? + .as_file_path() + .expect("invalid crash dir remote location"); + + let fuzzer = LibFuzzerFuzzTask::new(fuzz_config)?; + let mut task_handles = vec![]; + + let fuzz_task = spawn(async move { fuzzer.run().await }); + + wait_for_dir(&crash_dir).await?; + + task_handles.push(fuzz_task); + + if args.contains_id(UNIQUE_REPORTS_DIR) { + let crash_report_input_monitor = + DirectoryMonitorQueue::start_monitoring(crash_dir.clone()).await?; + + let report_config = build_report_config( + args, + Some(crash_report_input_monitor.queue_client), + CommonConfig { + task_id: Uuid::new_v4(), + ..context.common_config.clone() + }, + event_sender.clone(), + )?; + + let mut report = ReportTask::new(report_config); + let report_task = spawn(async move { report.managed_run().await }); + + task_handles.push(report_task); + task_handles.push(crash_report_input_monitor.handle); + } + + #[cfg(any(target_os = "linux", target_os = "windows"))] + if args.contains_id(COVERAGE_DIR) { + let coverage_input_monitor = + DirectoryMonitorQueue::start_monitoring(crash_dir.clone()).await?; + let coverage_config = coverage::build_coverage_config( + args, + true, + Some(coverage_input_monitor.queue_client), + CommonConfig { + task_id: Uuid::new_v4(), + ..context.common_config.clone() + }, + event_sender.clone(), + )?; + + let mut coverage = CoverageTask::new(coverage_config); + let coverage_task = spawn(async move { coverage.run().await }); + + task_handles.push(coverage_task); + task_handles.push(coverage_input_monitor.handle); + } + + if args.contains_id(ANALYZER_EXE) { + let analysis_input_monitor = DirectoryMonitorQueue::start_monitoring(crash_dir).await?; + let analysis_config = build_analysis_config( + args, + Some(analysis_input_monitor.queue_client), + CommonConfig { + task_id: Uuid::new_v4(), + ..context.common_config.clone() + }, + event_sender.clone(), + )?; + let analysis_task = spawn(async move { run_analysis(analysis_config).await }); + + task_handles.push(analysis_task); + task_handles.push(analysis_input_monitor.handle); + } + + if args.contains_id(REGRESSION_REPORTS_DIR) { + let regression_config = build_regression_config( + args, + CommonConfig { + task_id: Uuid::new_v4(), + ..context.common_config.clone() + }, + event_sender, + )?; + let regression = LibFuzzerRegressionTask::new(regression_config); + let regression_task = spawn(async move { regression.run().await }); + task_handles.push(regression_task); + } + + try_wait_all_join_handles(task_handles).await?; + + Ok(()) +} + +pub fn args(name: &'static str) -> Command { + let mut app = Command::new(name).about("run a local libfuzzer & crash reporting task"); + + let mut used = HashSet::new(); + + for args in &[ + build_fuzz_args(), + build_crash_args(), + build_analysis_args(false), + #[cfg(any(target_os = "linux", target_os = "windows"))] + build_coverage_args(true), + build_regression_args(false), + ] { + for arg in args { + if used.insert(arg.get_id()) { + app = app.arg(arg); + } + } + } + + app +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibFuzzer { inputs: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs b/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs index 04ba4f9225..c1ab283575 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs @@ -3,13 +3,139 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::tasks::{config::CommonConfig, utils::default_bool_true}; +use crate::{ + local::common::{ + build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType, + SyncCountDirMonitor, UiEvent, CHECK_FUZZER_HELP, CHECK_RETRY_COUNT, CRASHES_DIR, + DISABLE_CHECK_QUEUE, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, + TARGET_TIMEOUT, UNIQUE_REPORTS_DIR, + }, + tasks::{ + config::CommonConfig, + report::libfuzzer_report::{Config, ReportTask}, + utils::default_bool_true, + }, +}; use anyhow::Result; use async_trait::async_trait; +use clap::{Arg, ArgAction, Command}; +use flume::Sender; use futures::future::OptionFuture; use schemars::JsonSchema; +use storage_queue::QueueClient; use super::template::{RunContext, Template}; + +pub fn build_report_config( + args: &clap::ArgMatches, + input_queue: Option, + common: CommonConfig, + event_sender: Option>, +) -> Result { + let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); + let target_env = get_cmd_env(CmdType::Target, args)?; + let target_options = get_cmd_arg(CmdType::Target, args); + + let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + + let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + + let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + + let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); + + let check_retry_count = args + .get_one::(CHECK_RETRY_COUNT) + .copied() + .expect("has a default"); + + let check_queue = !args.get_flag(DISABLE_CHECK_QUEUE); + + let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP); + + let crashes = if input_queue.is_none() { crashes } else { None }; + + let config = Config { + target_exe, + target_env, + target_options, + target_timeout, + check_retry_count, + check_fuzzer_help, + minimized_stack_depth: None, + input_queue, + check_queue, + crashes, + reports, + no_repro, + unique_reports, + common, + }; + + Ok(config) +} + +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender.clone()).await?; + let config = build_report_config(args, None, context.common_config.clone(), event_sender)?; + ReportTask::new(config).managed_run().await +} + +pub fn build_shared_args() -> Vec { + vec![ + Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), + Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), + Arg::new(TARGET_OPTIONS) + .long(TARGET_OPTIONS) + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(CRASHES_DIR) + .long(CRASHES_DIR) + .required(true) + .value_parser(value_parser!(PathBuf)), + Arg::new(REPORTS_DIR) + .long(REPORTS_DIR) + .required(false) + .value_parser(value_parser!(PathBuf)), + Arg::new(NO_REPRO_DIR) + .long(NO_REPRO_DIR) + .required(false) + .value_parser(value_parser!(PathBuf)), + Arg::new(UNIQUE_REPORTS_DIR) + .long(UNIQUE_REPORTS_DIR) + .required(true) + .value_parser(value_parser!(PathBuf)), + Arg::new(TARGET_TIMEOUT) + .value_parser(value_parser!(u64)) + .long(TARGET_TIMEOUT), + Arg::new(CHECK_RETRY_COUNT) + .long(CHECK_RETRY_COUNT) + .value_parser(value_parser!(u64)) + .default_value("0"), + Arg::new(DISABLE_CHECK_QUEUE) + .action(ArgAction::SetTrue) + .long(DISABLE_CHECK_QUEUE), + Arg::new(CHECK_FUZZER_HELP) + .action(ArgAction::SetTrue) + .long(CHECK_FUZZER_HELP), + ] +} + +pub fn args(name: &'static str) -> Command { + Command::new(name) + .about("execute a local-only libfuzzer crash report task") + .args(&build_shared_args()) +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibfuzzerCrashReport { target_exe: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs b/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs index 4b3e4ce58f..69c9df820b 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs @@ -3,15 +3,97 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::tasks::{config::CommonConfig, utils::default_bool_true}; +use crate::{ + local::common::{ + build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, + get_synced_dirs, CmdType, SyncCountDirMonitor, UiEvent, ANALYSIS_INPUTS, + ANALYSIS_UNIQUE_INPUTS, CHECK_FUZZER_HELP, INPUTS_DIR, PRESERVE_EXISTING_OUTPUTS, + TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, + }, + tasks::{ + config::CommonConfig, + merge::libfuzzer_merge::{spawn, Config}, + utils::default_bool_true, + }, +}; use anyhow::Result; use async_trait::async_trait; +use clap::{Arg, ArgAction, Command}; +use flume::Sender; use futures::future::OptionFuture; use onefuzz::syncdir::SyncedDir; use schemars::JsonSchema; +use storage_queue::QueueClient; use super::template::{RunContext, Template}; +pub fn build_merge_config( + args: &clap::ArgMatches, + input_queue: Option, + common: CommonConfig, + event_sender: Option>, +) -> Result { + let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); + let target_env = get_cmd_env(CmdType::Target, args)?; + let target_options = get_cmd_arg(CmdType::Target, args); + let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP); + let inputs = get_synced_dirs(ANALYSIS_INPUTS, common.job_id, common.task_id, args)? + .into_iter() + .map(|sd| sd.monitor_count(&event_sender)) + .collect::>>()?; + let unique_inputs = + get_synced_dir(ANALYSIS_UNIQUE_INPUTS, common.job_id, common.task_id, args)? + .monitor_count(&event_sender)?; + let preserve_existing_outputs = args + .get_one::(PRESERVE_EXISTING_OUTPUTS) + .copied() + .unwrap_or_default(); + + let config = Config { + target_exe, + target_env, + target_options, + input_queue, + inputs, + unique_inputs, + preserve_existing_outputs, + check_fuzzer_help, + common, + }; + + Ok(config) +} + +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender.clone()).await?; + let config = build_merge_config(args, None, context.common_config.clone(), event_sender)?; + spawn(config).await +} + +pub fn build_shared_args() -> Vec { + vec![ + Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), + Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), + Arg::new(TARGET_OPTIONS) + .long(TARGET_OPTIONS) + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(CHECK_FUZZER_HELP) + .action(ArgAction::SetTrue) + .long(CHECK_FUZZER_HELP), + Arg::new(INPUTS_DIR) + .long(INPUTS_DIR) + .value_parser(value_parser!(PathBuf)) + .num_args(0..), + ] +} + +pub fn args(name: &'static str) -> Command { + Command::new(name) + .about("execute a local-only libfuzzer crash report task") + .args(&build_shared_args()) +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibfuzzerMerge { target_exe: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs b/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs index 3fbb9f0bd6..501d2385e2 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs @@ -3,13 +3,145 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::tasks::{config::CommonConfig, utils::default_bool_true}; +use crate::{ + local::common::{ + build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType, + SyncCountDirMonitor, UiEvent, CHECK_FUZZER_HELP, CHECK_RETRY_COUNT, COVERAGE_DIR, + CRASHES_DIR, NO_REPRO_DIR, REGRESSION_REPORTS_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE, + TARGET_OPTIONS, TARGET_TIMEOUT, UNIQUE_REPORTS_DIR, + }, + tasks::{ + config::CommonConfig, + regression::libfuzzer::{Config, LibFuzzerRegressionTask}, + utils::default_bool_true, + }, +}; use anyhow::Result; use async_trait::async_trait; +use clap::{Arg, ArgAction, Command}; +use flume::Sender; use schemars::JsonSchema; use super::template::{RunContext, Template}; +const REPORT_NAMES: &str = "report_names"; + +pub fn build_regression_config( + args: &clap::ArgMatches, + common: CommonConfig, + event_sender: Option>, +) -> Result { + let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); + let target_env = get_cmd_env(CmdType::Target, args)?; + let target_options = get_cmd_arg(CmdType::Target, args); + let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); + let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)? + .monitor_count(&event_sender)?; + let regression_reports = + get_synced_dir(REGRESSION_REPORTS_DIR, common.job_id, common.task_id, args)? + .monitor_count(&event_sender)?; + let check_retry_count = args + .get_one::(CHECK_RETRY_COUNT) + .copied() + .expect("has a default value"); + + let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args) + .ok() + .monitor_count(&event_sender)?; + + let report_list: Option> = args + .get_many::(REPORT_NAMES) + .map(|x| x.cloned().collect()); + + let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP); + + let config = Config { + target_exe, + target_env, + target_options, + target_timeout, + check_fuzzer_help, + check_retry_count, + crashes, + regression_reports, + reports, + no_repro, + unique_reports, + readonly_inputs: None, + report_list, + minimized_stack_depth: None, + common, + }; + Ok(config) +} + +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender.clone()).await?; + let config = build_regression_config(args, context.common_config.clone(), event_sender)?; + LibFuzzerRegressionTask::new(config).run().await +} + +pub fn build_shared_args(local_job: bool) -> Vec { + let mut args = vec![ + Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), + Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), + Arg::new(TARGET_OPTIONS) + .long(TARGET_OPTIONS) + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(COVERAGE_DIR) + .required(!local_job) + .long(COVERAGE_DIR) + .value_parser(value_parser!(PathBuf)), + Arg::new(CHECK_FUZZER_HELP) + .action(ArgAction::SetTrue) + .long(CHECK_FUZZER_HELP), + Arg::new(TARGET_TIMEOUT) + .long(TARGET_TIMEOUT) + .value_parser(value_parser!(u64)), + Arg::new(CRASHES_DIR) + .long(CRASHES_DIR) + .required(true) + .value_parser(value_parser!(PathBuf)), + Arg::new(REGRESSION_REPORTS_DIR) + .long(REGRESSION_REPORTS_DIR) + .required(local_job) + .value_parser(value_parser!(PathBuf)), + Arg::new(REPORTS_DIR) + .long(REPORTS_DIR) + .required(false) + .value_parser(value_parser!(PathBuf)), + Arg::new(NO_REPRO_DIR) + .long(NO_REPRO_DIR) + .required(false) + .value_parser(value_parser!(PathBuf)), + Arg::new(UNIQUE_REPORTS_DIR) + .long(UNIQUE_REPORTS_DIR) + .value_parser(value_parser!(PathBuf)) + .required(true), + Arg::new(CHECK_RETRY_COUNT) + .long(CHECK_RETRY_COUNT) + .value_parser(value_parser!(u64)) + .default_value("0"), + ]; + if local_job { + args.push(Arg::new(REPORT_NAMES).long(REPORT_NAMES).num_args(0..)) + } + args +} + +pub fn args(name: &'static str) -> Command { + Command::new(name) + .about("execute a local-only libfuzzer regression task") + .args(&build_shared_args(true)) +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibfuzzerRegression { target_exe: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs b/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs index 5bef2347f7..9c6f16094e 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs @@ -1,14 +1,97 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. +use crate::{ + local::common::{ + build_local_context, get_cmd_arg, get_cmd_env, CmdType, UiEvent, CHECK_RETRY_COUNT, + TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, + }, + tasks::report::libfuzzer_report::{test_input, TestInputArgs}, +}; use anyhow::Result; use async_trait::async_trait; +use clap::{Arg, Command}; +use flume::Sender; use onefuzz::machine_id::MachineIdentity; use schemars::JsonSchema; use std::{collections::HashMap, path::PathBuf}; use super::template::{RunContext, Template}; +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender).await?; + + let target_exe = args + .get_one::(TARGET_EXE) + .expect("marked as required"); + let target_env = get_cmd_env(CmdType::Target, args)?; + let target_options = get_cmd_arg(CmdType::Target, args); + let input = args + .get_one::("input") + .expect("marked as required"); + let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); + let check_retry_count = args + .get_one::(CHECK_RETRY_COUNT) + .copied() + .expect("has a default value"); + + let extra_setup_dir = context.common_config.extra_setup_dir.as_deref(); + let extra_output_dir = context + .common_config + .extra_output + .as_ref() + .map(|x| x.local_path.as_path()); + + let config = TestInputArgs { + target_exe: target_exe.as_path(), + target_env: &target_env, + target_options: &target_options, + input_url: None, + input: input.as_path(), + job_id: context.common_config.job_id, + task_id: context.common_config.task_id, + target_timeout, + check_retry_count, + setup_dir: &context.common_config.setup_dir, + extra_setup_dir, + extra_output_dir, + minimized_stack_depth: None, + machine_identity: context.common_config.machine_identity, + }; + + let result = test_input(config).await?; + println!("{}", serde_json::to_string_pretty(&result)?); + Ok(()) +} + +pub fn build_shared_args() -> Vec { + vec![ + Arg::new(TARGET_EXE).required(true), + Arg::new("input") + .required(true) + .value_parser(value_parser!(PathBuf)), + Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), + Arg::new(TARGET_OPTIONS) + .default_value("{input}") + .long(TARGET_OPTIONS) + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(TARGET_TIMEOUT) + .long(TARGET_TIMEOUT) + .value_parser(value_parser!(u64)), + Arg::new(CHECK_RETRY_COUNT) + .long(CHECK_RETRY_COUNT) + .value_parser(value_parser!(u64)) + .default_value("0"), + ] +} + +pub fn args(name: &'static str) -> Command { + Command::new(name) + .about("test a libfuzzer application with a specific input") + .args(&build_shared_args()) +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibfuzzerTestInput { input: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/mod.rs b/src/agent/onefuzz-task/src/local/mod.rs index 385ff8ffcd..03d394bcdb 100644 --- a/src/agent/onefuzz-task/src/local/mod.rs +++ b/src/agent/onefuzz-task/src/local/mod.rs @@ -14,6 +14,7 @@ pub mod libfuzzer_fuzz; pub mod libfuzzer_merge; pub mod libfuzzer_regression; pub mod libfuzzer_test_input; +pub mod radamsa; pub mod template; pub mod test_input; pub mod tui; diff --git a/src/agent/onefuzz-task/src/local/radamsa.rs b/src/agent/onefuzz-task/src/local/radamsa.rs new file mode 100644 index 0000000000..4d84de027a --- /dev/null +++ b/src/agent/onefuzz-task/src/local/radamsa.rs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +use crate::{ + local::{ + common::{build_local_context, DirectoryMonitorQueue, UiEvent}, + generic_crash_report::{build_report_config, build_shared_args as build_crash_args}, + generic_generator::{build_fuzz_config, build_shared_args as build_fuzz_args}, + }, + tasks::{config::CommonConfig, fuzz::generator::GeneratorTask, report::generic::ReportTask}, +}; +use anyhow::{Context, Result}; +use clap::Command; +use flume::Sender; +use onefuzz::utils::try_wait_all_join_handles; +use std::collections::HashSet; +use tokio::task::spawn; +use uuid::Uuid; + +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, true, event_sender.clone()).await?; + let fuzz_config = build_fuzz_config(args, context.common_config.clone(), event_sender.clone())?; + let crash_dir = fuzz_config + .crashes + .remote_url()? + .as_file_path() + .ok_or_else(|| format_err!("invalid crash directory"))?; + + tokio::fs::create_dir_all(&crash_dir) + .await + .with_context(|| { + format!( + "unable to create crashes directory: {}", + crash_dir.display() + ) + })?; + + let fuzzer = GeneratorTask::new(fuzz_config); + let fuzz_task = spawn(async move { fuzzer.run().await }); + + let crash_report_input_monitor = DirectoryMonitorQueue::start_monitoring(crash_dir) + .await + .context("directory monitor failed")?; + let report_config = build_report_config( + args, + Some(crash_report_input_monitor.queue_client), + CommonConfig { + task_id: Uuid::new_v4(), + ..context.common_config.clone() + }, + event_sender, + )?; + let report_task = spawn(async move { ReportTask::new(report_config).managed_run().await }); + + try_wait_all_join_handles(vec![ + fuzz_task, + report_task, + crash_report_input_monitor.handle, + ]) + .await?; + + Ok(()) +} + +pub fn args(name: &'static str) -> Command { + let mut app = Command::new(name).about("run a local generator & crash reporting job"); + + let mut used = HashSet::new(); + for args in &[build_fuzz_args(), build_crash_args()] { + for arg in args { + if used.insert(arg.get_id()) { + app = app.arg(arg); + } + } + } + + app +} diff --git a/src/agent/onefuzz-task/src/local/schema.json b/src/agent/onefuzz-task/src/local/schema.json index e5b00f6e17..0a1f128e67 100644 --- a/src/agent/onefuzz-task/src/local/schema.json +++ b/src/agent/onefuzz-task/src/local/schema.json @@ -126,6 +126,7 @@ "analyzer_options", "target_exe", "target_options", + "tools", "type" ], "properties": { @@ -181,10 +182,7 @@ } }, "tools": { - "type": [ - "string", - "null" - ] + "type": "string" }, "type": { "type": "string", @@ -895,4 +893,4 @@ ] } } -} \ No newline at end of file +} diff --git a/src/agent/onefuzz-task/src/local/template.rs b/src/agent/onefuzz-task/src/local/template.rs index 73ae6e5e48..b2e0c425ff 100644 --- a/src/agent/onefuzz-task/src/local/template.rs +++ b/src/agent/onefuzz-task/src/local/template.rs @@ -196,7 +196,6 @@ pub async fn launch( job_id: Uuid::new_v4(), instance_id: Uuid::new_v4(), heartbeat_queue: None, - job_result_queue: None, instance_telemetry_key: None, microsoft_telemetry_key: None, logs: None, @@ -242,10 +241,12 @@ mod test { .expect("Couldn't find checked-in schema.json") .replace("\r\n", "\n"); - if schema_str.replace('\n', "") != checked_in_schema.replace('\n', "") { - std::fs::write("src/local/new.schema.json", schema_str) - .expect("The schemas did not match but failed to write new schema to file."); - panic!("The checked-in local fuzzing schema did not match the generated schema. The generated schema can be found at src/local/new.schema.json"); - } + println!("{}", schema_str); + + assert_eq!( + schema_str.replace('\n', ""), + checked_in_schema.replace('\n', ""), + "The checked-in local fuzzing schema did not match the generated schema." + ); } } diff --git a/src/agent/onefuzz-task/src/local/test_input.rs b/src/agent/onefuzz-task/src/local/test_input.rs index b8027a7f41..4077bd08f8 100644 --- a/src/agent/onefuzz-task/src/local/test_input.rs +++ b/src/agent/onefuzz-task/src/local/test_input.rs @@ -1,8 +1,18 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. +use crate::{ + local::common::{ + build_local_context, get_cmd_arg, get_cmd_env, CmdType, UiEvent, CHECK_ASAN_LOG, + CHECK_RETRY_COUNT, DISABLE_CHECK_DEBUGGER, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, + TARGET_TIMEOUT, + }, + tasks::report::generic::{test_input, TestInputArgs}, +}; use anyhow::Result; use async_trait::async_trait; +use clap::{Arg, ArgAction, Command}; +use flume::Sender; use onefuzz::machine_id::MachineIdentity; use schemars::JsonSchema; use std::{collections::HashMap, path::PathBuf}; @@ -10,6 +20,82 @@ use uuid::Uuid; use super::template::{RunContext, Template}; +pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { + let context = build_local_context(args, false, event_sender).await?; + + let target_exe = args + .get_one::(TARGET_EXE) + .expect("is marked required"); + let target_env = get_cmd_env(CmdType::Target, args)?; + let target_options = get_cmd_arg(CmdType::Target, args); + let input = args + .get_one::("input") + .expect("is marked required"); + let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); + let check_retry_count = args + .get_one::(CHECK_RETRY_COUNT) + .copied() + .expect("has default value"); + let check_asan_log = args.get_flag(CHECK_ASAN_LOG); + let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER); + + let config = TestInputArgs { + target_exe: target_exe.as_path(), + target_env: &target_env, + target_options: &target_options, + input_url: None, + input: input.as_path(), + job_id: context.common_config.job_id, + task_id: context.common_config.task_id, + target_timeout, + check_retry_count, + setup_dir: &context.common_config.setup_dir, + extra_setup_dir: context.common_config.extra_setup_dir.as_deref(), + minimized_stack_depth: None, + check_asan_log, + check_debugger, + machine_identity: context.common_config.machine_identity.clone(), + }; + + let result = test_input(config).await?; + println!("{}", serde_json::to_string_pretty(&result)?); + Ok(()) +} + +pub fn build_shared_args() -> Vec { + vec![ + Arg::new(TARGET_EXE).required(true), + Arg::new("input") + .required(true) + .value_parser(value_parser!(PathBuf)), + Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), + Arg::new(TARGET_OPTIONS) + .default_value("{input}") + .long(TARGET_OPTIONS) + .value_delimiter(' ') + .help("Use a quoted string with space separation to denote multiple arguments"), + Arg::new(TARGET_TIMEOUT) + .long(TARGET_TIMEOUT) + .value_parser(value_parser!(u64)), + Arg::new(CHECK_RETRY_COUNT) + .long(CHECK_RETRY_COUNT) + .value_parser(value_parser!(u64)) + .default_value("0"), + Arg::new(CHECK_ASAN_LOG) + .action(ArgAction::SetTrue) + .long(CHECK_ASAN_LOG), + Arg::new(DISABLE_CHECK_DEBUGGER) + .action(ArgAction::SetTrue) + .long("disable_check_debugger"), + ] +} + +pub fn args(name: &'static str) -> Command { + Command::new(name) + .about("test an application with a specific input") + .args(&build_shared_args()) +} + #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct TestInput { input: PathBuf, diff --git a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs index 05c6c3d169..3ba068a614 100644 --- a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs @@ -65,8 +65,6 @@ pub async fn run(config: Config) -> Result<()> { tools.init_pull().await?; } - let job_result_client = config.common.init_job_result().await?; - // the tempdir is always created, however, the reports_path and // reports_monitor_future are only created if we have one of the three // report SyncedDir. The idea is that the option for where to write reports @@ -90,7 +88,6 @@ pub async fn run(config: Config) -> Result<()> { &config.unique_reports, &config.reports, &config.no_repro, - &job_result_client, ); ( Some(reports_dir.path().to_path_buf()), @@ -174,7 +171,7 @@ async fn poll_inputs( } message.delete().await?; } else { - debug!("no new candidate inputs found, sleeping"); + warn!("no new candidate inputs found, sleeping"); delay_with_jitter(EMPTY_QUEUE_DELAY).await; } } diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs index e29e0fd60d..0848379d73 100644 --- a/src/agent/onefuzz-task/src/tasks/config.rs +++ b/src/agent/onefuzz-task/src/tasks/config.rs @@ -14,7 +14,6 @@ use onefuzz::{ machine_id::MachineIdentity, syncdir::{SyncOperation, SyncedDir}, }; -use onefuzz_result::job_result::{init_job_result, TaskJobResultClient}; use onefuzz_telemetry::{ self as telemetry, Event::task_start, EventData, InstanceTelemetryKey, MicrosoftTelemetryKey, Role, @@ -51,8 +50,6 @@ pub struct CommonConfig { pub heartbeat_queue: Option, - pub job_result_queue: Option, - pub instance_telemetry_key: Option, pub microsoft_telemetry_key: Option, @@ -106,23 +103,6 @@ impl CommonConfig { None => Ok(None), } } - - pub async fn init_job_result(&self) -> Result> { - match &self.job_result_queue { - Some(url) => { - let result = init_job_result( - url.clone(), - self.task_id, - self.job_id, - self.machine_identity.machine_id, - self.machine_identity.machine_name.clone(), - ) - .await?; - Ok(Some(result)) - } - None => Ok(None), - } - } } #[derive(Debug, Deserialize)] diff --git a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs index 4fde9efb31..b112cfefbe 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs @@ -26,8 +26,6 @@ use onefuzz_file_format::coverage::{ binary::{v1::BinaryCoverageJson as BinaryCoverageJsonV1, BinaryCoverageJson}, source::{v1::SourceCoverageJson as SourceCoverageJsonV1, SourceCoverageJson}, }; -use onefuzz_result::job_result::JobResultData; -use onefuzz_result::job_result::{JobResultSender, TaskJobResultClient}; use onefuzz_telemetry::{event, warn, Event::coverage_data, Event::coverage_failed, EventData}; use storage_queue::{Message, QueueClient}; use tokio::fs; @@ -116,7 +114,7 @@ impl CoverageTask { let allowlist = self.load_target_allowlist().await?; let heartbeat = self.config.common.init_heartbeat(None).await?; - let job_result = self.config.common.init_job_result().await?; + let mut seen_inputs = false; let target_exe_path = @@ -131,7 +129,6 @@ impl CoverageTask { coverage, allowlist, heartbeat, - job_result, target_exe.to_string(), )?; @@ -222,7 +219,6 @@ struct TaskContext<'a> { module_allowlist: AllowList, source_allowlist: Arc, heartbeat: Option, - job_result: Option, cache: Arc, } @@ -232,7 +228,6 @@ impl<'a> TaskContext<'a> { coverage: BinaryCoverage, allowlist: TargetAllowList, heartbeat: Option, - job_result: Option, target_exe: String, ) -> Result { let cache = DebugInfoCache::new(allowlist.source_files.clone()); @@ -252,7 +247,6 @@ impl<'a> TaskContext<'a> { module_allowlist: allowlist.modules, source_allowlist: Arc::new(allowlist.source_files), heartbeat, - job_result, cache: Arc::new(cache), }) } @@ -461,16 +455,7 @@ impl<'a> TaskContext<'a> { let s = CoverageStats::new(&coverage); event!(coverage_data; Covered = s.covered, Features = s.features, Rate = s.rate); metric!(coverage_data; 1.0; Covered = s.covered, Features = s.features, Rate = s.rate); - self.job_result - .send_direct( - JobResultData::CoverageData, - HashMap::from([ - ("covered".to_string(), s.covered as f64), - ("features".to_string(), s.features as f64), - ("rate".to_string(), s.rate), - ]), - ) - .await; + Ok(()) } diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs index bd7511cac2..d9116a1ed2 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs @@ -73,7 +73,6 @@ impl GeneratorTask { } let hb_client = self.config.common.init_heartbeat(None).await?; - let jr_client = self.config.common.init_job_result().await?; for dir in &self.config.readonly_inputs { dir.init_pull().await?; @@ -85,10 +84,7 @@ impl GeneratorTask { self.config.ensemble_sync_delay, ); - let crash_dir_monitor = self - .config - .crashes - .monitor_results(new_result, false, &jr_client); + let crash_dir_monitor = self.config.crashes.monitor_results(new_result, false); let fuzzer = self.fuzzing_loop(hb_client); @@ -302,7 +298,6 @@ mod tests { task_id: Default::default(), instance_id: Default::default(), heartbeat_queue: Default::default(), - job_result_queue: Default::default(), instance_telemetry_key: Default::default(), microsoft_telemetry_key: Default::default(), logs: Default::default(), diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs b/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs index bfd9f3f5cc..4f8c67ae8e 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs @@ -1,11 +1,7 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -use crate::tasks::{ - config::CommonConfig, - heartbeat::{HeartbeatSender, TaskHeartbeatClient}, - utils::default_bool_true, -}; +use crate::tasks::{config::CommonConfig, heartbeat::HeartbeatSender, utils::default_bool_true}; use anyhow::{Context, Result}; use arraydeque::{ArrayDeque, Wrapping}; use async_trait::async_trait; @@ -16,7 +12,6 @@ use onefuzz::{ process::ExitStatus, syncdir::{continuous_sync, SyncOperation::Pull, SyncedDir}, }; -use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient}; use onefuzz_telemetry::{ Event::{new_coverage, new_crashdump, new_result, runtime_stats}, EventData, @@ -131,31 +126,21 @@ where self.verify().await?; let hb_client = self.config.common.init_heartbeat(None).await?; - let jr_client = self.config.common.init_job_result().await?; // To be scheduled. let resync = self.continuous_sync_inputs(); - - let new_inputs = self - .config - .inputs - .monitor_results(new_coverage, true, &jr_client); - let new_crashes = self - .config - .crashes - .monitor_results(new_result, true, &jr_client); + let new_inputs = self.config.inputs.monitor_results(new_coverage, true); + let new_crashes = self.config.crashes.monitor_results(new_result, true); let new_crashdumps = async { if let Some(crashdumps) = &self.config.crashdumps { - crashdumps - .monitor_results(new_crashdump, true, &jr_client) - .await + crashdumps.monitor_results(new_crashdump, true).await } else { Ok(()) } }; let (stats_sender, stats_receiver) = mpsc::unbounded_channel(); - let report_stats = report_runtime_stats(stats_receiver, &hb_client, &jr_client); + let report_stats = report_runtime_stats(stats_receiver, hb_client); let fuzzers = self.run_fuzzers(Some(&stats_sender)); futures::try_join!( resync, @@ -198,7 +183,7 @@ where .inputs .local_path .parent() - .ok_or_else(|| anyhow!("invalid input path"))?; + .ok_or_else(|| anyhow!("Invalid input path"))?; let temp_path = task_dir.join(".temp"); tokio::fs::create_dir_all(&temp_path).await?; let temp_dir = tempdir_in(temp_path)?; @@ -516,7 +501,7 @@ impl TotalStats { self.execs_sec = self.worker_stats.values().map(|x| x.execs_sec).sum(); } - async fn report(&self, jr_client: &Option) { + fn report(&self) { event!( runtime_stats; EventData::Count = self.count, @@ -528,17 +513,6 @@ impl TotalStats { EventData::Count = self.count, EventData::ExecsSecond = self.execs_sec ); - if let Some(jr_client) = jr_client { - let _ = jr_client - .send_direct( - JobResultData::RuntimeStats, - HashMap::from([ - ("total_count".to_string(), self.count as f64), - ("execs_sec".to_string(), self.execs_sec), - ]), - ) - .await; - } } } @@ -568,8 +542,7 @@ impl Timer { // are approximating nearest-neighbor interpolation on the runtime stats time series. async fn report_runtime_stats( mut stats_channel: mpsc::UnboundedReceiver, - heartbeat_client: &Option, - jr_client: &Option, + heartbeat_client: impl HeartbeatSender, ) -> Result<()> { // Cache the last-reported stats for a given worker. // @@ -578,7 +551,7 @@ async fn report_runtime_stats( let mut total = TotalStats::default(); // report all zeros to start - total.report(jr_client).await; + total.report(); let timer = Timer::new(RUNTIME_STATS_PERIOD); @@ -587,10 +560,10 @@ async fn report_runtime_stats( Some(stats) = stats_channel.recv() => { heartbeat_client.alive(); total.update(stats); - total.report(jr_client).await + total.report() } _ = timer.wait() => { - total.report(jr_client).await + total.report() } } } diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs index 3f00e20b8d..de1e1106ba 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs @@ -79,10 +79,7 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> { remote_path: config.crashes.remote_path.clone(), }; crashes.init().await?; - - let jr_client = config.common.init_job_result().await?; - - let monitor_crashes = crashes.monitor_results(new_result, false, &jr_client); + let monitor_crashes = crashes.monitor_results(new_result, false); // setup crashdumps let (crashdump_dir, monitor_crashdumps) = { @@ -98,12 +95,9 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> { }; let monitor_dir = crashdump_dir.clone(); - let monitor_jr_client = config.common.init_job_result().await?; let monitor_crashdumps = async move { if let Some(crashdumps) = monitor_dir { - crashdumps - .monitor_results(new_crashdump, false, &monitor_jr_client) - .await + crashdumps.monitor_results(new_crashdump, false).await } else { Ok(()) } @@ -135,13 +129,11 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> { if let Some(no_repro) = &config.no_repro { no_repro.init().await?; } - let monitor_reports_future = monitor_reports( reports_dir.path(), &config.unique_reports, &config.reports, &config.no_repro, - &jr_client, ); let inputs = SyncedDir { @@ -164,7 +156,7 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> { delay_with_jitter(delay).await; } } - let monitor_inputs = inputs.monitor_results(new_coverage, false, &jr_client); + let monitor_inputs = inputs.monitor_results(new_coverage, false); let inputs_sync_cancellation = CancellationToken::new(); // never actually cancelled let inputs_sync_task = inputs.continuous_sync(Pull, config.ensemble_sync_delay, &inputs_sync_cancellation); @@ -452,7 +444,6 @@ mod tests { task_id: Default::default(), instance_id: Default::default(), heartbeat_queue: Default::default(), - job_result_queue: Default::default(), instance_telemetry_key: Default::default(), microsoft_telemetry_key: Default::default(), logs: Default::default(), diff --git a/src/agent/onefuzz-task/src/tasks/heartbeat.rs b/src/agent/onefuzz-task/src/tasks/heartbeat.rs index e13b661909..515fa39d0c 100644 --- a/src/agent/onefuzz-task/src/tasks/heartbeat.rs +++ b/src/agent/onefuzz-task/src/tasks/heartbeat.rs @@ -1,8 +1,8 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. +use crate::onefuzz::heartbeat::HeartbeatClient; use anyhow::Result; -use onefuzz::heartbeat::HeartbeatClient; use reqwest::Url; use serde::{self, Deserialize, Serialize}; use std::time::Duration; diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs index 3b6a2094d8..4f2e8234a8 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs @@ -83,7 +83,7 @@ pub async fn spawn(config: &Config) -> Result<()> { } } } else { - debug!("no new candidate inputs found, sleeping"); + warn!("no new candidate inputs found, sleeping"); delay_with_jitter(EMPTY_QUEUE_DELAY).await; }; } diff --git a/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs b/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs index 2d53bc8c07..1c334b3f18 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs @@ -120,7 +120,7 @@ async fn process_message(config: &Config, input_queue: QueueClient) -> Result<() } Ok(()) } else { - debug!("no new candidate inputs found, sleeping"); + warn!("no new candidate inputs found, sleeping"); delay_with_jitter(EMPTY_QUEUE_DELAY).await; Ok(()) } diff --git a/src/agent/onefuzz-task/src/tasks/regression/common.rs b/src/agent/onefuzz-task/src/tasks/regression/common.rs index b61a97df4c..60023cfa6e 100644 --- a/src/agent/onefuzz-task/src/tasks/regression/common.rs +++ b/src/agent/onefuzz-task/src/tasks/regression/common.rs @@ -2,14 +2,12 @@ // Licensed under the MIT License. use crate::tasks::{ - config::CommonConfig, heartbeat::{HeartbeatSender, TaskHeartbeatClient}, report::crash_report::{parse_report_file, CrashTestResult, RegressionReport}, }; use anyhow::{Context, Result}; use async_trait::async_trait; use onefuzz::syncdir::SyncedDir; -use onefuzz_result::job_result::TaskJobResultClient; use reqwest::Url; use std::path::PathBuf; @@ -26,7 +24,7 @@ pub trait RegressionHandler { /// Runs the regression task pub async fn run( - common_config: &CommonConfig, + heartbeat_client: Option, regression_reports: &SyncedDir, crashes: &SyncedDir, report_dirs: &[&SyncedDir], @@ -37,9 +35,6 @@ pub async fn run( info!("starting regression task"); regression_reports.init().await?; - let heartbeat_client = common_config.init_heartbeat(None).await?; - let job_result_client = common_config.init_job_result().await?; - handle_crash_reports( handler, crashes, @@ -47,7 +42,6 @@ pub async fn run( report_list, regression_reports, &heartbeat_client, - &job_result_client, ) .await .context("handling crash reports")?; @@ -58,7 +52,6 @@ pub async fn run( readonly_inputs, regression_reports, &heartbeat_client, - &job_result_client, ) .await .context("handling inputs")?; @@ -78,7 +71,6 @@ pub async fn handle_inputs( readonly_inputs: &SyncedDir, regression_reports: &SyncedDir, heartbeat_client: &Option, - job_result_client: &Option, ) -> Result<()> { readonly_inputs.init_pull().await?; let mut input_files = tokio::fs::read_dir(&readonly_inputs.local_path).await?; @@ -103,7 +95,7 @@ pub async fn handle_inputs( crash_test_result, original_crash_test_result: None, } - .save(None, regression_reports, job_result_client) + .save(None, regression_reports) .await? } @@ -117,7 +109,6 @@ pub async fn handle_crash_reports( report_list: &Option>, regression_reports: &SyncedDir, heartbeat_client: &Option, - job_result_client: &Option, ) -> Result<()> { // without crash report containers, skip this method if report_dirs.is_empty() { @@ -167,7 +158,7 @@ pub async fn handle_crash_reports( crash_test_result, original_crash_test_result: Some(original_crash_test_result), } - .save(Some(file_name), regression_reports, job_result_client) + .save(Some(file_name), regression_reports) .await? } } diff --git a/src/agent/onefuzz-task/src/tasks/regression/generic.rs b/src/agent/onefuzz-task/src/tasks/regression/generic.rs index 8570208d59..640e80db9a 100644 --- a/src/agent/onefuzz-task/src/tasks/regression/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/regression/generic.rs @@ -89,6 +89,7 @@ impl GenericRegressionTask { pub async fn run(&self) -> Result<()> { info!("Starting generic regression task"); + let heartbeat_client = self.config.common.init_heartbeat(None).await?; let mut report_dirs = vec![]; for dir in vec![ @@ -102,7 +103,7 @@ impl GenericRegressionTask { report_dirs.push(dir); } common::run( - &self.config.common, + heartbeat_client, &self.config.regression_reports, &self.config.crashes, &report_dirs, diff --git a/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs b/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs index e65f46bb64..06dd7c00d9 100644 --- a/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs +++ b/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs @@ -103,8 +103,9 @@ impl LibFuzzerRegressionTask { report_dirs.push(dir); } + let heartbeat_client = self.config.common.init_heartbeat(None).await?; common::run( - &self.config.common, + heartbeat_client, &self.config.regression_reports, &self.config.crashes, &report_dirs, diff --git a/src/agent/onefuzz-task/src/tasks/report/crash_report.rs b/src/agent/onefuzz-task/src/tasks/report/crash_report.rs index 290b98ccde..23171bc432 100644 --- a/src/agent/onefuzz-task/src/tasks/report/crash_report.rs +++ b/src/agent/onefuzz-task/src/tasks/report/crash_report.rs @@ -3,7 +3,6 @@ use anyhow::{Context, Result}; use onefuzz::{blob::BlobUrl, monitor::DirectoryMonitor, syncdir::SyncedDir}; -use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient}; use onefuzz_telemetry::{ Event::{ new_report, new_unable_to_reproduce, new_unique_report, regression_report, @@ -13,7 +12,6 @@ use onefuzz_telemetry::{ }; use serde::{Deserialize, Serialize}; use stacktrace_parser::CrashLog; -use std::collections::HashMap; use std::path::{Path, PathBuf}; use uuid::Uuid; @@ -113,7 +111,6 @@ impl RegressionReport { self, report_name: Option, regression_reports: &SyncedDir, - jr_client: &Option, ) -> Result<()> { let (event, name) = match &self.crash_test_result { CrashTestResult::CrashReport(report) => { @@ -129,15 +126,6 @@ impl RegressionReport { if upload_or_save_local(&self, &name, regression_reports).await? { event!(event; EventData::Path = name.clone()); metric!(event; 1.0; EventData::Path = name.clone()); - - if let Some(jr_client) = jr_client { - let _ = jr_client - .send_direct( - JobResultData::NewRegressionReport, - HashMap::from([("count".to_string(), 1.0)]), - ) - .await; - } } Ok(()) } @@ -161,7 +149,6 @@ impl CrashTestResult { unique_reports: &Option, reports: &Option, no_repro: &Option, - jr_client: &Option, ) -> Result<()> { match self { Self::CrashReport(report) => { @@ -171,15 +158,6 @@ impl CrashTestResult { if upload_or_save_local(&report, &name, unique_reports).await? { event!(new_unique_report; EventData::Path = report.unique_blob_name()); metric!(new_unique_report; 1.0; EventData::Path = report.unique_blob_name()); - - if let Some(jr_client) = jr_client { - let _ = jr_client - .send_direct( - JobResultData::NewUniqueReport, - HashMap::from([("count".to_string(), 1.0)]), - ) - .await; - } } } @@ -188,15 +166,6 @@ impl CrashTestResult { if upload_or_save_local(&report, &name, reports).await? { event!(new_report; EventData::Path = report.blob_name()); metric!(new_report; 1.0; EventData::Path = report.blob_name()); - - if let Some(jr_client) = jr_client { - let _ = jr_client - .send_direct( - JobResultData::NewReport, - HashMap::from([("count".to_string(), 1.0)]), - ) - .await; - } } } } @@ -207,15 +176,6 @@ impl CrashTestResult { if upload_or_save_local(&report, &name, no_repro).await? { event!(new_unable_to_reproduce; EventData::Path = report.blob_name()); metric!(new_unable_to_reproduce; 1.0; EventData::Path = report.blob_name()); - - if let Some(jr_client) = jr_client { - let _ = jr_client - .send_direct( - JobResultData::NoReproCrashingInput, - HashMap::from([("count".to_string(), 1.0)]), - ) - .await; - } } } } @@ -364,7 +324,6 @@ pub async fn monitor_reports( unique_reports: &Option, reports: &Option, no_crash: &Option, - jr_client: &Option, ) -> Result<()> { if unique_reports.is_none() && reports.is_none() && no_crash.is_none() { debug!("no report directories configured"); @@ -375,9 +334,7 @@ pub async fn monitor_reports( while let Some(file) = monitor.next_file().await? { let result = parse_report_file(file).await?; - result - .save(unique_reports, reports, no_crash, jr_client) - .await?; + result.save(unique_reports, reports, no_crash).await?; } Ok(()) diff --git a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs index b8659845de..9b626a7d89 100644 --- a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs @@ -8,25 +8,25 @@ use std::{ sync::Arc, }; -use crate::tasks::report::crash_report::*; -use crate::tasks::report::dotnet::common::collect_exception_info; -use crate::tasks::{ - config::CommonConfig, - generic::input_poller::*, - heartbeat::{HeartbeatSender, TaskHeartbeatClient}, - utils::{default_bool_true, try_resolve_setup_relative_path}, -}; use anyhow::{Context, Result}; use async_trait::async_trait; use onefuzz::expand::Expand; use onefuzz::fs::set_executable; use onefuzz::{blob::BlobUrl, sha256, syncdir::SyncedDir}; -use onefuzz_result::job_result::TaskJobResultClient; use reqwest::Url; use serde::Deserialize; use storage_queue::{Message, QueueClient}; use tokio::fs; +use crate::tasks::report::crash_report::*; +use crate::tasks::report::dotnet::common::collect_exception_info; +use crate::tasks::{ + config::CommonConfig, + generic::input_poller::*, + heartbeat::{HeartbeatSender, TaskHeartbeatClient}, + utils::{default_bool_true, try_resolve_setup_relative_path}, +}; + const DOTNET_DUMP_TOOL_NAME: &str = "dotnet-dump"; #[derive(Debug, Deserialize)] @@ -114,18 +114,15 @@ impl DotnetCrashReportTask { pub struct AsanProcessor { config: Arc, heartbeat_client: Option, - job_result_client: Option, } impl AsanProcessor { pub async fn new(config: Arc) -> Result { let heartbeat_client = config.common.init_heartbeat(None).await?; - let job_result_client = config.common.init_job_result().await?; Ok(Self { config, heartbeat_client, - job_result_client, }) } @@ -263,7 +260,6 @@ impl Processor for AsanProcessor { &self.config.unique_reports, &self.config.reports, &self.config.no_repro, - &self.job_result_client, ) .await; diff --git a/src/agent/onefuzz-task/src/tasks/report/generic.rs b/src/agent/onefuzz-task/src/tasks/report/generic.rs index 8ad259f0a5..9088f98acc 100644 --- a/src/agent/onefuzz-task/src/tasks/report/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/report/generic.rs @@ -13,7 +13,6 @@ use async_trait::async_trait; use onefuzz::{ blob::BlobUrl, input_tester::Tester, machine_id::MachineIdentity, sha256, syncdir::SyncedDir, }; -use onefuzz_result::job_result::TaskJobResultClient; use reqwest::Url; use serde::Deserialize; use std::{ @@ -74,9 +73,7 @@ impl ReportTask { pub async fn managed_run(&mut self) -> Result<()> { info!("Starting generic crash report task"); let heartbeat_client = self.config.common.init_heartbeat(None).await?; - let job_result_client = self.config.common.init_job_result().await?; - let mut processor = - GenericReportProcessor::new(&self.config, heartbeat_client, job_result_client); + let mut processor = GenericReportProcessor::new(&self.config, heartbeat_client); #[allow(clippy::manual_flatten)] for entry in [ @@ -186,19 +183,13 @@ pub async fn test_input(args: TestInputArgs<'_>) -> Result { pub struct GenericReportProcessor<'a> { config: &'a Config, heartbeat_client: Option, - job_result_client: Option, } impl<'a> GenericReportProcessor<'a> { - pub fn new( - config: &'a Config, - heartbeat_client: Option, - job_result_client: Option, - ) -> Self { + pub fn new(config: &'a Config, heartbeat_client: Option) -> Self { Self { config, heartbeat_client, - job_result_client, } } @@ -248,7 +239,6 @@ impl<'a> Processor for GenericReportProcessor<'a> { &self.config.unique_reports, &self.config.reports, &self.config.no_repro, - &self.job_result_client, ) .await .context("saving report failed") diff --git a/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs b/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs index 587ed2e3dc..f18f638fa3 100644 --- a/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs +++ b/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs @@ -13,7 +13,6 @@ use async_trait::async_trait; use onefuzz::{ blob::BlobUrl, libfuzzer::LibFuzzer, machine_id::MachineIdentity, sha256, syncdir::SyncedDir, }; -use onefuzz_result::job_result::TaskJobResultClient; use reqwest::Url; use serde::Deserialize; use std::{ @@ -197,18 +196,15 @@ pub async fn test_input(args: TestInputArgs<'_>) -> Result { pub struct AsanProcessor { config: Arc, heartbeat_client: Option, - job_result_client: Option, } impl AsanProcessor { pub async fn new(config: Arc) -> Result { let heartbeat_client = config.common.init_heartbeat(None).await?; - let job_result_client = config.common.init_job_result().await?; Ok(Self { config, heartbeat_client, - job_result_client, }) } @@ -261,7 +257,6 @@ impl Processor for AsanProcessor { &self.config.unique_reports, &self.config.reports, &self.config.no_repro, - &self.job_result_client, ) .await } diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml index 1f3c27985c..c096c8ddfc 100644 --- a/src/agent/onefuzz/Cargo.toml +++ b/src/agent/onefuzz/Cargo.toml @@ -44,7 +44,6 @@ tempfile = "3.7.0" process_control = "4.0" reqwest-retry = { path = "../reqwest-retry" } onefuzz-telemetry = { path = "../onefuzz-telemetry" } -onefuzz-result = { path = "../onefuzz-result" } stacktrace-parser = { path = "../stacktrace-parser" } backoff = { version = "0.4", features = ["tokio"] } diff --git a/src/agent/onefuzz/src/blob/url.rs b/src/agent/onefuzz/src/blob/url.rs index 134b59dea0..f55ffbb23a 100644 --- a/src/agent/onefuzz/src/blob/url.rs +++ b/src/agent/onefuzz/src/blob/url.rs @@ -192,15 +192,10 @@ impl BlobContainerUrl { } pub fn as_path(&self, prefix: impl AsRef) -> Result { - match (self.account(), self.container()) { - (Some(account), Some(container)) => { - let mut path = PathBuf::new(); - path.push(account); - path.push(container); - Ok(prefix.as_ref().join(path)) - } - _ => bail!("Invalid container Url"), - } + let dir = self + .account() + .ok_or_else(|| anyhow!("Invalid container Url"))?; + Ok(prefix.as_ref().join(dir)) } } @@ -531,14 +526,4 @@ mod tests { "id:000000,sig:06,src:000000,op:havoc,rep:128" ); } - - #[test] - fn test_as_path() -> Result<()> { - let root = PathBuf::from(r"/onefuzz"); - let url = BlobContainerUrl::parse("https://myaccount.blob.core.windows.net/mycontainer")?; - let path = url.as_path(root)?; - assert_eq!(PathBuf::from(r"/onefuzz/myaccount/mycontainer"), path); - - Ok(()) - } } diff --git a/src/agent/onefuzz/src/syncdir.rs b/src/agent/onefuzz/src/syncdir.rs index 2e73b7a694..0252099561 100644 --- a/src/agent/onefuzz/src/syncdir.rs +++ b/src/agent/onefuzz/src/syncdir.rs @@ -11,12 +11,10 @@ use crate::{ }; use anyhow::{Context, Result}; use dunce::canonicalize; -use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient}; use onefuzz_telemetry::{Event, EventData}; use reqwest::{StatusCode, Url}; use reqwest_retry::{RetryCheck, SendRetry, DEFAULT_RETRY_PERIOD, MAX_RETRY_ATTEMPTS}; use serde::{Deserialize, Serialize}; -use std::collections::HashMap; use std::{env::current_dir, path::PathBuf, str, time::Duration}; use tokio::{fs, select}; use tokio_util::sync::CancellationToken; @@ -243,7 +241,6 @@ impl SyncedDir { url: BlobContainerUrl, event: Event, ignore_dotfiles: bool, - jr_client: &Option, ) -> Result<()> { debug!("monitoring {}", path.display()); @@ -268,39 +265,9 @@ impl SyncedDir { if ignore_dotfiles && file_name_event_str.starts_with('.') { continue; } + event!(event.clone(); EventData::Path = file_name_event_str); metric!(event.clone(); 1.0; EventData::Path = file_name_str_metric_str); - if let Some(jr_client) = jr_client { - match event { - Event::new_result => { - jr_client - .send_direct( - JobResultData::NewCrashingInput, - HashMap::from([("count".to_string(), 1.0)]), - ) - .await; - } - Event::new_coverage => { - jr_client - .send_direct( - JobResultData::CoverageData, - HashMap::from([("count".to_string(), 1.0)]), - ) - .await; - } - Event::new_crashdump => { - jr_client - .send_direct( - JobResultData::NewCrashDump, - HashMap::from([("count".to_string(), 1.0)]), - ) - .await; - } - _ => { - warn!("Unhandled job result!"); - } - } - } let destination = path.join(file_name); if let Err(err) = fs::copy(&item, &destination).await { let error_message = format!( @@ -338,29 +305,6 @@ impl SyncedDir { event!(event.clone(); EventData::Path = file_name_event_str); metric!(event.clone(); 1.0; EventData::Path = file_name_str_metric_str); - if let Some(jr_client) = jr_client { - match event { - Event::new_result => { - jr_client - .send_direct( - JobResultData::NewCrashingInput, - HashMap::from([("count".to_string(), 1.0)]), - ) - .await; - } - Event::new_coverage => { - jr_client - .send_direct( - JobResultData::CoverageData, - HashMap::from([("count".to_string(), 1.0)]), - ) - .await; - } - _ => { - warn!("Unhandled job result!"); - } - } - } if let Err(err) = uploader.upload(item.clone()).await { let error_message = format!( "Couldn't upload file. path:{} dir:{} err:{:?}", @@ -392,12 +336,7 @@ impl SyncedDir { /// The intent of this is to support use cases where we usually want a directory /// to be initialized, but a user-supplied binary, (such as AFL) logically owns /// a directory, and may reset it. - pub async fn monitor_results( - &self, - event: Event, - ignore_dotfiles: bool, - job_result_client: &Option, - ) -> Result<()> { + pub async fn monitor_results(&self, event: Event, ignore_dotfiles: bool) -> Result<()> { if let Some(url) = self.remote_path.clone() { loop { debug!("waiting to monitor {}", self.local_path.display()); @@ -416,7 +355,6 @@ impl SyncedDir { url.clone(), event.clone(), ignore_dotfiles, - job_result_client, ) .await?; } diff --git a/src/deployment/bicep-templates/storageAccounts.bicep b/src/deployment/bicep-templates/storageAccounts.bicep index 27f2da21d8..6a96cea6a0 100644 --- a/src/deployment/bicep-templates/storageAccounts.bicep +++ b/src/deployment/bicep-templates/storageAccounts.bicep @@ -33,7 +33,7 @@ var storageAccountFuncQueuesParams = [ 'update-queue' 'webhooks' 'signalr-events' - 'job-result' + 'custom-metrics' ] var fileChangesQueueIndex = 0 diff --git a/src/integration-tests/integration-test.py b/src/integration-tests/integration-test.py index 15ffcfb9fe..057404ceff 100755 --- a/src/integration-tests/integration-test.py +++ b/src/integration-tests/integration-test.py @@ -88,7 +88,6 @@ class Integration(BaseModel): target_method: Optional[str] setup_dir: Optional[str] target_env: Optional[Dict[str, str]] - pool: PoolName TARGETS: Dict[str, Integration] = { @@ -98,7 +97,6 @@ class Integration(BaseModel): target_exe="fuzz.exe", inputs="seeds", wait_for_files={ContainerType.unique_reports: 1}, - pool="linux", ), "linux-libfuzzer": Integration( template=TemplateType.libfuzzer, @@ -126,7 +124,6 @@ class Integration(BaseModel): "--only_asan_failures", "--write_test_file={extra_output_dir}/test.txt", ], - pool="linux", ), "linux-libfuzzer-with-options": Integration( template=TemplateType.libfuzzer, @@ -140,7 +137,6 @@ class Integration(BaseModel): }, reboot_after_setup=True, fuzzing_target_options=["-runs=10000000"], - pool="linux", ), "linux-libfuzzer-dlopen": Integration( template=TemplateType.libfuzzer, @@ -154,7 +150,6 @@ class Integration(BaseModel): }, reboot_after_setup=True, use_setup=True, - pool="linux", ), "linux-libfuzzer-linked-library": Integration( template=TemplateType.libfuzzer, @@ -168,7 +163,6 @@ class Integration(BaseModel): }, reboot_after_setup=True, use_setup=True, - pool="linux", ), "linux-libfuzzer-dotnet": Integration( template=TemplateType.libfuzzer_dotnet, @@ -186,7 +180,6 @@ class Integration(BaseModel): ContainerType.unique_reports: 1, }, test_repro=False, - pool="linux", ), "linux-libfuzzer-aarch64-crosscompile": Integration( template=TemplateType.libfuzzer_qemu_user, @@ -196,7 +189,6 @@ class Integration(BaseModel): use_setup=True, wait_for_files={ContainerType.inputs: 2, ContainerType.crashes: 1}, test_repro=False, - pool="linux", ), "linux-libfuzzer-rust": Integration( template=TemplateType.libfuzzer, @@ -204,7 +196,6 @@ class Integration(BaseModel): target_exe="fuzz_target_1", wait_for_files={ContainerType.unique_reports: 1, ContainerType.coverage: 1}, fuzzing_target_options=["--test:{extra_setup_dir}"], - pool="linux", ), "linux-trivial-crash": Integration( template=TemplateType.radamsa, @@ -213,7 +204,6 @@ class Integration(BaseModel): inputs="seeds", wait_for_files={ContainerType.unique_reports: 1}, inject_fake_regression=True, - pool="linux", ), "linux-trivial-crash-asan": Integration( template=TemplateType.radamsa, @@ -223,28 +213,6 @@ class Integration(BaseModel): wait_for_files={ContainerType.unique_reports: 1}, check_asan_log=True, disable_check_debugger=True, - pool="linux", - ), - # TODO: Don't install OMS extension on linux anymore - # TODO: Figure out why non mariner work is being scheduled to the mariner pool - "mariner-libfuzzer": Integration( - template=TemplateType.libfuzzer, - os=OS.linux, - target_exe="fuzz.exe", - inputs="seeds", - wait_for_files={ - ContainerType.unique_reports: 1, - ContainerType.coverage: 1, - ContainerType.inputs: 2, - ContainerType.extra_output: 1, - }, - reboot_after_setup=True, - inject_fake_regression=True, - fuzzing_target_options=[ - "--test:{extra_setup_dir}", - "--write_test_file={extra_output_dir}/test.txt", - ], - pool=PoolName("mariner") ), "windows-libfuzzer": Integration( template=TemplateType.libfuzzer, @@ -266,7 +234,6 @@ class Integration(BaseModel): "--only_asan_failures", "--write_test_file={extra_output_dir}/test.txt", ], - pool="windows", ), "windows-libfuzzer-linked-library": Integration( template=TemplateType.libfuzzer, @@ -279,7 +246,6 @@ class Integration(BaseModel): ContainerType.coverage: 1, }, use_setup=True, - pool="windows", ), "windows-libfuzzer-load-library": Integration( template=TemplateType.libfuzzer, @@ -292,7 +258,6 @@ class Integration(BaseModel): ContainerType.coverage: 1, }, use_setup=True, - pool="windows", ), "windows-libfuzzer-dotnet": Integration( template=TemplateType.libfuzzer_dotnet, @@ -310,7 +275,6 @@ class Integration(BaseModel): ContainerType.unique_reports: 1, }, test_repro=False, - pool="windows", ), "windows-trivial-crash": Integration( template=TemplateType.radamsa, @@ -319,7 +283,6 @@ class Integration(BaseModel): inputs="seeds", wait_for_files={ContainerType.unique_reports: 1}, inject_fake_regression=True, - pool="windows", ), } @@ -388,7 +351,7 @@ def try_info_get(data: Any) -> None: self.inject_log(self.start_log_marker) for entry in os_list: - name = self.build_pool_name(entry.name) + name = PoolName(f"testpool-{entry.name}-{self.test_id}") self.logger.info("creating pool: %s:%s", entry.name, name) self.of.pools.create(name, entry) self.logger.info("creating scaleset for pool: %s", name) @@ -396,15 +359,6 @@ def try_info_get(data: Any) -> None: name, pool_size, region=region, initial_size=pool_size ) - name = self.build_pool_name("mariner") - self.logger.info("creating pool: %s:%s", "mariner", name) - self.of.pools.create(name, OS.linux) - self.logger.info("creating scaleset for pool: %s", name) - self.of.scalesets.create( - name, pool_size, region=region, initial_size=pool_size, image="MicrosoftCBLMariner:cbl-mariner:cbl-mariner-2-gen2:latest" - ) - - class UnmanagedPool: def __init__( self, @@ -606,9 +560,12 @@ def launch( ) -> List[UUID]: """Launch all of the fuzzing templates""" - pool = None + pools: Dict[OS, Pool] = {} if unmanaged_pool is not None: - pool = unmanaged_pool.pool_name + pools[unmanaged_pool.the_os] = self.of.pools.get(unmanaged_pool.pool_name) + else: + for pool in self.of.pools.list(): + pools[pool.os] = pool job_ids = [] @@ -619,8 +576,8 @@ def launch( if config.os not in os_list: continue - if pool is None: - pool = self.build_pool_name(config.pool) + if config.os not in pools.keys(): + raise Exception(f"No pool for target: {target} ,os: {config.os}") self.logger.info("launching: %s", target) @@ -644,9 +601,8 @@ def launch( setup = Directory(os.path.join(setup, config.nested_setup_dir)) job: Optional[Job] = None - job = self.build_job( - duration, pool, target, config, setup, target_exe, inputs + duration, pools, target, config, setup, target_exe, inputs ) if config.inject_fake_regression and job is not None: @@ -662,7 +618,7 @@ def launch( def build_job( self, duration: int, - pool: PoolName, + pools: Dict[OS, Pool], target: str, config: Integration, setup: Optional[Directory], @@ -678,7 +634,7 @@ def build_job( self.project, target, BUILD, - pool, + pools[config.os].name, target_exe=target_exe, inputs=inputs, setup_dir=setup, @@ -703,7 +659,7 @@ def build_job( self.project, target, BUILD, - pool, + pools[config.os].name, target_dll=File(config.target_exe), inputs=inputs, setup_dir=setup, @@ -719,7 +675,7 @@ def build_job( self.project, target, BUILD, - pool, + pools[config.os].name, inputs=inputs, target_exe=target_exe, duration=duration, @@ -732,7 +688,7 @@ def build_job( self.project, target, BUILD, - pool_name=pool, + pool_name=pools[config.os].name, target_exe=target_exe, inputs=inputs, setup_dir=setup, @@ -747,7 +703,7 @@ def build_job( self.project, target, BUILD, - pool_name=pool, + pool_name=pools[config.os].name, target_exe=target_exe, inputs=inputs, setup_dir=setup, @@ -1277,9 +1233,6 @@ def check_logs_for_errors(self) -> None: if seen_errors: raise Exception("logs included errors") - - def build_pool_name(self, os_type: str) -> PoolName: - return PoolName(f"testpool-{os_type}-{self.test_id}") class Run(Command): diff --git a/src/runtime-tools/linux/setup.sh b/src/runtime-tools/linux/setup.sh old mode 100644 new mode 100755 index 794e827f4d..f6859003b4 --- a/src/runtime-tools/linux/setup.sh +++ b/src/runtime-tools/linux/setup.sh @@ -18,14 +18,6 @@ export DOTNET_CLI_HOME="$DOTNET_ROOT" export ONEFUZZ_ROOT=/onefuzz export LLVM_SYMBOLIZER_PATH=/onefuzz/bin/llvm-symbolizer -# `logger` won't work on mariner unless we install this package first -if type yum > /dev/null 2> /dev/null; then - until yum install -y util-linux sudo; do - echo "yum failed. sleep 10s, then retrying" - sleep 10 - done -fi - logger "onefuzz: making directories" sudo mkdir -p /onefuzz/downloaded sudo chown -R $(whoami) /onefuzz @@ -142,53 +134,31 @@ if type apt > /dev/null 2> /dev/null; then sudo ln -f -s $(which llvm-symbolizer-12) $LLVM_SYMBOLIZER_PATH fi - # Needed to install dotnet + # Install dotnet until sudo apt install -y curl libicu-dev; do logger "apt failed, sleeping 10s then retrying" sleep 10 done -elif type yum > /dev/null 2> /dev/null; then - until yum install -y gdb gdb-gdbserver libunwind awk ca-certificates tar yum-utils shadow-utils cronie procps; do - echo "yum failed. sleep 10s, then retrying" - sleep 10 - done - - # Install updated Microsoft Open Management Infrastructure - github.com/microsoft/omi - yum-config-manager --add-repo=https://packages.microsoft.com/config/rhel/8/prod.repo 2>&1 | logger -s -i -t 'onefuzz-OMI-add-MS-repo' - yum install -y omi 2>&1 | logger -s -i -t 'onefuzz-OMI-install' + logger "downloading dotnet install" + curl --retry 10 -sSL https://dot.net/v1/dotnet-install.sh -o dotnet-install.sh 2>&1 | logger -s -i -t 'onefuzz-curl-dotnet-install' + chmod +x dotnet-install.sh - if ! [ -f ${LLVM_SYMBOLIZER_PATH} ]; then - until yum install -y llvm-12.0.1; do - echo "yum failed, sleeping 10s then retrying" - sleep 10 - done - - # If specifying symbolizer, exe name must be a "known symbolizer". - # Using `llvm-symbolizer` works for clang 8 .. 12. - sudo ln -f -s $(which llvm-symbolizer-12) $LLVM_SYMBOLIZER_PATH - fi + for version in "${DOTNET_VERSIONS[@]}"; do + logger "running dotnet install $version" + /bin/bash ./dotnet-install.sh --channel "$version" --install-dir "$DOTNET_ROOT" 2>&1 | logger -s -i -t 'onefuzz-dotnet-setup' + done + rm dotnet-install.sh + + logger "install dotnet tools" + pushd "$DOTNET_ROOT" + ls -lah 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' + "$DOTNET_ROOT"/dotnet tool install dotnet-dump --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' + "$DOTNET_ROOT"/dotnet tool install dotnet-coverage --version 17.5 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' + "$DOTNET_ROOT"/dotnet tool install dotnet-sos --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' + popd fi -# Install dotnet -logger "downloading dotnet install" -curl --retry 10 -sSL https://dot.net/v1/dotnet-install.sh -o dotnet-install.sh 2>&1 | logger -s -i -t 'onefuzz-curl-dotnet-install' -chmod +x dotnet-install.sh - -for version in "${DOTNET_VERSIONS[@]}"; do - logger "running dotnet install $version" - /bin/bash ./dotnet-install.sh --channel "$version" --install-dir "$DOTNET_ROOT" 2>&1 | logger -s -i -t 'onefuzz-dotnet-setup' -done -rm dotnet-install.sh - -logger "install dotnet tools" -pushd "$DOTNET_ROOT" -ls -lah 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' -"$DOTNET_ROOT"/dotnet tool install dotnet-dump --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' -"$DOTNET_ROOT"/dotnet tool install dotnet-coverage --version 17.5 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' -"$DOTNET_ROOT"/dotnet tool install dotnet-sos --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' -popd - if [ -v DOCKER_BUILD ]; then echo "building for docker" elif [ -d /etc/systemd/system ]; then From c8986aaa91838a8d701cf0e1099be6a103b8b736 Mon Sep 17 00:00:00 2001 From: Adam <103067949+AdamL-Microsoft@users.noreply.github.com> Date: Wed, 30 Aug 2023 13:53:49 -0700 Subject: [PATCH 02/88] Revert "Release 8.7.1 (hotfix) (#3459)" (#3468) This reverts commit c69deed50e81cc1805f6f82ebb10513a211cbbe2. --- .devcontainer/devcontainer.json | 3 +- .github/workflows/ci.yml | 2 + CHANGELOG.md | 6 - CURRENT_VERSION | 2 +- .../ApiService/Functions/QueueJobResult.cs | 60 +++++++ .../ApiService/OneFuzzTypes/Model.cs | 45 +++++ src/ApiService/ApiService/Program.cs | 1 + .../ApiService/onefuzzlib/Config.cs | 1 + .../ApiService/onefuzzlib/Extension.cs | 44 ++--- .../onefuzzlib/JobResultOperations.cs | 121 +++++++++++++ .../ApiService/onefuzzlib/OnefuzzContext.cs | 2 + .../IntegrationTests/Fakes/TestContext.cs | 3 + src/agent/Cargo.lock | 16 ++ src/agent/Cargo.toml | 1 + src/agent/onefuzz-agent/src/config.rs | 12 ++ src/agent/onefuzz-agent/src/log_uploader.rs | 29 ---- src/agent/onefuzz-agent/src/work.rs | 5 +- src/agent/onefuzz-result/Cargo.toml | 18 ++ src/agent/onefuzz-result/src/job_result.rs | 129 ++++++++++++++ src/agent/onefuzz-result/src/lib.rs | 4 + src/agent/onefuzz-task/Cargo.toml | 1 + src/agent/onefuzz-task/src/local/cmd.rs | 42 +---- src/agent/onefuzz-task/src/local/common.rs | 26 +-- .../example_templates/libfuzzer_basic.yml | 34 ++-- .../src/local/generic_analysis.rs | 137 +-------------- .../src/local/generic_crash_report.rs | 138 +-------------- .../src/local/generic_generator.rs | 142 +-------------- src/agent/onefuzz-task/src/local/libfuzzer.rs | 161 +----------------- .../src/local/libfuzzer_crash_report.rs | 128 +------------- .../onefuzz-task/src/local/libfuzzer_merge.rs | 84 +-------- .../src/local/libfuzzer_regression.rs | 134 +-------------- .../src/local/libfuzzer_test_input.rs | 83 --------- src/agent/onefuzz-task/src/local/mod.rs | 1 - src/agent/onefuzz-task/src/local/radamsa.rs | 78 --------- src/agent/onefuzz-task/src/local/schema.json | 8 +- src/agent/onefuzz-task/src/local/template.rs | 13 +- .../onefuzz-task/src/local/test_input.rs | 86 ---------- .../src/tasks/analysis/generic.rs | 5 +- src/agent/onefuzz-task/src/tasks/config.rs | 20 +++ .../src/tasks/coverage/generic.rs | 19 ++- .../onefuzz-task/src/tasks/fuzz/generator.rs | 7 +- .../src/tasks/fuzz/libfuzzer/common.rs | 49 ++++-- .../onefuzz-task/src/tasks/fuzz/supervisor.rs | 15 +- src/agent/onefuzz-task/src/tasks/heartbeat.rs | 2 +- .../onefuzz-task/src/tasks/merge/generic.rs | 2 +- .../src/tasks/merge/libfuzzer_merge.rs | 2 +- .../src/tasks/regression/common.rs | 15 +- .../src/tasks/regression/generic.rs | 3 +- .../src/tasks/regression/libfuzzer.rs | 3 +- .../src/tasks/report/crash_report.rs | 45 ++++- .../src/tasks/report/dotnet/generic.rs | 22 ++- .../onefuzz-task/src/tasks/report/generic.rs | 14 +- .../src/tasks/report/libfuzzer_report.rs | 5 + src/agent/onefuzz/Cargo.toml | 1 + src/agent/onefuzz/src/blob/url.rs | 23 ++- src/agent/onefuzz/src/syncdir.rs | 66 ++++++- .../bicep-templates/storageAccounts.bicep | 2 +- src/integration-tests/integration-test.py | 77 +++++++-- src/runtime-tools/linux/setup.sh | 64 +++++-- 59 files changed, 872 insertions(+), 1389 deletions(-) create mode 100644 src/ApiService/ApiService/Functions/QueueJobResult.cs create mode 100644 src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs create mode 100644 src/agent/onefuzz-result/Cargo.toml create mode 100644 src/agent/onefuzz-result/src/job_result.rs create mode 100644 src/agent/onefuzz-result/src/lib.rs delete mode 100644 src/agent/onefuzz-task/src/local/radamsa.rs mode change 100755 => 100644 src/runtime-tools/linux/setup.sh diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 4059b3d7c1..d3fcf050ed 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -13,6 +13,7 @@ "**/target/**": true }, "lldb.executable": "/usr/bin/lldb", + "dotnet.server.useOmnisharp": true, "omnisharp.enableEditorConfigSupport": true, "omnisharp.enableRoslynAnalyzers": true, "python.defaultInterpreterPath": "/workspaces/onefuzz/src/venv/bin/python", @@ -48,4 +49,4 @@ "features": { "ghcr.io/devcontainers/features/azure-cli:1": {} } -} +} \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 12824fd182..2dd85d7c92 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -542,9 +542,11 @@ jobs: mkdir -p artifacts/linux-libfuzzer mkdir -p artifacts/linux-libfuzzer-with-options + mkdir -p artifacts/mariner-libfuzzer (cd libfuzzer ; make ) cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/linux-libfuzzer cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/linux-libfuzzer-with-options + cp -r libfuzzer/fuzz.exe libfuzzer/seeds artifacts/mariner-libfuzzer mkdir -p artifacts/linux-libfuzzer-regression (cd libfuzzer-regression ; make ) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8d46ea2a0e..be4779ad77 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,12 +7,6 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## 8.7.1 - -### Fixed - -* Service: Removed deprecated Azure retention policy setting that was causing scaleset deployment errors [#3452](https://github.com/microsoft/onefuzz/pull/3452) - ## 8.7.0 ### Added diff --git a/CURRENT_VERSION b/CURRENT_VERSION index efeecbe2c5..c0bcaebe8f 100644 --- a/CURRENT_VERSION +++ b/CURRENT_VERSION @@ -1 +1 @@ -8.7.1 \ No newline at end of file +8.7.0 \ No newline at end of file diff --git a/src/ApiService/ApiService/Functions/QueueJobResult.cs b/src/ApiService/ApiService/Functions/QueueJobResult.cs new file mode 100644 index 0000000000..d781a4d1e1 --- /dev/null +++ b/src/ApiService/ApiService/Functions/QueueJobResult.cs @@ -0,0 +1,60 @@ +using System.Text.Json; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Extensions.Logging; +using Microsoft.OneFuzz.Service.OneFuzzLib.Orm; +namespace Microsoft.OneFuzz.Service.Functions; + + +public class QueueJobResult { + private readonly ILogger _log; + private readonly IOnefuzzContext _context; + + public QueueJobResult(ILogger logTracer, IOnefuzzContext context) { + _log = logTracer; + _context = context; + } + + [Function("QueueJobResult")] + public async Async.Task Run([QueueTrigger("job-result", Connection = "AzureWebJobsStorage")] string msg) { + + var _tasks = _context.TaskOperations; + var _jobs = _context.JobOperations; + + _log.LogInformation("job result: {msg}", msg); + var jr = JsonSerializer.Deserialize(msg, EntityConverter.GetJsonSerializerOptions()).EnsureNotNull($"wrong data {msg}"); + + var task = await _tasks.GetByTaskId(jr.TaskId); + if (task == null) { + _log.LogWarning("invalid {TaskId}", jr.TaskId); + return; + } + + var job = await _jobs.Get(task.JobId); + if (job == null) { + _log.LogWarning("invalid {JobId}", task.JobId); + return; + } + + JobResultData? data = jr.Data; + if (data == null) { + _log.LogWarning($"job result data is empty, throwing out: {jr}"); + return; + } + + var jobResultType = data.Type; + _log.LogInformation($"job result data type: {jobResultType}"); + + Dictionary value; + if (jr.Value.Count > 0) { + value = jr.Value; + } else { + _log.LogWarning($"job result data is empty, throwing out: {jr}"); + return; + } + + var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jobResultType, value); + if (!jobResult.IsOk) { + _log.LogError("failed to create or update with job result {JobId}", job.JobId); + } + } +} diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index e430c1448c..b839f52ddc 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -33,6 +33,19 @@ public enum HeartbeatType { TaskAlive, } +[SkipRename] +public enum JobResultType { + NewCrashingInput, + NoReproCrashingInput, + NewReport, + NewUniqueReport, + NewRegressionReport, + NewCoverage, + NewCrashDump, + CoverageData, + RuntimeStats, +} + public record HeartbeatData(HeartbeatType Type); public record TaskHeartbeatEntry( @@ -41,6 +54,16 @@ public record TaskHeartbeatEntry( Guid MachineId, HeartbeatData[] Data); +public record JobResultData(JobResultType Type); + +public record TaskJobResultEntry( + Guid TaskId, + Guid? JobId, + Guid MachineId, + JobResultData Data, + Dictionary Value + ); + public record NodeHeartbeatEntry(Guid NodeId, HeartbeatData[] Data); public record NodeCommandStopIfFree(); @@ -892,6 +915,27 @@ public record SecretAddress(Uri Url) : ISecret { public record SecretData(ISecret Secret) { } +public record JobResult( + [PartitionKey][RowKey] Guid JobId, + string Project, + string Name, + double NewCrashingInput = 0, + double NoReproCrashingInput = 0, + double NewReport = 0, + double NewUniqueReport = 0, + double NewRegressionReport = 0, + double NewCrashDump = 0, + double InstructionsCovered = 0, + double TotalInstructions = 0, + double CoverageRate = 0, + double IterationCount = 0 +) : EntityBase() { + public JobResult(Guid JobId, string Project, string Name) : this( + JobId: JobId, + Project: Project, + Name: Name, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) { } +} + public record JobConfig( string Project, string Name, @@ -1056,6 +1100,7 @@ public record TaskUnitConfig( string? InstanceTelemetryKey, string? MicrosoftTelemetryKey, Uri HeartbeatQueue, + Uri JobResultQueue, Dictionary Tags ) { public Uri? inputQueue { get; set; } diff --git a/src/ApiService/ApiService/Program.cs b/src/ApiService/ApiService/Program.cs index f425c00809..d5ee30b45e 100644 --- a/src/ApiService/ApiService/Program.cs +++ b/src/ApiService/ApiService/Program.cs @@ -118,6 +118,7 @@ public static async Async.Task Main() { .AddScoped() .AddScoped() .AddScoped() + .AddScoped() .AddScoped() .AddScoped() .AddScoped() diff --git a/src/ApiService/ApiService/onefuzzlib/Config.cs b/src/ApiService/ApiService/onefuzzlib/Config.cs index 71af317348..872cedbc01 100644 --- a/src/ApiService/ApiService/onefuzzlib/Config.cs +++ b/src/ApiService/ApiService/onefuzzlib/Config.cs @@ -71,6 +71,7 @@ private static BlobContainerSasPermissions ConvertPermissions(ContainerPermissio InstanceTelemetryKey: _serviceConfig.ApplicationInsightsInstrumentationKey, MicrosoftTelemetryKey: _serviceConfig.OneFuzzTelemetry, HeartbeatQueue: await _queue.GetQueueSas("task-heartbeat", StorageType.Config, QueueSasPermissions.Add) ?? throw new Exception("unable to get heartbeat queue sas"), + JobResultQueue: await _queue.GetQueueSas("job-result", StorageType.Config, QueueSasPermissions.Add) ?? throw new Exception("unable to get heartbeat queue sas"), Tags: task.Config.Tags ?? new Dictionary() ); diff --git a/src/ApiService/ApiService/onefuzzlib/Extension.cs b/src/ApiService/ApiService/onefuzzlib/Extension.cs index 7995026eca..fbf62dd343 100644 --- a/src/ApiService/ApiService/onefuzzlib/Extension.cs +++ b/src/ApiService/ApiService/onefuzzlib/Extension.cs @@ -36,7 +36,9 @@ public async Async.Task> GenericExtensions(AzureLocati var extensions = new List(); var instanceConfig = await _context.ConfigOperations.Fetch(); - extensions.Add(await MonitorExtension(region, vmOs)); + if (vmOs == Os.Windows) { + extensions.Add(await MonitorExtension(region)); + } var depenency = DependencyExtension(region, vmOs); if (depenency is not null) { @@ -329,37 +331,21 @@ public async Async.Task AgentConfig(AzureLocation region, Os throw new NotSupportedException($"unsupported OS: {vmOs}"); } - public async Async.Task MonitorExtension(AzureLocation region, Os vmOs) { + public async Async.Task MonitorExtension(AzureLocation region) { var settings = await _context.LogAnalytics.GetMonitorSettings(); var extensionSettings = JsonSerializer.Serialize(new { WorkspaceId = settings.Id }, _extensionSerializerOptions); var protectedExtensionSettings = JsonSerializer.Serialize(new { WorkspaceKey = settings.Key }, _extensionSerializerOptions); - if (vmOs == Os.Windows) { - return new VMExtensionWrapper { - Location = region, - Name = "OMSExtension", - TypePropertiesType = "MicrosoftMonitoringAgent", - Publisher = "Microsoft.EnterpriseCloud.Monitoring", - TypeHandlerVersion = "1.0", - AutoUpgradeMinorVersion = true, - Settings = new BinaryData(extensionSettings), - ProtectedSettings = new BinaryData(protectedExtensionSettings), - EnableAutomaticUpgrade = false - }; - } else if (vmOs == Os.Linux) { - return new VMExtensionWrapper { - Location = region, - Name = "OmsAgentForLinux", - TypePropertiesType = "OmsAgentForLinux", - Publisher = "Microsoft.EnterpriseCloud.Monitoring", - TypeHandlerVersion = "1.0", - AutoUpgradeMinorVersion = true, - Settings = new BinaryData(extensionSettings), - ProtectedSettings = new BinaryData(protectedExtensionSettings), - EnableAutomaticUpgrade = false - }; - } else { - throw new NotSupportedException($"unsupported os: {vmOs}"); - } + return new VMExtensionWrapper { + Location = region, + Name = "OMSExtension", + TypePropertiesType = "MicrosoftMonitoringAgent", + Publisher = "Microsoft.EnterpriseCloud.Monitoring", + TypeHandlerVersion = "1.0", + AutoUpgradeMinorVersion = true, + Settings = new BinaryData(extensionSettings), + ProtectedSettings = new BinaryData(protectedExtensionSettings), + EnableAutomaticUpgrade = false + }; } diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs new file mode 100644 index 0000000000..1166cf91d4 --- /dev/null +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -0,0 +1,121 @@ +using ApiService.OneFuzzLib.Orm; +using Microsoft.Extensions.Logging; +using Polly; +namespace Microsoft.OneFuzz.Service; + +public interface IJobResultOperations : IOrm { + + Async.Task GetJobResult(Guid jobId); + Async.Task CreateOrUpdate(Guid jobId, JobResultType resultType, Dictionary resultValue); + +} +public class JobResultOperations : Orm, IJobResultOperations { + + public JobResultOperations(ILogger log, IOnefuzzContext context) + : base(log, context) { + } + + public async Async.Task GetJobResult(Guid jobId) { + return await SearchByPartitionKeys(new[] { jobId.ToString() }).SingleOrDefaultAsync(); + } + + private JobResult UpdateResult(JobResult result, JobResultType type, Dictionary resultValue) { + + var newResult = result; + double newValue; + switch (type) { + case JobResultType.NewCrashingInput: + newValue = result.NewCrashingInput + resultValue["count"]; + newResult = result with { NewCrashingInput = newValue }; + break; + case JobResultType.NewReport: + newValue = result.NewReport + resultValue["count"]; + newResult = result with { NewReport = newValue }; + break; + case JobResultType.NewUniqueReport: + newValue = result.NewUniqueReport + resultValue["count"]; + newResult = result with { NewUniqueReport = newValue }; + break; + case JobResultType.NewRegressionReport: + newValue = result.NewRegressionReport + resultValue["count"]; + newResult = result with { NewRegressionReport = newValue }; + break; + case JobResultType.NewCrashDump: + newValue = result.NewCrashDump + resultValue["count"]; + newResult = result with { NewCrashDump = newValue }; + break; + case JobResultType.CoverageData: + double newCovered = resultValue["covered"]; + double newTotalCovered = resultValue["features"]; + double newCoverageRate = resultValue["rate"]; + newResult = result with { InstructionsCovered = newCovered, TotalInstructions = newTotalCovered, CoverageRate = newCoverageRate }; + break; + case JobResultType.RuntimeStats: + double newTotalIterations = resultValue["total_count"]; + newResult = result with { IterationCount = newTotalIterations }; + break; + default: + _logTracer.LogWarning($"Invalid Field {type}."); + break; + } + _logTracer.LogInformation($"Attempting to log new result: {newResult}"); + return newResult; + } + + private async Async.Task TryUpdate(Job job, JobResultType resultType, Dictionary resultValue) { + var jobId = job.JobId; + + var jobResult = await GetJobResult(jobId); + + if (jobResult == null) { + _logTracer.LogInformation("Creating new JobResult for Job {JobId}", jobId); + + var entry = new JobResult(JobId: jobId, Project: job.Config.Project, Name: job.Config.Name); + + jobResult = UpdateResult(entry, resultType, resultValue); + + var r = await Insert(jobResult); + if (!r.IsOk) { + throw new InvalidOperationException($"failed to insert job result {jobResult.JobId}"); + } + _logTracer.LogInformation("created job result {JobId}", jobResult.JobId); + } else { + _logTracer.LogInformation("Updating existing JobResult entry for Job {JobId}", jobId); + + jobResult = UpdateResult(jobResult, resultType, resultValue); + + var r = await Update(jobResult); + if (!r.IsOk) { + throw new InvalidOperationException($"failed to insert job result {jobResult.JobId}"); + } + _logTracer.LogInformation("updated job result {JobId}", jobResult.JobId); + } + + return true; + } + + public async Async.Task CreateOrUpdate(Guid jobId, JobResultType resultType, Dictionary resultValue) { + + var job = await _context.JobOperations.Get(jobId); + if (job == null) { + return OneFuzzResultVoid.Error(ErrorCode.INVALID_REQUEST, "invalid job"); + } + + var success = false; + try { + _logTracer.LogInformation("attempt to update job result {JobId}", job.JobId); + var policy = Policy.Handle().WaitAndRetryAsync(50, _ => new TimeSpan(0, 0, 5)); + await policy.ExecuteAsync(async () => { + success = await TryUpdate(job, resultType, resultValue); + _logTracer.LogInformation("attempt {success}", success); + }); + return OneFuzzResultVoid.Ok; + } catch (Exception e) { + return OneFuzzResultVoid.Error(ErrorCode.UNABLE_TO_UPDATE, new string[] { + $"Unexpected failure when attempting to update job result for {job.JobId}", + $"Exception: {e}" + }); + } + } +} + diff --git a/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs b/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs index d877bfddbb..03c6322663 100644 --- a/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs +++ b/src/ApiService/ApiService/onefuzzlib/OnefuzzContext.cs @@ -19,6 +19,7 @@ public interface IOnefuzzContext { IExtensions Extensions { get; } IIpOperations IpOperations { get; } IJobOperations JobOperations { get; } + IJobResultOperations JobResultOperations { get; } ILogAnalytics LogAnalytics { get; } INodeMessageOperations NodeMessageOperations { get; } INodeOperations NodeOperations { get; } @@ -83,6 +84,7 @@ public OnefuzzContext(IServiceProvider serviceProvider) { public IVmOperations VmOperations => _serviceProvider.GetRequiredService(); public ISecretsOperations SecretsOperations => _serviceProvider.GetRequiredService(); public IJobOperations JobOperations => _serviceProvider.GetRequiredService(); + public IJobResultOperations JobResultOperations => _serviceProvider.GetRequiredService(); public IScheduler Scheduler => _serviceProvider.GetRequiredService(); public IConfig Config => _serviceProvider.GetRequiredService(); public ILogAnalytics LogAnalytics => _serviceProvider.GetRequiredService(); diff --git a/src/ApiService/IntegrationTests/Fakes/TestContext.cs b/src/ApiService/IntegrationTests/Fakes/TestContext.cs index c46ff5fce7..66d121e746 100644 --- a/src/ApiService/IntegrationTests/Fakes/TestContext.cs +++ b/src/ApiService/IntegrationTests/Fakes/TestContext.cs @@ -32,6 +32,7 @@ public TestContext(IHttpClientFactory httpClientFactory, OneFuzzLoggerProvider p TaskOperations = new TaskOperations(provider.CreateLogger(), Cache, this); NodeOperations = new NodeOperations(provider.CreateLogger(), this); JobOperations = new JobOperations(provider.CreateLogger(), this); + JobResultOperations = new JobResultOperations(provider.CreateLogger(), this); NodeTasksOperations = new NodeTasksOperations(provider.CreateLogger(), this); TaskEventOperations = new TaskEventOperations(provider.CreateLogger(), this); NodeMessageOperations = new NodeMessageOperations(provider.CreateLogger(), this); @@ -57,6 +58,7 @@ public Async.Task InsertAll(params EntityBase[] objs) Node n => NodeOperations.Insert(n), Pool p => PoolOperations.Insert(p), Job j => JobOperations.Insert(j), + JobResult jr => JobResultOperations.Insert(jr), Repro r => ReproOperations.Insert(r), Scaleset ss => ScalesetOperations.Insert(ss), NodeTasks nt => NodeTasksOperations.Insert(nt), @@ -84,6 +86,7 @@ public Async.Task InsertAll(params EntityBase[] objs) public ITaskOperations TaskOperations { get; } public IJobOperations JobOperations { get; } + public IJobResultOperations JobResultOperations { get; } public INodeOperations NodeOperations { get; } public INodeTasksOperations NodeTasksOperations { get; } public ITaskEventOperations TaskEventOperations { get; } diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index a1d86e7d25..254684be97 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -2123,6 +2123,7 @@ dependencies = [ "log", "nix", "notify", + "onefuzz-result", "onefuzz-telemetry", "pete", "pretty_assertions", @@ -2197,6 +2198,20 @@ dependencies = [ "serde_json", ] +[[package]] +name = "onefuzz-result" +version = "0.2.0" +dependencies = [ + "anyhow", + "async-trait", + "log", + "onefuzz-telemetry", + "reqwest", + "serde", + "storage-queue", + "uuid", +] + [[package]] name = "onefuzz-task" version = "0.2.0" @@ -2226,6 +2241,7 @@ dependencies = [ "num_cpus", "onefuzz", "onefuzz-file-format", + "onefuzz-result", "onefuzz-telemetry", "path-absolutize", "pretty_assertions", diff --git a/src/agent/Cargo.toml b/src/agent/Cargo.toml index 2f4cea41a4..ce01ae880c 100644 --- a/src/agent/Cargo.toml +++ b/src/agent/Cargo.toml @@ -10,6 +10,7 @@ members = [ "onefuzz", "onefuzz-task", "onefuzz-agent", + "onefuzz-result", "onefuzz-file-format", "onefuzz-telemetry", "reqwest-retry", diff --git a/src/agent/onefuzz-agent/src/config.rs b/src/agent/onefuzz-agent/src/config.rs index 87edfb2c1b..fc623e72af 100644 --- a/src/agent/onefuzz-agent/src/config.rs +++ b/src/agent/onefuzz-agent/src/config.rs @@ -34,6 +34,8 @@ pub struct StaticConfig { pub heartbeat_queue: Option, + pub job_result_queue: Option, + pub instance_id: Uuid, #[serde(default = "default_as_true")] @@ -71,6 +73,8 @@ struct RawStaticConfig { pub heartbeat_queue: Option, + pub job_result_queue: Option, + pub instance_id: Uuid, #[serde(default = "default_as_true")] @@ -117,6 +121,7 @@ impl StaticConfig { microsoft_telemetry_key: config.microsoft_telemetry_key, instance_telemetry_key: config.instance_telemetry_key, heartbeat_queue: config.heartbeat_queue, + job_result_queue: config.job_result_queue, instance_id: config.instance_id, managed: config.managed, machine_identity, @@ -152,6 +157,12 @@ impl StaticConfig { None }; + let job_result_queue = if let Ok(key) = std::env::var("ONEFUZZ_JOB_RESULT") { + Some(Url::parse(&key)?) + } else { + None + }; + let instance_telemetry_key = if let Ok(key) = std::env::var("ONEFUZZ_INSTANCE_TELEMETRY_KEY") { Some(InstanceTelemetryKey::new(Uuid::parse_str(&key)?)) @@ -183,6 +194,7 @@ impl StaticConfig { instance_telemetry_key, microsoft_telemetry_key, heartbeat_queue, + job_result_queue, instance_id, managed: !is_unmanaged, machine_identity, diff --git a/src/agent/onefuzz-agent/src/log_uploader.rs b/src/agent/onefuzz-agent/src/log_uploader.rs index 6bccc0bef2..d424013421 100644 --- a/src/agent/onefuzz-agent/src/log_uploader.rs +++ b/src/agent/onefuzz-agent/src/log_uploader.rs @@ -210,32 +210,3 @@ async fn sync_file( blob_client.append_block(Body::from(f)).await?; Ok(len) } - -#[cfg(test)] -mod tests { - use std::io::Seek; - - use anyhow::Result; - use tokio::io::{AsyncReadExt, AsyncSeekExt}; - - #[allow(clippy::unused_io_amount)] - #[tokio::test] - #[ignore] - - async fn test_seek_behavior() -> Result<()> { - let path = "C:\\temp\\test.ps1"; - let mut std_file = std::fs::File::open(path)?; - std_file.seek(std::io::SeekFrom::Start(3))?; - - let mut tokio_file = tokio::fs::File::from_std(std_file); - - let buf = &mut [0u8; 5]; - tokio_file.read(buf).await?; - println!("******** buf {:?}", buf); - tokio_file.seek(std::io::SeekFrom::Start(0)).await?; - tokio_file.read(buf).await?; - println!("******** buf {:?}", buf); - - Ok(()) - } -} diff --git a/src/agent/onefuzz-agent/src/work.rs b/src/agent/onefuzz-agent/src/work.rs index b55d1d86a1..d0222744a7 100644 --- a/src/agent/onefuzz-agent/src/work.rs +++ b/src/agent/onefuzz-agent/src/work.rs @@ -91,7 +91,10 @@ impl WorkSet { pub fn setup_dir(&self) -> Result { let root = self.get_root_folder()?; - self.setup_url.as_path(root) + // Putting the setup container at the root for backward compatibility. + // The path of setup folder can be used as part of the deduplication logic in the bug filing service + let setup_root = root.parent().ok_or_else(|| anyhow!("Invalid root"))?; + self.setup_url.as_path(setup_root) } pub fn extra_setup_dir(&self) -> Result> { diff --git a/src/agent/onefuzz-result/Cargo.toml b/src/agent/onefuzz-result/Cargo.toml new file mode 100644 index 0000000000..7c7de6615c --- /dev/null +++ b/src/agent/onefuzz-result/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "onefuzz-result" +version = "0.2.0" +authors = ["fuzzing@microsoft.com"] +edition = "2021" +publish = false +license = "MIT" + +[dependencies] +anyhow = { version = "1.0", features = ["backtrace"] } +async-trait = "0.1" +reqwest = "0.11" +serde = "1.0" +storage-queue = { path = "../storage-queue" } +uuid = { version = "1.4", features = ["serde", "v4"] } +onefuzz-telemetry = { path = "../onefuzz-telemetry" } +log = "0.4" + diff --git a/src/agent/onefuzz-result/src/job_result.rs b/src/agent/onefuzz-result/src/job_result.rs new file mode 100644 index 0000000000..b305eca2cb --- /dev/null +++ b/src/agent/onefuzz-result/src/job_result.rs @@ -0,0 +1,129 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +use anyhow::Result; +use async_trait::async_trait; +use onefuzz_telemetry::warn; +use reqwest::Url; +use serde::{self, Deserialize, Serialize}; +use std::collections::HashMap; +use std::sync::Arc; +use storage_queue::QueueClient; +use uuid::Uuid; + +#[derive(Debug, Deserialize, Serialize, Hash, Eq, PartialEq, Clone)] +#[serde(tag = "type")] +pub enum JobResultData { + NewCrashingInput, + NoReproCrashingInput, + NewReport, + NewUniqueReport, + NewRegressionReport, + NewCoverage, + NewCrashDump, + CoverageData, + RuntimeStats, +} + +#[derive(Debug, Deserialize, Serialize, Clone)] +struct JobResult { + task_id: Uuid, + job_id: Uuid, + machine_id: Uuid, + machine_name: String, + data: JobResultData, + value: HashMap, +} + +#[derive(Clone)] +pub struct TaskContext { + task_id: Uuid, + job_id: Uuid, + machine_id: Uuid, + machine_name: String, +} + +pub struct JobResultContext { + pub state: TaskContext, + pub queue_client: QueueClient, +} + +pub struct JobResultClient { + pub context: Arc>, +} + +impl JobResultClient { + pub fn init_job_result( + context: TaskContext, + queue_url: Url, + ) -> Result> + where + TaskContext: Send + Sync + 'static, + { + let context = Arc::new(JobResultContext { + state: context, + queue_client: QueueClient::new(queue_url)?, + }); + + Ok(JobResultClient { context }) + } +} + +pub type TaskJobResultClient = JobResultClient; + +pub async fn init_job_result( + queue_url: Url, + task_id: Uuid, + job_id: Uuid, + machine_id: Uuid, + machine_name: String, +) -> Result { + let hb = JobResultClient::init_job_result( + TaskContext { + task_id, + job_id, + machine_id, + machine_name, + }, + queue_url, + )?; + Ok(hb) +} + +#[async_trait] +pub trait JobResultSender { + async fn send_direct(&self, data: JobResultData, value: HashMap); +} + +#[async_trait] +impl JobResultSender for TaskJobResultClient { + async fn send_direct(&self, data: JobResultData, value: HashMap) { + let task_id = self.context.state.task_id; + let job_id = self.context.state.job_id; + let machine_id = self.context.state.machine_id; + let machine_name = self.context.state.machine_name.clone(); + + let _ = self + .context + .queue_client + .enqueue(JobResult { + task_id, + job_id, + machine_id, + machine_name, + data, + value, + }) + .await; + } +} + +#[async_trait] +impl JobResultSender for Option { + async fn send_direct(&self, data: JobResultData, value: HashMap) { + match self { + Some(client) => client.send_direct(data, value).await, + None => warn!("Failed to send Job Result message data from agent."), + } + } +} diff --git a/src/agent/onefuzz-result/src/lib.rs b/src/agent/onefuzz-result/src/lib.rs new file mode 100644 index 0000000000..dae666ca9a --- /dev/null +++ b/src/agent/onefuzz-result/src/lib.rs @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT License. + +pub mod job_result; diff --git a/src/agent/onefuzz-task/Cargo.toml b/src/agent/onefuzz-task/Cargo.toml index 0ad2f9aa4f..4e0bd381b0 100644 --- a/src/agent/onefuzz-task/Cargo.toml +++ b/src/agent/onefuzz-task/Cargo.toml @@ -39,6 +39,7 @@ serde_json = "1.0" serde_yaml = "0.9.21" onefuzz = { path = "../onefuzz" } onefuzz-telemetry = { path = "../onefuzz-telemetry" } +onefuzz-result = { path = "../onefuzz-result" } path-absolutize = "3.1" reqwest-retry = { path = "../reqwest-retry" } strum = "0.25" diff --git a/src/agent/onefuzz-task/src/local/cmd.rs b/src/agent/onefuzz-task/src/local/cmd.rs index 80fd51a96b..eabefb71ee 100644 --- a/src/agent/onefuzz-task/src/local/cmd.rs +++ b/src/agent/onefuzz-task/src/local/cmd.rs @@ -3,11 +3,7 @@ #[cfg(any(target_os = "linux", target_os = "windows"))] use crate::local::coverage; -use crate::local::{ - common::add_common_config, generic_analysis, generic_crash_report, generic_generator, - libfuzzer, libfuzzer_crash_report, libfuzzer_fuzz, libfuzzer_merge, libfuzzer_regression, - libfuzzer_test_input, radamsa, test_input, tui::TerminalUi, -}; +use crate::local::{common::add_common_config, libfuzzer_fuzz, tui::TerminalUi}; use anyhow::{Context, Result}; use clap::{Arg, ArgAction, Command}; use std::time::Duration; @@ -21,19 +17,9 @@ use super::template; #[derive(Debug, PartialEq, Eq, EnumString, IntoStaticStr, EnumIter)] #[strum(serialize_all = "kebab-case")] enum Commands { - Radamsa, #[cfg(any(target_os = "linux", target_os = "windows"))] Coverage, LibfuzzerFuzz, - LibfuzzerMerge, - LibfuzzerCrashReport, - LibfuzzerTestInput, - LibfuzzerRegression, - Libfuzzer, - CrashReport, - Generator, - Analysis, - TestInput, Template, } @@ -68,23 +54,7 @@ pub async fn run(args: clap::ArgMatches) -> Result<()> { match command { #[cfg(any(target_os = "linux", target_os = "windows"))] Commands::Coverage => coverage::run(&sub_args, event_sender).await, - Commands::Radamsa => radamsa::run(&sub_args, event_sender).await, - Commands::LibfuzzerCrashReport => { - libfuzzer_crash_report::run(&sub_args, event_sender).await - } Commands::LibfuzzerFuzz => libfuzzer_fuzz::run(&sub_args, event_sender).await, - Commands::LibfuzzerMerge => libfuzzer_merge::run(&sub_args, event_sender).await, - Commands::LibfuzzerTestInput => { - libfuzzer_test_input::run(&sub_args, event_sender).await - } - Commands::LibfuzzerRegression => { - libfuzzer_regression::run(&sub_args, event_sender).await - } - Commands::Libfuzzer => libfuzzer::run(&sub_args, event_sender).await, - Commands::CrashReport => generic_crash_report::run(&sub_args, event_sender).await, - Commands::Generator => generic_generator::run(&sub_args, event_sender).await, - Commands::Analysis => generic_analysis::run(&sub_args, event_sender).await, - Commands::TestInput => test_input::run(&sub_args, event_sender).await, Commands::Template => { let config = sub_args .get_one::("config") @@ -140,17 +110,7 @@ pub fn args(name: &'static str) -> Command { let app = match subcommand { #[cfg(any(target_os = "linux", target_os = "windows"))] Commands::Coverage => coverage::args(subcommand.into()), - Commands::Radamsa => radamsa::args(subcommand.into()), - Commands::LibfuzzerCrashReport => libfuzzer_crash_report::args(subcommand.into()), Commands::LibfuzzerFuzz => libfuzzer_fuzz::args(subcommand.into()), - Commands::LibfuzzerMerge => libfuzzer_merge::args(subcommand.into()), - Commands::LibfuzzerTestInput => libfuzzer_test_input::args(subcommand.into()), - Commands::LibfuzzerRegression => libfuzzer_regression::args(subcommand.into()), - Commands::Libfuzzer => libfuzzer::args(subcommand.into()), - Commands::CrashReport => generic_crash_report::args(subcommand.into()), - Commands::Generator => generic_generator::args(subcommand.into()), - Commands::Analysis => generic_analysis::args(subcommand.into()), - Commands::TestInput => test_input::args(subcommand.into()), Commands::Template => Command::new("template") .about("uses the template to generate a run") .args(vec![Arg::new("config") diff --git a/src/agent/onefuzz-task/src/local/common.rs b/src/agent/onefuzz-task/src/local/common.rs index f8d7949e80..17940d799f 100644 --- a/src/agent/onefuzz-task/src/local/common.rs +++ b/src/agent/onefuzz-task/src/local/common.rs @@ -26,20 +26,10 @@ pub const INPUTS_DIR: &str = "inputs_dir"; pub const CRASHES_DIR: &str = "crashes_dir"; pub const CRASHDUMPS_DIR: &str = "crashdumps_dir"; pub const TARGET_WORKERS: &str = "target_workers"; -pub const REPORTS_DIR: &str = "reports_dir"; -pub const NO_REPRO_DIR: &str = "no_repro_dir"; pub const TARGET_TIMEOUT: &str = "target_timeout"; -pub const CHECK_RETRY_COUNT: &str = "check_retry_count"; -pub const DISABLE_CHECK_QUEUE: &str = "disable_check_queue"; -pub const UNIQUE_REPORTS_DIR: &str = "unique_reports_dir"; pub const COVERAGE_DIR: &str = "coverage_dir"; pub const READONLY_INPUTS: &str = "readonly_inputs_dir"; -pub const CHECK_ASAN_LOG: &str = "check_asan_log"; -pub const TOOLS_DIR: &str = "tools_dir"; -pub const RENAME_OUTPUT: &str = "rename_output"; pub const CHECK_FUZZER_HELP: &str = "check_fuzzer_help"; -pub const DISABLE_CHECK_DEBUGGER: &str = "disable_check_debugger"; -pub const REGRESSION_REPORTS_DIR: &str = "regression_reports_dir"; pub const TARGET_EXE: &str = "target_exe"; pub const TARGET_ENV: &str = "target_env"; @@ -47,17 +37,6 @@ pub const TARGET_OPTIONS: &str = "target_options"; // pub const SUPERVISOR_EXE: &str = "supervisor_exe"; // pub const SUPERVISOR_ENV: &str = "supervisor_env"; // pub const SUPERVISOR_OPTIONS: &str = "supervisor_options"; -pub const GENERATOR_EXE: &str = "generator_exe"; -pub const GENERATOR_ENV: &str = "generator_env"; -pub const GENERATOR_OPTIONS: &str = "generator_options"; - -pub const ANALYZER_EXE: &str = "analyzer_exe"; -pub const ANALYZER_OPTIONS: &str = "analyzer_options"; -pub const ANALYZER_ENV: &str = "analyzer_env"; -pub const ANALYSIS_DIR: &str = "analysis_dir"; -pub const ANALYSIS_INPUTS: &str = "analysis_inputs"; -pub const ANALYSIS_UNIQUE_INPUTS: &str = "analysis_unique_inputs"; -pub const PRESERVE_EXISTING_OUTPUTS: &str = "preserve_existing_outputs"; pub const CREATE_JOB_DIR: &str = "create_job_dir"; @@ -66,7 +45,6 @@ const WAIT_FOR_DIR_DELAY: Duration = Duration::from_secs(1); pub enum CmdType { Target, - Generator, // Supervisor, } @@ -90,7 +68,6 @@ pub fn get_cmd_exe(cmd_type: CmdType, args: &clap::ArgMatches) -> Result let name = match cmd_type { CmdType::Target => TARGET_EXE, // CmdType::Supervisor => SUPERVISOR_EXE, - CmdType::Generator => GENERATOR_EXE, }; args.get_one::(name) @@ -102,7 +79,6 @@ pub fn get_cmd_arg(cmd_type: CmdType, args: &clap::ArgMatches) -> Vec { let name = match cmd_type { CmdType::Target => TARGET_OPTIONS, // CmdType::Supervisor => SUPERVISOR_OPTIONS, - CmdType::Generator => GENERATOR_OPTIONS, }; args.get_many::(name) @@ -115,7 +91,6 @@ pub fn get_cmd_env(cmd_type: CmdType, args: &clap::ArgMatches) -> Result TARGET_ENV, // CmdType::Supervisor => SUPERVISOR_ENV, - CmdType::Generator => GENERATOR_ENV, }; get_hash_map(args, env_name) } @@ -265,6 +240,7 @@ pub async fn build_local_context( }, instance_telemetry_key: None, heartbeat_queue: None, + job_result_queue: None, microsoft_telemetry_key: None, logs: None, min_available_memory_mb: 0, diff --git a/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml b/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml index 7210893809..aba02c7991 100644 --- a/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml +++ b/src/agent/onefuzz-task/src/local/example_templates/libfuzzer_basic.yml @@ -5,28 +5,31 @@ # 2. Install llvm and export LLVM_SYMBOLIZER_PATH like we do in setup.sh +required_args: &required_args + target_exe: "REPLACE_ME" # The path to your target + inputs: &inputs "REPLACE_ME" # A folder containining your inputs + crashes: &crashes "REPLACE_ME" # The folder where you want the crashing inputs to be output + crashdumps: "REPLACE_ME" # The folder where you want the crash dumps to be output + coverage: "REPLACE_ME" # The folder where you want the code coverage to be output + regression_reports: "REPLACE_ME" # The folder where you want the regression reports to be output + target_args: &target_args + <<: *required_args target_env: {} - target_exe: "C:\\temp\\onefuzz\\integration\\windows-libfuzzer\\fuzz.exe" target_options: [] -inputs: &inputs "C:\\temp\\onefuzz\\integration\\windows-libfuzzer\\seeds" - tasks: - type: LibFuzzer <<: *target_args - inputs: *inputs - crashes: &crash "./crashes" readonly_inputs: [] check_fuzzer_help: true - - type: "Report" + - type: LibfuzzerRegression <<: *target_args - input_queue: *crash - crashes: *crash - reports: "./reports" - unique_reports: "./unique_reports" - no_repro: "./no_repro" + + - type: "LibfuzzerCrashReport" + <<: *target_args + input_queue: *crashes check_fuzzer_help: true - type: "Coverage" @@ -35,4 +38,11 @@ tasks: - "{input}" input_queue: *inputs readonly_inputs: [*inputs] - coverage: "./coverage" + + # The analysis task is optional in the libfuzzer_basic template + # - type: Analysis + # <<: *target_args + # analysis: "REPLACE_ME" # The folder where you want the analysis results to be output + # analyzer_exe: "REPLACE_ME" + # analyzer_options: [] + # analyzer_env: {} diff --git a/src/agent/onefuzz-task/src/local/generic_analysis.rs b/src/agent/onefuzz-task/src/local/generic_analysis.rs index 3d3e2fafc8..429e7b0e3b 100644 --- a/src/agent/onefuzz-task/src/local/generic_analysis.rs +++ b/src/agent/onefuzz-task/src/local/generic_analysis.rs @@ -3,139 +3,13 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::{ - local::common::{ - build_local_context, get_cmd_arg, get_cmd_exe, get_hash_map, get_synced_dir, CmdType, - SyncCountDirMonitor, UiEvent, ANALYSIS_DIR, ANALYZER_ENV, ANALYZER_EXE, ANALYZER_OPTIONS, - CRASHES_DIR, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TOOLS_DIR, - UNIQUE_REPORTS_DIR, - }, - tasks::{ - analysis::generic::{run as run_analysis, Config}, - config::CommonConfig, - }, -}; +use crate::tasks::config::CommonConfig; use anyhow::Result; use async_trait::async_trait; -use clap::{Arg, Command}; -use flume::Sender; use schemars::JsonSchema; -use storage_queue::QueueClient; use super::template::{RunContext, Template}; -pub fn build_analysis_config( - args: &clap::ArgMatches, - input_queue: Option, - common: CommonConfig, - event_sender: Option>, -) -> Result { - let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); - let target_options = get_cmd_arg(CmdType::Target, args); - - let analyzer_exe = args - .get_one::(ANALYZER_EXE) - .cloned() - .ok_or_else(|| format_err!("expected {ANALYZER_EXE}"))?; - - let analyzer_options = args - .get_many::(ANALYZER_OPTIONS) - .unwrap_or_default() - .map(|x| x.to_string()) - .collect(); - - let analyzer_env = get_hash_map(args, ANALYZER_ENV)?; - let analysis = get_synced_dir(ANALYSIS_DIR, common.job_id, common.task_id, args)? - .monitor_count(&event_sender)?; - let tools = get_synced_dir(TOOLS_DIR, common.job_id, common.task_id, args)?; - let crashes = if input_queue.is_none() { - get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)? - } else { - None - }; - let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - - let config = Config { - analyzer_exe, - analyzer_options, - analyzer_env, - target_exe, - target_options, - input_queue, - crashes, - analysis, - tools: Some(tools), - reports, - unique_reports, - no_repro, - common, - }; - - Ok(config) -} - -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender.clone()).await?; - let config = build_analysis_config(args, None, context.common_config.clone(), event_sender)?; - run_analysis(config).await -} - -pub fn build_shared_args(required_task: bool) -> Vec { - vec![ - Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), - Arg::new(TARGET_ENV) - .long(TARGET_ENV) - .requires(TARGET_EXE) - .num_args(0..), - Arg::new(TARGET_OPTIONS) - .long(TARGET_OPTIONS) - .default_value("{input}") - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(CRASHES_DIR) - .long(CRASHES_DIR) - .value_parser(value_parser!(PathBuf)), - Arg::new(ANALYZER_OPTIONS) - .long(ANALYZER_OPTIONS) - .requires(ANALYZER_EXE) - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(ANALYZER_ENV) - .long(ANALYZER_ENV) - .requires(ANALYZER_EXE) - .num_args(0..), - Arg::new(TOOLS_DIR) - .long(TOOLS_DIR) - .value_parser(value_parser!(PathBuf)), - Arg::new(ANALYZER_EXE) - .long(ANALYZER_EXE) - .requires(ANALYSIS_DIR) - .requires(CRASHES_DIR) - .required(required_task), - Arg::new(ANALYSIS_DIR) - .long(ANALYSIS_DIR) - .requires(ANALYZER_EXE) - .requires(CRASHES_DIR) - .required(required_task), - ] -} - -pub fn args(name: &'static str) -> Command { - Command::new(name) - .about("execute a local-only generic analysis") - .args(&build_shared_args(true)) -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct Analysis { analyzer_exe: String, @@ -146,7 +20,7 @@ pub struct Analysis { input_queue: Option, crashes: Option, analysis: PathBuf, - tools: PathBuf, + tools: Option, reports: Option, unique_reports: Option, no_repro: Option, @@ -175,9 +49,10 @@ impl Template for Analysis { .and_then(|path| context.to_monitored_sync_dir("crashes", path).ok()), analysis: context.to_monitored_sync_dir("analysis", self.analysis.clone())?, - tools: context - .to_monitored_sync_dir("tools", self.tools.clone()) - .ok(), + tools: self + .tools + .as_ref() + .and_then(|path| context.to_monitored_sync_dir("tools", path).ok()), reports: self .reports diff --git a/src/agent/onefuzz-task/src/local/generic_crash_report.rs b/src/agent/onefuzz-task/src/local/generic_crash_report.rs index 6b0e2fccad..347a8cac76 100644 --- a/src/agent/onefuzz-task/src/local/generic_crash_report.rs +++ b/src/agent/onefuzz-task/src/local/generic_crash_report.rs @@ -3,150 +3,14 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::{ - local::common::{ - build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType, - SyncCountDirMonitor, UiEvent, CHECK_ASAN_LOG, CHECK_RETRY_COUNT, CRASHES_DIR, - DISABLE_CHECK_DEBUGGER, DISABLE_CHECK_QUEUE, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV, - TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, UNIQUE_REPORTS_DIR, - }, - tasks::{ - config::CommonConfig, - report::generic::{Config, ReportTask}, - utils::default_bool_true, - }, -}; +use crate::tasks::{config::CommonConfig, utils::default_bool_true}; use anyhow::Result; use async_trait::async_trait; -use clap::{Arg, ArgAction, Command}; -use flume::Sender; use futures::future::OptionFuture; use schemars::JsonSchema; -use storage_queue::QueueClient; use super::template::{RunContext, Template}; -pub fn build_report_config( - args: &clap::ArgMatches, - input_queue: Option, - common: CommonConfig, - event_sender: Option>, -) -> Result { - let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); - let target_env = get_cmd_env(CmdType::Target, args)?; - let target_options = get_cmd_arg(CmdType::Target, args); - - let crashes = Some(get_synced_dir( - CRASHES_DIR, - common.job_id, - common.task_id, - args, - )?) - .monitor_count(&event_sender)?; - let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - - let unique_reports = Some(get_synced_dir( - UNIQUE_REPORTS_DIR, - common.job_id, - common.task_id, - args, - )?) - .monitor_count(&event_sender)?; - - let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); - - let check_retry_count = args - .get_one::(CHECK_RETRY_COUNT) - .copied() - .expect("has a default"); - - let check_queue = !args.get_flag(DISABLE_CHECK_QUEUE); - let check_asan_log = args.get_flag(CHECK_ASAN_LOG); - let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER); - - let config = Config { - target_exe, - target_env, - target_options, - target_timeout, - check_asan_log, - check_debugger, - check_retry_count, - check_queue, - crashes, - minimized_stack_depth: None, - input_queue, - no_repro, - reports, - unique_reports, - common, - }; - - Ok(config) -} - -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender.clone()).await?; - let config = build_report_config(args, None, context.common_config.clone(), event_sender)?; - ReportTask::new(config).managed_run().await -} - -pub fn build_shared_args() -> Vec { - vec![ - Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), - Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), - Arg::new(TARGET_OPTIONS) - .default_value("{input}") - .long(TARGET_OPTIONS) - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(CRASHES_DIR) - .long(CRASHES_DIR) - .required(true) - .value_parser(value_parser!(PathBuf)), - Arg::new(REPORTS_DIR) - .long(REPORTS_DIR) - .required(false) - .value_parser(value_parser!(PathBuf)), - Arg::new(NO_REPRO_DIR) - .long(NO_REPRO_DIR) - .required(false) - .value_parser(value_parser!(PathBuf)), - Arg::new(UNIQUE_REPORTS_DIR) - .long(UNIQUE_REPORTS_DIR) - .value_parser(value_parser!(PathBuf)) - .required(true), - Arg::new(TARGET_TIMEOUT) - .long(TARGET_TIMEOUT) - .value_parser(value_parser!(u64)) - .default_value("30"), - Arg::new(CHECK_RETRY_COUNT) - .long(CHECK_RETRY_COUNT) - .value_parser(value_parser!(u64)) - .default_value("0"), - Arg::new(DISABLE_CHECK_QUEUE) - .action(ArgAction::SetTrue) - .long(DISABLE_CHECK_QUEUE), - Arg::new(CHECK_ASAN_LOG) - .action(ArgAction::SetTrue) - .long(CHECK_ASAN_LOG), - Arg::new(DISABLE_CHECK_DEBUGGER) - .action(ArgAction::SetTrue) - .long(DISABLE_CHECK_DEBUGGER), - ] -} - -pub fn args(name: &'static str) -> Command { - Command::new(name) - .about("execute a local-only generic crash report") - .args(&build_shared_args()) -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct CrashReport { target_exe: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/generic_generator.rs b/src/agent/onefuzz-task/src/local/generic_generator.rs index 823ba221d6..ae9f6a3cc6 100644 --- a/src/agent/onefuzz-task/src/local/generic_generator.rs +++ b/src/agent/onefuzz-task/src/local/generic_generator.rs @@ -3,154 +3,14 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::{ - local::common::{ - build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, - get_synced_dirs, CmdType, SyncCountDirMonitor, UiEvent, CHECK_ASAN_LOG, CHECK_RETRY_COUNT, - CRASHES_DIR, DISABLE_CHECK_DEBUGGER, GENERATOR_ENV, GENERATOR_EXE, GENERATOR_OPTIONS, - READONLY_INPUTS, RENAME_OUTPUT, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, - TOOLS_DIR, - }, - tasks::{ - config::CommonConfig, - fuzz::generator::{Config, GeneratorTask}, - utils::default_bool_true, - }, -}; +use crate::tasks::{config::CommonConfig, utils::default_bool_true}; use anyhow::Result; use async_trait::async_trait; -use clap::{Arg, ArgAction, Command}; -use flume::Sender; use onefuzz::syncdir::SyncedDir; use schemars::JsonSchema; use super::template::{RunContext, Template}; -pub fn build_fuzz_config( - args: &clap::ArgMatches, - common: CommonConfig, - event_sender: Option>, -) -> Result { - let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)? - .monitor_count(&event_sender)?; - let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); - let target_options = get_cmd_arg(CmdType::Target, args); - let target_env = get_cmd_env(CmdType::Target, args)?; - - let generator_exe = get_cmd_exe(CmdType::Generator, args)?; - let generator_options = get_cmd_arg(CmdType::Generator, args); - let generator_env = get_cmd_env(CmdType::Generator, args)?; - let readonly_inputs = get_synced_dirs(READONLY_INPUTS, common.job_id, common.task_id, args)? - .into_iter() - .map(|sd| sd.monitor_count(&event_sender)) - .collect::>>()?; - - let rename_output = args.get_flag(RENAME_OUTPUT); - let check_asan_log = args.get_flag(CHECK_ASAN_LOG); - let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER); - - let check_retry_count = args - .get_one::(CHECK_RETRY_COUNT) - .copied() - .expect("has a default"); - - let target_timeout = Some( - args.get_one::(TARGET_TIMEOUT) - .copied() - .expect("has a default"), - ); - - let tools = get_synced_dir(TOOLS_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - - let ensemble_sync_delay = None; - - let config = Config { - generator_exe, - generator_env, - generator_options, - readonly_inputs, - crashes, - tools, - target_exe, - target_env, - target_options, - target_timeout, - check_asan_log, - check_debugger, - check_retry_count, - rename_output, - ensemble_sync_delay, - common, - }; - - Ok(config) -} - -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender.clone()).await?; - let config = build_fuzz_config(args, context.common_config.clone(), event_sender)?; - GeneratorTask::new(config).run().await -} - -pub fn build_shared_args() -> Vec { - vec![ - Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), - Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), - Arg::new(TARGET_OPTIONS) - .default_value("{input}") - .long(TARGET_OPTIONS) - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(GENERATOR_EXE) - .long(GENERATOR_EXE) - .default_value("radamsa") - .required(true), - Arg::new(GENERATOR_ENV).long(GENERATOR_ENV).num_args(0..), - Arg::new(GENERATOR_OPTIONS) - .long(GENERATOR_OPTIONS) - .value_delimiter(' ') - .default_value("-H sha256 -o {generated_inputs}/input-%h.%s -n 100 -r {input_corpus}") - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(CRASHES_DIR) - .required(true) - .long(CRASHES_DIR) - .value_parser(value_parser!(PathBuf)), - Arg::new(READONLY_INPUTS) - .required(true) - .num_args(1..) - .value_parser(value_parser!(PathBuf)) - .long(READONLY_INPUTS), - Arg::new(TOOLS_DIR) - .long(TOOLS_DIR) - .value_parser(value_parser!(PathBuf)), - Arg::new(CHECK_RETRY_COUNT) - .long(CHECK_RETRY_COUNT) - .value_parser(value_parser!(u64)) - .default_value("0"), - Arg::new(CHECK_ASAN_LOG) - .action(ArgAction::SetTrue) - .long(CHECK_ASAN_LOG), - Arg::new(RENAME_OUTPUT) - .action(ArgAction::SetTrue) - .long(RENAME_OUTPUT), - Arg::new(TARGET_TIMEOUT) - .long(TARGET_TIMEOUT) - .value_parser(value_parser!(u64)) - .default_value("30"), - Arg::new(DISABLE_CHECK_DEBUGGER) - .action(ArgAction::SetTrue) - .long(DISABLE_CHECK_DEBUGGER), - ] -} - -pub fn args(name: &'static str) -> Command { - Command::new(name) - .about("execute a local-only generator fuzzing task") - .args(&build_shared_args()) -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct Generator { generator_exe: String, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer.rs b/src/agent/onefuzz-task/src/local/libfuzzer.rs index 56dff7dbe3..433636be1c 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer.rs @@ -1,168 +1,19 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -#[cfg(any(target_os = "linux", target_os = "windows"))] -use crate::{ - local::{common::COVERAGE_DIR, coverage, coverage::build_shared_args as build_coverage_args}, - tasks::coverage::generic::CoverageTask, -}; -use crate::{ - local::{ - common::{ - build_local_context, wait_for_dir, DirectoryMonitorQueue, UiEvent, ANALYZER_EXE, - REGRESSION_REPORTS_DIR, UNIQUE_REPORTS_DIR, - }, - generic_analysis::{build_analysis_config, build_shared_args as build_analysis_args}, - libfuzzer_crash_report::{build_report_config, build_shared_args as build_crash_args}, - libfuzzer_fuzz::{build_fuzz_config, build_shared_args as build_fuzz_args}, - libfuzzer_regression::{ - build_regression_config, build_shared_args as build_regression_args, - }, - }, - tasks::{ - analysis::generic::run as run_analysis, - config::CommonConfig, - fuzz::libfuzzer::{common::default_workers, generic::LibFuzzerFuzzTask}, - regression::libfuzzer::LibFuzzerRegressionTask, - report::libfuzzer_report::ReportTask, - utils::default_bool_true, - }, +use crate::tasks::{ + config::CommonConfig, + fuzz::libfuzzer::{common::default_workers, generic::LibFuzzerFuzzTask}, + utils::default_bool_true, }; use anyhow::Result; use async_trait::async_trait; -use clap::Command; -use flume::Sender; -use onefuzz::{syncdir::SyncedDir, utils::try_wait_all_join_handles}; +use onefuzz::syncdir::SyncedDir; use schemars::JsonSchema; -use std::{ - collections::{HashMap, HashSet}, - path::PathBuf, -}; -use tokio::task::spawn; -use uuid::Uuid; +use std::{collections::HashMap, path::PathBuf}; use super::template::{RunContext, Template}; -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender.clone()).await?; - let fuzz_config = build_fuzz_config(args, context.common_config.clone(), event_sender.clone())?; - let crash_dir = fuzz_config - .crashes - .remote_url()? - .as_file_path() - .expect("invalid crash dir remote location"); - - let fuzzer = LibFuzzerFuzzTask::new(fuzz_config)?; - let mut task_handles = vec![]; - - let fuzz_task = spawn(async move { fuzzer.run().await }); - - wait_for_dir(&crash_dir).await?; - - task_handles.push(fuzz_task); - - if args.contains_id(UNIQUE_REPORTS_DIR) { - let crash_report_input_monitor = - DirectoryMonitorQueue::start_monitoring(crash_dir.clone()).await?; - - let report_config = build_report_config( - args, - Some(crash_report_input_monitor.queue_client), - CommonConfig { - task_id: Uuid::new_v4(), - ..context.common_config.clone() - }, - event_sender.clone(), - )?; - - let mut report = ReportTask::new(report_config); - let report_task = spawn(async move { report.managed_run().await }); - - task_handles.push(report_task); - task_handles.push(crash_report_input_monitor.handle); - } - - #[cfg(any(target_os = "linux", target_os = "windows"))] - if args.contains_id(COVERAGE_DIR) { - let coverage_input_monitor = - DirectoryMonitorQueue::start_monitoring(crash_dir.clone()).await?; - let coverage_config = coverage::build_coverage_config( - args, - true, - Some(coverage_input_monitor.queue_client), - CommonConfig { - task_id: Uuid::new_v4(), - ..context.common_config.clone() - }, - event_sender.clone(), - )?; - - let mut coverage = CoverageTask::new(coverage_config); - let coverage_task = spawn(async move { coverage.run().await }); - - task_handles.push(coverage_task); - task_handles.push(coverage_input_monitor.handle); - } - - if args.contains_id(ANALYZER_EXE) { - let analysis_input_monitor = DirectoryMonitorQueue::start_monitoring(crash_dir).await?; - let analysis_config = build_analysis_config( - args, - Some(analysis_input_monitor.queue_client), - CommonConfig { - task_id: Uuid::new_v4(), - ..context.common_config.clone() - }, - event_sender.clone(), - )?; - let analysis_task = spawn(async move { run_analysis(analysis_config).await }); - - task_handles.push(analysis_task); - task_handles.push(analysis_input_monitor.handle); - } - - if args.contains_id(REGRESSION_REPORTS_DIR) { - let regression_config = build_regression_config( - args, - CommonConfig { - task_id: Uuid::new_v4(), - ..context.common_config.clone() - }, - event_sender, - )?; - let regression = LibFuzzerRegressionTask::new(regression_config); - let regression_task = spawn(async move { regression.run().await }); - task_handles.push(regression_task); - } - - try_wait_all_join_handles(task_handles).await?; - - Ok(()) -} - -pub fn args(name: &'static str) -> Command { - let mut app = Command::new(name).about("run a local libfuzzer & crash reporting task"); - - let mut used = HashSet::new(); - - for args in &[ - build_fuzz_args(), - build_crash_args(), - build_analysis_args(false), - #[cfg(any(target_os = "linux", target_os = "windows"))] - build_coverage_args(true), - build_regression_args(false), - ] { - for arg in args { - if used.insert(arg.get_id()) { - app = app.arg(arg); - } - } - } - - app -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibFuzzer { inputs: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs b/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs index c1ab283575..04ba4f9225 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs @@ -3,139 +3,13 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::{ - local::common::{ - build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType, - SyncCountDirMonitor, UiEvent, CHECK_FUZZER_HELP, CHECK_RETRY_COUNT, CRASHES_DIR, - DISABLE_CHECK_QUEUE, NO_REPRO_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, - TARGET_TIMEOUT, UNIQUE_REPORTS_DIR, - }, - tasks::{ - config::CommonConfig, - report::libfuzzer_report::{Config, ReportTask}, - utils::default_bool_true, - }, -}; +use crate::tasks::{config::CommonConfig, utils::default_bool_true}; use anyhow::Result; use async_trait::async_trait; -use clap::{Arg, ArgAction, Command}; -use flume::Sender; use futures::future::OptionFuture; use schemars::JsonSchema; -use storage_queue::QueueClient; use super::template::{RunContext, Template}; - -pub fn build_report_config( - args: &clap::ArgMatches, - input_queue: Option, - common: CommonConfig, - event_sender: Option>, -) -> Result { - let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); - let target_env = get_cmd_env(CmdType::Target, args)?; - let target_options = get_cmd_arg(CmdType::Target, args); - - let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - - let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - - let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - - let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); - - let check_retry_count = args - .get_one::(CHECK_RETRY_COUNT) - .copied() - .expect("has a default"); - - let check_queue = !args.get_flag(DISABLE_CHECK_QUEUE); - - let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP); - - let crashes = if input_queue.is_none() { crashes } else { None }; - - let config = Config { - target_exe, - target_env, - target_options, - target_timeout, - check_retry_count, - check_fuzzer_help, - minimized_stack_depth: None, - input_queue, - check_queue, - crashes, - reports, - no_repro, - unique_reports, - common, - }; - - Ok(config) -} - -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender.clone()).await?; - let config = build_report_config(args, None, context.common_config.clone(), event_sender)?; - ReportTask::new(config).managed_run().await -} - -pub fn build_shared_args() -> Vec { - vec![ - Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), - Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), - Arg::new(TARGET_OPTIONS) - .long(TARGET_OPTIONS) - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(CRASHES_DIR) - .long(CRASHES_DIR) - .required(true) - .value_parser(value_parser!(PathBuf)), - Arg::new(REPORTS_DIR) - .long(REPORTS_DIR) - .required(false) - .value_parser(value_parser!(PathBuf)), - Arg::new(NO_REPRO_DIR) - .long(NO_REPRO_DIR) - .required(false) - .value_parser(value_parser!(PathBuf)), - Arg::new(UNIQUE_REPORTS_DIR) - .long(UNIQUE_REPORTS_DIR) - .required(true) - .value_parser(value_parser!(PathBuf)), - Arg::new(TARGET_TIMEOUT) - .value_parser(value_parser!(u64)) - .long(TARGET_TIMEOUT), - Arg::new(CHECK_RETRY_COUNT) - .long(CHECK_RETRY_COUNT) - .value_parser(value_parser!(u64)) - .default_value("0"), - Arg::new(DISABLE_CHECK_QUEUE) - .action(ArgAction::SetTrue) - .long(DISABLE_CHECK_QUEUE), - Arg::new(CHECK_FUZZER_HELP) - .action(ArgAction::SetTrue) - .long(CHECK_FUZZER_HELP), - ] -} - -pub fn args(name: &'static str) -> Command { - Command::new(name) - .about("execute a local-only libfuzzer crash report task") - .args(&build_shared_args()) -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibfuzzerCrashReport { target_exe: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs b/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs index 69c9df820b..4b3e4ce58f 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs @@ -3,97 +3,15 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::{ - local::common::{ - build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, - get_synced_dirs, CmdType, SyncCountDirMonitor, UiEvent, ANALYSIS_INPUTS, - ANALYSIS_UNIQUE_INPUTS, CHECK_FUZZER_HELP, INPUTS_DIR, PRESERVE_EXISTING_OUTPUTS, - TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, - }, - tasks::{ - config::CommonConfig, - merge::libfuzzer_merge::{spawn, Config}, - utils::default_bool_true, - }, -}; +use crate::tasks::{config::CommonConfig, utils::default_bool_true}; use anyhow::Result; use async_trait::async_trait; -use clap::{Arg, ArgAction, Command}; -use flume::Sender; use futures::future::OptionFuture; use onefuzz::syncdir::SyncedDir; use schemars::JsonSchema; -use storage_queue::QueueClient; use super::template::{RunContext, Template}; -pub fn build_merge_config( - args: &clap::ArgMatches, - input_queue: Option, - common: CommonConfig, - event_sender: Option>, -) -> Result { - let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); - let target_env = get_cmd_env(CmdType::Target, args)?; - let target_options = get_cmd_arg(CmdType::Target, args); - let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP); - let inputs = get_synced_dirs(ANALYSIS_INPUTS, common.job_id, common.task_id, args)? - .into_iter() - .map(|sd| sd.monitor_count(&event_sender)) - .collect::>>()?; - let unique_inputs = - get_synced_dir(ANALYSIS_UNIQUE_INPUTS, common.job_id, common.task_id, args)? - .monitor_count(&event_sender)?; - let preserve_existing_outputs = args - .get_one::(PRESERVE_EXISTING_OUTPUTS) - .copied() - .unwrap_or_default(); - - let config = Config { - target_exe, - target_env, - target_options, - input_queue, - inputs, - unique_inputs, - preserve_existing_outputs, - check_fuzzer_help, - common, - }; - - Ok(config) -} - -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender.clone()).await?; - let config = build_merge_config(args, None, context.common_config.clone(), event_sender)?; - spawn(config).await -} - -pub fn build_shared_args() -> Vec { - vec![ - Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), - Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), - Arg::new(TARGET_OPTIONS) - .long(TARGET_OPTIONS) - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(CHECK_FUZZER_HELP) - .action(ArgAction::SetTrue) - .long(CHECK_FUZZER_HELP), - Arg::new(INPUTS_DIR) - .long(INPUTS_DIR) - .value_parser(value_parser!(PathBuf)) - .num_args(0..), - ] -} - -pub fn args(name: &'static str) -> Command { - Command::new(name) - .about("execute a local-only libfuzzer crash report task") - .args(&build_shared_args()) -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibfuzzerMerge { target_exe: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs b/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs index 501d2385e2..3fbb9f0bd6 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs @@ -3,145 +3,13 @@ use std::{collections::HashMap, path::PathBuf}; -use crate::{ - local::common::{ - build_local_context, get_cmd_arg, get_cmd_env, get_cmd_exe, get_synced_dir, CmdType, - SyncCountDirMonitor, UiEvent, CHECK_FUZZER_HELP, CHECK_RETRY_COUNT, COVERAGE_DIR, - CRASHES_DIR, NO_REPRO_DIR, REGRESSION_REPORTS_DIR, REPORTS_DIR, TARGET_ENV, TARGET_EXE, - TARGET_OPTIONS, TARGET_TIMEOUT, UNIQUE_REPORTS_DIR, - }, - tasks::{ - config::CommonConfig, - regression::libfuzzer::{Config, LibFuzzerRegressionTask}, - utils::default_bool_true, - }, -}; +use crate::tasks::{config::CommonConfig, utils::default_bool_true}; use anyhow::Result; use async_trait::async_trait; -use clap::{Arg, ArgAction, Command}; -use flume::Sender; use schemars::JsonSchema; use super::template::{RunContext, Template}; -const REPORT_NAMES: &str = "report_names"; - -pub fn build_regression_config( - args: &clap::ArgMatches, - common: CommonConfig, - event_sender: Option>, -) -> Result { - let target_exe = get_cmd_exe(CmdType::Target, args)?.into(); - let target_env = get_cmd_env(CmdType::Target, args)?; - let target_options = get_cmd_arg(CmdType::Target, args); - let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); - let crashes = get_synced_dir(CRASHES_DIR, common.job_id, common.task_id, args)? - .monitor_count(&event_sender)?; - let regression_reports = - get_synced_dir(REGRESSION_REPORTS_DIR, common.job_id, common.task_id, args)? - .monitor_count(&event_sender)?; - let check_retry_count = args - .get_one::(CHECK_RETRY_COUNT) - .copied() - .expect("has a default value"); - - let reports = get_synced_dir(REPORTS_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - let no_repro = get_synced_dir(NO_REPRO_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - let unique_reports = get_synced_dir(UNIQUE_REPORTS_DIR, common.job_id, common.task_id, args) - .ok() - .monitor_count(&event_sender)?; - - let report_list: Option> = args - .get_many::(REPORT_NAMES) - .map(|x| x.cloned().collect()); - - let check_fuzzer_help = args.get_flag(CHECK_FUZZER_HELP); - - let config = Config { - target_exe, - target_env, - target_options, - target_timeout, - check_fuzzer_help, - check_retry_count, - crashes, - regression_reports, - reports, - no_repro, - unique_reports, - readonly_inputs: None, - report_list, - minimized_stack_depth: None, - common, - }; - Ok(config) -} - -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender.clone()).await?; - let config = build_regression_config(args, context.common_config.clone(), event_sender)?; - LibFuzzerRegressionTask::new(config).run().await -} - -pub fn build_shared_args(local_job: bool) -> Vec { - let mut args = vec![ - Arg::new(TARGET_EXE).long(TARGET_EXE).required(true), - Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), - Arg::new(TARGET_OPTIONS) - .long(TARGET_OPTIONS) - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(COVERAGE_DIR) - .required(!local_job) - .long(COVERAGE_DIR) - .value_parser(value_parser!(PathBuf)), - Arg::new(CHECK_FUZZER_HELP) - .action(ArgAction::SetTrue) - .long(CHECK_FUZZER_HELP), - Arg::new(TARGET_TIMEOUT) - .long(TARGET_TIMEOUT) - .value_parser(value_parser!(u64)), - Arg::new(CRASHES_DIR) - .long(CRASHES_DIR) - .required(true) - .value_parser(value_parser!(PathBuf)), - Arg::new(REGRESSION_REPORTS_DIR) - .long(REGRESSION_REPORTS_DIR) - .required(local_job) - .value_parser(value_parser!(PathBuf)), - Arg::new(REPORTS_DIR) - .long(REPORTS_DIR) - .required(false) - .value_parser(value_parser!(PathBuf)), - Arg::new(NO_REPRO_DIR) - .long(NO_REPRO_DIR) - .required(false) - .value_parser(value_parser!(PathBuf)), - Arg::new(UNIQUE_REPORTS_DIR) - .long(UNIQUE_REPORTS_DIR) - .value_parser(value_parser!(PathBuf)) - .required(true), - Arg::new(CHECK_RETRY_COUNT) - .long(CHECK_RETRY_COUNT) - .value_parser(value_parser!(u64)) - .default_value("0"), - ]; - if local_job { - args.push(Arg::new(REPORT_NAMES).long(REPORT_NAMES).num_args(0..)) - } - args -} - -pub fn args(name: &'static str) -> Command { - Command::new(name) - .about("execute a local-only libfuzzer regression task") - .args(&build_shared_args(true)) -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibfuzzerRegression { target_exe: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs b/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs index 9c6f16094e..5bef2347f7 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs @@ -1,97 +1,14 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -use crate::{ - local::common::{ - build_local_context, get_cmd_arg, get_cmd_env, CmdType, UiEvent, CHECK_RETRY_COUNT, - TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, TARGET_TIMEOUT, - }, - tasks::report::libfuzzer_report::{test_input, TestInputArgs}, -}; use anyhow::Result; use async_trait::async_trait; -use clap::{Arg, Command}; -use flume::Sender; use onefuzz::machine_id::MachineIdentity; use schemars::JsonSchema; use std::{collections::HashMap, path::PathBuf}; use super::template::{RunContext, Template}; -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender).await?; - - let target_exe = args - .get_one::(TARGET_EXE) - .expect("marked as required"); - let target_env = get_cmd_env(CmdType::Target, args)?; - let target_options = get_cmd_arg(CmdType::Target, args); - let input = args - .get_one::("input") - .expect("marked as required"); - let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); - let check_retry_count = args - .get_one::(CHECK_RETRY_COUNT) - .copied() - .expect("has a default value"); - - let extra_setup_dir = context.common_config.extra_setup_dir.as_deref(); - let extra_output_dir = context - .common_config - .extra_output - .as_ref() - .map(|x| x.local_path.as_path()); - - let config = TestInputArgs { - target_exe: target_exe.as_path(), - target_env: &target_env, - target_options: &target_options, - input_url: None, - input: input.as_path(), - job_id: context.common_config.job_id, - task_id: context.common_config.task_id, - target_timeout, - check_retry_count, - setup_dir: &context.common_config.setup_dir, - extra_setup_dir, - extra_output_dir, - minimized_stack_depth: None, - machine_identity: context.common_config.machine_identity, - }; - - let result = test_input(config).await?; - println!("{}", serde_json::to_string_pretty(&result)?); - Ok(()) -} - -pub fn build_shared_args() -> Vec { - vec![ - Arg::new(TARGET_EXE).required(true), - Arg::new("input") - .required(true) - .value_parser(value_parser!(PathBuf)), - Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), - Arg::new(TARGET_OPTIONS) - .default_value("{input}") - .long(TARGET_OPTIONS) - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(TARGET_TIMEOUT) - .long(TARGET_TIMEOUT) - .value_parser(value_parser!(u64)), - Arg::new(CHECK_RETRY_COUNT) - .long(CHECK_RETRY_COUNT) - .value_parser(value_parser!(u64)) - .default_value("0"), - ] -} - -pub fn args(name: &'static str) -> Command { - Command::new(name) - .about("test a libfuzzer application with a specific input") - .args(&build_shared_args()) -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct LibfuzzerTestInput { input: PathBuf, diff --git a/src/agent/onefuzz-task/src/local/mod.rs b/src/agent/onefuzz-task/src/local/mod.rs index 03d394bcdb..385ff8ffcd 100644 --- a/src/agent/onefuzz-task/src/local/mod.rs +++ b/src/agent/onefuzz-task/src/local/mod.rs @@ -14,7 +14,6 @@ pub mod libfuzzer_fuzz; pub mod libfuzzer_merge; pub mod libfuzzer_regression; pub mod libfuzzer_test_input; -pub mod radamsa; pub mod template; pub mod test_input; pub mod tui; diff --git a/src/agent/onefuzz-task/src/local/radamsa.rs b/src/agent/onefuzz-task/src/local/radamsa.rs deleted file mode 100644 index 4d84de027a..0000000000 --- a/src/agent/onefuzz-task/src/local/radamsa.rs +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT License. - -use crate::{ - local::{ - common::{build_local_context, DirectoryMonitorQueue, UiEvent}, - generic_crash_report::{build_report_config, build_shared_args as build_crash_args}, - generic_generator::{build_fuzz_config, build_shared_args as build_fuzz_args}, - }, - tasks::{config::CommonConfig, fuzz::generator::GeneratorTask, report::generic::ReportTask}, -}; -use anyhow::{Context, Result}; -use clap::Command; -use flume::Sender; -use onefuzz::utils::try_wait_all_join_handles; -use std::collections::HashSet; -use tokio::task::spawn; -use uuid::Uuid; - -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, true, event_sender.clone()).await?; - let fuzz_config = build_fuzz_config(args, context.common_config.clone(), event_sender.clone())?; - let crash_dir = fuzz_config - .crashes - .remote_url()? - .as_file_path() - .ok_or_else(|| format_err!("invalid crash directory"))?; - - tokio::fs::create_dir_all(&crash_dir) - .await - .with_context(|| { - format!( - "unable to create crashes directory: {}", - crash_dir.display() - ) - })?; - - let fuzzer = GeneratorTask::new(fuzz_config); - let fuzz_task = spawn(async move { fuzzer.run().await }); - - let crash_report_input_monitor = DirectoryMonitorQueue::start_monitoring(crash_dir) - .await - .context("directory monitor failed")?; - let report_config = build_report_config( - args, - Some(crash_report_input_monitor.queue_client), - CommonConfig { - task_id: Uuid::new_v4(), - ..context.common_config.clone() - }, - event_sender, - )?; - let report_task = spawn(async move { ReportTask::new(report_config).managed_run().await }); - - try_wait_all_join_handles(vec![ - fuzz_task, - report_task, - crash_report_input_monitor.handle, - ]) - .await?; - - Ok(()) -} - -pub fn args(name: &'static str) -> Command { - let mut app = Command::new(name).about("run a local generator & crash reporting job"); - - let mut used = HashSet::new(); - for args in &[build_fuzz_args(), build_crash_args()] { - for arg in args { - if used.insert(arg.get_id()) { - app = app.arg(arg); - } - } - } - - app -} diff --git a/src/agent/onefuzz-task/src/local/schema.json b/src/agent/onefuzz-task/src/local/schema.json index 0a1f128e67..e5b00f6e17 100644 --- a/src/agent/onefuzz-task/src/local/schema.json +++ b/src/agent/onefuzz-task/src/local/schema.json @@ -126,7 +126,6 @@ "analyzer_options", "target_exe", "target_options", - "tools", "type" ], "properties": { @@ -182,7 +181,10 @@ } }, "tools": { - "type": "string" + "type": [ + "string", + "null" + ] }, "type": { "type": "string", @@ -893,4 +895,4 @@ ] } } -} +} \ No newline at end of file diff --git a/src/agent/onefuzz-task/src/local/template.rs b/src/agent/onefuzz-task/src/local/template.rs index b2e0c425ff..73ae6e5e48 100644 --- a/src/agent/onefuzz-task/src/local/template.rs +++ b/src/agent/onefuzz-task/src/local/template.rs @@ -196,6 +196,7 @@ pub async fn launch( job_id: Uuid::new_v4(), instance_id: Uuid::new_v4(), heartbeat_queue: None, + job_result_queue: None, instance_telemetry_key: None, microsoft_telemetry_key: None, logs: None, @@ -241,12 +242,10 @@ mod test { .expect("Couldn't find checked-in schema.json") .replace("\r\n", "\n"); - println!("{}", schema_str); - - assert_eq!( - schema_str.replace('\n', ""), - checked_in_schema.replace('\n', ""), - "The checked-in local fuzzing schema did not match the generated schema." - ); + if schema_str.replace('\n', "") != checked_in_schema.replace('\n', "") { + std::fs::write("src/local/new.schema.json", schema_str) + .expect("The schemas did not match but failed to write new schema to file."); + panic!("The checked-in local fuzzing schema did not match the generated schema. The generated schema can be found at src/local/new.schema.json"); + } } } diff --git a/src/agent/onefuzz-task/src/local/test_input.rs b/src/agent/onefuzz-task/src/local/test_input.rs index 4077bd08f8..b8027a7f41 100644 --- a/src/agent/onefuzz-task/src/local/test_input.rs +++ b/src/agent/onefuzz-task/src/local/test_input.rs @@ -1,18 +1,8 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -use crate::{ - local::common::{ - build_local_context, get_cmd_arg, get_cmd_env, CmdType, UiEvent, CHECK_ASAN_LOG, - CHECK_RETRY_COUNT, DISABLE_CHECK_DEBUGGER, TARGET_ENV, TARGET_EXE, TARGET_OPTIONS, - TARGET_TIMEOUT, - }, - tasks::report::generic::{test_input, TestInputArgs}, -}; use anyhow::Result; use async_trait::async_trait; -use clap::{Arg, ArgAction, Command}; -use flume::Sender; use onefuzz::machine_id::MachineIdentity; use schemars::JsonSchema; use std::{collections::HashMap, path::PathBuf}; @@ -20,82 +10,6 @@ use uuid::Uuid; use super::template::{RunContext, Template}; -pub async fn run(args: &clap::ArgMatches, event_sender: Option>) -> Result<()> { - let context = build_local_context(args, false, event_sender).await?; - - let target_exe = args - .get_one::(TARGET_EXE) - .expect("is marked required"); - let target_env = get_cmd_env(CmdType::Target, args)?; - let target_options = get_cmd_arg(CmdType::Target, args); - let input = args - .get_one::("input") - .expect("is marked required"); - let target_timeout = args.get_one::(TARGET_TIMEOUT).copied(); - let check_retry_count = args - .get_one::(CHECK_RETRY_COUNT) - .copied() - .expect("has default value"); - let check_asan_log = args.get_flag(CHECK_ASAN_LOG); - let check_debugger = !args.get_flag(DISABLE_CHECK_DEBUGGER); - - let config = TestInputArgs { - target_exe: target_exe.as_path(), - target_env: &target_env, - target_options: &target_options, - input_url: None, - input: input.as_path(), - job_id: context.common_config.job_id, - task_id: context.common_config.task_id, - target_timeout, - check_retry_count, - setup_dir: &context.common_config.setup_dir, - extra_setup_dir: context.common_config.extra_setup_dir.as_deref(), - minimized_stack_depth: None, - check_asan_log, - check_debugger, - machine_identity: context.common_config.machine_identity.clone(), - }; - - let result = test_input(config).await?; - println!("{}", serde_json::to_string_pretty(&result)?); - Ok(()) -} - -pub fn build_shared_args() -> Vec { - vec![ - Arg::new(TARGET_EXE).required(true), - Arg::new("input") - .required(true) - .value_parser(value_parser!(PathBuf)), - Arg::new(TARGET_ENV).long(TARGET_ENV).num_args(0..), - Arg::new(TARGET_OPTIONS) - .default_value("{input}") - .long(TARGET_OPTIONS) - .value_delimiter(' ') - .help("Use a quoted string with space separation to denote multiple arguments"), - Arg::new(TARGET_TIMEOUT) - .long(TARGET_TIMEOUT) - .value_parser(value_parser!(u64)), - Arg::new(CHECK_RETRY_COUNT) - .long(CHECK_RETRY_COUNT) - .value_parser(value_parser!(u64)) - .default_value("0"), - Arg::new(CHECK_ASAN_LOG) - .action(ArgAction::SetTrue) - .long(CHECK_ASAN_LOG), - Arg::new(DISABLE_CHECK_DEBUGGER) - .action(ArgAction::SetTrue) - .long("disable_check_debugger"), - ] -} - -pub fn args(name: &'static str) -> Command { - Command::new(name) - .about("test an application with a specific input") - .args(&build_shared_args()) -} - #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct TestInput { input: PathBuf, diff --git a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs index 3ba068a614..05c6c3d169 100644 --- a/src/agent/onefuzz-task/src/tasks/analysis/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/analysis/generic.rs @@ -65,6 +65,8 @@ pub async fn run(config: Config) -> Result<()> { tools.init_pull().await?; } + let job_result_client = config.common.init_job_result().await?; + // the tempdir is always created, however, the reports_path and // reports_monitor_future are only created if we have one of the three // report SyncedDir. The idea is that the option for where to write reports @@ -88,6 +90,7 @@ pub async fn run(config: Config) -> Result<()> { &config.unique_reports, &config.reports, &config.no_repro, + &job_result_client, ); ( Some(reports_dir.path().to_path_buf()), @@ -171,7 +174,7 @@ async fn poll_inputs( } message.delete().await?; } else { - warn!("no new candidate inputs found, sleeping"); + debug!("no new candidate inputs found, sleeping"); delay_with_jitter(EMPTY_QUEUE_DELAY).await; } } diff --git a/src/agent/onefuzz-task/src/tasks/config.rs b/src/agent/onefuzz-task/src/tasks/config.rs index 0848379d73..e29e0fd60d 100644 --- a/src/agent/onefuzz-task/src/tasks/config.rs +++ b/src/agent/onefuzz-task/src/tasks/config.rs @@ -14,6 +14,7 @@ use onefuzz::{ machine_id::MachineIdentity, syncdir::{SyncOperation, SyncedDir}, }; +use onefuzz_result::job_result::{init_job_result, TaskJobResultClient}; use onefuzz_telemetry::{ self as telemetry, Event::task_start, EventData, InstanceTelemetryKey, MicrosoftTelemetryKey, Role, @@ -50,6 +51,8 @@ pub struct CommonConfig { pub heartbeat_queue: Option, + pub job_result_queue: Option, + pub instance_telemetry_key: Option, pub microsoft_telemetry_key: Option, @@ -103,6 +106,23 @@ impl CommonConfig { None => Ok(None), } } + + pub async fn init_job_result(&self) -> Result> { + match &self.job_result_queue { + Some(url) => { + let result = init_job_result( + url.clone(), + self.task_id, + self.job_id, + self.machine_identity.machine_id, + self.machine_identity.machine_name.clone(), + ) + .await?; + Ok(Some(result)) + } + None => Ok(None), + } + } } #[derive(Debug, Deserialize)] diff --git a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs index b112cfefbe..4fde9efb31 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs @@ -26,6 +26,8 @@ use onefuzz_file_format::coverage::{ binary::{v1::BinaryCoverageJson as BinaryCoverageJsonV1, BinaryCoverageJson}, source::{v1::SourceCoverageJson as SourceCoverageJsonV1, SourceCoverageJson}, }; +use onefuzz_result::job_result::JobResultData; +use onefuzz_result::job_result::{JobResultSender, TaskJobResultClient}; use onefuzz_telemetry::{event, warn, Event::coverage_data, Event::coverage_failed, EventData}; use storage_queue::{Message, QueueClient}; use tokio::fs; @@ -114,7 +116,7 @@ impl CoverageTask { let allowlist = self.load_target_allowlist().await?; let heartbeat = self.config.common.init_heartbeat(None).await?; - + let job_result = self.config.common.init_job_result().await?; let mut seen_inputs = false; let target_exe_path = @@ -129,6 +131,7 @@ impl CoverageTask { coverage, allowlist, heartbeat, + job_result, target_exe.to_string(), )?; @@ -219,6 +222,7 @@ struct TaskContext<'a> { module_allowlist: AllowList, source_allowlist: Arc, heartbeat: Option, + job_result: Option, cache: Arc, } @@ -228,6 +232,7 @@ impl<'a> TaskContext<'a> { coverage: BinaryCoverage, allowlist: TargetAllowList, heartbeat: Option, + job_result: Option, target_exe: String, ) -> Result { let cache = DebugInfoCache::new(allowlist.source_files.clone()); @@ -247,6 +252,7 @@ impl<'a> TaskContext<'a> { module_allowlist: allowlist.modules, source_allowlist: Arc::new(allowlist.source_files), heartbeat, + job_result, cache: Arc::new(cache), }) } @@ -455,7 +461,16 @@ impl<'a> TaskContext<'a> { let s = CoverageStats::new(&coverage); event!(coverage_data; Covered = s.covered, Features = s.features, Rate = s.rate); metric!(coverage_data; 1.0; Covered = s.covered, Features = s.features, Rate = s.rate); - + self.job_result + .send_direct( + JobResultData::CoverageData, + HashMap::from([ + ("covered".to_string(), s.covered as f64), + ("features".to_string(), s.features as f64), + ("rate".to_string(), s.rate), + ]), + ) + .await; Ok(()) } diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs index d9116a1ed2..bd7511cac2 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/generator.rs @@ -73,6 +73,7 @@ impl GeneratorTask { } let hb_client = self.config.common.init_heartbeat(None).await?; + let jr_client = self.config.common.init_job_result().await?; for dir in &self.config.readonly_inputs { dir.init_pull().await?; @@ -84,7 +85,10 @@ impl GeneratorTask { self.config.ensemble_sync_delay, ); - let crash_dir_monitor = self.config.crashes.monitor_results(new_result, false); + let crash_dir_monitor = self + .config + .crashes + .monitor_results(new_result, false, &jr_client); let fuzzer = self.fuzzing_loop(hb_client); @@ -298,6 +302,7 @@ mod tests { task_id: Default::default(), instance_id: Default::default(), heartbeat_queue: Default::default(), + job_result_queue: Default::default(), instance_telemetry_key: Default::default(), microsoft_telemetry_key: Default::default(), logs: Default::default(), diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs b/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs index 4f8c67ae8e..bfd9f3f5cc 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs @@ -1,7 +1,11 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -use crate::tasks::{config::CommonConfig, heartbeat::HeartbeatSender, utils::default_bool_true}; +use crate::tasks::{ + config::CommonConfig, + heartbeat::{HeartbeatSender, TaskHeartbeatClient}, + utils::default_bool_true, +}; use anyhow::{Context, Result}; use arraydeque::{ArrayDeque, Wrapping}; use async_trait::async_trait; @@ -12,6 +16,7 @@ use onefuzz::{ process::ExitStatus, syncdir::{continuous_sync, SyncOperation::Pull, SyncedDir}, }; +use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient}; use onefuzz_telemetry::{ Event::{new_coverage, new_crashdump, new_result, runtime_stats}, EventData, @@ -126,21 +131,31 @@ where self.verify().await?; let hb_client = self.config.common.init_heartbeat(None).await?; + let jr_client = self.config.common.init_job_result().await?; // To be scheduled. let resync = self.continuous_sync_inputs(); - let new_inputs = self.config.inputs.monitor_results(new_coverage, true); - let new_crashes = self.config.crashes.monitor_results(new_result, true); + + let new_inputs = self + .config + .inputs + .monitor_results(new_coverage, true, &jr_client); + let new_crashes = self + .config + .crashes + .monitor_results(new_result, true, &jr_client); let new_crashdumps = async { if let Some(crashdumps) = &self.config.crashdumps { - crashdumps.monitor_results(new_crashdump, true).await + crashdumps + .monitor_results(new_crashdump, true, &jr_client) + .await } else { Ok(()) } }; let (stats_sender, stats_receiver) = mpsc::unbounded_channel(); - let report_stats = report_runtime_stats(stats_receiver, hb_client); + let report_stats = report_runtime_stats(stats_receiver, &hb_client, &jr_client); let fuzzers = self.run_fuzzers(Some(&stats_sender)); futures::try_join!( resync, @@ -183,7 +198,7 @@ where .inputs .local_path .parent() - .ok_or_else(|| anyhow!("Invalid input path"))?; + .ok_or_else(|| anyhow!("invalid input path"))?; let temp_path = task_dir.join(".temp"); tokio::fs::create_dir_all(&temp_path).await?; let temp_dir = tempdir_in(temp_path)?; @@ -501,7 +516,7 @@ impl TotalStats { self.execs_sec = self.worker_stats.values().map(|x| x.execs_sec).sum(); } - fn report(&self) { + async fn report(&self, jr_client: &Option) { event!( runtime_stats; EventData::Count = self.count, @@ -513,6 +528,17 @@ impl TotalStats { EventData::Count = self.count, EventData::ExecsSecond = self.execs_sec ); + if let Some(jr_client) = jr_client { + let _ = jr_client + .send_direct( + JobResultData::RuntimeStats, + HashMap::from([ + ("total_count".to_string(), self.count as f64), + ("execs_sec".to_string(), self.execs_sec), + ]), + ) + .await; + } } } @@ -542,7 +568,8 @@ impl Timer { // are approximating nearest-neighbor interpolation on the runtime stats time series. async fn report_runtime_stats( mut stats_channel: mpsc::UnboundedReceiver, - heartbeat_client: impl HeartbeatSender, + heartbeat_client: &Option, + jr_client: &Option, ) -> Result<()> { // Cache the last-reported stats for a given worker. // @@ -551,7 +578,7 @@ async fn report_runtime_stats( let mut total = TotalStats::default(); // report all zeros to start - total.report(); + total.report(jr_client).await; let timer = Timer::new(RUNTIME_STATS_PERIOD); @@ -560,10 +587,10 @@ async fn report_runtime_stats( Some(stats) = stats_channel.recv() => { heartbeat_client.alive(); total.update(stats); - total.report() + total.report(jr_client).await } _ = timer.wait() => { - total.report() + total.report(jr_client).await } } } diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs index de1e1106ba..3f00e20b8d 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/supervisor.rs @@ -79,7 +79,10 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> { remote_path: config.crashes.remote_path.clone(), }; crashes.init().await?; - let monitor_crashes = crashes.monitor_results(new_result, false); + + let jr_client = config.common.init_job_result().await?; + + let monitor_crashes = crashes.monitor_results(new_result, false, &jr_client); // setup crashdumps let (crashdump_dir, monitor_crashdumps) = { @@ -95,9 +98,12 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> { }; let monitor_dir = crashdump_dir.clone(); + let monitor_jr_client = config.common.init_job_result().await?; let monitor_crashdumps = async move { if let Some(crashdumps) = monitor_dir { - crashdumps.monitor_results(new_crashdump, false).await + crashdumps + .monitor_results(new_crashdump, false, &monitor_jr_client) + .await } else { Ok(()) } @@ -129,11 +135,13 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> { if let Some(no_repro) = &config.no_repro { no_repro.init().await?; } + let monitor_reports_future = monitor_reports( reports_dir.path(), &config.unique_reports, &config.reports, &config.no_repro, + &jr_client, ); let inputs = SyncedDir { @@ -156,7 +164,7 @@ pub async fn spawn(config: SupervisorConfig) -> Result<(), Error> { delay_with_jitter(delay).await; } } - let monitor_inputs = inputs.monitor_results(new_coverage, false); + let monitor_inputs = inputs.monitor_results(new_coverage, false, &jr_client); let inputs_sync_cancellation = CancellationToken::new(); // never actually cancelled let inputs_sync_task = inputs.continuous_sync(Pull, config.ensemble_sync_delay, &inputs_sync_cancellation); @@ -444,6 +452,7 @@ mod tests { task_id: Default::default(), instance_id: Default::default(), heartbeat_queue: Default::default(), + job_result_queue: Default::default(), instance_telemetry_key: Default::default(), microsoft_telemetry_key: Default::default(), logs: Default::default(), diff --git a/src/agent/onefuzz-task/src/tasks/heartbeat.rs b/src/agent/onefuzz-task/src/tasks/heartbeat.rs index 515fa39d0c..e13b661909 100644 --- a/src/agent/onefuzz-task/src/tasks/heartbeat.rs +++ b/src/agent/onefuzz-task/src/tasks/heartbeat.rs @@ -1,8 +1,8 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -use crate::onefuzz::heartbeat::HeartbeatClient; use anyhow::Result; +use onefuzz::heartbeat::HeartbeatClient; use reqwest::Url; use serde::{self, Deserialize, Serialize}; use std::time::Duration; diff --git a/src/agent/onefuzz-task/src/tasks/merge/generic.rs b/src/agent/onefuzz-task/src/tasks/merge/generic.rs index 4f2e8234a8..3b6a2094d8 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/generic.rs @@ -83,7 +83,7 @@ pub async fn spawn(config: &Config) -> Result<()> { } } } else { - warn!("no new candidate inputs found, sleeping"); + debug!("no new candidate inputs found, sleeping"); delay_with_jitter(EMPTY_QUEUE_DELAY).await; }; } diff --git a/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs b/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs index 1c334b3f18..2d53bc8c07 100644 --- a/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs +++ b/src/agent/onefuzz-task/src/tasks/merge/libfuzzer_merge.rs @@ -120,7 +120,7 @@ async fn process_message(config: &Config, input_queue: QueueClient) -> Result<() } Ok(()) } else { - warn!("no new candidate inputs found, sleeping"); + debug!("no new candidate inputs found, sleeping"); delay_with_jitter(EMPTY_QUEUE_DELAY).await; Ok(()) } diff --git a/src/agent/onefuzz-task/src/tasks/regression/common.rs b/src/agent/onefuzz-task/src/tasks/regression/common.rs index 60023cfa6e..b61a97df4c 100644 --- a/src/agent/onefuzz-task/src/tasks/regression/common.rs +++ b/src/agent/onefuzz-task/src/tasks/regression/common.rs @@ -2,12 +2,14 @@ // Licensed under the MIT License. use crate::tasks::{ + config::CommonConfig, heartbeat::{HeartbeatSender, TaskHeartbeatClient}, report::crash_report::{parse_report_file, CrashTestResult, RegressionReport}, }; use anyhow::{Context, Result}; use async_trait::async_trait; use onefuzz::syncdir::SyncedDir; +use onefuzz_result::job_result::TaskJobResultClient; use reqwest::Url; use std::path::PathBuf; @@ -24,7 +26,7 @@ pub trait RegressionHandler { /// Runs the regression task pub async fn run( - heartbeat_client: Option, + common_config: &CommonConfig, regression_reports: &SyncedDir, crashes: &SyncedDir, report_dirs: &[&SyncedDir], @@ -35,6 +37,9 @@ pub async fn run( info!("starting regression task"); regression_reports.init().await?; + let heartbeat_client = common_config.init_heartbeat(None).await?; + let job_result_client = common_config.init_job_result().await?; + handle_crash_reports( handler, crashes, @@ -42,6 +47,7 @@ pub async fn run( report_list, regression_reports, &heartbeat_client, + &job_result_client, ) .await .context("handling crash reports")?; @@ -52,6 +58,7 @@ pub async fn run( readonly_inputs, regression_reports, &heartbeat_client, + &job_result_client, ) .await .context("handling inputs")?; @@ -71,6 +78,7 @@ pub async fn handle_inputs( readonly_inputs: &SyncedDir, regression_reports: &SyncedDir, heartbeat_client: &Option, + job_result_client: &Option, ) -> Result<()> { readonly_inputs.init_pull().await?; let mut input_files = tokio::fs::read_dir(&readonly_inputs.local_path).await?; @@ -95,7 +103,7 @@ pub async fn handle_inputs( crash_test_result, original_crash_test_result: None, } - .save(None, regression_reports) + .save(None, regression_reports, job_result_client) .await? } @@ -109,6 +117,7 @@ pub async fn handle_crash_reports( report_list: &Option>, regression_reports: &SyncedDir, heartbeat_client: &Option, + job_result_client: &Option, ) -> Result<()> { // without crash report containers, skip this method if report_dirs.is_empty() { @@ -158,7 +167,7 @@ pub async fn handle_crash_reports( crash_test_result, original_crash_test_result: Some(original_crash_test_result), } - .save(Some(file_name), regression_reports) + .save(Some(file_name), regression_reports, job_result_client) .await? } } diff --git a/src/agent/onefuzz-task/src/tasks/regression/generic.rs b/src/agent/onefuzz-task/src/tasks/regression/generic.rs index 640e80db9a..8570208d59 100644 --- a/src/agent/onefuzz-task/src/tasks/regression/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/regression/generic.rs @@ -89,7 +89,6 @@ impl GenericRegressionTask { pub async fn run(&self) -> Result<()> { info!("Starting generic regression task"); - let heartbeat_client = self.config.common.init_heartbeat(None).await?; let mut report_dirs = vec![]; for dir in vec![ @@ -103,7 +102,7 @@ impl GenericRegressionTask { report_dirs.push(dir); } common::run( - heartbeat_client, + &self.config.common, &self.config.regression_reports, &self.config.crashes, &report_dirs, diff --git a/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs b/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs index 06dd7c00d9..e65f46bb64 100644 --- a/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs +++ b/src/agent/onefuzz-task/src/tasks/regression/libfuzzer.rs @@ -103,9 +103,8 @@ impl LibFuzzerRegressionTask { report_dirs.push(dir); } - let heartbeat_client = self.config.common.init_heartbeat(None).await?; common::run( - heartbeat_client, + &self.config.common, &self.config.regression_reports, &self.config.crashes, &report_dirs, diff --git a/src/agent/onefuzz-task/src/tasks/report/crash_report.rs b/src/agent/onefuzz-task/src/tasks/report/crash_report.rs index 23171bc432..290b98ccde 100644 --- a/src/agent/onefuzz-task/src/tasks/report/crash_report.rs +++ b/src/agent/onefuzz-task/src/tasks/report/crash_report.rs @@ -3,6 +3,7 @@ use anyhow::{Context, Result}; use onefuzz::{blob::BlobUrl, monitor::DirectoryMonitor, syncdir::SyncedDir}; +use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient}; use onefuzz_telemetry::{ Event::{ new_report, new_unable_to_reproduce, new_unique_report, regression_report, @@ -12,6 +13,7 @@ use onefuzz_telemetry::{ }; use serde::{Deserialize, Serialize}; use stacktrace_parser::CrashLog; +use std::collections::HashMap; use std::path::{Path, PathBuf}; use uuid::Uuid; @@ -111,6 +113,7 @@ impl RegressionReport { self, report_name: Option, regression_reports: &SyncedDir, + jr_client: &Option, ) -> Result<()> { let (event, name) = match &self.crash_test_result { CrashTestResult::CrashReport(report) => { @@ -126,6 +129,15 @@ impl RegressionReport { if upload_or_save_local(&self, &name, regression_reports).await? { event!(event; EventData::Path = name.clone()); metric!(event; 1.0; EventData::Path = name.clone()); + + if let Some(jr_client) = jr_client { + let _ = jr_client + .send_direct( + JobResultData::NewRegressionReport, + HashMap::from([("count".to_string(), 1.0)]), + ) + .await; + } } Ok(()) } @@ -149,6 +161,7 @@ impl CrashTestResult { unique_reports: &Option, reports: &Option, no_repro: &Option, + jr_client: &Option, ) -> Result<()> { match self { Self::CrashReport(report) => { @@ -158,6 +171,15 @@ impl CrashTestResult { if upload_or_save_local(&report, &name, unique_reports).await? { event!(new_unique_report; EventData::Path = report.unique_blob_name()); metric!(new_unique_report; 1.0; EventData::Path = report.unique_blob_name()); + + if let Some(jr_client) = jr_client { + let _ = jr_client + .send_direct( + JobResultData::NewUniqueReport, + HashMap::from([("count".to_string(), 1.0)]), + ) + .await; + } } } @@ -166,6 +188,15 @@ impl CrashTestResult { if upload_or_save_local(&report, &name, reports).await? { event!(new_report; EventData::Path = report.blob_name()); metric!(new_report; 1.0; EventData::Path = report.blob_name()); + + if let Some(jr_client) = jr_client { + let _ = jr_client + .send_direct( + JobResultData::NewReport, + HashMap::from([("count".to_string(), 1.0)]), + ) + .await; + } } } } @@ -176,6 +207,15 @@ impl CrashTestResult { if upload_or_save_local(&report, &name, no_repro).await? { event!(new_unable_to_reproduce; EventData::Path = report.blob_name()); metric!(new_unable_to_reproduce; 1.0; EventData::Path = report.blob_name()); + + if let Some(jr_client) = jr_client { + let _ = jr_client + .send_direct( + JobResultData::NoReproCrashingInput, + HashMap::from([("count".to_string(), 1.0)]), + ) + .await; + } } } } @@ -324,6 +364,7 @@ pub async fn monitor_reports( unique_reports: &Option, reports: &Option, no_crash: &Option, + jr_client: &Option, ) -> Result<()> { if unique_reports.is_none() && reports.is_none() && no_crash.is_none() { debug!("no report directories configured"); @@ -334,7 +375,9 @@ pub async fn monitor_reports( while let Some(file) = monitor.next_file().await? { let result = parse_report_file(file).await?; - result.save(unique_reports, reports, no_crash).await?; + result + .save(unique_reports, reports, no_crash, jr_client) + .await?; } Ok(()) diff --git a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs index 9b626a7d89..b8659845de 100644 --- a/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/report/dotnet/generic.rs @@ -8,25 +8,25 @@ use std::{ sync::Arc, }; +use crate::tasks::report::crash_report::*; +use crate::tasks::report::dotnet::common::collect_exception_info; +use crate::tasks::{ + config::CommonConfig, + generic::input_poller::*, + heartbeat::{HeartbeatSender, TaskHeartbeatClient}, + utils::{default_bool_true, try_resolve_setup_relative_path}, +}; use anyhow::{Context, Result}; use async_trait::async_trait; use onefuzz::expand::Expand; use onefuzz::fs::set_executable; use onefuzz::{blob::BlobUrl, sha256, syncdir::SyncedDir}; +use onefuzz_result::job_result::TaskJobResultClient; use reqwest::Url; use serde::Deserialize; use storage_queue::{Message, QueueClient}; use tokio::fs; -use crate::tasks::report::crash_report::*; -use crate::tasks::report::dotnet::common::collect_exception_info; -use crate::tasks::{ - config::CommonConfig, - generic::input_poller::*, - heartbeat::{HeartbeatSender, TaskHeartbeatClient}, - utils::{default_bool_true, try_resolve_setup_relative_path}, -}; - const DOTNET_DUMP_TOOL_NAME: &str = "dotnet-dump"; #[derive(Debug, Deserialize)] @@ -114,15 +114,18 @@ impl DotnetCrashReportTask { pub struct AsanProcessor { config: Arc, heartbeat_client: Option, + job_result_client: Option, } impl AsanProcessor { pub async fn new(config: Arc) -> Result { let heartbeat_client = config.common.init_heartbeat(None).await?; + let job_result_client = config.common.init_job_result().await?; Ok(Self { config, heartbeat_client, + job_result_client, }) } @@ -260,6 +263,7 @@ impl Processor for AsanProcessor { &self.config.unique_reports, &self.config.reports, &self.config.no_repro, + &self.job_result_client, ) .await; diff --git a/src/agent/onefuzz-task/src/tasks/report/generic.rs b/src/agent/onefuzz-task/src/tasks/report/generic.rs index 9088f98acc..8ad259f0a5 100644 --- a/src/agent/onefuzz-task/src/tasks/report/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/report/generic.rs @@ -13,6 +13,7 @@ use async_trait::async_trait; use onefuzz::{ blob::BlobUrl, input_tester::Tester, machine_id::MachineIdentity, sha256, syncdir::SyncedDir, }; +use onefuzz_result::job_result::TaskJobResultClient; use reqwest::Url; use serde::Deserialize; use std::{ @@ -73,7 +74,9 @@ impl ReportTask { pub async fn managed_run(&mut self) -> Result<()> { info!("Starting generic crash report task"); let heartbeat_client = self.config.common.init_heartbeat(None).await?; - let mut processor = GenericReportProcessor::new(&self.config, heartbeat_client); + let job_result_client = self.config.common.init_job_result().await?; + let mut processor = + GenericReportProcessor::new(&self.config, heartbeat_client, job_result_client); #[allow(clippy::manual_flatten)] for entry in [ @@ -183,13 +186,19 @@ pub async fn test_input(args: TestInputArgs<'_>) -> Result { pub struct GenericReportProcessor<'a> { config: &'a Config, heartbeat_client: Option, + job_result_client: Option, } impl<'a> GenericReportProcessor<'a> { - pub fn new(config: &'a Config, heartbeat_client: Option) -> Self { + pub fn new( + config: &'a Config, + heartbeat_client: Option, + job_result_client: Option, + ) -> Self { Self { config, heartbeat_client, + job_result_client, } } @@ -239,6 +248,7 @@ impl<'a> Processor for GenericReportProcessor<'a> { &self.config.unique_reports, &self.config.reports, &self.config.no_repro, + &self.job_result_client, ) .await .context("saving report failed") diff --git a/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs b/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs index f18f638fa3..587ed2e3dc 100644 --- a/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs +++ b/src/agent/onefuzz-task/src/tasks/report/libfuzzer_report.rs @@ -13,6 +13,7 @@ use async_trait::async_trait; use onefuzz::{ blob::BlobUrl, libfuzzer::LibFuzzer, machine_id::MachineIdentity, sha256, syncdir::SyncedDir, }; +use onefuzz_result::job_result::TaskJobResultClient; use reqwest::Url; use serde::Deserialize; use std::{ @@ -196,15 +197,18 @@ pub async fn test_input(args: TestInputArgs<'_>) -> Result { pub struct AsanProcessor { config: Arc, heartbeat_client: Option, + job_result_client: Option, } impl AsanProcessor { pub async fn new(config: Arc) -> Result { let heartbeat_client = config.common.init_heartbeat(None).await?; + let job_result_client = config.common.init_job_result().await?; Ok(Self { config, heartbeat_client, + job_result_client, }) } @@ -257,6 +261,7 @@ impl Processor for AsanProcessor { &self.config.unique_reports, &self.config.reports, &self.config.no_repro, + &self.job_result_client, ) .await } diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml index c096c8ddfc..1f3c27985c 100644 --- a/src/agent/onefuzz/Cargo.toml +++ b/src/agent/onefuzz/Cargo.toml @@ -44,6 +44,7 @@ tempfile = "3.7.0" process_control = "4.0" reqwest-retry = { path = "../reqwest-retry" } onefuzz-telemetry = { path = "../onefuzz-telemetry" } +onefuzz-result = { path = "../onefuzz-result" } stacktrace-parser = { path = "../stacktrace-parser" } backoff = { version = "0.4", features = ["tokio"] } diff --git a/src/agent/onefuzz/src/blob/url.rs b/src/agent/onefuzz/src/blob/url.rs index f55ffbb23a..134b59dea0 100644 --- a/src/agent/onefuzz/src/blob/url.rs +++ b/src/agent/onefuzz/src/blob/url.rs @@ -192,10 +192,15 @@ impl BlobContainerUrl { } pub fn as_path(&self, prefix: impl AsRef) -> Result { - let dir = self - .account() - .ok_or_else(|| anyhow!("Invalid container Url"))?; - Ok(prefix.as_ref().join(dir)) + match (self.account(), self.container()) { + (Some(account), Some(container)) => { + let mut path = PathBuf::new(); + path.push(account); + path.push(container); + Ok(prefix.as_ref().join(path)) + } + _ => bail!("Invalid container Url"), + } } } @@ -526,4 +531,14 @@ mod tests { "id:000000,sig:06,src:000000,op:havoc,rep:128" ); } + + #[test] + fn test_as_path() -> Result<()> { + let root = PathBuf::from(r"/onefuzz"); + let url = BlobContainerUrl::parse("https://myaccount.blob.core.windows.net/mycontainer")?; + let path = url.as_path(root)?; + assert_eq!(PathBuf::from(r"/onefuzz/myaccount/mycontainer"), path); + + Ok(()) + } } diff --git a/src/agent/onefuzz/src/syncdir.rs b/src/agent/onefuzz/src/syncdir.rs index 0252099561..2e73b7a694 100644 --- a/src/agent/onefuzz/src/syncdir.rs +++ b/src/agent/onefuzz/src/syncdir.rs @@ -11,10 +11,12 @@ use crate::{ }; use anyhow::{Context, Result}; use dunce::canonicalize; +use onefuzz_result::job_result::{JobResultData, JobResultSender, TaskJobResultClient}; use onefuzz_telemetry::{Event, EventData}; use reqwest::{StatusCode, Url}; use reqwest_retry::{RetryCheck, SendRetry, DEFAULT_RETRY_PERIOD, MAX_RETRY_ATTEMPTS}; use serde::{Deserialize, Serialize}; +use std::collections::HashMap; use std::{env::current_dir, path::PathBuf, str, time::Duration}; use tokio::{fs, select}; use tokio_util::sync::CancellationToken; @@ -241,6 +243,7 @@ impl SyncedDir { url: BlobContainerUrl, event: Event, ignore_dotfiles: bool, + jr_client: &Option, ) -> Result<()> { debug!("monitoring {}", path.display()); @@ -265,9 +268,39 @@ impl SyncedDir { if ignore_dotfiles && file_name_event_str.starts_with('.') { continue; } - event!(event.clone(); EventData::Path = file_name_event_str); metric!(event.clone(); 1.0; EventData::Path = file_name_str_metric_str); + if let Some(jr_client) = jr_client { + match event { + Event::new_result => { + jr_client + .send_direct( + JobResultData::NewCrashingInput, + HashMap::from([("count".to_string(), 1.0)]), + ) + .await; + } + Event::new_coverage => { + jr_client + .send_direct( + JobResultData::CoverageData, + HashMap::from([("count".to_string(), 1.0)]), + ) + .await; + } + Event::new_crashdump => { + jr_client + .send_direct( + JobResultData::NewCrashDump, + HashMap::from([("count".to_string(), 1.0)]), + ) + .await; + } + _ => { + warn!("Unhandled job result!"); + } + } + } let destination = path.join(file_name); if let Err(err) = fs::copy(&item, &destination).await { let error_message = format!( @@ -305,6 +338,29 @@ impl SyncedDir { event!(event.clone(); EventData::Path = file_name_event_str); metric!(event.clone(); 1.0; EventData::Path = file_name_str_metric_str); + if let Some(jr_client) = jr_client { + match event { + Event::new_result => { + jr_client + .send_direct( + JobResultData::NewCrashingInput, + HashMap::from([("count".to_string(), 1.0)]), + ) + .await; + } + Event::new_coverage => { + jr_client + .send_direct( + JobResultData::CoverageData, + HashMap::from([("count".to_string(), 1.0)]), + ) + .await; + } + _ => { + warn!("Unhandled job result!"); + } + } + } if let Err(err) = uploader.upload(item.clone()).await { let error_message = format!( "Couldn't upload file. path:{} dir:{} err:{:?}", @@ -336,7 +392,12 @@ impl SyncedDir { /// The intent of this is to support use cases where we usually want a directory /// to be initialized, but a user-supplied binary, (such as AFL) logically owns /// a directory, and may reset it. - pub async fn monitor_results(&self, event: Event, ignore_dotfiles: bool) -> Result<()> { + pub async fn monitor_results( + &self, + event: Event, + ignore_dotfiles: bool, + job_result_client: &Option, + ) -> Result<()> { if let Some(url) = self.remote_path.clone() { loop { debug!("waiting to monitor {}", self.local_path.display()); @@ -355,6 +416,7 @@ impl SyncedDir { url.clone(), event.clone(), ignore_dotfiles, + job_result_client, ) .await?; } diff --git a/src/deployment/bicep-templates/storageAccounts.bicep b/src/deployment/bicep-templates/storageAccounts.bicep index 6a96cea6a0..27f2da21d8 100644 --- a/src/deployment/bicep-templates/storageAccounts.bicep +++ b/src/deployment/bicep-templates/storageAccounts.bicep @@ -33,7 +33,7 @@ var storageAccountFuncQueuesParams = [ 'update-queue' 'webhooks' 'signalr-events' - 'custom-metrics' + 'job-result' ] var fileChangesQueueIndex = 0 diff --git a/src/integration-tests/integration-test.py b/src/integration-tests/integration-test.py index 057404ceff..15ffcfb9fe 100755 --- a/src/integration-tests/integration-test.py +++ b/src/integration-tests/integration-test.py @@ -88,6 +88,7 @@ class Integration(BaseModel): target_method: Optional[str] setup_dir: Optional[str] target_env: Optional[Dict[str, str]] + pool: PoolName TARGETS: Dict[str, Integration] = { @@ -97,6 +98,7 @@ class Integration(BaseModel): target_exe="fuzz.exe", inputs="seeds", wait_for_files={ContainerType.unique_reports: 1}, + pool="linux", ), "linux-libfuzzer": Integration( template=TemplateType.libfuzzer, @@ -124,6 +126,7 @@ class Integration(BaseModel): "--only_asan_failures", "--write_test_file={extra_output_dir}/test.txt", ], + pool="linux", ), "linux-libfuzzer-with-options": Integration( template=TemplateType.libfuzzer, @@ -137,6 +140,7 @@ class Integration(BaseModel): }, reboot_after_setup=True, fuzzing_target_options=["-runs=10000000"], + pool="linux", ), "linux-libfuzzer-dlopen": Integration( template=TemplateType.libfuzzer, @@ -150,6 +154,7 @@ class Integration(BaseModel): }, reboot_after_setup=True, use_setup=True, + pool="linux", ), "linux-libfuzzer-linked-library": Integration( template=TemplateType.libfuzzer, @@ -163,6 +168,7 @@ class Integration(BaseModel): }, reboot_after_setup=True, use_setup=True, + pool="linux", ), "linux-libfuzzer-dotnet": Integration( template=TemplateType.libfuzzer_dotnet, @@ -180,6 +186,7 @@ class Integration(BaseModel): ContainerType.unique_reports: 1, }, test_repro=False, + pool="linux", ), "linux-libfuzzer-aarch64-crosscompile": Integration( template=TemplateType.libfuzzer_qemu_user, @@ -189,6 +196,7 @@ class Integration(BaseModel): use_setup=True, wait_for_files={ContainerType.inputs: 2, ContainerType.crashes: 1}, test_repro=False, + pool="linux", ), "linux-libfuzzer-rust": Integration( template=TemplateType.libfuzzer, @@ -196,6 +204,7 @@ class Integration(BaseModel): target_exe="fuzz_target_1", wait_for_files={ContainerType.unique_reports: 1, ContainerType.coverage: 1}, fuzzing_target_options=["--test:{extra_setup_dir}"], + pool="linux", ), "linux-trivial-crash": Integration( template=TemplateType.radamsa, @@ -204,6 +213,7 @@ class Integration(BaseModel): inputs="seeds", wait_for_files={ContainerType.unique_reports: 1}, inject_fake_regression=True, + pool="linux", ), "linux-trivial-crash-asan": Integration( template=TemplateType.radamsa, @@ -213,6 +223,28 @@ class Integration(BaseModel): wait_for_files={ContainerType.unique_reports: 1}, check_asan_log=True, disable_check_debugger=True, + pool="linux", + ), + # TODO: Don't install OMS extension on linux anymore + # TODO: Figure out why non mariner work is being scheduled to the mariner pool + "mariner-libfuzzer": Integration( + template=TemplateType.libfuzzer, + os=OS.linux, + target_exe="fuzz.exe", + inputs="seeds", + wait_for_files={ + ContainerType.unique_reports: 1, + ContainerType.coverage: 1, + ContainerType.inputs: 2, + ContainerType.extra_output: 1, + }, + reboot_after_setup=True, + inject_fake_regression=True, + fuzzing_target_options=[ + "--test:{extra_setup_dir}", + "--write_test_file={extra_output_dir}/test.txt", + ], + pool=PoolName("mariner") ), "windows-libfuzzer": Integration( template=TemplateType.libfuzzer, @@ -234,6 +266,7 @@ class Integration(BaseModel): "--only_asan_failures", "--write_test_file={extra_output_dir}/test.txt", ], + pool="windows", ), "windows-libfuzzer-linked-library": Integration( template=TemplateType.libfuzzer, @@ -246,6 +279,7 @@ class Integration(BaseModel): ContainerType.coverage: 1, }, use_setup=True, + pool="windows", ), "windows-libfuzzer-load-library": Integration( template=TemplateType.libfuzzer, @@ -258,6 +292,7 @@ class Integration(BaseModel): ContainerType.coverage: 1, }, use_setup=True, + pool="windows", ), "windows-libfuzzer-dotnet": Integration( template=TemplateType.libfuzzer_dotnet, @@ -275,6 +310,7 @@ class Integration(BaseModel): ContainerType.unique_reports: 1, }, test_repro=False, + pool="windows", ), "windows-trivial-crash": Integration( template=TemplateType.radamsa, @@ -283,6 +319,7 @@ class Integration(BaseModel): inputs="seeds", wait_for_files={ContainerType.unique_reports: 1}, inject_fake_regression=True, + pool="windows", ), } @@ -351,7 +388,7 @@ def try_info_get(data: Any) -> None: self.inject_log(self.start_log_marker) for entry in os_list: - name = PoolName(f"testpool-{entry.name}-{self.test_id}") + name = self.build_pool_name(entry.name) self.logger.info("creating pool: %s:%s", entry.name, name) self.of.pools.create(name, entry) self.logger.info("creating scaleset for pool: %s", name) @@ -359,6 +396,15 @@ def try_info_get(data: Any) -> None: name, pool_size, region=region, initial_size=pool_size ) + name = self.build_pool_name("mariner") + self.logger.info("creating pool: %s:%s", "mariner", name) + self.of.pools.create(name, OS.linux) + self.logger.info("creating scaleset for pool: %s", name) + self.of.scalesets.create( + name, pool_size, region=region, initial_size=pool_size, image="MicrosoftCBLMariner:cbl-mariner:cbl-mariner-2-gen2:latest" + ) + + class UnmanagedPool: def __init__( self, @@ -560,12 +606,9 @@ def launch( ) -> List[UUID]: """Launch all of the fuzzing templates""" - pools: Dict[OS, Pool] = {} + pool = None if unmanaged_pool is not None: - pools[unmanaged_pool.the_os] = self.of.pools.get(unmanaged_pool.pool_name) - else: - for pool in self.of.pools.list(): - pools[pool.os] = pool + pool = unmanaged_pool.pool_name job_ids = [] @@ -576,8 +619,8 @@ def launch( if config.os not in os_list: continue - if config.os not in pools.keys(): - raise Exception(f"No pool for target: {target} ,os: {config.os}") + if pool is None: + pool = self.build_pool_name(config.pool) self.logger.info("launching: %s", target) @@ -601,8 +644,9 @@ def launch( setup = Directory(os.path.join(setup, config.nested_setup_dir)) job: Optional[Job] = None + job = self.build_job( - duration, pools, target, config, setup, target_exe, inputs + duration, pool, target, config, setup, target_exe, inputs ) if config.inject_fake_regression and job is not None: @@ -618,7 +662,7 @@ def launch( def build_job( self, duration: int, - pools: Dict[OS, Pool], + pool: PoolName, target: str, config: Integration, setup: Optional[Directory], @@ -634,7 +678,7 @@ def build_job( self.project, target, BUILD, - pools[config.os].name, + pool, target_exe=target_exe, inputs=inputs, setup_dir=setup, @@ -659,7 +703,7 @@ def build_job( self.project, target, BUILD, - pools[config.os].name, + pool, target_dll=File(config.target_exe), inputs=inputs, setup_dir=setup, @@ -675,7 +719,7 @@ def build_job( self.project, target, BUILD, - pools[config.os].name, + pool, inputs=inputs, target_exe=target_exe, duration=duration, @@ -688,7 +732,7 @@ def build_job( self.project, target, BUILD, - pool_name=pools[config.os].name, + pool_name=pool, target_exe=target_exe, inputs=inputs, setup_dir=setup, @@ -703,7 +747,7 @@ def build_job( self.project, target, BUILD, - pool_name=pools[config.os].name, + pool_name=pool, target_exe=target_exe, inputs=inputs, setup_dir=setup, @@ -1233,6 +1277,9 @@ def check_logs_for_errors(self) -> None: if seen_errors: raise Exception("logs included errors") + + def build_pool_name(self, os_type: str) -> PoolName: + return PoolName(f"testpool-{os_type}-{self.test_id}") class Run(Command): diff --git a/src/runtime-tools/linux/setup.sh b/src/runtime-tools/linux/setup.sh old mode 100755 new mode 100644 index f6859003b4..794e827f4d --- a/src/runtime-tools/linux/setup.sh +++ b/src/runtime-tools/linux/setup.sh @@ -18,6 +18,14 @@ export DOTNET_CLI_HOME="$DOTNET_ROOT" export ONEFUZZ_ROOT=/onefuzz export LLVM_SYMBOLIZER_PATH=/onefuzz/bin/llvm-symbolizer +# `logger` won't work on mariner unless we install this package first +if type yum > /dev/null 2> /dev/null; then + until yum install -y util-linux sudo; do + echo "yum failed. sleep 10s, then retrying" + sleep 10 + done +fi + logger "onefuzz: making directories" sudo mkdir -p /onefuzz/downloaded sudo chown -R $(whoami) /onefuzz @@ -134,31 +142,53 @@ if type apt > /dev/null 2> /dev/null; then sudo ln -f -s $(which llvm-symbolizer-12) $LLVM_SYMBOLIZER_PATH fi - # Install dotnet + # Needed to install dotnet until sudo apt install -y curl libicu-dev; do logger "apt failed, sleeping 10s then retrying" sleep 10 done +elif type yum > /dev/null 2> /dev/null; then + until yum install -y gdb gdb-gdbserver libunwind awk ca-certificates tar yum-utils shadow-utils cronie procps; do + echo "yum failed. sleep 10s, then retrying" + sleep 10 + done + + # Install updated Microsoft Open Management Infrastructure - github.com/microsoft/omi + yum-config-manager --add-repo=https://packages.microsoft.com/config/rhel/8/prod.repo 2>&1 | logger -s -i -t 'onefuzz-OMI-add-MS-repo' + yum install -y omi 2>&1 | logger -s -i -t 'onefuzz-OMI-install' - logger "downloading dotnet install" - curl --retry 10 -sSL https://dot.net/v1/dotnet-install.sh -o dotnet-install.sh 2>&1 | logger -s -i -t 'onefuzz-curl-dotnet-install' - chmod +x dotnet-install.sh - for version in "${DOTNET_VERSIONS[@]}"; do - logger "running dotnet install $version" - /bin/bash ./dotnet-install.sh --channel "$version" --install-dir "$DOTNET_ROOT" 2>&1 | logger -s -i -t 'onefuzz-dotnet-setup' - done - rm dotnet-install.sh - - logger "install dotnet tools" - pushd "$DOTNET_ROOT" - ls -lah 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' - "$DOTNET_ROOT"/dotnet tool install dotnet-dump --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' - "$DOTNET_ROOT"/dotnet tool install dotnet-coverage --version 17.5 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' - "$DOTNET_ROOT"/dotnet tool install dotnet-sos --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' - popd + if ! [ -f ${LLVM_SYMBOLIZER_PATH} ]; then + until yum install -y llvm-12.0.1; do + echo "yum failed, sleeping 10s then retrying" + sleep 10 + done + + # If specifying symbolizer, exe name must be a "known symbolizer". + # Using `llvm-symbolizer` works for clang 8 .. 12. + sudo ln -f -s $(which llvm-symbolizer-12) $LLVM_SYMBOLIZER_PATH + fi fi +# Install dotnet +logger "downloading dotnet install" +curl --retry 10 -sSL https://dot.net/v1/dotnet-install.sh -o dotnet-install.sh 2>&1 | logger -s -i -t 'onefuzz-curl-dotnet-install' +chmod +x dotnet-install.sh + +for version in "${DOTNET_VERSIONS[@]}"; do + logger "running dotnet install $version" + /bin/bash ./dotnet-install.sh --channel "$version" --install-dir "$DOTNET_ROOT" 2>&1 | logger -s -i -t 'onefuzz-dotnet-setup' +done +rm dotnet-install.sh + +logger "install dotnet tools" +pushd "$DOTNET_ROOT" +ls -lah 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' +"$DOTNET_ROOT"/dotnet tool install dotnet-dump --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' +"$DOTNET_ROOT"/dotnet tool install dotnet-coverage --version 17.5 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' +"$DOTNET_ROOT"/dotnet tool install dotnet-sos --version 6.0.351802 --tool-path /onefuzz/tools 2>&1 | logger -s -i -t 'onefuzz-dotnet-tools' +popd + if [ -v DOCKER_BUILD ]; then echo "building for docker" elif [ -d /etc/systemd/system ]; then From 7b404025aa8f5e371721c141b13c2f63a7dfec94 Mon Sep 17 00:00:00 2001 From: Adam <103067949+AdamL-Microsoft@users.noreply.github.com> Date: Wed, 30 Aug 2023 15:18:51 -0700 Subject: [PATCH 03/88] Redo 8.7.1 (#3469) * Redo-8.7.1-hotfix --------- Co-authored-by: Cheick Keita --- CHANGELOG.md | 6 ++++++ CURRENT_VERSION | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index be4779ad77..8d46ea2a0e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,12 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## 8.7.1 + +### Fixed + +* Service: Removed deprecated Azure retention policy setting that was causing scaleset deployment errors [#3452](https://github.com/microsoft/onefuzz/pull/3452) + ## 8.7.0 ### Added diff --git a/CURRENT_VERSION b/CURRENT_VERSION index c0bcaebe8f..efeecbe2c5 100644 --- a/CURRENT_VERSION +++ b/CURRENT_VERSION @@ -1 +1 @@ -8.7.0 \ No newline at end of file +8.7.1 \ No newline at end of file From d99960323de7336e0463f92babb80ebc841fa627 Mon Sep 17 00:00:00 2001 From: Kanan B <32438208+kananb@users.noreply.github.com> Date: Wed, 30 Aug 2023 16:40:42 -0700 Subject: [PATCH 04/88] Support custom ado fields that mark work items as duplicate (#3467) * Add field to ado config for checking duplicate work items * Make duplicate fields nullable and add it to python models * Update broken tests * Update docs to include new ado_duplicate_fields property --- .../ado-work-items.json | 4 ++++ docs/notifications/ado.md | 7 +++++++ src/ApiService/ApiService/OneFuzzTypes/Model.cs | 4 +++- .../ApiService/onefuzzlib/notifications/Ado.cs | 13 +++++++++---- .../JinjaToScribanMigrationTests.cs | 2 ++ src/ApiService/Tests/OrmModelsTest.cs | 2 ++ src/pytypes/onefuzztypes/models.py | 1 + 7 files changed, 28 insertions(+), 5 deletions(-) diff --git a/contrib/onefuzz-job-azure-devops-pipeline/ado-work-items.json b/contrib/onefuzz-job-azure-devops-pipeline/ado-work-items.json index eb89fc019d..034d97cf15 100644 --- a/contrib/onefuzz-job-azure-devops-pipeline/ado-work-items.json +++ b/contrib/onefuzz-job-azure-devops-pipeline/ado-work-items.json @@ -13,6 +13,10 @@ "System.AreaPath": "OneFuzz-Ado-Integration", "System.Title": "{{report.task_id}}" }, + "ado_duplicate_fields": { + "System.Reason": "My custom value that means a work item is a duplicate", + "Custom.Work.Item.Field": "My custom value that means a work item is a duplicate" + }, "on_duplicate": { "increment": [], "comment": "DUP {{report.input_sha256}}
Repro Command:
 {{ repro_cmd }} 
", diff --git a/docs/notifications/ado.md b/docs/notifications/ado.md index 131986afba..09dd5b9072 100644 --- a/docs/notifications/ado.md +++ b/docs/notifications/ado.md @@ -51,6 +51,13 @@ clickable, make it a link. "System.Title": "{{ report.crash_site }} - {{ report.executable }}", "Microsoft.VSTS.TCM.ReproSteps": "This is my call stack:
    {{ for item in report.call_stack }}
  • {{ item }}
  • {{ end }}
" }, + "ado_duplicate_fields": { + "System.Reason": "My custom value that means a work item is a duplicate", + "Custom.Work.Item.Field": "My custom value that means a work item is a duplicate" + // note: the fields and values below are checked by default and don't need to be specified + // "System.Reason": "Duplicate" + // "Microsoft.VSTS.Common.ResolvedReason": "Duplicate" + }, "comment": "This is my comment. {{ report.input_sha256 }} {{ input_url }}
{{ repro_cmd }}
", "unique_fields": ["System.Title", "System.AreaPath"], "on_duplicate": { diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index b839f52ddc..424669899a 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -689,6 +689,7 @@ public record AdoTemplate( List UniqueFields, Dictionary AdoFields, ADODuplicateTemplate OnDuplicate, + Dictionary? AdoDuplicateFields = null, string? Comment = null ) : NotificationTemplate { public async Task Validate() { @@ -704,8 +705,9 @@ public record RenderedAdoTemplate( List UniqueFields, Dictionary AdoFields, ADODuplicateTemplate OnDuplicate, + Dictionary? AdoDuplicateFields = null, string? Comment = null - ) : AdoTemplate(BaseUrl, AuthToken, Project, Type, UniqueFields, AdoFields, OnDuplicate, Comment); + ) : AdoTemplate(BaseUrl, AuthToken, Project, Type, UniqueFields, AdoFields, OnDuplicate, AdoDuplicateFields, Comment); public record TeamsTemplate(SecretData Url) : NotificationTemplate { public Task Validate() { diff --git a/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs b/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs index e05bb9bc24..98b857c9bc 100644 --- a/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs +++ b/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs @@ -239,7 +239,7 @@ private static async Async.Task ProcessNotification(IOnefuzzContext context, Con var renderedConfig = RenderAdoTemplate(logTracer, renderer, config, instanceUrl); var ado = new AdoConnector(renderedConfig, project!, client, instanceUrl, logTracer, await GetValidFields(client, project)); - await ado.Process(notificationInfo); + await ado.Process(notificationInfo, config.AdoDuplicateFields); } public static RenderedAdoTemplate RenderAdoTemplate(ILogger logTracer, Renderer renderer, AdoTemplate original, Uri instanceUrl) { @@ -291,6 +291,7 @@ public static RenderedAdoTemplate RenderAdoTemplate(ILogger logTracer, Renderer original.UniqueFields, adoFields, onDuplicate, + original.AdoDuplicateFields, original.Comment != null ? Render(renderer, original.Comment, instanceUrl, logTracer) : null ); } @@ -525,7 +526,7 @@ private async Async.Task CreateNew() { return (taskType, document); } - public async Async.Task Process(IList<(string, string)> notificationInfo) { + public async Async.Task Process(IList<(string, string)> notificationInfo, Dictionary? duplicateFields) { var updated = false; WorkItem? oldestWorkItem = null; await foreach (var workItem in ExistingWorkItems(notificationInfo)) { @@ -535,7 +536,7 @@ public async Async.Task Process(IList<(string, string)> notificationInfo) { _logTracer.AddTags(new List<(string, string)> { ("MatchingWorkItemIds", $"{workItem.Id}") }); _logTracer.LogInformation("Found matching work item"); } - if (IsADODuplicateWorkItem(workItem)) { + if (IsADODuplicateWorkItem(workItem, duplicateFields)) { continue; } @@ -575,13 +576,17 @@ public async Async.Task Process(IList<(string, string)> notificationInfo) { } } - private static bool IsADODuplicateWorkItem(WorkItem wi) { + private static bool IsADODuplicateWorkItem(WorkItem wi, Dictionary? duplicateFields) { // A work item could have System.State == Resolve && System.Reason == Duplicate // OR it could have System.State == Closed && System.Reason == Duplicate // I haven't found any other combinations where System.Reason could be duplicate but just to be safe // we're explicitly _not_ checking the state of the work item to determine if it's duplicate return wi.Fields.ContainsKey("System.Reason") && string.Equals(wi.Fields["System.Reason"].ToString(), "Duplicate", StringComparison.OrdinalIgnoreCase) || wi.Fields.ContainsKey("Microsoft.VSTS.Common.ResolvedReason") && string.Equals(wi.Fields["Microsoft.VSTS.Common.ResolvedReason"].ToString(), "Duplicate", StringComparison.OrdinalIgnoreCase) + || duplicateFields?.Any(fieldPair => { + var (field, value) = fieldPair; + return wi.Fields.ContainsKey(field) && string.Equals(wi.Fields[field].ToString(), value, StringComparison.OrdinalIgnoreCase); + }) == true // Alternatively, the work item can also specify a 'relation' to another work item. // This is typically used to create parent/child relationships between work items but can also // Be used to mark duplicates so we should check this as well. diff --git a/src/ApiService/IntegrationTests/JinjaToScribanMigrationTests.cs b/src/ApiService/IntegrationTests/JinjaToScribanMigrationTests.cs index 0ae3b11cb5..4033a05369 100644 --- a/src/ApiService/IntegrationTests/JinjaToScribanMigrationTests.cs +++ b/src/ApiService/IntegrationTests/JinjaToScribanMigrationTests.cs @@ -111,6 +111,7 @@ public async Async.Task OptionalFieldsAreSupported() { }, "{{ if org }} blah {{ end }}" ), + null, "{{ if org }} blah {{ end }}" ); @@ -137,6 +138,7 @@ public async Async.Task All_ADO_Fields_Are_Migrated() { }, "{% if org %} comment {% endif %}" ), + null, "{% if org %} comment {% endif %}" ); diff --git a/src/ApiService/Tests/OrmModelsTest.cs b/src/ApiService/Tests/OrmModelsTest.cs index 1aa7d2d163..956d0c30c5 100644 --- a/src/ApiService/Tests/OrmModelsTest.cs +++ b/src/ApiService/Tests/OrmModelsTest.cs @@ -232,6 +232,7 @@ from authToken in Arb.Generate>() from str in Arb.Generate() from fields in Arb.Generate>() from adoFields in Arb.Generate>() + from adoDuplicateFields in Arb.Generate>() from dupeTemplate in Arb.Generate() select new AdoTemplate( baseUrl, @@ -241,6 +242,7 @@ from dupeTemplate in Arb.Generate() fields, adoFields, dupeTemplate, + adoDuplicateFields, str.Get)); public static Arbitrary ArbTeamsTemplate() diff --git a/src/pytypes/onefuzztypes/models.py b/src/pytypes/onefuzztypes/models.py index a5f8139e97..c888621600 100644 --- a/src/pytypes/onefuzztypes/models.py +++ b/src/pytypes/onefuzztypes/models.py @@ -273,6 +273,7 @@ class ADOTemplate(BaseModel): unique_fields: List[str] comment: Optional[str] ado_fields: Dict[str, str] + ado_duplicate_fields: Optional[Dict[str, str]] on_duplicate: ADODuplicateTemplate # validator needed to convert auth_token to SecretData From b2435b1aea7e653f8e90142904c504a61ffcca1e Mon Sep 17 00:00:00 2001 From: Marc Greisen Date: Thu, 31 Aug 2023 14:12:19 -0700 Subject: [PATCH 05/88] Update readme with archive message (#3408) Co-authored-by: Adam <103067949+AdamL-Microsoft@users.noreply.github.com> --- README.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/README.md b/README.md index 010148dd3a..486dae6c15 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,20 @@ # OneFuzz +# IMPORTANT NOTICE + +**_Since September 2020 when OneFuzz was first open sourced, we’ve been on a journey to create a best-in-class orchestrator for running fuzzers, driving security and quality into our products._** + + +**_Initially launched by a small group in MSR, OneFuzz has now become a significant internal platform within Microsoft. As such, we are regretfully archiving the project to focus our attention on becoming a more deeply integrated service within the company. Unfortunately, we aren’t a large enough team to live in both the open-source world and the internal Microsoft world with its own unique set of requirements._** + +**_Our current plan is to archive the project in the next few months. That means we’ll still be making updates for a little while. Of course, even after it’s archived, you’ll still be able to fork it and make the changes you need. Once we’ve decided on a specific date for archiving, we’ll update this readme._** + +**_Thanks for taking the journey with us._** + +**_The OneFuzz team._** + +--- + [![Onefuzz build status](https://github.com/microsoft/onefuzz/workflows/Build/badge.svg?branch=main)](https://github.com/microsoft/onefuzz/actions/workflows/ci.yml?query=branch%3Amain) ## A self-hosted Fuzzing-As-A-Service platform From b913074d529160a57815b25361c4288270f2f97d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 31 Aug 2023 22:54:54 +0000 Subject: [PATCH 06/88] Bump tokio from 1.30.0 to 1.32.0 in /src/proxy-manager (#3425) Bumps [tokio](https://github.com/tokio-rs/tokio) from 1.30.0 to 1.32.0. - [Release notes](https://github.com/tokio-rs/tokio/releases) - [Commits](https://github.com/tokio-rs/tokio/compare/tokio-1.30.0...tokio-1.32.0) --- updated-dependencies: - dependency-name: tokio dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- src/proxy-manager/Cargo.lock | 4 ++-- src/proxy-manager/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/proxy-manager/Cargo.lock b/src/proxy-manager/Cargo.lock index ca4813995e..848ea32156 100644 --- a/src/proxy-manager/Cargo.lock +++ b/src/proxy-manager/Cargo.lock @@ -1531,9 +1531,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.30.0" +version = "1.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d3ce25f50619af8b0aec2eb23deebe84249e19e2ddd393a6e16e3300a6dadfd" +checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9" dependencies = [ "backtrace", "bytes", diff --git a/src/proxy-manager/Cargo.toml b/src/proxy-manager/Cargo.toml index c783e8d3aa..3a377a5f4d 100644 --- a/src/proxy-manager/Cargo.toml +++ b/src/proxy-manager/Cargo.toml @@ -20,7 +20,7 @@ serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" storage-queue = { path = "../agent/storage-queue" } thiserror = "1.0" -tokio = { version = "1.29", features = [ +tokio = { version = "1.32", features = [ "macros", "rt-multi-thread", "fs", From 14ab36ed5ffeb81588a8522c9469f59973eb14ef Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 31 Aug 2023 23:33:16 +0000 Subject: [PATCH 07/88] Bump tokio from 1.30.0 to 1.32.0 in /src/agent (#3424) Bumps [tokio](https://github.com/tokio-rs/tokio) from 1.30.0 to 1.32.0. - [Release notes](https://github.com/tokio-rs/tokio/releases) - [Commits](https://github.com/tokio-rs/tokio/compare/tokio-1.30.0...tokio-1.32.0) --- updated-dependencies: - dependency-name: tokio dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- src/agent/Cargo.lock | 4 ++-- src/agent/onefuzz-agent/Cargo.toml | 2 +- src/agent/onefuzz-task/Cargo.toml | 2 +- src/agent/onefuzz-telemetry/Cargo.toml | 2 +- src/agent/onefuzz/Cargo.toml | 2 +- src/agent/reqwest-retry/Cargo.toml | 2 +- src/agent/storage-queue/Cargo.toml | 2 +- 7 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index 254684be97..10cb7a7531 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -3555,9 +3555,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.30.0" +version = "1.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d3ce25f50619af8b0aec2eb23deebe84249e19e2ddd393a6e16e3300a6dadfd" +checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9" dependencies = [ "backtrace", "bytes", diff --git a/src/agent/onefuzz-agent/Cargo.toml b/src/agent/onefuzz-agent/Cargo.toml index 5ce8669766..90f44147c1 100644 --- a/src/agent/onefuzz-agent/Cargo.toml +++ b/src/agent/onefuzz-agent/Cargo.toml @@ -22,7 +22,7 @@ reqwest = { version = "0.11", features = [ serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" storage-queue = { path = "../storage-queue" } -tokio = { version = "1.29", features = ["full"] } +tokio = { version = "1.32", features = ["full"] } url = { version = "2.4", features = ["serde"] } uuid = { version = "1.4", features = ["serde", "v4"] } clap = { version = "4", features = ["derive", "cargo"] } diff --git a/src/agent/onefuzz-task/Cargo.toml b/src/agent/onefuzz-task/Cargo.toml index 4e0bd381b0..cc64929cc6 100644 --- a/src/agent/onefuzz-task/Cargo.toml +++ b/src/agent/onefuzz-task/Cargo.toml @@ -48,7 +48,7 @@ stacktrace-parser = { path = "../stacktrace-parser" } storage-queue = { path = "../storage-queue" } tempfile = "3.7.0" thiserror = "1.0" -tokio = { version = "1.29", features = ["full"] } +tokio = { version = "1.32", features = ["full"] } tokio-util = { version = "0.7", features = ["full"] } tokio-stream = "0.1" tui = { package = "ratatui", version = "0.22.0", default-features = false, features = [ diff --git a/src/agent/onefuzz-telemetry/Cargo.toml b/src/agent/onefuzz-telemetry/Cargo.toml index 23574a013f..8f91478b1f 100644 --- a/src/agent/onefuzz-telemetry/Cargo.toml +++ b/src/agent/onefuzz-telemetry/Cargo.toml @@ -15,5 +15,5 @@ chrono = { version = "0.4", default-features = false, features = [ lazy_static = "1.4" log = "0.4" serde = { version = "1.0", features = ["derive"] } -tokio = { version = "1.29", features = ["full"] } +tokio = { version = "1.32", features = ["full"] } uuid = { version = "1.4", features = ["serde", "v4"] } diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml index 1f3c27985c..440a018591 100644 --- a/src/agent/onefuzz/Cargo.toml +++ b/src/agent/onefuzz/Cargo.toml @@ -31,7 +31,7 @@ serde = "1.0" serde_json = "1.0" rand = "0.8" serde_derive = "1.0" -tokio = { version = "1.29", features = ["full"] } +tokio = { version = "1.32", features = ["full"] } tokio-stream = { version = "0.1", features = ["fs", "time", "tokio-util"] } tokio-util = { version = "0.7", features = ["full"] } uuid = { version = "1.4", features = ["serde", "v4"] } diff --git a/src/agent/reqwest-retry/Cargo.toml b/src/agent/reqwest-retry/Cargo.toml index d7d12ff4e8..5ddfbbe419 100644 --- a/src/agent/reqwest-retry/Cargo.toml +++ b/src/agent/reqwest-retry/Cargo.toml @@ -19,5 +19,5 @@ reqwest = { version = "0.11", features = [ thiserror = "1.0" [dev-dependencies] -tokio = { version = "1.29", features = ["macros"] } +tokio = { version = "1.32", features = ["macros"] } wiremock = "0.5" diff --git a/src/agent/storage-queue/Cargo.toml b/src/agent/storage-queue/Cargo.toml index d5c1c09d08..381a761c74 100644 --- a/src/agent/storage-queue/Cargo.toml +++ b/src/agent/storage-queue/Cargo.toml @@ -26,6 +26,6 @@ serde = { version = "1.0", features = ["derive"] } serde_derive = "1.0" serde_json = "1.0" bincode = "1.3" -tokio = { version = "1.29", features = ["full"] } +tokio = { version = "1.32", features = ["full"] } queue-file = "1.4" uuid = { version = "1.4", features = ["serde", "v4"] } From f141050bbbf7ddc6f42881cb82ded38e2924fdb3 Mon Sep 17 00:00:00 2001 From: Kanan B <32438208+kananb@users.noreply.github.com> Date: Fri, 1 Sep 2023 09:32:39 -0700 Subject: [PATCH 08/88] Remove unnecessary method argument (#3473) --- src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs b/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs index 98b857c9bc..3780bc1b2b 100644 --- a/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs +++ b/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs @@ -239,7 +239,7 @@ private static async Async.Task ProcessNotification(IOnefuzzContext context, Con var renderedConfig = RenderAdoTemplate(logTracer, renderer, config, instanceUrl); var ado = new AdoConnector(renderedConfig, project!, client, instanceUrl, logTracer, await GetValidFields(client, project)); - await ado.Process(notificationInfo, config.AdoDuplicateFields); + await ado.Process(notificationInfo); } public static RenderedAdoTemplate RenderAdoTemplate(ILogger logTracer, Renderer renderer, AdoTemplate original, Uri instanceUrl) { @@ -526,7 +526,7 @@ private async Async.Task CreateNew() { return (taskType, document); } - public async Async.Task Process(IList<(string, string)> notificationInfo, Dictionary? duplicateFields) { + public async Async.Task Process(IList<(string, string)> notificationInfo) { var updated = false; WorkItem? oldestWorkItem = null; await foreach (var workItem in ExistingWorkItems(notificationInfo)) { @@ -536,7 +536,7 @@ public async Async.Task Process(IList<(string, string)> notificationInfo, Dictio _logTracer.AddTags(new List<(string, string)> { ("MatchingWorkItemIds", $"{workItem.Id}") }); _logTracer.LogInformation("Found matching work item"); } - if (IsADODuplicateWorkItem(workItem, duplicateFields)) { + if (IsADODuplicateWorkItem(workItem, _config.AdoDuplicateFields)) { continue; } From d4319d209f5cf511b28ef627eec7e66263e9bd9b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Sep 2023 23:55:08 +0000 Subject: [PATCH 09/88] Bump elsa from 1.8.1 to 1.9.0 in /src/agent (#3411) Bumps [elsa](https://github.com/manishearth/elsa) from 1.8.1 to 1.9.0. - [Commits](https://github.com/manishearth/elsa/compare/v1.8.1...v1.9.0) --- updated-dependencies: - dependency-name: elsa dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- src/agent/Cargo.lock | 4 ++-- src/agent/debuggable-module/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index 10cb7a7531..5a3d59d060 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -883,9 +883,9 @@ dependencies = [ [[package]] name = "elsa" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e0aca8dce8856e420195bd13b6a64de3334235ccc9214e824b86b12bf26283" +checksum = "714f766f3556b44e7e4776ad133fcc3445a489517c25c704ace411bb14790194" dependencies = [ "stable_deref_trait", ] diff --git a/src/agent/debuggable-module/Cargo.toml b/src/agent/debuggable-module/Cargo.toml index 1cd11dfd30..ee464961f7 100644 --- a/src/agent/debuggable-module/Cargo.toml +++ b/src/agent/debuggable-module/Cargo.toml @@ -6,7 +6,7 @@ license = "MIT" [dependencies] anyhow = "1.0" -elsa = "1.8.1" +elsa = "1.9.0" gimli = "0.27.2" goblin = "0.6" iced-x86 = "1.20" From 93b16ec54858bbf14a08516bbe1f9b6c30fed92f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Sep 2023 00:16:15 +0000 Subject: [PATCH 10/88] Bump tempfile from 3.7.1 to 3.8.0 in /src/agent (#3437) Bumps [tempfile](https://github.com/Stebalien/tempfile) from 3.7.1 to 3.8.0. - [Changelog](https://github.com/Stebalien/tempfile/blob/master/CHANGELOG.md) - [Commits](https://github.com/Stebalien/tempfile/compare/v3.7.1...v3.8.0) --- updated-dependencies: - dependency-name: tempfile dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- src/agent/Cargo.lock | 4 ++-- src/agent/coverage/Cargo.toml | 2 +- src/agent/onefuzz-task/Cargo.toml | 2 +- src/agent/onefuzz/Cargo.toml | 2 +- src/agent/win-util/Cargo.toml | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index 5a3d59d060..b74d4055a7 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -3468,9 +3468,9 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.7.1" +version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc02fddf48964c42031a0b3fe0428320ecf3a73c401040fc0096f97794310651" +checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" dependencies = [ "cfg-if 1.0.0", "fastrand 2.0.0", diff --git a/src/agent/coverage/Cargo.toml b/src/agent/coverage/Cargo.toml index 29e67523d9..cca6dc2e4b 100644 --- a/src/agent/coverage/Cargo.toml +++ b/src/agent/coverage/Cargo.toml @@ -38,5 +38,5 @@ pretty_assertions = "1.4.0" insta = { version = "1.31.0", features = ["glob"] } coverage = { path = "../coverage" } cc = "1.0" -tempfile = "3.7.0" +tempfile = "3.8.0" dunce = "1.0" diff --git a/src/agent/onefuzz-task/Cargo.toml b/src/agent/onefuzz-task/Cargo.toml index cc64929cc6..4b7b4da730 100644 --- a/src/agent/onefuzz-task/Cargo.toml +++ b/src/agent/onefuzz-task/Cargo.toml @@ -46,7 +46,7 @@ strum = "0.25" strum_macros = "0.25" stacktrace-parser = { path = "../stacktrace-parser" } storage-queue = { path = "../storage-queue" } -tempfile = "3.7.0" +tempfile = "3.8.0" thiserror = "1.0" tokio = { version = "1.32", features = ["full"] } tokio-util = { version = "0.7", features = ["full"] } diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml index 440a018591..b5b7837d8e 100644 --- a/src/agent/onefuzz/Cargo.toml +++ b/src/agent/onefuzz/Cargo.toml @@ -40,7 +40,7 @@ url-escape = "0.1.0" storage-queue = { path = "../storage-queue" } strum = "0.25" strum_macros = "0.25" -tempfile = "3.7.0" +tempfile = "3.8.0" process_control = "4.0" reqwest-retry = { path = "../reqwest-retry" } onefuzz-telemetry = { path = "../onefuzz-telemetry" } diff --git a/src/agent/win-util/Cargo.toml b/src/agent/win-util/Cargo.toml index 1edaa3fc58..2c4f1065bf 100644 --- a/src/agent/win-util/Cargo.toml +++ b/src/agent/win-util/Cargo.toml @@ -33,4 +33,4 @@ features = [ ] [dev-dependencies] -tempfile = "3.7.0" +tempfile = "3.8.0" From 7f7ab370f3b1acd188e23c6e917f3555be0c7464 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Sep 2023 00:34:45 +0000 Subject: [PATCH 11/88] Bump tempfile from 3.7.1 to 3.8.0 in /src/proxy-manager (#3436) Bumps [tempfile](https://github.com/Stebalien/tempfile) from 3.7.1 to 3.8.0. - [Changelog](https://github.com/Stebalien/tempfile/blob/master/CHANGELOG.md) - [Commits](https://github.com/Stebalien/tempfile/compare/v3.7.1...v3.8.0) --- updated-dependencies: - dependency-name: tempfile dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- src/proxy-manager/Cargo.lock | 4 ++-- src/proxy-manager/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/proxy-manager/Cargo.lock b/src/proxy-manager/Cargo.lock index 848ea32156..4eae33643a 100644 --- a/src/proxy-manager/Cargo.lock +++ b/src/proxy-manager/Cargo.lock @@ -1474,9 +1474,9 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.7.1" +version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc02fddf48964c42031a0b3fe0428320ecf3a73c401040fc0096f97794310651" +checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" dependencies = [ "cfg-if", "fastrand", diff --git a/src/proxy-manager/Cargo.toml b/src/proxy-manager/Cargo.toml index 3a377a5f4d..b2258e994b 100644 --- a/src/proxy-manager/Cargo.toml +++ b/src/proxy-manager/Cargo.toml @@ -31,4 +31,4 @@ reqwest-retry = { path = "../agent/reqwest-retry" } onefuzz-telemetry = { path = "../agent/onefuzz-telemetry" } uuid = { version = "1.4", features = ["serde"] } log = "0.4" -tempfile = "3.7.0" +tempfile = "3.8.0" From b2e6a07ac0cd16780c878430d2235bdce0045be8 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Tue, 5 Sep 2023 11:20:52 -0700 Subject: [PATCH 12/88] Updating requirements.txt to accept >= onefuzztypes. (#3477) * Updating requirements.txt to accept >= onefuzztypes. * Trying to loosen restriction. --- src/ci/set-versions.sh | 2 +- src/ci/unset-versions.sh | 2 +- src/cli/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/ci/set-versions.sh b/src/ci/set-versions.sh index 34c30ea37c..174b77f8ea 100755 --- a/src/ci/set-versions.sh +++ b/src/ci/set-versions.sh @@ -14,4 +14,4 @@ SET_VERSIONS="src/pytypes/onefuzztypes/__version__.py src/cli/onefuzz/__version_ SET_REQS="src/cli/requirements.txt" sed -i "s/0.0.0/${VERSION}/" ${SET_VERSIONS} -sed -i "s/onefuzztypes==0.0.0/onefuzztypes==${VERSION}/" ${SET_REQS} +sed -i "s/onefuzztypes~=0.0.0/onefuzztypes==${VERSION}/" ${SET_REQS} diff --git a/src/ci/unset-versions.sh b/src/ci/unset-versions.sh index 95cbfac3d5..9312e6b4d8 100755 --- a/src/ci/unset-versions.sh +++ b/src/ci/unset-versions.sh @@ -12,4 +12,4 @@ SET_VERSIONS="src/pytypes/onefuzztypes/__version__.py src/cli/onefuzz/__version_ SET_REQS="src/cli/requirements.txt" sed -i 's/__version__ = .*/__version__ = "0.0.0"/' ${SET_VERSIONS} -sed -i "s/onefuzztypes==.*/onefuzztypes==0.0.0/" ${SET_REQS} +sed -i "s/onefuzztypes==.*/onefuzztypes~=0.0.0/" ${SET_REQS} diff --git a/src/cli/requirements.txt b/src/cli/requirements.txt index d9977fe03e..475f86acc4 100644 --- a/src/cli/requirements.txt +++ b/src/cli/requirements.txt @@ -21,4 +21,4 @@ opentelemetry-instrumentation-requests==0.37b0 # install rsa version >=4.7 to fix CVE-2020-25658 rsa>=4.7 # onefuzztypes version is set during build -onefuzztypes==0.0.0 +onefuzztypes~=0.0.0 From aa9c9ea1779eba98087acca2380b2f0763a2d402 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Sep 2023 20:19:44 +0000 Subject: [PATCH 13/88] Bump notify from 6.0.1 to 6.1.1 in /src/agent (#3435) Bumps [notify](https://github.com/notify-rs/notify) from 6.0.1 to 6.1.1. - [Release notes](https://github.com/notify-rs/notify/releases) - [Changelog](https://github.com/notify-rs/notify/blob/main/CHANGELOG.md) - [Commits](https://github.com/notify-rs/notify/compare/notify-6.0.1...notify-6.1.1) --- updated-dependencies: - dependency-name: notify dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- src/agent/Cargo.lock | 9 +++++---- src/agent/onefuzz/Cargo.toml | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index b74d4055a7..88aef03b88 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -2035,18 +2035,19 @@ dependencies = [ [[package]] name = "notify" -version = "6.0.1" +version = "6.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5738a2795d57ea20abec2d6d76c6081186709c0024187cd5977265eda6598b51" +checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.3.3", "filetime", "inotify", "kqueue", "libc", + "log", "mio 0.8.8", "walkdir", - "windows-sys 0.45.0", + "windows-sys 0.48.0", ] [[package]] diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml index b5b7837d8e..8c09477891 100644 --- a/src/agent/onefuzz/Cargo.toml +++ b/src/agent/onefuzz/Cargo.toml @@ -18,7 +18,7 @@ futures-util = "0.3" hex = "0.4" lazy_static = "1.4" log = "0.4" -notify = { version = "6.0.1", default-features = false } +notify = { version = "6.1.1", default-features = false } regex = "1.9.1" reqwest = { version = "0.11", features = [ "json", From 74475cc3fa9995f82db0e879c31b1a07b8034cf9 Mon Sep 17 00:00:00 2001 From: George Pollard Date: Wed, 6 Sep 2023 08:44:48 +1200 Subject: [PATCH 14/88] Bump azure_* crates (#3478) --- src/agent/Cargo.lock | 30 ++++++++++-------------------- src/agent/onefuzz-agent/Cargo.toml | 6 +++--- src/agent/onefuzz-task/Cargo.toml | 6 +++--- 3 files changed, 16 insertions(+), 26 deletions(-) diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index 88aef03b88..1885f8b76d 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -200,9 +200,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "azure_core" -version = "0.13.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b0f0eea648347e40f5f7f7e6bfea4553bcefad0fbf52044ea339e5ce3aba61" +checksum = "2331555a3618a32516c6172a63e9fec4af0edb43c6fcfeb5303a0716fc34498b" dependencies = [ "async-trait", "base64 0.21.2", @@ -214,7 +214,7 @@ dependencies = [ "log", "paste", "pin-project", - "quick-xml 0.29.0", + "quick-xml", "rand 0.8.5", "reqwest", "rustc_version", @@ -227,9 +227,9 @@ dependencies = [ [[package]] name = "azure_storage" -version = "0.13.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32d9cfa13ed9acb51cd663e04f343bd550a92b455add96c90de387a9a6bc4dbc" +checksum = "16565073e533053f4e29e6b139de2af758e984108a1cddbb1a432387e7f4474d" dependencies = [ "RustyXML", "async-trait", @@ -250,9 +250,9 @@ dependencies = [ [[package]] name = "azure_storage_blobs" -version = "0.13.1" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57cb0fe58af32a3fb49e560613cb1e4937f9f13161a2c1caf1bba0224435f2af" +checksum = "0900e63940d1ba51039efda3d8cf658157a1c75449081a6e18069d2588809329" dependencies = [ "RustyXML", "azure_core", @@ -510,7 +510,7 @@ name = "cobertura" version = "0.1.0" dependencies = [ "anyhow", - "quick-xml 0.30.0", + "quick-xml", ] [[package]] @@ -2194,7 +2194,7 @@ dependencies = [ "coverage", "debuggable-module", "pretty_assertions", - "quick-xml 0.30.0", + "quick-xml", "serde", "serde_json", ] @@ -2588,16 +2588,6 @@ dependencies = [ "snafu", ] -[[package]] -name = "quick-xml" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81b9228215d82c7b61490fec1de287136b5de6f5700f6e58ea9ad61a7964ca51" -dependencies = [ - "memchr", - "serde", -] - [[package]] name = "quick-xml" version = "0.30.0" @@ -3279,7 +3269,7 @@ dependencies = [ "flume", "num_cpus", "queue-file", - "quick-xml 0.30.0", + "quick-xml", "regex", "reqwest", "reqwest-retry", diff --git a/src/agent/onefuzz-agent/Cargo.toml b/src/agent/onefuzz-agent/Cargo.toml index 90f44147c1..bc73d37973 100644 --- a/src/agent/onefuzz-agent/Cargo.toml +++ b/src/agent/onefuzz-agent/Cargo.toml @@ -31,13 +31,13 @@ onefuzz-telemetry = { path = "../onefuzz-telemetry" } backtrace = "0.3" ipc-channel = { git = "https://github.com/servo/ipc-channel", rev = "7f432aa" } dynamic-library = { path = "../dynamic-library" } -azure_core = { version = "0.13", default-features = false, features = [ +azure_core = { version = "0.14", default-features = false, features = [ "enable_reqwest", ] } -azure_storage = { version = "0.13", default-features = false, features = [ +azure_storage = { version = "0.14", default-features = false, features = [ "enable_reqwest", ] } -azure_storage_blobs = { version = "0.13", default-features = false, features = [ +azure_storage_blobs = { version = "0.14", default-features = false, features = [ "enable_reqwest", ] } diff --git a/src/agent/onefuzz-task/Cargo.toml b/src/agent/onefuzz-task/Cargo.toml index 4b7b4da730..def8d8eab2 100644 --- a/src/agent/onefuzz-task/Cargo.toml +++ b/src/agent/onefuzz-task/Cargo.toml @@ -62,13 +62,13 @@ chrono = { version = "0.4", default-features = false, features = [ ] } ipc-channel = { git = "https://github.com/servo/ipc-channel", rev = "7f432aa" } -azure_core = { version = "0.13", default-features = false, features = [ +azure_core = { version = "0.14", default-features = false, features = [ "enable_reqwest", ] } -azure_storage = { version = "0.13", default-features = false, features = [ +azure_storage = { version = "0.14", default-features = false, features = [ "enable_reqwest", ] } -azure_storage_blobs = { version = "0.13", default-features = false, features = [ +azure_storage_blobs = { version = "0.14", default-features = false, features = [ "enable_reqwest", ] } From 64699ed2917d64ec87813af41fa7eb464e96eace Mon Sep 17 00:00:00 2001 From: Adam <103067949+AdamL-Microsoft@users.noreply.github.com> Date: Wed, 6 Sep 2023 09:11:09 -0700 Subject: [PATCH 15/88] Release 8.8.0 (#3466) * Release 8.8.0 --- CHANGELOG.md | 20 ++++++++++++++++++++ CURRENT_VERSION | 2 +- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8d46ea2a0e..f02721fa44 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,26 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## 8.8.0 + +### Added + +* Agent: Added Mariner Linux support for agent VMs [#3306](https://github.com/microsoft/onefuzz/pull/3306) +* Service: Added support for custom ado fields that mark work items as duplicate [#3467](https://github.com/microsoft/onefuzz/pull/3467) +* Service: Permanently store OneFuzz job result data - # crashing input, # regression crashing input, etc. - in Azure storage [#3380](https://github.com/microsoft/onefuzz/pull/3380), [#3439](https://github.com/microsoft/onefuzz/pull/3439) +* Service: Added validation for Iteration/AreaPath on notifications when a job is submitted with a notification config and for `onefuzz debug notification test_template` [#3386](https://github.com/microsoft/onefuzz/pull/3386) + +### Changed + +* Agent: Updated libfuzzer-fuzz basic template to include required args and make it match cli [#3429](https://github.com/microsoft/onefuzz/pull/3429) +* Agent: Downgraded some debug logs from warn to debug [#3450](https://github.com/microsoft/onefuzz/pull/3450) +* CLI: Removed CLI commands from the local fuzzing tasks as they can now be described via yaml template [#3428](https://github.com/microsoft/onefuzz/pull/3428) +* Service: AutoScale table entries are now deleted on VMSS shutdown [#3455](https://github.com/microsoft/onefuzz/pull/3455) + +### Fixed + +* Agent: Fixed local path generation [#3432](https://github.com/microsoft/onefuzz/pull/3432), [#3460](https://github.com/microsoft/onefuzz/pull/3460) + ## 8.7.1 ### Fixed diff --git a/CURRENT_VERSION b/CURRENT_VERSION index efeecbe2c5..cfc27b4fab 100644 --- a/CURRENT_VERSION +++ b/CURRENT_VERSION @@ -1 +1 @@ -8.7.1 \ No newline at end of file +8.8.0 \ No newline at end of file From a3fb480ac57cb8cf4e61f2e8526bd2610f6a922b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Sep 2023 22:04:55 +0000 Subject: [PATCH 16/88] Bump clap from 4.3.21 to 4.4.2 in /src/agent (#3484) Bumps [clap](https://github.com/clap-rs/clap) from 4.3.21 to 4.4.2. - [Release notes](https://github.com/clap-rs/clap/releases) - [Changelog](https://github.com/clap-rs/clap/blob/master/CHANGELOG.md) - [Commits](https://github.com/clap-rs/clap/compare/v4.3.21...v4.4.2) --- updated-dependencies: - dependency-name: clap dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- src/agent/Cargo.lock | 23 ++++++++++------------- src/agent/coverage/Cargo.toml | 2 +- src/agent/debuggable-module/Cargo.toml | 2 +- src/agent/dynamic-library/Cargo.toml | 2 +- src/agent/onefuzz/Cargo.toml | 2 +- 5 files changed, 14 insertions(+), 17 deletions(-) diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index 1885f8b76d..0e750eb498 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -60,16 +60,15 @@ dependencies = [ [[package]] name = "anstream" -version = "0.3.2" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" +checksum = "b1f58811cfac344940f1a400b6e6231ce35171f614f26439e80f8c1465c5cc0c" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", - "is-terminal", "utf8parse", ] @@ -99,9 +98,9 @@ dependencies = [ [[package]] name = "anstyle-wincon" -version = "1.0.2" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c677ab05e09154296dd37acecd46420c17b9713e8366facafa8fc0885167cf4c" +checksum = "58f54d10c6dfa51283a066ceab3ec1ab78d13fae00aa49243a45e4571fb79dfd" dependencies = [ "anstyle", "windows-sys 0.48.0", @@ -465,33 +464,31 @@ dependencies = [ [[package]] name = "clap" -version = "4.3.21" +version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c27cdf28c0f604ba3f512b0c9a409f8de8513e4816705deb0498b627e7c3a3fd" +checksum = "6a13b88d2c62ff462f88e4a121f17a82c1af05693a2f192b5c38d14de73c19f6" dependencies = [ "clap_builder", "clap_derive", - "once_cell", ] [[package]] name = "clap_builder" -version = "4.3.21" +version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08a9f1ab5e9f01a9b81f202e8562eb9a10de70abf9eaeac1be465c28b75aa4aa" +checksum = "2bb9faaa7c2ef94b2743a21f5a29e6f0010dff4caa69ac8e9d6cf8b6fa74da08" dependencies = [ "anstream", "anstyle", "clap_lex", - "once_cell", "strsim", ] [[package]] name = "clap_derive" -version = "4.3.12" +version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050" +checksum = "0862016ff20d69b84ef8247369fabf5c008a7417002411897d40ee1f4532b873" dependencies = [ "heck", "proc-macro2 1.0.66", diff --git a/src/agent/coverage/Cargo.toml b/src/agent/coverage/Cargo.toml index cca6dc2e4b..83a47f9a18 100644 --- a/src/agent/coverage/Cargo.toml +++ b/src/agent/coverage/Cargo.toml @@ -32,7 +32,7 @@ pete = "0.10" procfs = { version = "0.15.1", default-features = false, features = ["flate2"] } [dev-dependencies] -clap = { version = "4.3", features = ["derive"] } +clap = { version = "4.4", features = ["derive"] } env_logger = "0.10.0" pretty_assertions = "1.4.0" insta = { version = "1.31.0", features = ["glob"] } diff --git a/src/agent/debuggable-module/Cargo.toml b/src/agent/debuggable-module/Cargo.toml index ee464961f7..811de1a15b 100644 --- a/src/agent/debuggable-module/Cargo.toml +++ b/src/agent/debuggable-module/Cargo.toml @@ -21,4 +21,4 @@ symbolic = { version = "12.3", features = [ thiserror = "1.0" [dev-dependencies] -clap = { version = "4.3", features = ["derive"] } +clap = { version = "4.4", features = ["derive"] } diff --git a/src/agent/dynamic-library/Cargo.toml b/src/agent/dynamic-library/Cargo.toml index 604d221700..e8a93fb387 100644 --- a/src/agent/dynamic-library/Cargo.toml +++ b/src/agent/dynamic-library/Cargo.toml @@ -6,7 +6,7 @@ license = "MIT" [dependencies] anyhow = "1.0" -clap = { version = "4.3.0", features = ["derive"] } +clap = { version = "4.4.2", features = ["derive"] } lazy_static = "1.4" regex = "1.9" thiserror = "1.0" diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml index 8c09477891..18295e357b 100644 --- a/src/agent/onefuzz/Cargo.toml +++ b/src/agent/onefuzz/Cargo.toml @@ -67,5 +67,5 @@ rstack = "0.3" proc-maps = { version = "0.3", default-features = false } [dev-dependencies] -clap = { version = "4.3.0", features = ["derive"] } +clap = { version = "4.4.2", features = ["derive"] } pretty_assertions = "1.4.0" From 59c52d61829e15764ad2ce3cec7ba2e032fd9469 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Sep 2023 22:32:11 +0000 Subject: [PATCH 17/88] Bump gimli from 0.27.3 to 0.28.0 in /src/agent (#3414) Bumps [gimli](https://github.com/gimli-rs/gimli) from 0.27.3 to 0.28.0. - [Changelog](https://github.com/gimli-rs/gimli/blob/master/CHANGELOG.md) - [Commits](https://github.com/gimli-rs/gimli/compare/0.27.3...0.28.0) --- updated-dependencies: - dependency-name: gimli dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- src/agent/Cargo.lock | 30 ++++++++++++++++++++------ src/agent/debuggable-module/Cargo.toml | 2 +- 2 files changed, 24 insertions(+), 8 deletions(-) diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index 0e750eb498..e41f220010 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -14,7 +14,7 @@ version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3" dependencies = [ - "gimli", + "gimli 0.27.3", ] [[package]] @@ -746,7 +746,7 @@ dependencies = [ "anyhow", "clap", "elsa", - "gimli", + "gimli 0.28.0", "goblin", "iced-x86", "log", @@ -965,6 +965,12 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" +[[package]] +name = "fallible-iterator" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" + [[package]] name = "fastrand" version = "1.9.0" @@ -1254,8 +1260,18 @@ version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" dependencies = [ - "fallible-iterator", - "indexmap 1.9.3", + "fallible-iterator 0.2.0", + "stable_deref_trait", +] + +[[package]] +name = "gimli" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" +dependencies = [ + "fallible-iterator 0.3.0", + "indexmap 2.0.0", "stable_deref_trait", ] @@ -2401,7 +2417,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82040a392923abe6279c00ab4aff62d5250d1c8555dc780e4b02783a7aa74863" dependencies = [ - "fallible-iterator", + "fallible-iterator 0.2.0", "scroll", "uuid", ] @@ -3356,9 +3372,9 @@ dependencies = [ "dmsort", "elementtree", "elsa", - "fallible-iterator", + "fallible-iterator 0.2.0", "flate2", - "gimli", + "gimli 0.27.3", "goblin", "lazy_static", "nom", diff --git a/src/agent/debuggable-module/Cargo.toml b/src/agent/debuggable-module/Cargo.toml index 811de1a15b..a227432830 100644 --- a/src/agent/debuggable-module/Cargo.toml +++ b/src/agent/debuggable-module/Cargo.toml @@ -7,7 +7,7 @@ license = "MIT" [dependencies] anyhow = "1.0" elsa = "1.9.0" -gimli = "0.27.2" +gimli = "0.28.0" goblin = "0.6" iced-x86 = "1.20" log = "0.4.17" From dd9e2663edbac890d855d29ff2c7d1b8659c39e7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Sep 2023 23:00:14 +0000 Subject: [PATCH 18/88] Bump clap from 4.3.21 to 4.4.2 in /src/proxy-manager (#3474) Bumps [clap](https://github.com/clap-rs/clap) from 4.3.21 to 4.4.2. - [Release notes](https://github.com/clap-rs/clap/releases) - [Changelog](https://github.com/clap-rs/clap/blob/master/CHANGELOG.md) - [Commits](https://github.com/clap-rs/clap/compare/v4.3.21...v4.4.2) --- updated-dependencies: - dependency-name: clap dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- src/proxy-manager/Cargo.lock | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/src/proxy-manager/Cargo.lock b/src/proxy-manager/Cargo.lock index 4eae33643a..e86da294a7 100644 --- a/src/proxy-manager/Cargo.lock +++ b/src/proxy-manager/Cargo.lock @@ -43,16 +43,15 @@ dependencies = [ [[package]] name = "anstream" -version = "0.3.2" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" +checksum = "b1f58811cfac344940f1a400b6e6231ce35171f614f26439e80f8c1465c5cc0c" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", - "is-terminal", "utf8parse", ] @@ -82,9 +81,9 @@ dependencies = [ [[package]] name = "anstyle-wincon" -version = "1.0.2" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c677ab05e09154296dd37acecd46420c17b9713e8366facafa8fc0885167cf4c" +checksum = "58f54d10c6dfa51283a066ceab3ec1ab78d13fae00aa49243a45e4571fb79dfd" dependencies = [ "anstyle", "windows-sys", @@ -236,23 +235,22 @@ dependencies = [ [[package]] name = "clap" -version = "4.3.21" +version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c27cdf28c0f604ba3f512b0c9a409f8de8513e4816705deb0498b627e7c3a3fd" +checksum = "6a13b88d2c62ff462f88e4a121f17a82c1af05693a2f192b5c38d14de73c19f6" dependencies = [ "clap_builder", ] [[package]] name = "clap_builder" -version = "4.3.21" +version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08a9f1ab5e9f01a9b81f202e8562eb9a10de70abf9eaeac1be465c28b75aa4aa" +checksum = "2bb9faaa7c2ef94b2743a21f5a29e6f0010dff4caa69ac8e9d6cf8b6fa74da08" dependencies = [ "anstream", "anstyle", "clap_lex", - "once_cell", "strsim", ] From 6e2cb14ffb201e28d349666ecc4044044a8285da Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Sep 2023 23:22:43 +0000 Subject: [PATCH 19/88] Bump winreg from 0.50.0 to 0.51.0 in /src/agent (#3434) Bumps [winreg](https://github.com/gentoo90/winreg-rs) from 0.50.0 to 0.51.0. - [Release notes](https://github.com/gentoo90/winreg-rs/releases) - [Changelog](https://github.com/gentoo90/winreg-rs/blob/master/CHANGELOG.md) - [Commits](https://github.com/gentoo90/winreg-rs/compare/v0.50.0...v0.51.0) --- updated-dependencies: - dependency-name: winreg dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Adam <103067949+AdamL-Microsoft@users.noreply.github.com> --- src/agent/Cargo.lock | 10 +++++----- src/agent/dynamic-library/Cargo.toml | 2 +- src/agent/onefuzz/Cargo.toml | 2 +- src/agent/win-util/Cargo.toml | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index e41f220010..8d1689c440 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -860,7 +860,7 @@ dependencies = [ "regex", "thiserror", "windows", - "winreg 0.50.0", + "winreg 0.51.0", ] [[package]] @@ -2165,7 +2165,7 @@ dependencies = [ "urlparse", "uuid", "windows", - "winreg 0.50.0", + "winreg 0.51.0", ] [[package]] @@ -3954,7 +3954,7 @@ dependencies = [ "os_pipe", "tempfile", "windows", - "winreg 0.50.0", + "winreg 0.51.0", ] [[package]] @@ -4152,9 +4152,9 @@ dependencies = [ [[package]] name = "winreg" -version = "0.50.0" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +checksum = "937f3df7948156640f46aacef17a70db0de5917bda9c92b0f751f3a955b588fc" dependencies = [ "cfg-if 1.0.0", "windows-sys 0.48.0", diff --git a/src/agent/dynamic-library/Cargo.toml b/src/agent/dynamic-library/Cargo.toml index e8a93fb387..ad3cc482d2 100644 --- a/src/agent/dynamic-library/Cargo.toml +++ b/src/agent/dynamic-library/Cargo.toml @@ -13,7 +13,7 @@ thiserror = "1.0" [target.'cfg(windows)'.dependencies] debugger = { path = "../debugger" } -winreg = "0.50" +winreg = "0.51" [dependencies.windows] version = "0.48" diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml index 18295e357b..e77fcebe69 100644 --- a/src/agent/onefuzz/Cargo.toml +++ b/src/agent/onefuzz/Cargo.toml @@ -49,7 +49,7 @@ stacktrace-parser = { path = "../stacktrace-parser" } backoff = { version = "0.4", features = ["tokio"] } [target.'cfg(target_family = "windows")'.dependencies] -winreg = "0.50" +winreg = "0.51" input-tester = { path = "../input-tester" } debugger = { path = "../debugger" } windows = { version = "0.48", features = [ diff --git a/src/agent/win-util/Cargo.toml b/src/agent/win-util/Cargo.toml index 2c4f1065bf..460ee0e197 100644 --- a/src/agent/win-util/Cargo.toml +++ b/src/agent/win-util/Cargo.toml @@ -12,7 +12,7 @@ log = "0.4" os_pipe = "1.1" [target.'cfg(windows)'.dependencies] -winreg = "0.50" +winreg = "0.51" [dependencies.windows] version = "0.48" From d2d57a8130cfafaacaed92b15901ac480491be14 Mon Sep 17 00:00:00 2001 From: Teo Voinea <58236992+tevoinea@users.noreply.github.com> Date: Thu, 7 Sep 2023 12:44:04 -0400 Subject: [PATCH 20/88] Starting integration tests (#3438) * Starting integration tests * Ready to test the test * Parametrize test * checkpoint * Test works * Run integration tests in pipeline * fmt * . * -p * Install clang * quotes not required in yaml? * Hopefully fixed windows? * Try without killondrop * lint * small test * another test * Reuse core name * Wrong step * bump tokio? * Try with rust * make build happy * Bump pete and small clean up * Clean up and make the test pass regularly * fix broken ci * Lower the poll timeout * Set the timeout in a nicer way * fix windows * fmt * Include and copy pdbs * Ignore if pdb is missing on linux * It takes too long for coverage to be generated * lint * Only warn on missing coverage since it's flaky * Fix windows build * Small clean up * Try lowering the poll delay * fix coverage * PR comments * . * Apparently make is missing? * Remove aggressive step skipping in CI --- .github/workflows/ci.yml | 16 +- src/agent/Cargo.lock | 4 +- src/agent/coverage/Cargo.toml | 2 +- .../coverage/src/record/linux/debugger.rs | 11 +- src/agent/onefuzz-task/Cargo.toml | 9 + src/agent/onefuzz-task/src/lib.rs | 9 + .../src/tasks/coverage/generic.rs | 7 +- .../src/tasks/fuzz/libfuzzer/common.rs | 2 +- .../tests/template_integration.rs | 212 ++++++++++++++++++ .../tests/templates/libfuzzer_basic.yml | 33 +++ src/agent/onefuzz/Cargo.toml | 2 +- src/agent/onefuzz/src/libfuzzer.rs | 22 +- src/ci/agent.sh | 2 +- 13 files changed, 310 insertions(+), 21 deletions(-) create mode 100644 src/agent/onefuzz-task/src/lib.rs create mode 100644 src/agent/onefuzz-task/tests/template_integration.rs create mode 100644 src/agent/onefuzz-task/tests/templates/libfuzzer_basic.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2dd85d7c92..99e9bddd32 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -79,16 +79,24 @@ jobs: key: ${{env.ACTIONS_CACHE_KEY_DATE}} # additional key for cache-busting workspaces: src/agent - name: Linux Prereqs - if: runner.os == 'Linux' && steps.cache-agent-artifacts.outputs.cache-hit != 'true' + if: runner.os == 'Linux' run: | sudo apt-get -y update - sudo apt-get -y install libssl-dev libunwind-dev build-essential pkg-config + sudo apt-get -y install libssl-dev libunwind-dev build-essential pkg-config clang + - name: Clone onefuzz-samples + run: git clone https://github.com/microsoft/onefuzz-samples + - name: Prepare for agent integration tests + shell: bash + working-directory: ./onefuzz-samples/examples/simple-libfuzzer + run: | + make + mkdir -p ../../../src/agent/onefuzz-task/tests/targets/simple + cp fuzz.exe ../../../src/agent/onefuzz-task/tests/targets/simple/fuzz.exe + cp *.pdb ../../../src/agent/onefuzz-task/tests/targets/simple/ 2>/dev/null || : - name: Install Rust Prereqs - if: steps.rust-build-cache.outputs.cache-hit != 'true' && steps.cache-agent-artifacts.outputs.cache-hit != 'true' shell: bash run: src/ci/rust-prereqs.sh - run: src/ci/agent.sh - if: steps.cache-agent-artifacts.outputs.cache-hit != 'true' shell: bash - name: Upload coverage to Codecov uses: codecov/codecov-action@v3 diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index 8d1689c440..1394c8cd06 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -2450,9 +2450,9 @@ checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "pete" -version = "0.10.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "229eb6b3cb0d3d075727c614687ab08384cac3b75fa100e1e08b30d7bee39d00" +checksum = "0f09c1c1ad40df294ff8643fe88a3dc64fff3293b6bc0ed9f71aff71f7086cbd" dependencies = [ "libc", "memoffset 0.8.0", diff --git a/src/agent/coverage/Cargo.toml b/src/agent/coverage/Cargo.toml index 83a47f9a18..70a55cd07f 100644 --- a/src/agent/coverage/Cargo.toml +++ b/src/agent/coverage/Cargo.toml @@ -26,7 +26,7 @@ debugger = { path = "../debugger" } [target.'cfg(target_os = "linux")'.dependencies] nix = "0.26" -pete = "0.10" +pete = "0.12" # For procfs, opt out of the `chrono` freature; it pulls in an old version # of `time`. We do not use the methods that the `chrono` feature enables. procfs = { version = "0.15.1", default-features = false, features = ["flate2"] } diff --git a/src/agent/coverage/src/record/linux/debugger.rs b/src/agent/coverage/src/record/linux/debugger.rs index e2502e8d2e..c4512c4fbb 100644 --- a/src/agent/coverage/src/record/linux/debugger.rs +++ b/src/agent/coverage/src/record/linux/debugger.rs @@ -4,6 +4,7 @@ use std::collections::BTreeMap; use std::io::Read; use std::process::{Child, Command}; +use std::time::Duration; use anyhow::{bail, format_err, Result}; use debuggable_module::path::FilePath; @@ -75,7 +76,11 @@ impl<'eh> Debugger<'eh> { // These calls should also be unnecessary no-ops, but we really want to avoid any dangling // or zombie child processes. let _ = child.kill(); - let _ = child.wait(); + + // We don't need to call child.wait() because of the following series of events: + // 1. pete, our ptracing library, spawns the child process with ptrace flags + // 2. rust stdlib set SIG_IGN as the SIGCHLD handler: https://github.com/rust-lang/rust/issues/110317 + // 3. linux kernel automatically reaps pids when the above 2 hold: https://github.com/torvalds/linux/blob/44149752e9987a9eac5ad78e6d3a20934b5e018d/kernel/signal.c#L2089-L2110 let output = Output { status, @@ -198,8 +203,8 @@ impl DebuggerContext { pub fn new() -> Self { let breakpoints = Breakpoints::default(); let images = None; - let tracer = Ptracer::new(); - + let mut tracer = Ptracer::new(); + *tracer.poll_delay_mut() = Duration::from_millis(1); Self { breakpoints, images, diff --git a/src/agent/onefuzz-task/Cargo.toml b/src/agent/onefuzz-task/Cargo.toml index def8d8eab2..d5588a58e6 100644 --- a/src/agent/onefuzz-task/Cargo.toml +++ b/src/agent/onefuzz-task/Cargo.toml @@ -6,6 +6,14 @@ edition = "2021" publish = false license = "MIT" +[lib] +path = "src/lib.rs" +name = "onefuzz_task_lib" + +[[bin]] +path = "src/main.rs" +name = "onefuzz-task" + [features] integration_test = [] @@ -77,3 +85,4 @@ schemars = { version = "0.8.12", features = ["uuid1"] } [dev-dependencies] pretty_assertions = "1.4" +tempfile = "3.8" diff --git a/src/agent/onefuzz-task/src/lib.rs b/src/agent/onefuzz-task/src/lib.rs new file mode 100644 index 0000000000..997eea549d --- /dev/null +++ b/src/agent/onefuzz-task/src/lib.rs @@ -0,0 +1,9 @@ +#[macro_use] +extern crate anyhow; +#[macro_use] +extern crate clap; +#[macro_use] +extern crate onefuzz_telemetry; + +pub mod local; +pub mod tasks; diff --git a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs index 4fde9efb31..eeaa861c00 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs @@ -161,7 +161,7 @@ impl CoverageTask { } if seen_inputs { - context.report_coverage_stats().await?; + context.report_coverage_stats().await; context.save_and_sync_coverage().await?; } @@ -454,7 +454,7 @@ impl<'a> TaskContext<'a> { Ok(count) } - pub async fn report_coverage_stats(&self) -> Result<()> { + pub async fn report_coverage_stats(&self) { use EventData::*; let coverage = RwLock::read(&self.coverage).await; @@ -471,7 +471,6 @@ impl<'a> TaskContext<'a> { ]), ) .await; - Ok(()) } pub async fn save_coverage( @@ -565,7 +564,7 @@ impl<'a> Processor for TaskContext<'a> { self.heartbeat.alive(); self.record_input(input).await?; - self.report_coverage_stats().await?; + self.report_coverage_stats().await; self.save_and_sync_coverage().await?; Ok(()) diff --git a/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs b/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs index bfd9f3f5cc..32f3372958 100644 --- a/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs +++ b/src/agent/onefuzz-task/src/tasks/fuzz/libfuzzer/common.rs @@ -272,7 +272,7 @@ where info!("config is: {:?}", self.config); let fuzzer = L::from_config(&self.config).await?; - let mut running = fuzzer.fuzz(crash_dir.path(), local_inputs, &inputs).await?; + let mut running = fuzzer.fuzz(crash_dir.path(), local_inputs, &inputs)?; info!("child is: {:?}", running); diff --git a/src/agent/onefuzz-task/tests/template_integration.rs b/src/agent/onefuzz-task/tests/template_integration.rs new file mode 100644 index 0000000000..d0e68e5d02 --- /dev/null +++ b/src/agent/onefuzz-task/tests/template_integration.rs @@ -0,0 +1,212 @@ +use std::{ + collections::HashSet, + ffi::OsStr, + path::{Path, PathBuf}, +}; + +use tokio::fs; + +use anyhow::Result; +use log::info; +use onefuzz_task_lib::local::template; +use std::time::Duration; +use tokio::time::timeout; + +macro_rules! libfuzzer_tests { + ($($name:ident: $value:expr,)*) => { + $( + #[tokio::test(flavor = "multi_thread")] + #[cfg_attr(not(feature = "integration_test"), ignore)] + async fn $name() { + let _ = env_logger::builder().is_test(true).try_init(); + let (config, libfuzzer_target) = $value; + test_libfuzzer_basic_template(PathBuf::from(config), PathBuf::from(libfuzzer_target)).await; + } + )* + } +} + +// This is the format for adding other templates/targets for this macro +// $TEST_NAME: ($RELATIVE_PATH_TO_TEMPLATE, $RELATIVE_PATH_TO_TARGET), +// Make sure that you place the target binary in CI +libfuzzer_tests! { + libfuzzer_basic: ("./tests/templates/libfuzzer_basic.yml", "./tests/targets/simple/fuzz.exe"), +} + +async fn test_libfuzzer_basic_template(config: PathBuf, libfuzzer_target: PathBuf) { + assert_exists_and_is_file(&config).await; + assert_exists_and_is_file(&libfuzzer_target).await; + + let test_layout = create_test_directory(&config, &libfuzzer_target) + .await + .expect("Failed to create test directory layout"); + + info!("Executed test from: {:?}", &test_layout.root); + info!("Running template for 1 minute..."); + if let Ok(template_result) = timeout( + Duration::from_secs(60), + template::launch(&test_layout.config, None), + ) + .await + { + // Something went wrong when running the template so lets print out the template to be helpful + info!("Printing config as it was used in the test:"); + info!("{:?}", fs::read_to_string(&test_layout.config).await); + template_result.unwrap(); + } + + verify_test_layout_structure_did_not_change(&test_layout).await; + assert_directory_is_not_empty(&test_layout.inputs).await; + assert_directory_is_not_empty(&test_layout.crashes).await; + verify_coverage_dir(&test_layout.coverage).await; + + let _ = fs::remove_dir_all(&test_layout.root).await; +} + +async fn verify_test_layout_structure_did_not_change(test_layout: &TestLayout) { + assert_exists_and_is_dir(&test_layout.root).await; + assert_exists_and_is_file(&test_layout.config).await; + assert_exists_and_is_file(&test_layout.target_exe).await; + assert_exists_and_is_dir(&test_layout.crashdumps).await; + assert_exists_and_is_dir(&test_layout.coverage).await; + assert_exists_and_is_dir(&test_layout.crashes).await; + assert_exists_and_is_dir(&test_layout.inputs).await; + assert_exists_and_is_dir(&test_layout.regression_reports).await; +} + +async fn verify_coverage_dir(coverage: &Path) { + warn_if_empty(coverage).await; +} + +async fn assert_exists_and_is_dir(dir: &Path) { + assert!(dir.exists(), "Expected directory to exist. dir = {:?}", dir); + assert!( + dir.is_dir(), + "Expected path to be a directory. dir = {:?}", + dir + ); +} + +async fn warn_if_empty(dir: &Path) { + if dir_is_empty(dir).await { + println!("Expected directory to not be empty: {:?}", dir); + } +} + +async fn assert_exists_and_is_file(file: &Path) { + assert!(file.exists(), "Expected file to exist. file = {:?}", file); + assert!( + file.is_file(), + "Expected path to be a file. file = {:?}", + file + ); +} + +async fn dir_is_empty(dir: &Path) -> bool { + fs::read_dir(dir) + .await + .unwrap_or_else(|_| panic!("Failed to list files in directory. dir = {:?}", dir)) + .next_entry() + .await + .unwrap_or_else(|_| { + panic!( + "Failed to get next file in directory listing. dir = {:?}", + dir + ) + }) + .is_some() +} + +async fn assert_directory_is_not_empty(dir: &Path) { + assert!( + dir_is_empty(dir).await, + "Expected directory to not be empty. dir = {:?}", + dir + ); +} + +async fn create_test_directory(config: &Path, target_exe: &Path) -> Result { + let mut test_directory = PathBuf::from(".").join(uuid::Uuid::new_v4().to_string()); + fs::create_dir_all(&test_directory).await?; + test_directory = test_directory.canonicalize()?; + + let mut inputs_directory = PathBuf::from(&test_directory).join("inputs"); + fs::create_dir(&inputs_directory).await?; + inputs_directory = inputs_directory.canonicalize()?; + + let mut crashes_directory = PathBuf::from(&test_directory).join("crashes"); + fs::create_dir(&crashes_directory).await?; + crashes_directory = crashes_directory.canonicalize()?; + + let mut crashdumps_directory = PathBuf::from(&test_directory).join("crashdumps"); + fs::create_dir(&crashdumps_directory).await?; + crashdumps_directory = crashdumps_directory.canonicalize()?; + + let mut coverage_directory = PathBuf::from(&test_directory).join("coverage"); + fs::create_dir(&coverage_directory).await?; + coverage_directory = coverage_directory.canonicalize()?; + + let mut regression_reports_directory = + PathBuf::from(&test_directory).join("regression_reports"); + fs::create_dir(®ression_reports_directory).await?; + regression_reports_directory = regression_reports_directory.canonicalize()?; + + let mut target_in_test = PathBuf::from(&test_directory).join("fuzz.exe"); + fs::copy(target_exe, &target_in_test).await?; + target_in_test = target_in_test.canonicalize()?; + + let mut interesting_extensions = HashSet::new(); + interesting_extensions.insert(Some(OsStr::new("so"))); + interesting_extensions.insert(Some(OsStr::new("pdb"))); + let mut f = fs::read_dir(target_exe.parent().unwrap()).await?; + while let Ok(Some(f)) = f.next_entry().await { + if interesting_extensions.contains(&f.path().extension()) { + fs::copy(f.path(), PathBuf::from(&test_directory).join(f.file_name())).await?; + } + } + + let mut config_data = fs::read_to_string(config).await?; + + config_data = config_data + .replace("{TARGET_PATH}", target_in_test.to_str().unwrap()) + .replace("{INPUTS_PATH}", inputs_directory.to_str().unwrap()) + .replace("{CRASHES_PATH}", crashes_directory.to_str().unwrap()) + .replace("{CRASHDUMPS_PATH}", crashdumps_directory.to_str().unwrap()) + .replace("{COVERAGE_PATH}", coverage_directory.to_str().unwrap()) + .replace( + "{REGRESSION_REPORTS_PATH}", + regression_reports_directory.to_str().unwrap(), + ) + .replace("{TEST_DIRECTORY}", test_directory.to_str().unwrap()); + + let mut config_in_test = + PathBuf::from(&test_directory).join(config.file_name().unwrap_or_else(|| { + panic!("Failed to get file name for config. config = {:?}", config) + })); + + fs::write(&config_in_test, &config_data).await?; + config_in_test = config_in_test.canonicalize()?; + + Ok(TestLayout { + root: test_directory, + config: config_in_test, + target_exe: target_in_test, + inputs: inputs_directory, + crashes: crashes_directory, + crashdumps: crashdumps_directory, + coverage: coverage_directory, + regression_reports: regression_reports_directory, + }) +} + +#[derive(Debug)] +struct TestLayout { + root: PathBuf, + config: PathBuf, + target_exe: PathBuf, + inputs: PathBuf, + crashes: PathBuf, + crashdumps: PathBuf, + coverage: PathBuf, + regression_reports: PathBuf, +} diff --git a/src/agent/onefuzz-task/tests/templates/libfuzzer_basic.yml b/src/agent/onefuzz-task/tests/templates/libfuzzer_basic.yml new file mode 100644 index 0000000000..f6740cbc96 --- /dev/null +++ b/src/agent/onefuzz-task/tests/templates/libfuzzer_basic.yml @@ -0,0 +1,33 @@ +# yaml-language-server: $schema=../../src/local/schema.json + +required_args: &required_args + target_exe: '{TARGET_PATH}' + inputs: &inputs '{INPUTS_PATH}' # A folder containining your inputs + crashes: &crashes '{CRASHES_PATH}' # The folder where you want the crashing inputs to be output + crashdumps: '{CRASHDUMPS_PATH}' # The folder where you want the crash dumps to be output + coverage: '{COVERAGE_PATH}' # The folder where you want the code coverage to be output + regression_reports: '{REGRESSION_REPORTS_PATH}' # The folder where you want the regression reports to be output + target_env: { + 'LD_LIBRARY_PATH': '{TEST_DIRECTORY}', + } + target_options: [] + check_fuzzer_help: false + +tasks: + - type: LibFuzzer + <<: *required_args + readonly_inputs: [] + + - type: LibfuzzerRegression + <<: *required_args + + - type: "LibfuzzerCrashReport" + <<: *required_args + input_queue: *crashes + + - type: "Coverage" + <<: *required_args + target_options: + - "{input}" + input_queue: *inputs + readonly_inputs: [*inputs] diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml index e77fcebe69..b00e846a5f 100644 --- a/src/agent/onefuzz/Cargo.toml +++ b/src/agent/onefuzz/Cargo.toml @@ -62,7 +62,7 @@ cpp_demangle = "0.4" nix = "0.26" [target.'cfg(target_os = "linux")'.dependencies] -pete = "0.10" +pete = "0.12" rstack = "0.3" proc-maps = { version = "0.3", default-features = false } diff --git a/src/agent/onefuzz/src/libfuzzer.rs b/src/agent/onefuzz/src/libfuzzer.rs index 495f401bae..00b24bf4e9 100644 --- a/src/agent/onefuzz/src/libfuzzer.rs +++ b/src/agent/onefuzz/src/libfuzzer.rs @@ -339,7 +339,7 @@ impl LibFuzzer { Ok(missing) } - pub async fn fuzz( + pub fn fuzz( &self, fault_dir: impl AsRef, corpus_dir: impl AsRef, @@ -352,8 +352,7 @@ impl LibFuzzer { // specify that a new file `crash-` should be written to a // _directory_ ``, we must ensure that the prefix includes a // trailing path separator. - let artifact_prefix: OsString = - format!("-artifact_prefix={}/", fault_dir.as_ref().display()).into(); + let artifact_prefix = artifact_prefix(fault_dir.as_ref()); let mut cmd = self.build_command( Some(fault_dir.as_ref()), @@ -363,10 +362,11 @@ impl LibFuzzer { None, )?; + debug!("Running command: {:?}", &cmd); + let child = cmd .spawn() .with_context(|| format_err!("libfuzzer failed to start: {}", self.exe.display()))?; - Ok(child) } @@ -441,6 +441,20 @@ impl LibFuzzer { } } +#[cfg(target_os = "windows")] +fn artifact_prefix(fault_dir: &Path) -> OsString { + if fault_dir.is_absolute() { + format!("-artifact_prefix={}\\", fault_dir.display()).into() + } else { + format!("-artifact_prefix={}/", fault_dir.display()).into() + } +} + +#[cfg(not(target_os = "windows"))] +fn artifact_prefix(fault_dir: &Path) -> OsString { + format!("-artifact_prefix={}/", fault_dir.display()).into() +} + pub struct LibFuzzerLine { _line: String, iters: u64, diff --git a/src/ci/agent.sh b/src/ci/agent.sh index 4a49c975b3..4cca93168b 100755 --- a/src/ci/agent.sh +++ b/src/ci/agent.sh @@ -37,7 +37,7 @@ export RUST_BACKTRACE=full # Run tests and collect coverage # https://github.com/taiki-e/cargo-llvm-cov -cargo llvm-cov nextest --all-targets --features slow-tests --locked --workspace --lcov --output-path "$output_dir/lcov.info" +cargo llvm-cov nextest --all-targets --features slow-tests,integration_test --locked --workspace --lcov --output-path "$output_dir/lcov.info" # TODO: re-enable integration tests. # cargo test --release --manifest-path ./onefuzz-task/Cargo.toml --features integration_test -- --nocapture From 830b4790787b47c09445bab12f2f0ac3e823ca17 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Thu, 7 Sep 2023 15:03:03 -0700 Subject: [PATCH 21/88] Fix sed checks for CLI versioning (#3486) * Fix sed checks for CLI versioning * Fix. * Fix. * Changing build_cli * Trying greater than * Tring once more. * Trying major minor * trying to replace major minor * Using major minor --- src/ci/set-versions.sh | 6 +++++- src/ci/unset-versions.sh | 2 +- src/cli/requirements.txt | 2 +- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/ci/set-versions.sh b/src/ci/set-versions.sh index 174b77f8ea..2271a752f4 100755 --- a/src/ci/set-versions.sh +++ b/src/ci/set-versions.sh @@ -10,8 +10,12 @@ GET_VERSION=${SCRIPT_DIR}/get-version.sh VERSION=${1:-$(${GET_VERSION})} cd ${SCRIPT_DIR}/../../ +arrVer=(${VERSION//./ }) +MAJOR=${arrVer[0]} +MINOR=${arrVer[1]} + SET_VERSIONS="src/pytypes/onefuzztypes/__version__.py src/cli/onefuzz/__version__.py" SET_REQS="src/cli/requirements.txt" sed -i "s/0.0.0/${VERSION}/" ${SET_VERSIONS} -sed -i "s/onefuzztypes~=0.0.0/onefuzztypes==${VERSION}/" ${SET_REQS} +sed -i "s/onefuzztypes==0.0.0/onefuzztypes==${MAJOR}.${MINOR}.*/" ${SET_REQS} diff --git a/src/ci/unset-versions.sh b/src/ci/unset-versions.sh index 9312e6b4d8..95cbfac3d5 100755 --- a/src/ci/unset-versions.sh +++ b/src/ci/unset-versions.sh @@ -12,4 +12,4 @@ SET_VERSIONS="src/pytypes/onefuzztypes/__version__.py src/cli/onefuzz/__version_ SET_REQS="src/cli/requirements.txt" sed -i 's/__version__ = .*/__version__ = "0.0.0"/' ${SET_VERSIONS} -sed -i "s/onefuzztypes==.*/onefuzztypes~=0.0.0/" ${SET_REQS} +sed -i "s/onefuzztypes==.*/onefuzztypes==0.0.0/" ${SET_REQS} diff --git a/src/cli/requirements.txt b/src/cli/requirements.txt index 475f86acc4..d9977fe03e 100644 --- a/src/cli/requirements.txt +++ b/src/cli/requirements.txt @@ -21,4 +21,4 @@ opentelemetry-instrumentation-requests==0.37b0 # install rsa version >=4.7 to fix CVE-2020-25658 rsa>=4.7 # onefuzztypes version is set during build -onefuzztypes~=0.0.0 +onefuzztypes==0.0.0 From 896329da40c8ee9506f41cba25ca1c0de1526139 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 10 Sep 2023 21:08:49 +0000 Subject: [PATCH 22/88] Bump bytes from 1.4.0 to 1.5.0 in /src/agent (#3488) Bumps [bytes](https://github.com/tokio-rs/bytes) from 1.4.0 to 1.5.0. - [Release notes](https://github.com/tokio-rs/bytes/releases) - [Changelog](https://github.com/tokio-rs/bytes/blob/master/CHANGELOG.md) - [Commits](https://github.com/tokio-rs/bytes/compare/v1.4.0...v1.5.0) --- updated-dependencies: - dependency-name: bytes dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- src/agent/Cargo.lock | 4 ++-- src/agent/onefuzz/Cargo.toml | 2 +- src/agent/storage-queue/Cargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index 1394c8cd06..6e27fbad8e 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -396,9 +396,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" dependencies = [ "serde", ] diff --git a/src/agent/onefuzz/Cargo.toml b/src/agent/onefuzz/Cargo.toml index b00e846a5f..55042607fa 100644 --- a/src/agent/onefuzz/Cargo.toml +++ b/src/agent/onefuzz/Cargo.toml @@ -10,7 +10,7 @@ license = "MIT" anyhow = "1.0" async-trait = "0.1" base64 = "0.21" -bytes = "1.4" +bytes = "1.5" dunce = "1.0" dynamic-library = { path = "../dynamic-library" } futures = "0.3" diff --git a/src/agent/storage-queue/Cargo.toml b/src/agent/storage-queue/Cargo.toml index 381a761c74..58034cff18 100644 --- a/src/agent/storage-queue/Cargo.toml +++ b/src/agent/storage-queue/Cargo.toml @@ -10,7 +10,7 @@ anyhow = "1.0" async-trait = "0.1" backoff = { version = "0.4", features = ["tokio"] } base64 = "0.21" -bytes = { version = "1.4", features = ["serde"] } +bytes = { version = "1.5", features = ["serde"] } derivative = "2.2" flume = "0.10" num_cpus = "1.15" From d34138dfdcb53be338ba66afe8c2a0dbf8bed619 Mon Sep 17 00:00:00 2001 From: Kanan B <32438208+kananb@users.noreply.github.com> Date: Mon, 11 Sep 2023 15:45:34 -0700 Subject: [PATCH 23/88] Improve area/iteration path validation (#3489) * Add more comprehensive checks and better error messages to area/iteration path validation * Join invalid chars with space instead of comma * Make tree path validation more testable * Add error code for invalid ADO project in config * Write unit tests for tree path validation * Format tree path unit tests * Merge escape character and control character checks and clarify error message --- .../ApiService/OneFuzzTypes/Enums.cs | 1 + .../onefuzzlib/notifications/Ado.cs | 104 ++++++++++-- src/ApiService/Tests/TreePathTests.cs | 148 ++++++++++++++++++ src/pytypes/onefuzztypes/enums.py | 1 + 4 files changed, 238 insertions(+), 16 deletions(-) create mode 100644 src/ApiService/Tests/TreePathTests.cs diff --git a/src/ApiService/ApiService/OneFuzzTypes/Enums.cs b/src/ApiService/ApiService/OneFuzzTypes/Enums.cs index b1f7225b5f..4739987e6b 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Enums.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Enums.cs @@ -49,6 +49,7 @@ public enum ErrorCode { ADO_VALIDATION_MISSING_PAT_SCOPES = 492, ADO_WORKITEM_PROCESSING_DISABLED = 494, ADO_VALIDATION_INVALID_PATH = 495, + ADO_VALIDATION_INVALID_PROJECT = 496, // NB: if you update this enum, also update enums.py } diff --git a/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs b/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs index 3780bc1b2b..b1442851ba 100644 --- a/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs +++ b/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs @@ -89,30 +89,97 @@ private static bool IsTransient(Exception e) { return errorCodes.Any(errorStr.Contains); } - private static async Async.Task ValidatePath(string project, string path, TreeStructureGroup structureGroup, WorkItemTrackingHttpClient client) { - var pathType = (structureGroup == TreeStructureGroup.Areas) ? "Area" : "Iteration"; - var pathParts = path.Split('\\'); - if (!string.Equals(pathParts[0], project, StringComparison.OrdinalIgnoreCase)) { + public static OneFuzzResultVoid ValidateTreePath(IEnumerable path, WorkItemClassificationNode? root) { + if (root is null) { + return OneFuzzResultVoid.Error(ErrorCode.ADO_VALIDATION_INVALID_PROJECT, new string[] { + $"Path \"{string.Join('\\', path)}\" is invalid. The specified ADO project doesn't exist.", + "Double check the 'project' field in your ADO config.", + }); + } + + string treeNodeTypeName; + switch (root.StructureType) { + case TreeNodeStructureType.Area: + treeNodeTypeName = "Area"; + break; + case TreeNodeStructureType.Iteration: + treeNodeTypeName = "Iteration"; + break; + default: + return OneFuzzResultVoid.Error(ErrorCode.ADO_VALIDATION_INVALID_PATH, new string[] { + $"Path root \"{root.Name}\" is an unsupported type. Expected Area or Iteration but got {root.StructureType}.", + }); + } + + // Validate path based on + // https://learn.microsoft.com/en-us/azure/devops/organizations/settings/about-areas-iterations?view=azure-devops#naming-restrictions + var maxNodeLength = 255; + var maxDepth = 13; + // Invalid characters from the link above plus the escape sequences (since they have backslashes and produce confusingly formatted errors if not caught here) + var invalidChars = new char[] { '/', ':', '*', '?', '"', '<', '>', '|', ';', '#', '$', '*', '{', '}', ',', '+', '=', '[', ']' }; + + // Ensure that none of the path parts are too long + var erroneous = path.FirstOrDefault(part => part.Length > maxNodeLength); + if (erroneous != null) { + return OneFuzzResultVoid.Error(ErrorCode.ADO_VALIDATION_INVALID_PATH, new string[] { + $"{treeNodeTypeName} Path \"{string.Join('\\', path)}\" is invalid. \"{erroneous}\" is too long. It must be less than {maxNodeLength} characters.", + "Learn more about naming restrictions here: https://learn.microsoft.com/en-us/azure/devops/organizations/settings/about-areas-iterations?view=azure-devops#naming-restrictions" + }); + } + + // Ensure that none of the path parts contain invalid characters + erroneous = path.FirstOrDefault(part => invalidChars.Any(part.Contains)); + if (erroneous != null) { return OneFuzzResultVoid.Error(ErrorCode.ADO_VALIDATION_INVALID_PATH, new string[] { - $"Path \"{path}\" is invalid. It must start with the project name, \"{project}\".", - $"Example: \"{project}\\{path}\".", + $"{treeNodeTypeName} Path \"{string.Join('\\', path)}\" is invalid. \"{erroneous}\" contains an invalid character ({string.Join(" ", invalidChars)}).", + "Make sure that the path is separated by backslashes (\\) and not forward slashes (/).", + "Learn more about naming restrictions here: https://learn.microsoft.com/en-us/azure/devops/organizations/settings/about-areas-iterations?view=azure-devops#naming-restrictions" }); } - var current = await client.GetClassificationNodeAsync(project, structureGroup, depth: pathParts.Length - 1); - if (current == null) { + // Ensure no unicode control characters + erroneous = path.FirstOrDefault(part => part.Any(ch => char.IsControl(ch))); + if (erroneous != null) { return OneFuzzResultVoid.Error(ErrorCode.ADO_VALIDATION_INVALID_PATH, new string[] { - $"{pathType} Path \"{path}\" is invalid. \"{project}\" is not a valid project.", + // More about control codes and their range here: https://en.wikipedia.org/wiki/Unicode_control_characters + $"{treeNodeTypeName} Path \"{string.Join('\\', path)}\" is invalid. \"{erroneous}\" contains a unicode control character (\\u0000 - \\u001F or \\u007F - \\u009F).", + "Make sure that you're path doesn't contain any escape characters (\\0 \\a \\b \\f \\n \\r \\t \\v).", + "Learn more about naming restrictions here: https://learn.microsoft.com/en-us/azure/devops/organizations/settings/about-areas-iterations?view=azure-devops#naming-restrictions" }); } - foreach (var part in pathParts.Skip(1)) { + // Ensure that there aren't too many path parts + if (path.Count() > maxDepth) { + return OneFuzzResultVoid.Error(ErrorCode.ADO_VALIDATION_INVALID_PATH, new string[] { + $"{treeNodeTypeName} Path \"{string.Join('\\', path)}\" is invalid. It must be less than {maxDepth} levels deep.", + "Learn more about naming restrictions here: https://learn.microsoft.com/en-us/azure/devops/organizations/settings/about-areas-iterations?view=azure-devops#naming-restrictions" + }); + } + + + // Path should always start with the project name ADO expects an absolute path + if (!string.Equals(path.First(), root.Name, StringComparison.OrdinalIgnoreCase)) { + return OneFuzzResultVoid.Error(ErrorCode.ADO_VALIDATION_INVALID_PATH, new string[] { + $"{treeNodeTypeName} Path \"{string.Join('\\', path)}\" is invalid. It must start with the project name, \"{root.Name}\".", + $"Example: \"{root.Name}\\{path}\".", + }); + } + + // Validate that each part of the path is a valid child of the previous part + var current = root; + foreach (var part in path.Skip(1)) { var child = current.Children?.FirstOrDefault(x => string.Equals(x.Name, part, StringComparison.OrdinalIgnoreCase)); if (child == null) { - return OneFuzzResultVoid.Error(ErrorCode.ADO_VALIDATION_INVALID_PATH, new string[] { - $"{pathType} Path \"{path}\" is invalid. \"{part}\" is not a valid child of \"{current.Name}\".", - $"Valid children of \"{current.Name}\" are: [{string.Join(',', current.Children?.Select(x => $"\"{x.Name}\"") ?? new List())}].", - }); + if (current.Children is null || !current.Children.Any()) { + return OneFuzzResultVoid.Error(ErrorCode.ADO_VALIDATION_INVALID_PATH, new string[] { + $"{treeNodeTypeName} Path \"{string.Join('\\', path)}\" is invalid. \"{current.Name}\" has no children.", + }); + } else { + return OneFuzzResultVoid.Error(ErrorCode.ADO_VALIDATION_INVALID_PATH, new string[] { + $"{treeNodeTypeName} Path \"{string.Join('\\', path)}\" is invalid. \"{part}\" is not a valid child of \"{current.Name}\".", + $"Valid children of \"{current.Name}\" are: [{string.Join(',', current.Children?.Select(x => $"\"{x.Name}\"") ?? new List())}].", + }); + } } current = child; @@ -195,14 +262,19 @@ await policy.ExecuteAsync(async () => { try { // Validate AreaPath and IterationPath exist + // This also validates that the config.Project exists if (config.AdoFields.TryGetValue("System.AreaPath", out var areaPathString)) { - var validateAreaPath = await ValidatePath(config.Project, areaPathString, TreeStructureGroup.Areas, witClient); + var path = areaPathString.Split('\\'); + var root = await witClient.GetClassificationNodeAsync(config.Project, TreeStructureGroup.Areas, depth: path.Length - 1); + var validateAreaPath = ValidateTreePath(path, root); if (!validateAreaPath.IsOk) { return validateAreaPath; } } if (config.AdoFields.TryGetValue("System.IterationPath", out var iterationPathString)) { - var validateIterationPath = await ValidatePath(config.Project, iterationPathString, TreeStructureGroup.Iterations, witClient); + var path = iterationPathString.Split('\\'); + var root = await witClient.GetClassificationNodeAsync(config.Project, TreeStructureGroup.Iterations, depth: path.Length - 1); + var validateIterationPath = ValidateTreePath(path, root); if (!validateIterationPath.IsOk) { return validateIterationPath; } diff --git a/src/ApiService/Tests/TreePathTests.cs b/src/ApiService/Tests/TreePathTests.cs new file mode 100644 index 0000000000..fba818793c --- /dev/null +++ b/src/ApiService/Tests/TreePathTests.cs @@ -0,0 +1,148 @@ +using System.Collections.Generic; +using System.Linq; +using Microsoft.OneFuzz.Service; +using Microsoft.TeamFoundation.WorkItemTracking.WebApi.Models; +using Xunit; + +namespace Tests; + +// This might be a good candidate for property based testing +// https://fscheck.github.io/FsCheck//QuickStart.html +public class TreePathTests { + private static IEnumerable SplitPath(string path) { + return path.Split('\\'); + } + + private static WorkItemClassificationNode MockTreeNode(IEnumerable path, TreeNodeStructureType structureType) { + var root = new WorkItemClassificationNode() { + Name = path.First(), + StructureType = structureType + }; + + var current = root; + foreach (var segment in path.Skip(1)) { + var child = new WorkItemClassificationNode { + Name = segment + }; + current.Children = new[] { child }; + current = child; + } + + return root; + } + + + [Fact] + public void TestValidPath() { + var path = SplitPath(@"project\foo\bar\baz"); + var root = MockTreeNode(path, TreeNodeStructureType.Area); + + var result = Ado.ValidateTreePath(path, root); + + Assert.True(result.IsOk); + } + + [Fact] + public void TestNullTreeNode() { // A null tree node indicates an invalid ADO project was used in the query + var path = SplitPath(@"project\foo\bar\baz"); + + var result = Ado.ValidateTreePath(path, null); + + Assert.False(result.IsOk); + Assert.Equal(ErrorCode.ADO_VALIDATION_INVALID_PROJECT, result.ErrorV!.Code); + Assert.Contains("ADO project doesn't exist", result.ErrorV!.Errors![0]); + } + + [Fact] + public void TestPathPartTooLong() { + var path = SplitPath(@"project\foo\barbazquxquuxcorgegraultgarplywaldofredplughxyzzythudbarbazquxquuxcorgegraultgarplywaldofredplughxyzzythudbarbazquxquuxcorgegraultgarplywaldofredplughxyzzythudbarbazquxquuxcorgegraultgarplywaldofredplughxyzzythudbarbazquxquuxcorgegraultgarplywaldofredplughxyzzythud\baz"); + var root = MockTreeNode(path, TreeNodeStructureType.Iteration); + + var result = Ado.ValidateTreePath(path, root); + + Assert.False(result.IsOk); + Assert.Equal(ErrorCode.ADO_VALIDATION_INVALID_PATH, result.ErrorV!.Code); + Assert.Contains("too long", result.ErrorV!.Errors![0]); + } + + [Theory] + [InlineData("project/foo/bar/baz")] + [InlineData("project\\foo:\\bar\\baz")] + public void TestPathContainsInvalidChar(string invalidPath) { + var path = SplitPath(invalidPath); + var treePath = SplitPath(@"project\foo\bar\baz"); + var root = MockTreeNode(treePath, TreeNodeStructureType.Area); + + var result = Ado.ValidateTreePath(path, root); + + Assert.False(result.IsOk); + Assert.Equal(ErrorCode.ADO_VALIDATION_INVALID_PATH, result.ErrorV!.Code); + Assert.Contains("invalid character", result.ErrorV!.Errors![0]); + } + + [Theory] + [InlineData("project\\foo\\ba\u0005r\\baz")] + [InlineData("project\\\nfoo\\bar\\baz")] + public void TestPathContainsUnicodeControlChar(string invalidPath) { + var path = SplitPath(invalidPath); + var treePath = SplitPath(@"project\foo\bar\baz"); + var root = MockTreeNode(treePath, TreeNodeStructureType.Area); + + var result = Ado.ValidateTreePath(path, root); + + Assert.False(result.IsOk); + Assert.Equal(ErrorCode.ADO_VALIDATION_INVALID_PATH, result.ErrorV!.Code); + Assert.Contains("unicode control character", result.ErrorV!.Errors![0]); + } + + [Fact] + public void TestPathTooDeep() { + var path = SplitPath(@"project\foo\bar\baz\qux\quux\corge\grault\garply\waldo\fred\plugh\xyzzy\thud"); + var root = MockTreeNode(path, TreeNodeStructureType.Area); + + var result = Ado.ValidateTreePath(path, root); + + Assert.False(result.IsOk); + Assert.Equal(ErrorCode.ADO_VALIDATION_INVALID_PATH, result.ErrorV!.Code); + Assert.Contains("levels deep", result.ErrorV!.Errors![0]); + } + + [Fact] + public void TestPathWithoutProjectName() { + var path = SplitPath(@"foo\bar\baz"); + var treePath = SplitPath(@"project\foo\bar\baz"); + var root = MockTreeNode(treePath, TreeNodeStructureType.Iteration); + + var result = Ado.ValidateTreePath(path, root); + + Assert.False(result.IsOk); + Assert.Equal(ErrorCode.ADO_VALIDATION_INVALID_PATH, result.ErrorV!.Code); + Assert.Contains("start with the project name", result.ErrorV!.Errors![0]); + } + + [Fact] + public void TestPathWithInvalidChild() { + var path = SplitPath(@"project\foo\baz"); + var treePath = SplitPath(@"project\foo\bar"); + var root = MockTreeNode(treePath, TreeNodeStructureType.Iteration); + + var result = Ado.ValidateTreePath(path, root); + + Assert.False(result.IsOk); + Assert.Equal(ErrorCode.ADO_VALIDATION_INVALID_PATH, result.ErrorV!.Code); + Assert.Contains("not a valid child", result.ErrorV!.Errors![0]); + } + + [Fact] + public void TestPathWithExtraChild() { + var path = SplitPath(@"project\foo\bar\baz"); + var treePath = SplitPath(@"project\foo\bar"); + var root = MockTreeNode(treePath, TreeNodeStructureType.Iteration); + + var result = Ado.ValidateTreePath(path, root); + + Assert.False(result.IsOk); + Assert.Equal(ErrorCode.ADO_VALIDATION_INVALID_PATH, result.ErrorV!.Code); + Assert.Contains("has no children", result.ErrorV!.Errors![0]); + } +} diff --git a/src/pytypes/onefuzztypes/enums.py b/src/pytypes/onefuzztypes/enums.py index 4d59945c85..e2ec81eb15 100644 --- a/src/pytypes/onefuzztypes/enums.py +++ b/src/pytypes/onefuzztypes/enums.py @@ -303,6 +303,7 @@ class ErrorCode(Enum): ADO_VALIDATION_UNEXPECTED_ERROR = 491 ADO_VALIDATION_MISSING_PAT_SCOPES = 492 ADO_VALIDATION_INVALID_PATH = 495 + ADO_VALIDATION_INVALID_PROJECT = 496 # NB: if you update this enum, also update Enums.cs From d0094769d316cf801d9e33c2248df2ebcbca93ac Mon Sep 17 00:00:00 2001 From: Teo Voinea <58236992+tevoinea@users.noreply.github.com> Date: Wed, 13 Sep 2023 17:11:02 -0400 Subject: [PATCH 24/88] Improve handling of unexpected breakpoints (#3493) * Improve handling of unexpected breakpoints * fmt --- src/agent/coverage/src/record/windows.rs | 34 +++++++++++++----------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/src/agent/coverage/src/record/windows.rs b/src/agent/coverage/src/record/windows.rs index 076aa70d37..32d22b5534 100644 --- a/src/agent/coverage/src/record/windows.rs +++ b/src/agent/coverage/src/record/windows.rs @@ -4,7 +4,7 @@ use std::collections::BTreeMap; use std::path::Path; -use anyhow::{anyhow, bail, Error, Result}; +use anyhow::{anyhow, Error, Result}; use debuggable_module::debuginfo::{DebugInfo, Function}; use debuggable_module::load_module::LoadModule; use debuggable_module::loader::Loader; @@ -132,20 +132,24 @@ impl<'cache, 'data> WindowsRecorder<'cache, 'data> { return Ok(()); } - let breakpoint = self.breakpoints.remove(id); - - let Some(breakpoint) = breakpoint else { - let stack = dbg.get_current_stack()?; - bail!("stopped on dangling breakpoint, debuggee stack:\n{}", stack); - }; - - let coverage = self - .coverage - .modules - .get_mut(&breakpoint.module) - .ok_or_else(|| anyhow!("coverage not initialized for module: {}", breakpoint.module))?; - - coverage.increment(breakpoint.offset); + match self.breakpoints.remove(id) { + Some(breakpoint) => { + let coverage = self + .coverage + .modules + .get_mut(&breakpoint.module) + .ok_or_else(|| { + anyhow!("coverage not initialized for module: {}", breakpoint.module) + })?; + + coverage.increment(breakpoint.offset); + } + // ASAN can set breakpoints which we don't know about, meaning they're not in `self.breakpoints` + None => { + let stack = dbg.get_current_stack()?; + warn!("stopped on dangling breakpoint, debuggee stack:\n{}", stack); + } + } Ok(()) } From 18f2b4a997e1c7e729606f04eb15de71249b52cd Mon Sep 17 00:00:00 2001 From: George Pollard Date: Thu, 14 Sep 2023 09:51:06 +1200 Subject: [PATCH 25/88] Update azure_* crates (#3503) --- src/agent/Cargo.lock | 12 ++++++------ src/agent/onefuzz-agent/Cargo.toml | 6 +++--- src/agent/onefuzz-task/Cargo.toml | 6 +++--- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index 6e27fbad8e..65c2967ec0 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -199,9 +199,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "azure_core" -version = "0.14.0" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2331555a3618a32516c6172a63e9fec4af0edb43c6fcfeb5303a0716fc34498b" +checksum = "1f20eb684aea745292c540173304383c9cba9697d1c31d307620a57d6f878fa9" dependencies = [ "async-trait", "base64 0.21.2", @@ -226,9 +226,9 @@ dependencies = [ [[package]] name = "azure_storage" -version = "0.14.0" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16565073e533053f4e29e6b139de2af758e984108a1cddbb1a432387e7f4474d" +checksum = "bf64f9d78e573f64e189fa7188c4e6a0f605e27740105a8d32038b3ba8c913be" dependencies = [ "RustyXML", "async-trait", @@ -249,9 +249,9 @@ dependencies = [ [[package]] name = "azure_storage_blobs" -version = "0.14.0" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0900e63940d1ba51039efda3d8cf658157a1c75449081a6e18069d2588809329" +checksum = "a61299a8b65b88acba1a079a0b5e8a39970a12cb53e35ada2641687edb022d5a" dependencies = [ "RustyXML", "azure_core", diff --git a/src/agent/onefuzz-agent/Cargo.toml b/src/agent/onefuzz-agent/Cargo.toml index bc73d37973..3e7f00a8b0 100644 --- a/src/agent/onefuzz-agent/Cargo.toml +++ b/src/agent/onefuzz-agent/Cargo.toml @@ -31,13 +31,13 @@ onefuzz-telemetry = { path = "../onefuzz-telemetry" } backtrace = "0.3" ipc-channel = { git = "https://github.com/servo/ipc-channel", rev = "7f432aa" } dynamic-library = { path = "../dynamic-library" } -azure_core = { version = "0.14", default-features = false, features = [ +azure_core = { version = "0.15", default-features = false, features = [ "enable_reqwest", ] } -azure_storage = { version = "0.14", default-features = false, features = [ +azure_storage = { version = "0.15", default-features = false, features = [ "enable_reqwest", ] } -azure_storage_blobs = { version = "0.14", default-features = false, features = [ +azure_storage_blobs = { version = "0.15", default-features = false, features = [ "enable_reqwest", ] } diff --git a/src/agent/onefuzz-task/Cargo.toml b/src/agent/onefuzz-task/Cargo.toml index d5588a58e6..4b3e8e8c43 100644 --- a/src/agent/onefuzz-task/Cargo.toml +++ b/src/agent/onefuzz-task/Cargo.toml @@ -70,13 +70,13 @@ chrono = { version = "0.4", default-features = false, features = [ ] } ipc-channel = { git = "https://github.com/servo/ipc-channel", rev = "7f432aa" } -azure_core = { version = "0.14", default-features = false, features = [ +azure_core = { version = "0.15", default-features = false, features = [ "enable_reqwest", ] } -azure_storage = { version = "0.14", default-features = false, features = [ +azure_storage = { version = "0.15", default-features = false, features = [ "enable_reqwest", ] } -azure_storage_blobs = { version = "0.14", default-features = false, features = [ +azure_storage_blobs = { version = "0.15", default-features = false, features = [ "enable_reqwest", ] } From 9ede0dee0f034a1e80f4996f575f33e46588cdc9 Mon Sep 17 00:00:00 2001 From: Teo Voinea <58236992+tevoinea@users.noreply.github.com> Date: Thu, 14 Sep 2023 12:53:27 -0400 Subject: [PATCH 26/88] Fuzz coverage recording (#3322) * Fuzz coverage recording * Update cargo.toml * Update src/agent/coverage/fuzz/fuzz_targets/fuzz_target_record_coverage.rs Co-authored-by: George Pollard * Fix fuzz --------- Co-authored-by: George Pollard --- src/agent/coverage/fuzz/.gitignore | 4 + src/agent/coverage/fuzz/Cargo.lock | 1480 +++++++++++++++++ src/agent/coverage/fuzz/Cargo.toml | 30 + .../fuzz_target_record_coverage.rs | 51 + 4 files changed, 1565 insertions(+) create mode 100644 src/agent/coverage/fuzz/.gitignore create mode 100644 src/agent/coverage/fuzz/Cargo.lock create mode 100644 src/agent/coverage/fuzz/Cargo.toml create mode 100644 src/agent/coverage/fuzz/fuzz_targets/fuzz_target_record_coverage.rs diff --git a/src/agent/coverage/fuzz/.gitignore b/src/agent/coverage/fuzz/.gitignore new file mode 100644 index 0000000000..1a45eee776 --- /dev/null +++ b/src/agent/coverage/fuzz/.gitignore @@ -0,0 +1,4 @@ +target +corpus +artifacts +coverage diff --git a/src/agent/coverage/fuzz/Cargo.lock b/src/agent/coverage/fuzz/Cargo.lock new file mode 100644 index 0000000000..6096b84473 --- /dev/null +++ b/src/agent/coverage/fuzz/Cargo.lock @@ -0,0 +1,1480 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3" +dependencies = [ + "gimli 0.27.3", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "aho-corasick" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" +dependencies = [ + "memchr", +] + +[[package]] +name = "anyhow" +version = "1.0.72" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854" +dependencies = [ + "backtrace", +] + +[[package]] +name = "arbitrary" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2d098ff73c1ca148721f37baad5ea6a465a13f9573aba8641fbbbae8164a54e" + +[[package]] +name = "arrayvec" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" + +[[package]] +name = "atexit" +version = "0.1.0" +dependencies = [ + "ctrlc", + "lazy_static", + "log", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "backtrace" +version = "0.3.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "binary-merge" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597bb81c80a54b6a4381b23faba8d7774b144c94cbd1d6fe3f1329bd776554ab" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" + +[[package]] +name = "brownstone" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5839ee4f953e811bfdcf223f509cb2c6a3e1447959b0bff459405575bc17f22" +dependencies = [ + "arrayvec", +] + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "cc" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +dependencies = [ + "jobserver", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cobertura" +version = "0.1.0" +dependencies = [ + "anyhow", + "quick-xml", +] + +[[package]] +name = "coverage" +version = "0.1.0" +dependencies = [ + "anyhow", + "cobertura", + "debuggable-module", + "debugger", + "iced-x86", + "log", + "nix", + "pete", + "procfs", + "regex", + "symbolic", + "thiserror", +] + +[[package]] +name = "coverage-fuzz" +version = "0.0.0" +dependencies = [ + "coverage", + "debuggable-module", + "libfuzzer-sys", + "tempfile", +] + +[[package]] +name = "cpp_demangle" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee34052ee3d93d6d8f3e6f81d85c47921f6653a19a7b70e939e3e602d893a674" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crc32fast" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "ctrlc" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a011bbe2c35ce9c1f143b7af6f94f29a167beb4cd1d29e6740ce836f723120e" +dependencies = [ + "nix", + "windows-sys 0.48.0", +] + +[[package]] +name = "debuggable-module" +version = "0.1.0" +dependencies = [ + "anyhow", + "elsa", + "gimli 0.28.0", + "goblin 0.6.1", + "iced-x86", + "log", + "pdb", + "regex", + "symbolic", + "thiserror", +] + +[[package]] +name = "debugger" +version = "0.1.0" +dependencies = [ + "anyhow", + "fnv", + "goblin 0.6.1", + "iced-x86", + "log", + "memmap2 0.7.1", + "rand", + "serde", + "win-util", + "windows", +] + +[[package]] +name = "debugid" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d" +dependencies = [ + "serde", + "uuid", +] + +[[package]] +name = "dmsort" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0bc8fbe9441c17c9f46f75dfe27fa1ddb6c68a461ccaed0481419219d4f10d3" + +[[package]] +name = "elementtree" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3efd4742acf458718a6456e0adf0b4d734d6b783e452bbf1ac36bf31f4085cb3" +dependencies = [ + "string_cache", +] + +[[package]] +name = "elsa" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "714f766f3556b44e7e4776ad133fcc3445a489517c25c704ace411bb14790194" +dependencies = [ + "stable_deref_trait", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +dependencies = [ + "errno-dragonfly", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "fallible-iterator" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" + +[[package]] +name = "fallible-iterator" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" + +[[package]] +name = "fastrand" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" + +[[package]] +name = "flate2" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "form_urlencoded" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "getrandom" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "gimli" +version = "0.27.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" + +[[package]] +name = "gimli" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" +dependencies = [ + "fallible-iterator 0.3.0", + "indexmap 2.0.0", + "stable_deref_trait", +] + +[[package]] +name = "goblin" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d6b4de4a8eb6c46a8c77e1d3be942cb9a8bf073c22374578e5ba4b08ed0ff68" +dependencies = [ + "log", + "plain", + "scroll", +] + +[[package]] +name = "goblin" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f27c1b4369c2cd341b5de549380158b105a04c331be5db9110eef7b6d2742134" +dependencies = [ + "log", + "plain", + "scroll", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" + +[[package]] +name = "hermit-abi" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "iced-x86" +version = "1.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdd366a53278429c028367e0ba22a46cab6d565a57afb959f06e92c7a69e7828" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "idna" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "indent_write" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cfe9645a18782869361d9c8732246be7b410ad4e919d3609ebabdac00ba12c3" + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + +[[package]] +name = "indexmap" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +dependencies = [ + "equivalent", + "hashbrown 0.14.0", +] + +[[package]] +name = "inplace-vec-builder" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf64c2edc8226891a71f127587a2861b132d2b942310843814d5001d99a1d307" +dependencies = [ + "smallvec", +] + +[[package]] +name = "io-lifetimes" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" +dependencies = [ + "hermit-abi", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "itoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" + +[[package]] +name = "jobserver" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "936cfd212a0155903bcbc060e316fb6cc7cbf2e1907329391ebadc1fe0ce77c2" +dependencies = [ + "libc", +] + +[[package]] +name = "joinery" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72167d68f5fce3b8655487b8038691a3c9984ee769590f93f2a631f4ad64e4f5" + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "leb128" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" + +[[package]] +name = "libc" +version = "0.2.147" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" + +[[package]] +name = "libfuzzer-sys" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "beb09950ae85a0a94b27676cccf37da5ff13f27076aa1adbc6545dd0d0e1bd4e" +dependencies = [ + "arbitrary", + "cc", + "once_cell", +] + +[[package]] +name = "linux-raw-sys" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" + +[[package]] +name = "linux-raw-sys" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" + +[[package]] +name = "lock_api" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" + +[[package]] +name = "maybe-owned" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4facc753ae494aeb6e3c22f839b158aebd4f9270f55cd3c79906c45476c47ab4" + +[[package]] +name = "memchr" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" + +[[package]] +name = "memmap2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327" +dependencies = [ + "libc", +] + +[[package]] +name = "memmap2" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f49388d20533534cd19360ad3d6a7dadc885944aa802ba3995040c5ec11288c6" +dependencies = [ + "libc", +] + +[[package]] +name = "memoffset" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" +dependencies = [ + "autocfg", +] + +[[package]] +name = "memoffset" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" +dependencies = [ + "autocfg", +] + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +dependencies = [ + "adler", +] + +[[package]] +name = "msvc-demangler" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfb67c6dd0fa9b00619c41c5700b6f92d5f418be49b45ddb9970fbd4569df3c8" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "new_debug_unreachable" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" + +[[package]] +name = "nix" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" +dependencies = [ + "bitflags 1.3.2", + "cfg-if", + "libc", + "memoffset 0.7.1", + "pin-utils", + "static_assertions", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nom-supreme" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bd3ae6c901f1959588759ff51c95d24b491ecb9ff91aa9c2ef4acc5b1dcab27" +dependencies = [ + "brownstone", + "indent_write", + "joinery", + "memchr", + "nom", +] + +[[package]] +name = "object" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" + +[[package]] +name = "os_pipe" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ae859aa07428ca9a929b936690f8b12dc5f11dd8c6992a18ca93919f28bc177" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.48.1", +] + +[[package]] +name = "pdb" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82040a392923abe6279c00ab4aff62d5250d1c8555dc780e4b02783a7aa74863" +dependencies = [ + "fallible-iterator 0.2.0", + "scroll", + "uuid", +] + +[[package]] +name = "pdb-addr2line" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4e89a9f2f40b2389ba6da0814c8044bf942bece03dffa1514f84e3b525f4f9a" +dependencies = [ + "bitflags 1.3.2", + "elsa", + "maybe-owned", + "pdb", + "range-collections", + "thiserror", +] + +[[package]] +name = "percent-encoding" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" + +[[package]] +name = "pete" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f09c1c1ad40df294ff8643fe88a3dc64fff3293b6bc0ed9f71aff71f7086cbd" +dependencies = [ + "libc", + "memoffset 0.8.0", + "nix", + "thiserror", +] + +[[package]] +name = "phf_shared" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "plain" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "precomputed-hash" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" + +[[package]] +name = "proc-macro2" +version = "1.0.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "procfs" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "943ca7f9f29bab5844ecd8fdb3992c5969b6622bb9609b9502fef9b4310e3f1f" +dependencies = [ + "bitflags 1.3.2", + "byteorder", + "flate2", + "hex", + "lazy_static", + "rustix 0.36.15", +] + +[[package]] +name = "quick-xml" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eff6510e86862b57b210fd8cbe8ed3f0d7d600b9c2863cd4549a2e033c66e956" +dependencies = [ + "memchr", +] + +[[package]] +name = "quote" +version = "1.0.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "range-collections" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61fdfd79629e2b44a1d34b4d227957174cb858e6b86ee45fad114edbcfc903ab" +dependencies = [ + "binary-merge", + "inplace-vec-builder", + "smallvec", +] + +[[package]] +name = "redox_syscall" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "regex" +version = "1.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" + +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + +[[package]] +name = "rustix" +version = "0.36.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c37f1bd5ef1b5422177b7646cba67430579cfe2ace80f284fee876bca52ad941" +dependencies = [ + "bitflags 1.3.2", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys 0.1.4", + "windows-sys 0.45.0", +] + +[[package]] +name = "rustix" +version = "0.38.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5" +dependencies = [ + "bitflags 2.3.3", + "errno", + "libc", + "linux-raw-sys 0.4.3", + "windows-sys 0.48.0", +] + +[[package]] +name = "ryu" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "scroll" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04c565b551bafbef4157586fa379538366e4385d42082f255bfd96e4fe8519da" +dependencies = [ + "scroll_derive", +] + +[[package]] +name = "scroll_derive" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1db149f81d46d2deba7cd3c50772474707729550221e69588478ebf9ada425ae" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde" +version = "1.0.174" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b88756493a5bd5e5395d53baa70b194b05764ab85b59e43e4b8f4e1192fa9b1" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.174" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e5c3a298c7f978e53536f95a63bdc4c4a64550582f31a0359a9afda6aede62e" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d03b412469450d4404fe8499a268edd7f8b79fecb074b0d812ad64ca21f4031b" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "siphasher" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" + +[[package]] +name = "smallvec" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "string_cache" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" +dependencies = [ + "new_debug_unreachable", + "once_cell", + "parking_lot", + "phf_shared", + "precomputed-hash", + "serde", +] + +[[package]] +name = "symbolic" +version = "12.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3b5247a96aeefec188691938459892bffd23f1c3e9900dc08ac5248fe3bf08e" +dependencies = [ + "symbolic-common", + "symbolic-debuginfo", + "symbolic-demangle", + "symbolic-symcache", +] + +[[package]] +name = "symbolic-common" +version = "12.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e0e9bc48b3852f36a84f8d0da275d50cb3c2b88b59b9ec35fdd8b7fa239e37d" +dependencies = [ + "debugid", + "memmap2 0.5.10", + "stable_deref_trait", + "uuid", +] + +[[package]] +name = "symbolic-debuginfo" +version = "12.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ef9a1b95a8ea7b5afb550da0d93ecc706de3ce869a9674fc3bc51fadc019feb" +dependencies = [ + "debugid", + "dmsort", + "elementtree", + "elsa", + "fallible-iterator 0.3.0", + "flate2", + "gimli 0.28.0", + "goblin 0.7.1", + "lazy_static", + "nom", + "nom-supreme", + "once_cell", + "parking_lot", + "pdb-addr2line", + "regex", + "scroll", + "serde", + "serde_json", + "smallvec", + "symbolic-common", + "symbolic-ppdb", + "thiserror", + "wasmparser", + "zip", +] + +[[package]] +name = "symbolic-demangle" +version = "12.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "691e53bdc0702aba3a5abc2cffff89346fcbd4050748883c7e2f714b33a69045" +dependencies = [ + "cc", + "cpp_demangle", + "msvc-demangler", + "rustc-demangle", + "symbolic-common", +] + +[[package]] +name = "symbolic-ppdb" +version = "12.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b95399a30236ac95fd9ce69a008b8a18e58859e9780a13bcb16fda545802f876" +dependencies = [ + "flate2", + "indexmap 1.9.3", + "serde", + "serde_json", + "symbolic-common", + "thiserror", + "uuid", + "watto", +] + +[[package]] +name = "symbolic-symcache" +version = "12.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4339f37007c0fd6d6dddaf6f04619a4a5d6308e71eabbd45c30e0af124014259" +dependencies = [ + "indexmap 2.0.0", + "symbolic-common", + "symbolic-debuginfo", + "thiserror", + "tracing", + "watto", +] + +[[package]] +name = "syn" +version = "2.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b60f673f44a8255b9c8c657daf66a596d435f2da81a555b06dc644d080ba45e0" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "tempfile" +version = "3.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5486094ee78b2e5038a6382ed7645bc084dc2ec433426ca4c3cb61e2007b8998" +dependencies = [ + "cfg-if", + "fastrand", + "redox_syscall", + "rustix 0.38.4", + "windows-sys 0.48.0", +] + +[[package]] +name = "thiserror" +version = "1.0.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "611040a08a0439f8248d1990b111c95baa9c704c805fa1f62104b39655fd7f90" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tracing" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +dependencies = [ + "cfg-if", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +dependencies = [ + "once_cell", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" + +[[package]] +name = "unicode-ident" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "url" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "uuid" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasmparser" +version = "0.102.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48134de3d7598219ab9eaf6b91b15d8e50d31da76b8519fe4ecfcec2cf35104b" +dependencies = [ + "indexmap 1.9.3", + "url", +] + +[[package]] +name = "watto" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6746b5315e417144282a047ebb82260d45c92d09bf653fa9ec975e3809be942b" +dependencies = [ + "leb128", + "thiserror", +] + +[[package]] +name = "win-util" +version = "0.1.0" +dependencies = [ + "anyhow", + "atexit", + "log", + "os_pipe", + "windows", + "winreg", +] + +[[package]] +name = "windows" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" +dependencies = [ + "windows-targets 0.48.1", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.1", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" +dependencies = [ + "windows_aarch64_gnullvm 0.48.0", + "windows_aarch64_msvc 0.48.0", + "windows_i686_gnu 0.48.0", + "windows_i686_msvc 0.48.0", + "windows_x86_64_gnu 0.48.0", + "windows_x86_64_gnullvm 0.48.0", + "windows_x86_64_msvc 0.48.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" + +[[package]] +name = "winreg" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "937f3df7948156640f46aacef17a70db0de5917bda9c92b0f751f3a955b588fc" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "zip" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261" +dependencies = [ + "byteorder", + "crc32fast", + "crossbeam-utils", + "flate2", +] diff --git a/src/agent/coverage/fuzz/Cargo.toml b/src/agent/coverage/fuzz/Cargo.toml new file mode 100644 index 0000000000..6f23f1b7a0 --- /dev/null +++ b/src/agent/coverage/fuzz/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "coverage-fuzz" +version = "0.0.0" +publish = false +edition = "2021" + +[package.metadata] +cargo-fuzz = true + +[dependencies] +libfuzzer-sys = "0.4" +tempfile = "3.7" +debuggable-module = { path = "../../debuggable-module" } + + +[dependencies.coverage] +path = ".." + +# Prevent this from interfering with workspaces +[workspace] +members = ["."] + +[profile.release] +debug = 1 + +[[bin]] +name = "fuzz_target_record_coverage" +path = "fuzz_targets/fuzz_target_record_coverage.rs" +test = false +doc = false diff --git a/src/agent/coverage/fuzz/fuzz_targets/fuzz_target_record_coverage.rs b/src/agent/coverage/fuzz/fuzz_targets/fuzz_target_record_coverage.rs new file mode 100644 index 0000000000..c097a1f279 --- /dev/null +++ b/src/agent/coverage/fuzz/fuzz_targets/fuzz_target_record_coverage.rs @@ -0,0 +1,51 @@ +#![no_main] + +use libfuzzer_sys::fuzz_target; +use std::env; +use std::fs; +use std::io::Write; +use std::process::Command; +use std::sync::Arc; +use std::time::Duration; + +use tempfile::NamedTempFile; + +use coverage::allowlist::AllowList; +use coverage::binary::BinaryCoverage; +use coverage::record::CoverageRecorder; + +use debuggable_module::loader::Loader; + +const INPUT_MARKER: &str = "@@"; + +fuzz_target!(|data: &[u8]| { + if data.len() == 0 { + return; + } + + // Write mutated bytes to a file + let mut file = NamedTempFile::new_in(env::current_dir().unwrap()).unwrap(); + file.write_all(data); + let path = String::from(file.path().to_str().unwrap()); + + // Make sure the file is executable + Command::new("chmod").args(["+wrx", &path]).spawn().unwrap().wait(); + file.keep().unwrap(); + + let timeout = Duration::from_secs(5); + + let allowlist = AllowList::default(); + + let _coverage = BinaryCoverage::default(); + let loader = Arc::new(Loader::new()); + + let cmd = Command::new(&path); + + let _recorded = CoverageRecorder::new(cmd) + .module_allowlist(allowlist.clone()) + .loader(loader) + .timeout(timeout) + .record(); + + fs::remove_file(path); +}); From cde6a19352905552e4936a39588508446e8bbb01 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Thu, 14 Sep 2023 11:23:22 -0700 Subject: [PATCH 27/88] Reporting coverage on task start up (#3502) * Reporting coverage on task start up * Moving metric up. --- src/agent/onefuzz-task/src/tasks/coverage/generic.rs | 4 +++- src/agent/onefuzz/src/syncdir.rs | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs index eeaa861c00..53a45e9948 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/generic.rs +++ b/src/agent/onefuzz-task/src/tasks/coverage/generic.rs @@ -141,6 +141,9 @@ impl CoverageTask { context.heartbeat.alive(); + info!("report initial coverage"); + context.report_coverage_stats().await; + for dir in &self.config.readonly_inputs { debug!("recording coverage for {}", dir.local_path.display()); @@ -161,7 +164,6 @@ impl CoverageTask { } if seen_inputs { - context.report_coverage_stats().await; context.save_and_sync_coverage().await?; } diff --git a/src/agent/onefuzz/src/syncdir.rs b/src/agent/onefuzz/src/syncdir.rs index 2e73b7a694..efd8f8e0e3 100644 --- a/src/agent/onefuzz/src/syncdir.rs +++ b/src/agent/onefuzz/src/syncdir.rs @@ -283,7 +283,7 @@ impl SyncedDir { Event::new_coverage => { jr_client .send_direct( - JobResultData::CoverageData, + JobResultData::NewCoverage, HashMap::from([("count".to_string(), 1.0)]), ) .await; @@ -351,7 +351,7 @@ impl SyncedDir { Event::new_coverage => { jr_client .send_direct( - JobResultData::CoverageData, + JobResultData::NewCoverage, HashMap::from([("count".to_string(), 1.0)]), ) .await; From 1fb156333635abecb95cb7bf27a43a7d061fd03b Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Thu, 14 Sep 2023 15:22:13 -0700 Subject: [PATCH 28/88] Remove feature flag from heartbeat metrics. (#3505) --- src/ApiService/ApiService/Functions/QueueNodeHeartbeat.cs | 5 ++--- src/ApiService/ApiService/Functions/QueueTaskHeartbeat.cs | 5 ++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/src/ApiService/ApiService/Functions/QueueNodeHeartbeat.cs b/src/ApiService/ApiService/Functions/QueueNodeHeartbeat.cs index ba19175938..649443cf28 100644 --- a/src/ApiService/ApiService/Functions/QueueNodeHeartbeat.cs +++ b/src/ApiService/ApiService/Functions/QueueNodeHeartbeat.cs @@ -41,9 +41,8 @@ public async Async.Task Run([QueueTrigger("node-heartbeat", Connection = "AzureW var nodeHeartbeatEvent = new EventNodeHeartbeat(node.MachineId, node.ScalesetId, node.PoolName, node.State); // TODO: do we still send event if we fail do update the table ? await events.SendEvent(nodeHeartbeatEvent); - if (await _context.FeatureManagerSnapshot.IsEnabledAsync(FeatureFlagConstants.EnableCustomMetricTelemetry)) { - metrics.SendMetric(1, nodeHeartbeatEvent); - } + metrics.SendMetric(1, nodeHeartbeatEvent); + } } diff --git a/src/ApiService/ApiService/Functions/QueueTaskHeartbeat.cs b/src/ApiService/ApiService/Functions/QueueTaskHeartbeat.cs index 6eba20c9cf..850e77f71f 100644 --- a/src/ApiService/ApiService/Functions/QueueTaskHeartbeat.cs +++ b/src/ApiService/ApiService/Functions/QueueTaskHeartbeat.cs @@ -45,8 +45,7 @@ public async Async.Task Run([QueueTrigger("task-heartbeat", Connection = "AzureW var taskHeartBeatEvent = new EventTaskHeartbeat(newTask.JobId, newTask.TaskId, job.Config.Project, job.Config.Name, newTask.State, newTask.Config); await _events.SendEvent(taskHeartBeatEvent); - if (await _context.FeatureManagerSnapshot.IsEnabledAsync(FeatureFlagConstants.EnableCustomMetricTelemetry)) { - _metrics.SendMetric(1, taskHeartBeatEvent); - } + _metrics.SendMetric(1, taskHeartBeatEvent); + } } From c7a982748e13cfd1d8bf2e39dd8b36ee28980e46 Mon Sep 17 00:00:00 2001 From: Marc Greisen Date: Fri, 15 Sep 2023 09:36:35 -0700 Subject: [PATCH 29/88] Update archive notice. (#3507) --- README.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 486dae6c15..01daa7f7f1 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,8 @@ # OneFuzz -# IMPORTANT NOTICE +# :exclamation: IMPORTANT NOTICE :exclamation: + +**_August 31, 2023_**. **_Since September 2020 when OneFuzz was first open sourced, we’ve been on a journey to create a best-in-class orchestrator for running fuzzers, driving security and quality into our products._** @@ -13,6 +15,10 @@ **_The OneFuzz team._** +--- +**_Update: September 15 2023:_** +**_Our current target to archive the project is September 30th, 2023._** + --- [![Onefuzz build status](https://github.com/microsoft/onefuzz/workflows/Build/badge.svg?branch=main)](https://github.com/microsoft/onefuzz/actions/workflows/ci.yml?query=branch%3Amain) From 58da7b431e0b307155b508eeee5922f23b209c46 Mon Sep 17 00:00:00 2001 From: Kanan B <32438208+kananb@users.noreply.github.com> Date: Wed, 20 Sep 2023 14:28:05 -0700 Subject: [PATCH 30/88] Add onefuzz service version to job created events (#3504) --- src/ApiService/ApiService/Functions/Jobs.cs | 2 +- src/ApiService/ApiService/OneFuzzTypes/Events.cs | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/ApiService/ApiService/Functions/Jobs.cs b/src/ApiService/ApiService/Functions/Jobs.cs index bef61adfc2..3f8746df1f 100644 --- a/src/ApiService/ApiService/Functions/Jobs.cs +++ b/src/ApiService/ApiService/Functions/Jobs.cs @@ -83,7 +83,7 @@ private async Task Post(HttpRequestData req, FunctionContext c "job"); } - await _context.Events.SendEvent(new EventJobCreated(job.JobId, job.Config, job.UserInfo)); + await _context.Events.SendEvent(new EventJobCreated(job.JobId, job.Config, job.UserInfo, _context.ServiceConfiguration.OneFuzzVersion)); return await RequestHandling.Ok(req, JobResponse.ForJob(job, taskInfo: null)); } diff --git a/src/ApiService/ApiService/OneFuzzTypes/Events.cs b/src/ApiService/ApiService/OneFuzzTypes/Events.cs index d81e083db4..b06391f12f 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Events.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Events.cs @@ -124,7 +124,8 @@ TaskConfig Config public record EventJobCreated( Guid JobId, JobConfig Config, - StoredUserInfo? UserInfo + StoredUserInfo? UserInfo, + string OneFuzzVersion ) : BaseEvent(); From 60766e6d7c9f8e7398fd4c1b310294f8d944d85c Mon Sep 17 00:00:00 2001 From: Teo Voinea <58236992+tevoinea@users.noreply.github.com> Date: Thu, 21 Sep 2023 13:31:45 -0400 Subject: [PATCH 31/88] Tevoinea/add version checking in local tasks (#3517) * Compare task version to service version * Swallow output when looking for appropriate name --- .../onefuzz-task/src/check_for_update.rs | 78 +++++++++++++++++++ src/agent/onefuzz-task/src/main.rs | 14 +++- 2 files changed, 90 insertions(+), 2 deletions(-) create mode 100644 src/agent/onefuzz-task/src/check_for_update.rs diff --git a/src/agent/onefuzz-task/src/check_for_update.rs b/src/agent/onefuzz-task/src/check_for_update.rs new file mode 100644 index 0000000000..51c0178158 --- /dev/null +++ b/src/agent/onefuzz-task/src/check_for_update.rs @@ -0,0 +1,78 @@ +use std::process::Stdio; + +use anyhow::Result; +use serde_json::Value; + +pub fn run(onefuzz_built_version: &str) -> Result<()> { + // Find onefuzz cli + let common_names = ["onefuzz", "onefuzz.exe", "onefuzz.cmd"]; + let mut valid_commands: Vec<_> = common_names + .into_iter() + .map(|name| { + ( + name, + std::process::Command::new(name) + .stderr(Stdio::null()) + .stdout(Stdio::null()) + .arg("-h") + .spawn(), + ) + }) + .filter_map(|(name, child)| child.ok().map(|c| (name, c))) + .collect(); + + if valid_commands.is_empty() { + bail!( + "Could not find any of the following common names for the onefuzz-cli: {:?}", + common_names + ); + } + + let (name, child) = valid_commands + .first_mut() + .expect("Expected valid_commands to not be empty"); + + info!("Found the onefuzz cli at: {}", name); + + // We just used this to check if it exists, we'll invoke it again later + let _ = child.kill(); + + // Run onefuzz info get + let output = std::process::Command::new(&name) + .args(["info", "get"]) + .output()?; + + if !output.status.success() { + bail!( + "Failed to run command `{} info get`. stderr: {:?}, stdout: {:?}", + name, + String::from_utf8(output.stderr), + String::from_utf8(output.stdout) + ) + } + + let stdout = String::from_utf8(output.stdout)?; + let info: Value = serde_json::from_str(&stdout)?; + + if let Some(onefuzz_service_version) = info["versions"]["onefuzz"]["version"].as_str() { + if onefuzz_service_version == onefuzz_built_version { + println!("You are up to date!"); + } else { + println!( + "Version mismatch. onefuzz-task version: {} | onefuzz service version: {}", + onefuzz_built_version, onefuzz_service_version + ); + println!( + "To update, please run the following command: {} tools get .", + name + ); + println!("Then extract the onefuzz-task binary from the appropriate OS folder"); + } + return Ok(()); + } + + bail!( + "Failed to get onefuzz service version from cli response: {}", + stdout + ) +} diff --git a/src/agent/onefuzz-task/src/main.rs b/src/agent/onefuzz-task/src/main.rs index 77fd7a59ea..d230f92ff5 100644 --- a/src/agent/onefuzz-task/src/main.rs +++ b/src/agent/onefuzz-task/src/main.rs @@ -11,8 +11,10 @@ extern crate onefuzz; use anyhow::Result; use clap::{ArgMatches, Command}; + use std::io::{stdout, Write}; +mod check_for_update; mod local; mod managed; mod tasks; @@ -20,12 +22,15 @@ mod tasks; const LICENSE_CMD: &str = "licenses"; const LOCAL_CMD: &str = "local"; const MANAGED_CMD: &str = "managed"; +const CHECK_FOR_UPDATE: &str = "check_for_update"; + +const ONEFUZZ_BUILT_VERSION: &str = env!("ONEFUZZ_VERSION"); fn main() -> Result<()> { let built_version = format!( "{} onefuzz:{} git:{}", crate_version!(), - env!("ONEFUZZ_VERSION"), + ONEFUZZ_BUILT_VERSION, env!("GIT_VERSION") ); @@ -33,7 +38,11 @@ fn main() -> Result<()> { .version(built_version) .subcommand(managed::cmd::args(MANAGED_CMD)) .subcommand(local::cmd::args(LOCAL_CMD)) - .subcommand(Command::new(LICENSE_CMD).about("display third-party licenses")); + .subcommand(Command::new(LICENSE_CMD).about("display third-party licenses")) + .subcommand( + Command::new(CHECK_FOR_UPDATE) + .about("compares the version of onefuzz-task with the onefuzz service"), + ); let matches = app.get_matches(); @@ -55,6 +64,7 @@ async fn run(args: ArgMatches) -> Result<()> { Some((LICENSE_CMD, _)) => licenses(), Some((LOCAL_CMD, sub)) => local::cmd::run(sub.to_owned()).await, Some((MANAGED_CMD, sub)) => managed::cmd::run(sub).await, + Some((CHECK_FOR_UPDATE, _)) => check_for_update::run(ONEFUZZ_BUILT_VERSION), _ => anyhow::bail!("No command provided. Run with 'help' to see available commands."), } } From e3c4a409e19ffdafb9dd7997a96b0e771c168228 Mon Sep 17 00:00:00 2001 From: Teo Voinea <58236992+tevoinea@users.noreply.github.com> Date: Thu, 21 Sep 2023 14:17:02 -0400 Subject: [PATCH 32/88] Create directories if they don't exist in the template (#3522) * Create directories if they don't exist in the template * fmt --- src/agent/onefuzz-task/src/local/template.rs | 8 ++++---- .../onefuzz-task/tests/template_integration.rs | 16 ++++++---------- 2 files changed, 10 insertions(+), 14 deletions(-) diff --git a/src/agent/onefuzz-task/src/local/template.rs b/src/agent/onefuzz-task/src/local/template.rs index 73ae6e5e48..64b342744d 100644 --- a/src/agent/onefuzz-task/src/local/template.rs +++ b/src/agent/onefuzz-task/src/local/template.rs @@ -136,16 +136,16 @@ impl RunContext { name: impl AsRef, path: impl AsRef, ) -> Result { - if !path.as_ref().exists() { - std::fs::create_dir_all(&path)?; - } - self.to_sync_dir(name, path)? .monitor_count(&self.event_sender) } pub fn to_sync_dir(&self, name: impl AsRef, path: impl AsRef) -> Result { let path = path.as_ref(); + if !path.exists() { + std::fs::create_dir_all(path)?; + } + let name = name.as_ref(); let current_dir = std::env::current_dir()?; if self.create_job_dir { diff --git a/src/agent/onefuzz-task/tests/template_integration.rs b/src/agent/onefuzz-task/tests/template_integration.rs index d0e68e5d02..bcfefd3bda 100644 --- a/src/agent/onefuzz-task/tests/template_integration.rs +++ b/src/agent/onefuzz-task/tests/template_integration.rs @@ -4,6 +4,7 @@ use std::{ path::{Path, PathBuf}, }; +use path_absolutize::Absolutize; use tokio::fs; use anyhow::Result; @@ -131,25 +132,20 @@ async fn create_test_directory(config: &Path, target_exe: &Path) -> Result Date: Wed, 27 Sep 2023 09:13:06 +1300 Subject: [PATCH 33/88] Support for retention policies on containers (#3501) - [x] ability to specify a retention period on a container, which applies to newly-created blobs - [x] specify default retention periods in templates from CLI side There's a small breaking change to the Python JobHelper class. --- .github/workflows/ci.yml | 12 +- src/ApiService/ApiService/FeatureFlags.cs | 1 + .../ApiService/Functions/QueueFileChanges.cs | 44 ++++- .../ApiService/OneFuzzTypes/Enums.cs | 1 + .../onefuzzlib/NotificationOperations.cs | 5 +- .../ApiService/onefuzzlib/RententionPolicy.cs | 24 --- .../ApiService/onefuzzlib/RetentionPolicy.cs | 43 +++++ src/cli/examples/domato.py | 21 ++- src/cli/examples/honggfuzz.py | 19 +- .../source-coverage-libfuzzer.py | 10 +- .../llvm-source-coverage/source-coverage.py | 10 +- src/cli/onefuzz/templates/__init__.py | 93 ++++++++-- src/cli/onefuzz/templates/afl.py | 22 +-- src/cli/onefuzz/templates/libfuzzer.py | 162 ++++++++++++------ src/cli/onefuzz/templates/ossfuzz.py | 8 +- src/cli/onefuzz/templates/radamsa.py | 51 ++++-- src/cli/onefuzz/templates/regression.py | 37 ++-- .../bicep-templates/feature-flags.bicep | 13 ++ src/pytypes/onefuzztypes/enums.py | 1 + 19 files changed, 393 insertions(+), 184 deletions(-) delete mode 100644 src/ApiService/ApiService/onefuzzlib/RententionPolicy.cs create mode 100644 src/ApiService/ApiService/onefuzzlib/RetentionPolicy.cs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 99e9bddd32..5f07124dd7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -123,7 +123,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: "3.10" - name: lint shell: bash run: src/ci/check-check-pr.sh @@ -137,7 +137,7 @@ jobs: shell: bash - uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: "3.10" - uses: actions/download-artifact@v3 with: name: artifact-onefuzztypes @@ -190,7 +190,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: "3.10" - name: lint shell: bash run: | @@ -208,7 +208,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: "3.10" - name: lint shell: bash run: | @@ -224,7 +224,7 @@ jobs: - run: src/ci/set-versions.sh - uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: "3.10" - run: src/ci/onefuzztypes.sh - uses: actions/upload-artifact@v3 with: @@ -481,7 +481,7 @@ jobs: path: artifacts - uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: "3.10" - name: Lint shell: bash run: | diff --git a/src/ApiService/ApiService/FeatureFlags.cs b/src/ApiService/ApiService/FeatureFlags.cs index aa4bc87079..e74396e882 100644 --- a/src/ApiService/ApiService/FeatureFlags.cs +++ b/src/ApiService/ApiService/FeatureFlags.cs @@ -8,4 +8,5 @@ public static class FeatureFlagConstants { public const string EnableBlobRetentionPolicy = "EnableBlobRetentionPolicy"; public const string EnableDryRunBlobRetention = "EnableDryRunBlobRetention"; public const string EnableWorkItemCreation = "EnableWorkItemCreation"; + public const string EnableContainerRetentionPolicies = "EnableContainerRetentionPolicies"; } diff --git a/src/ApiService/ApiService/Functions/QueueFileChanges.cs b/src/ApiService/ApiService/Functions/QueueFileChanges.cs index acdd3e328d..f1c4711f9d 100644 --- a/src/ApiService/ApiService/Functions/QueueFileChanges.cs +++ b/src/ApiService/ApiService/Functions/QueueFileChanges.cs @@ -1,5 +1,6 @@ using System.Text.Json; using System.Text.Json.Nodes; +using System.Threading.Tasks; using Azure.Core; using Microsoft.Azure.Functions.Worker; using Microsoft.Extensions.Logging; @@ -54,6 +55,8 @@ public async Async.Task Run( return; } + var storageAccount = new ResourceIdentifier(topicElement.GetString()!); + try { // Setting isLastRetryAttempt to false will rethrow any exceptions // With the intention that the azure functions runtime will handle requeing @@ -61,7 +64,7 @@ public async Async.Task Run( // requeuing ourselves because azure functions doesn't support retry policies // for queue based functions. - var result = await FileAdded(fileChangeEvent, isLastRetryAttempt: false); + var result = await FileAdded(storageAccount, fileChangeEvent, isLastRetryAttempt: false); if (!result.IsOk && result.ErrorV.Code == ErrorCode.ADO_WORKITEM_PROCESSING_DISABLED) { await RequeueMessage(msg, TimeSpan.FromDays(1)); } @@ -71,16 +74,47 @@ public async Async.Task Run( } } - private async Async.Task FileAdded(JsonDocument fileChangeEvent, bool isLastRetryAttempt) { + private async Async.Task FileAdded(ResourceIdentifier storageAccount, JsonDocument fileChangeEvent, bool isLastRetryAttempt) { var data = fileChangeEvent.RootElement.GetProperty("data"); var url = data.GetProperty("url").GetString()!; var parts = url.Split("/").Skip(3).ToList(); - var container = parts[0]; + var container = Container.Parse(parts[0]); var path = string.Join('/', parts.Skip(1)); - _log.LogInformation("file added : {Container} - {Path}", container, path); - return await _notificationOperations.NewFiles(Container.Parse(container), path, isLastRetryAttempt); + _log.LogInformation("file added : {Container} - {Path}", container.String, path); + + var (_, result) = await ( + ApplyRetentionPolicy(storageAccount, container, path), + _notificationOperations.NewFiles(container, path, isLastRetryAttempt)); + + return result; + } + + private async Async.Task ApplyRetentionPolicy(ResourceIdentifier storageAccount, Container container, string path) { + if (await _context.FeatureManagerSnapshot.IsEnabledAsync(FeatureFlagConstants.EnableContainerRetentionPolicies)) { + // default retention period can be applied to the container + // if one exists, we will set the expiry date on the newly-created blob, if it doesn't already have one + var account = await _storage.GetBlobServiceClientForAccount(storageAccount); + var containerClient = account.GetBlobContainerClient(container.String); + var containerProps = await containerClient.GetPropertiesAsync(); + var retentionPeriod = RetentionPolicyUtils.GetContainerRetentionPeriodFromMetadata(containerProps.Value.Metadata); + if (!retentionPeriod.IsOk) { + _log.LogError("invalid retention period: {Error}", retentionPeriod.ErrorV); + } else if (retentionPeriod.OkV is TimeSpan period) { + var blobClient = containerClient.GetBlobClient(path); + var tags = (await blobClient.GetTagsAsync()).Value.Tags; + var expiryDate = DateTime.UtcNow + period; + var tag = RetentionPolicyUtils.CreateExpiryDateTag(DateOnly.FromDateTime(expiryDate)); + if (tags.TryAdd(tag.Key, tag.Value)) { + _ = await blobClient.SetTagsAsync(tags); + _log.LogInformation("applied container retention policy ({Policy}) to {Path}", period, path); + return true; + } + } + } + + return false; } private async Async.Task RequeueMessage(string msg, TimeSpan? visibilityTimeout = null) { diff --git a/src/ApiService/ApiService/OneFuzzTypes/Enums.cs b/src/ApiService/ApiService/OneFuzzTypes/Enums.cs index 4739987e6b..4692debfe8 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Enums.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Enums.cs @@ -50,6 +50,7 @@ public enum ErrorCode { ADO_WORKITEM_PROCESSING_DISABLED = 494, ADO_VALIDATION_INVALID_PATH = 495, ADO_VALIDATION_INVALID_PROJECT = 496, + INVALID_RETENTION_PERIOD = 497, // NB: if you update this enum, also update enums.py } diff --git a/src/ApiService/ApiService/onefuzzlib/NotificationOperations.cs b/src/ApiService/ApiService/onefuzzlib/NotificationOperations.cs index dc133e1fba..0eca5b1e00 100644 --- a/src/ApiService/ApiService/onefuzzlib/NotificationOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/NotificationOperations.cs @@ -22,13 +22,12 @@ public NotificationOperations(ILogger log, IOnefuzzConte } public async Async.Task NewFiles(Container container, string filename, bool isLastRetryAttempt) { - var result = OneFuzzResultVoid.Ok; - // We don't want to store file added events for the events container because that causes an infinite loop if (container == WellKnownContainers.Events) { - return result; + return Result.Ok(); } + var result = OneFuzzResultVoid.Ok; var notifications = GetNotifications(container); var hasNotifications = await notifications.AnyAsync(); var reportOrRegression = await _context.Reports.GetReportOrRegression(container, filename, expectReports: hasNotifications); diff --git a/src/ApiService/ApiService/onefuzzlib/RententionPolicy.cs b/src/ApiService/ApiService/onefuzzlib/RententionPolicy.cs deleted file mode 100644 index 4052db93e1..0000000000 --- a/src/ApiService/ApiService/onefuzzlib/RententionPolicy.cs +++ /dev/null @@ -1,24 +0,0 @@ -namespace Microsoft.OneFuzz.Service; - - -public interface IRetentionPolicy { - DateOnly GetExpiryDate(); -} - -public class RetentionPolicyUtils { - public const string EXPIRY_TAG = "Expiry"; - public static KeyValuePair CreateExpiryDateTag(DateOnly expiryDate) => - new(EXPIRY_TAG, expiryDate.ToString()); - - public static DateOnly? GetExpiryDateTagFromTags(IDictionary? blobTags) { - if (blobTags != null && - blobTags.TryGetValue(EXPIRY_TAG, out var expiryTag) && - !string.IsNullOrWhiteSpace(expiryTag) && - DateOnly.TryParse(expiryTag, out var expiryDate)) { - return expiryDate; - } - return null; - } - - public static string CreateExpiredBlobTagFilter() => $@"""{EXPIRY_TAG}"" <= '{DateOnly.FromDateTime(DateTime.UtcNow)}'"; -} diff --git a/src/ApiService/ApiService/onefuzzlib/RetentionPolicy.cs b/src/ApiService/ApiService/onefuzzlib/RetentionPolicy.cs new file mode 100644 index 0000000000..48d81df5c7 --- /dev/null +++ b/src/ApiService/ApiService/onefuzzlib/RetentionPolicy.cs @@ -0,0 +1,43 @@ +using System.Xml; + +namespace Microsoft.OneFuzz.Service; + + +public interface IRetentionPolicy { + DateOnly GetExpiryDate(); +} + +public class RetentionPolicyUtils { + public const string EXPIRY_TAG = "Expiry"; + public static KeyValuePair CreateExpiryDateTag(DateOnly expiryDate) => + new(EXPIRY_TAG, expiryDate.ToString()); + + public static DateOnly? GetExpiryDateTagFromTags(IDictionary? blobTags) { + if (blobTags != null && + blobTags.TryGetValue(EXPIRY_TAG, out var expiryTag) && + !string.IsNullOrWhiteSpace(expiryTag) && + DateOnly.TryParse(expiryTag, out var expiryDate)) { + return expiryDate; + } + return null; + } + + public static string CreateExpiredBlobTagFilter() => $@"""{EXPIRY_TAG}"" <= '{DateOnly.FromDateTime(DateTime.UtcNow)}'"; + + // NB: this must match the value used on the CLI side + public const string CONTAINER_RETENTION_KEY = "onefuzz_retentionperiod"; + + public static OneFuzzResult GetContainerRetentionPeriodFromMetadata(IDictionary? containerMetadata) { + if (containerMetadata is not null && + containerMetadata.TryGetValue(CONTAINER_RETENTION_KEY, out var retentionString) && + !string.IsNullOrWhiteSpace(retentionString)) { + try { + return Result.Ok(XmlConvert.ToTimeSpan(retentionString)); + } catch (Exception ex) { + return Error.Create(ErrorCode.INVALID_RETENTION_PERIOD, ex.Message); + } + } + + return Result.Ok(null); + } +} diff --git a/src/cli/examples/domato.py b/src/cli/examples/domato.py index 7c2abc6301..4bdf2a297c 100755 --- a/src/cli/examples/domato.py +++ b/src/cli/examples/domato.py @@ -67,7 +67,7 @@ def upload_to_fuzzer_container(of: Onefuzz, fuzzer_name: str, fuzzer_url: str) - def upload_to_setup_container(of: Onefuzz, helper: JobHelper, setup_dir: str) -> None: - setup_sas = of.containers.get(helper.containers[ContainerType.setup]).sas_url + setup_sas = of.containers.get(helper.container_name(ContainerType.setup)).sas_url if AZCOPY_PATH is None: raise Exception("missing azcopy") command = [AZCOPY_PATH, "sync", setup_dir, setup_sas] @@ -143,13 +143,16 @@ def main() -> None: helper.create_containers() helper.setup_notifications(notification_config) upload_to_setup_container(of, helper, args.setup_dir) - add_setup_script(of, helper.containers[ContainerType.setup]) + add_setup_script(of, helper.container_name(ContainerType.setup)) containers = [ - (ContainerType.setup, helper.containers[ContainerType.setup]), - (ContainerType.crashes, helper.containers[ContainerType.crashes]), - (ContainerType.reports, helper.containers[ContainerType.reports]), - (ContainerType.unique_reports, helper.containers[ContainerType.unique_reports]), + (ContainerType.setup, helper.container_name(ContainerType.setup)), + (ContainerType.crashes, helper.container_name(ContainerType.crashes)), + (ContainerType.reports, helper.container_name(ContainerType.reports)), + ( + ContainerType.unique_reports, + helper.container_name(ContainerType.unique_reports), + ), ] of.logger.info("Creating generic_crash_report task") @@ -164,11 +167,11 @@ def main() -> None: containers = [ (ContainerType.tools, Container(FUZZER_NAME)), - (ContainerType.setup, helper.containers[ContainerType.setup]), - (ContainerType.crashes, helper.containers[ContainerType.crashes]), + (ContainerType.setup, helper.container_name(ContainerType.setup)), + (ContainerType.crashes, helper.container_name(ContainerType.crashes)), ( ContainerType.readonly_inputs, - helper.containers[ContainerType.readonly_inputs], + helper.container_name(ContainerType.readonly_inputs), ), ] diff --git a/src/cli/examples/honggfuzz.py b/src/cli/examples/honggfuzz.py index 225b7f7510..9466716d98 100644 --- a/src/cli/examples/honggfuzz.py +++ b/src/cli/examples/honggfuzz.py @@ -88,13 +88,16 @@ def main() -> None: if args.inputs: helper.upload_inputs(args.inputs) - add_setup_script(of, helper.containers[ContainerType.setup]) + add_setup_script(of, helper.container_name(ContainerType.setup)) containers = [ - (ContainerType.setup, helper.containers[ContainerType.setup]), - (ContainerType.crashes, helper.containers[ContainerType.crashes]), - (ContainerType.reports, helper.containers[ContainerType.reports]), - (ContainerType.unique_reports, helper.containers[ContainerType.unique_reports]), + (ContainerType.setup, helper.container_name(ContainerType.setup)), + (ContainerType.crashes, helper.container_name(ContainerType.crashes)), + (ContainerType.reports, helper.container_name(ContainerType.reports)), + ( + ContainerType.unique_reports, + helper.container_name(ContainerType.unique_reports), + ), ] of.logger.info("Creating generic_crash_report task") @@ -109,11 +112,11 @@ def main() -> None: containers = [ (ContainerType.tools, Container("honggfuzz")), - (ContainerType.setup, helper.containers[ContainerType.setup]), - (ContainerType.crashes, helper.containers[ContainerType.crashes]), + (ContainerType.setup, helper.container_name(ContainerType.setup)), + (ContainerType.crashes, helper.container_name(ContainerType.crashes)), ( ContainerType.inputs, - helper.containers[ContainerType.inputs], + helper.container_name(ContainerType.inputs), ), ] diff --git a/src/cli/examples/llvm-source-coverage/source-coverage-libfuzzer.py b/src/cli/examples/llvm-source-coverage/source-coverage-libfuzzer.py index a8a6a91ac9..b8ab5f347a 100755 --- a/src/cli/examples/llvm-source-coverage/source-coverage-libfuzzer.py +++ b/src/cli/examples/llvm-source-coverage/source-coverage-libfuzzer.py @@ -74,15 +74,15 @@ def main() -> None: helper.create_containers() of.containers.files.upload_file( - helper.containers[ContainerType.tools], f"{args.tools}/source-coverage.sh" + helper.container_name(ContainerType.tools), f"{args.tools}/source-coverage.sh" ) containers = [ - (ContainerType.setup, helper.containers[ContainerType.setup]), - (ContainerType.analysis, helper.containers[ContainerType.analysis]), - (ContainerType.tools, helper.containers[ContainerType.tools]), + (ContainerType.setup, helper.container_name(ContainerType.setup)), + (ContainerType.analysis, helper.container_name(ContainerType.analysis)), + (ContainerType.tools, helper.container_name(ContainerType.tools)), # note, analysis is typically for crashes, but this is analyzing inputs - (ContainerType.crashes, helper.containers[ContainerType.inputs]), + (ContainerType.crashes, helper.container_name(ContainerType.inputs)), ] of.logger.info("Creating generic_analysis task") diff --git a/src/cli/examples/llvm-source-coverage/source-coverage.py b/src/cli/examples/llvm-source-coverage/source-coverage.py index 749662caba..ae903cd3b5 100755 --- a/src/cli/examples/llvm-source-coverage/source-coverage.py +++ b/src/cli/examples/llvm-source-coverage/source-coverage.py @@ -61,15 +61,15 @@ def main() -> None: helper.upload_inputs(args.inputs) of.containers.files.upload_file( - helper.containers[ContainerType.tools], f"{args.tools}/source-coverage.sh" + helper.container_name(ContainerType.tools), f"{args.tools}/source-coverage.sh" ) containers = [ - (ContainerType.setup, helper.containers[ContainerType.setup]), - (ContainerType.analysis, helper.containers[ContainerType.analysis]), - (ContainerType.tools, helper.containers[ContainerType.tools]), + (ContainerType.setup, helper.container_name(ContainerType.setup)), + (ContainerType.analysis, helper.container_name(ContainerType.analysis)), + (ContainerType.tools, helper.container_name(ContainerType.tools)), # note, analysis is typically for crashes, but this is analyzing inputs - (ContainerType.crashes, helper.containers[ContainerType.inputs]), + (ContainerType.crashes, helper.container_name(ContainerType.inputs)), ] of.logger.info("Creating generic_analysis task") diff --git a/src/cli/onefuzz/templates/__init__.py b/src/cli/onefuzz/templates/__init__.py index c7bb21d41a..a88db46303 100644 --- a/src/cli/onefuzz/templates/__init__.py +++ b/src/cli/onefuzz/templates/__init__.py @@ -6,6 +6,7 @@ import os import tempfile import zipfile +from datetime import timedelta from typing import Any, Dict, List, Optional, Tuple from uuid import uuid4 @@ -22,6 +23,30 @@ class StoppedEarly(Exception): pass +class ContainerTemplate: + def __init__( + self, + name: Container, + exists: bool, + *, + retention_period: Optional[timedelta] = None, + ): + self.name = name + self.retention_period = retention_period + # TODO: exists is not yet used/checked + self.exists = exists + + @staticmethod + def existing(name: Container) -> "ContainerTemplate": + return ContainerTemplate(name, True) + + @staticmethod + def fresh( + name: Container, *, retention_period: Optional[timedelta] = None + ) -> "ContainerTemplate": + return ContainerTemplate(name, False, retention_period=retention_period) + + class JobHelper: def __init__( self, @@ -59,7 +84,7 @@ def __init__( self.wait_for_running: bool = False self.wait_for_stopped: bool = False - self.containers: Dict[ContainerType, Container] = {} + self.containers: Dict[ContainerType, ContainerTemplate] = {} self.tags: Dict[str, str] = {"project": project, "name": name, "build": build} if job is None: self.onefuzz.versions.check() @@ -71,6 +96,20 @@ def __init__( else: self.job = job + def add_existing_container( + self, container_type: ContainerType, container: Container + ) -> None: + self.containers[container_type] = ContainerTemplate.existing(container) + + def container_name(self, container_type: ContainerType) -> Container: + return self.containers[container_type].name + + def container_names(self) -> Dict[ContainerType, Container]: + return { + container_type: container.name + for (container_type, container) in self.containers.items() + } + def define_containers(self, *types: ContainerType) -> None: """ Define default container set based on provided types @@ -79,13 +118,23 @@ def define_containers(self, *types: ContainerType) -> None: """ for container_type in types: - self.containers[container_type] = self.onefuzz.utils.build_container_name( + container_name = self.onefuzz.utils.build_container_name( container_type=container_type, project=self.project, name=self.name, build=self.build, platform=self.platform, ) + self.containers[container_type] = ContainerTemplate.fresh( + container_name, + retention_period=JobHelper._default_retention_period(container_type), + ) + + @staticmethod + def _default_retention_period(container_type: ContainerType) -> Optional[timedelta]: + if container_type == ContainerType.crashdumps: + return timedelta(days=90) + return None def get_unique_container_name(self, container_type: ContainerType) -> Container: return Container( @@ -97,11 +146,17 @@ def get_unique_container_name(self, container_type: ContainerType) -> Container: ) def create_containers(self) -> None: - for container_type, container_name in self.containers.items(): - self.logger.info("using container: %s", container_name) - self.onefuzz.containers.create( - container_name, metadata={"container_type": container_type.name} - ) + for container_type, container in self.containers.items(): + self.logger.info("using container: %s", container.name) + metadata = {"container_type": container_type.name} + if container.retention_period is not None: + # format as ISO8601 period + # NB: this must match the value used on the server side + metadata[ + "onefuzz_retentionperiod" + ] = f"P{container.retention_period.days}D" + + self.onefuzz.containers.create(container.name, metadata=metadata) def delete_container(self, container_name: Container) -> None: self.onefuzz.containers.delete(container_name) @@ -112,12 +167,12 @@ def setup_notifications(self, config: Optional[NotificationConfig]) -> None: containers: List[Container] = [] if ContainerType.unique_reports in self.containers: - containers.append(self.containers[ContainerType.unique_reports]) + containers.append(self.container_name(ContainerType.unique_reports)) else: - containers.append(self.containers[ContainerType.reports]) + containers.append(self.container_name(ContainerType.reports)) if ContainerType.regression_reports in self.containers: - containers.append(self.containers[ContainerType.regression_reports]) + containers.append(self.container_name(ContainerType.regression_reports)) for container in containers: self.logger.info("creating notification config for %s", container) @@ -141,25 +196,25 @@ def upload_setup( self.logger.info("uploading setup dir `%s`" % setup_dir) self.onefuzz.containers.files.upload_dir( - self.containers[ContainerType.setup], setup_dir + self.container_name(ContainerType.setup), setup_dir ) else: self.logger.info("uploading target exe `%s`" % target_exe) self.onefuzz.containers.files.upload_file( - self.containers[ContainerType.setup], target_exe + self.container_name(ContainerType.setup), target_exe ) pdb_path = os.path.splitext(target_exe)[0] + ".pdb" if os.path.exists(pdb_path): pdb_name = os.path.basename(pdb_path) self.onefuzz.containers.files.upload_file( - self.containers[ContainerType.setup], pdb_path, pdb_name + self.container_name(ContainerType.setup), pdb_path, pdb_name ) if setup_files: for filename in setup_files: self.logger.info("uploading %s", filename) self.onefuzz.containers.files.upload_file( - self.containers[ContainerType.setup], filename + self.container_name(ContainerType.setup), filename ) def upload_inputs(self, path: Directory, read_only: bool = False) -> None: @@ -167,7 +222,9 @@ def upload_inputs(self, path: Directory, read_only: bool = False) -> None: container_type = ContainerType.inputs if read_only: container_type = ContainerType.readonly_inputs - self.onefuzz.containers.files.upload_dir(self.containers[container_type], path) + self.onefuzz.containers.files.upload_dir( + self.container_name(container_type), path + ) def upload_inputs_zip(self, path: File) -> None: with tempfile.TemporaryDirectory() as tmp_dir: @@ -176,7 +233,7 @@ def upload_inputs_zip(self, path: File) -> None: self.logger.info("uploading inputs from zip: `%s`" % path) self.onefuzz.containers.files.upload_dir( - self.containers[ContainerType.inputs], Directory(tmp_dir) + self.container_name(ContainerType.inputs), Directory(tmp_dir) ) @classmethod @@ -195,8 +252,8 @@ def wait_on( wait_for_files = [] self.to_monitor = { - self.containers[x]: len( - self.onefuzz.containers.files.list(self.containers[x]).files + self.container_name(x): len( + self.onefuzz.containers.files.list(self.container_name(x)).files ) for x in wait_for_files } diff --git a/src/cli/onefuzz/templates/afl.py b/src/cli/onefuzz/templates/afl.py index d3019a19cf..e4d49233dc 100644 --- a/src/cli/onefuzz/templates/afl.py +++ b/src/cli/onefuzz/templates/afl.py @@ -98,7 +98,7 @@ def basic( if existing_inputs: self.onefuzz.containers.get(existing_inputs) - helper.containers[ContainerType.inputs] = existing_inputs + helper.add_existing_container(ContainerType.inputs, existing_inputs) else: helper.define_containers(ContainerType.inputs) @@ -112,7 +112,7 @@ def basic( if ( len( self.onefuzz.containers.files.list( - helper.containers[ContainerType.inputs] + helper.containers[ContainerType.inputs].name ).files ) == 0 @@ -131,16 +131,16 @@ def basic( containers = [ (ContainerType.tools, afl_container), - (ContainerType.setup, helper.containers[ContainerType.setup]), - (ContainerType.crashes, helper.containers[ContainerType.crashes]), - (ContainerType.inputs, helper.containers[ContainerType.inputs]), + (ContainerType.setup, helper.container_name(ContainerType.setup)), + (ContainerType.crashes, helper.container_name(ContainerType.crashes)), + (ContainerType.inputs, helper.container_name(ContainerType.inputs)), ] if extra_setup_container is not None: containers.append( ( ContainerType.extra_setup, - helper.containers[ContainerType.extra_setup], + extra_setup_container, ) ) @@ -169,12 +169,12 @@ def basic( ) report_containers = [ - (ContainerType.setup, helper.containers[ContainerType.setup]), - (ContainerType.crashes, helper.containers[ContainerType.crashes]), - (ContainerType.reports, helper.containers[ContainerType.reports]), + (ContainerType.setup, helper.container_name(ContainerType.setup)), + (ContainerType.crashes, helper.container_name(ContainerType.crashes)), + (ContainerType.reports, helper.container_name(ContainerType.reports)), ( ContainerType.unique_reports, - helper.containers[ContainerType.unique_reports], + helper.container_name(ContainerType.unique_reports), ), ] @@ -182,7 +182,7 @@ def basic( report_containers.append( ( ContainerType.extra_setup, - helper.containers[ContainerType.extra_setup], + helper.container_name(ContainerType.extra_setup), ) ) diff --git a/src/cli/onefuzz/templates/libfuzzer.py b/src/cli/onefuzz/templates/libfuzzer.py index 7716cfefed..f487372121 100644 --- a/src/cli/onefuzz/templates/libfuzzer.py +++ b/src/cli/onefuzz/templates/libfuzzer.py @@ -85,7 +85,7 @@ def _create_tasks( task_env: Optional[Dict[str, str]] = None, ) -> None: target_options = target_options or [] - regression_containers = [ + regression_containers: List[Tuple[ContainerType, Container]] = [ (ContainerType.setup, containers[ContainerType.setup]), (ContainerType.crashes, containers[ContainerType.crashes]), (ContainerType.unique_reports, containers[ContainerType.unique_reports]), @@ -129,7 +129,7 @@ def _create_tasks( task_env=task_env, ) - fuzzer_containers = [ + fuzzer_containers: List[Tuple[ContainerType, Container]] = [ (ContainerType.setup, containers[ContainerType.setup]), (ContainerType.crashes, containers[ContainerType.crashes]), (ContainerType.crashdumps, containers[ContainerType.crashdumps]), @@ -184,7 +184,7 @@ def _create_tasks( prereq_tasks = [fuzzer_task.task_id, regression_task.task_id] - coverage_containers = [ + coverage_containers: List[Tuple[ContainerType, Container]] = [ (ContainerType.setup, containers[ContainerType.setup]), (ContainerType.coverage, containers[ContainerType.coverage]), (ContainerType.readonly_inputs, containers[ContainerType.inputs]), @@ -245,7 +245,7 @@ def _create_tasks( task_env=task_env, ) - report_containers = [ + report_containers: List[Tuple[ContainerType, Container]] = [ (ContainerType.setup, containers[ContainerType.setup]), (ContainerType.crashes, containers[ContainerType.crashes]), (ContainerType.reports, containers[ContainerType.reports]), @@ -285,7 +285,7 @@ def _create_tasks( if analyzer_exe is not None: self.logger.info("creating custom analysis") - analysis_containers = [ + analysis_containers: List[Tuple[ContainerType, Container]] = [ (ContainerType.setup, containers[ContainerType.setup]), (ContainerType.analysis, containers[ContainerType.analysis]), (ContainerType.crashes, containers[ContainerType.crashes]), @@ -428,15 +428,17 @@ def basic( ) if existing_inputs: - helper.containers[ContainerType.inputs] = existing_inputs + helper.add_existing_container(ContainerType.inputs, existing_inputs) else: helper.define_containers(ContainerType.inputs) if readonly_inputs: - helper.containers[ContainerType.readonly_inputs] = readonly_inputs + helper.add_existing_container( + ContainerType.readonly_inputs, readonly_inputs + ) if crashes: - helper.containers[ContainerType.crashes] = crashes + helper.add_existing_container(ContainerType.crashes, crashes) if analyzer_exe is not None: helper.define_containers(ContainerType.analysis) @@ -465,17 +467,19 @@ def basic( else: source_allowlist_blob_name = None - containers = helper.containers - if extra_setup_container is not None: - containers[ContainerType.extra_setup] = extra_setup_container + helper.add_existing_container( + ContainerType.extra_setup, extra_setup_container + ) if extra_output_container is not None: - containers[ContainerType.extra_output] = extra_output_container + helper.add_existing_container( + ContainerType.extra_output, extra_output_container + ) self._create_tasks( job=helper.job, - containers=containers, + containers=helper.container_names(), pool_name=pool_name, target_exe=target_exe_blob_name, vm_count=vm_count, @@ -600,19 +604,35 @@ def merge( target_exe_blob_name = helper.setup_relative_blob_name(target_exe, setup_dir) merge_containers = [ - (ContainerType.setup, helper.containers[ContainerType.setup]), - ( - ContainerType.unique_inputs, - output_container or helper.containers[ContainerType.unique_inputs], - ), + (ContainerType.setup, helper.container_name(ContainerType.setup)), ] + if output_container: + merge_containers.append( + ( + ContainerType.unique_inputs, + output_container, + ) + ) + else: + merge_containers.append( + ( + ContainerType.unique_inputs, + helper.container_name(ContainerType.unique_inputs), + ) + ) + if extra_setup_container is not None: - merge_containers.append((ContainerType.extra_setup, extra_setup_container)) + merge_containers.append( + ( + ContainerType.extra_setup, + extra_setup_container, + ) + ) if inputs: merge_containers.append( - (ContainerType.inputs, helper.containers[ContainerType.inputs]) + (ContainerType.inputs, helper.container_name(ContainerType.inputs)) ) if existing_inputs: for existing_container in existing_inputs: @@ -735,18 +755,18 @@ def dotnet( ContainerType.no_repro, ) - containers = helper.containers - if existing_inputs: - helper.containers[ContainerType.inputs] = existing_inputs + helper.add_existing_container(ContainerType.inputs, existing_inputs) else: helper.define_containers(ContainerType.inputs) if readonly_inputs: - helper.containers[ContainerType.readonly_inputs] = readonly_inputs + helper.add_existing_container( + ContainerType.readonly_inputs, readonly_inputs + ) if crashes: - helper.containers[ContainerType.crashes] = crashes + helper.add_existing_container(ContainerType.crashes, crashes) # Assumes that `libfuzzer-dotnet` and supporting tools were uploaded upon deployment. fuzzer_tools_container = Container( @@ -754,15 +774,20 @@ def dotnet( ) fuzzer_containers = [ - (ContainerType.setup, containers[ContainerType.setup]), - (ContainerType.crashes, containers[ContainerType.crashes]), - (ContainerType.crashdumps, containers[ContainerType.crashdumps]), - (ContainerType.inputs, containers[ContainerType.inputs]), + (ContainerType.setup, helper.container_name(ContainerType.setup)), + (ContainerType.crashes, helper.container_name(ContainerType.crashes)), + (ContainerType.crashdumps, helper.container_name(ContainerType.crashdumps)), + (ContainerType.inputs, helper.container_name(ContainerType.inputs)), (ContainerType.tools, fuzzer_tools_container), ] if extra_setup_container is not None: - fuzzer_containers.append((ContainerType.extra_setup, extra_setup_container)) + fuzzer_containers.append( + ( + ContainerType.extra_setup, + extra_setup_container, + ) + ) helper.create_containers() helper.setup_notifications(notification_config) @@ -814,15 +839,21 @@ def dotnet( libfuzzer_dotnet_loader_dll = LIBFUZZER_DOTNET_LOADER_PATH coverage_containers = [ - (ContainerType.setup, containers[ContainerType.setup]), - (ContainerType.coverage, containers[ContainerType.coverage]), - (ContainerType.readonly_inputs, containers[ContainerType.inputs]), + (ContainerType.setup, helper.container_name(ContainerType.setup)), + (ContainerType.coverage, helper.container_name(ContainerType.coverage)), + ( + ContainerType.readonly_inputs, + helper.container_name(ContainerType.inputs), + ), (ContainerType.tools, fuzzer_tools_container), ] if extra_setup_container is not None: coverage_containers.append( - (ContainerType.extra_setup, extra_setup_container) + ( + ContainerType.extra_setup, + extra_setup_container, + ) ) self.logger.info("creating `dotnet_coverage` task") @@ -846,16 +877,24 @@ def dotnet( ) report_containers = [ - (ContainerType.setup, containers[ContainerType.setup]), - (ContainerType.crashes, containers[ContainerType.crashes]), - (ContainerType.reports, containers[ContainerType.reports]), - (ContainerType.unique_reports, containers[ContainerType.unique_reports]), - (ContainerType.no_repro, containers[ContainerType.no_repro]), + (ContainerType.setup, helper.container_name(ContainerType.setup)), + (ContainerType.crashes, helper.container_name(ContainerType.crashes)), + (ContainerType.reports, helper.container_name(ContainerType.reports)), + ( + ContainerType.unique_reports, + helper.container_name(ContainerType.unique_reports), + ), + (ContainerType.no_repro, helper.container_name(ContainerType.no_repro)), (ContainerType.tools, fuzzer_tools_container), ] if extra_setup_container is not None: - report_containers.append((ContainerType.extra_setup, extra_setup_container)) + report_containers.append( + ( + ContainerType.extra_setup, + extra_setup_container, + ) + ) self.logger.info("creating `dotnet_crash_report` task") self.onefuzz.tasks.create( @@ -972,27 +1011,37 @@ def qemu_user( if existing_inputs: self.onefuzz.containers.get(existing_inputs) # ensure it exists - helper.containers[ContainerType.inputs] = existing_inputs + helper.add_existing_container(ContainerType.inputs, existing_inputs) else: helper.define_containers(ContainerType.inputs) if crashes: self.onefuzz.containers.get(crashes) - helper.containers[ContainerType.crashes] = crashes + helper.add_existing_container(ContainerType.crashes, crashes) fuzzer_containers = [ - (ContainerType.setup, helper.containers[ContainerType.setup]), - (ContainerType.crashes, helper.containers[ContainerType.crashes]), - (ContainerType.crashdumps, helper.containers[ContainerType.crashdumps]), - (ContainerType.inputs, helper.containers[ContainerType.inputs]), + (ContainerType.setup, helper.container_name(ContainerType.setup)), + (ContainerType.crashes, helper.container_name(ContainerType.crashes)), + (ContainerType.crashdumps, helper.container_name(ContainerType.crashdumps)), + (ContainerType.inputs, helper.container_name(ContainerType.inputs)), ] if extra_setup_container is not None: - fuzzer_containers.append((ContainerType.extra_setup, extra_setup_container)) + fuzzer_containers.append( + ( + ContainerType.extra_setup, + extra_setup_container, + ) + ) if readonly_inputs is not None: self.onefuzz.containers.get(readonly_inputs) # ensure it exists - fuzzer_containers.append((ContainerType.readonly_inputs, readonly_inputs)) + fuzzer_containers.append( + ( + ContainerType.readonly_inputs, + readonly_inputs, + ) + ) helper.create_containers() @@ -1079,18 +1128,23 @@ def qemu_user( ) report_containers = [ - (ContainerType.setup, helper.containers[ContainerType.setup]), - (ContainerType.crashes, helper.containers[ContainerType.crashes]), - (ContainerType.reports, helper.containers[ContainerType.reports]), + (ContainerType.setup, helper.container_name(ContainerType.setup)), + (ContainerType.crashes, helper.container_name(ContainerType.crashes)), + (ContainerType.reports, helper.container_name(ContainerType.reports)), ( ContainerType.unique_reports, - helper.containers[ContainerType.unique_reports], + helper.container_name(ContainerType.unique_reports), ), - (ContainerType.no_repro, helper.containers[ContainerType.no_repro]), + (ContainerType.no_repro, helper.container_name(ContainerType.no_repro)), ] if extra_setup_container is not None: - report_containers.append((ContainerType.extra_setup, extra_setup_container)) + report_containers.append( + ( + ContainerType.extra_setup, + extra_setup_container, + ) + ) self.logger.info("creating libfuzzer_crash_report task") self.onefuzz.tasks.create( diff --git a/src/cli/onefuzz/templates/ossfuzz.py b/src/cli/onefuzz/templates/ossfuzz.py index fde1da0708..94024add83 100644 --- a/src/cli/onefuzz/templates/ossfuzz.py +++ b/src/cli/onefuzz/templates/ossfuzz.py @@ -215,13 +215,15 @@ def libfuzzer( ) if extra_setup_container is not None: - helper.containers[ContainerType.extra_setup] = extra_setup_container + helper.add_existing_container( + ContainerType.extra_setup, extra_setup_container + ) helper.create_containers() helper.setup_notifications(notification_config) dst_sas = self.onefuzz.containers.get( - helper.containers[ContainerType.setup] + helper.containers[ContainerType.setup].name ).sas_url self._copy_exe(container_sas["build"], dst_sas, File(fuzzer)) self._copy_all(container_sas["base"], dst_sas) @@ -245,7 +247,7 @@ def libfuzzer( self.onefuzz.template.libfuzzer._create_tasks( job=base_helper.job, - containers=helper.containers, + containers=helper.container_names(), pool_name=pool_name, target_exe=fuzzer_blob_name, vm_count=VM_COUNT, diff --git a/src/cli/onefuzz/templates/radamsa.py b/src/cli/onefuzz/templates/radamsa.py index ea0b57fdb3..d9ec34e15f 100644 --- a/src/cli/onefuzz/templates/radamsa.py +++ b/src/cli/onefuzz/templates/radamsa.py @@ -3,7 +3,7 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Tuple from onefuzztypes.enums import OS, ContainerType, TaskDebugFlag, TaskType from onefuzztypes.models import Job, NotificationConfig @@ -94,7 +94,9 @@ def basic( if existing_inputs: self.onefuzz.containers.get(existing_inputs) - helper.containers[ContainerType.readonly_inputs] = existing_inputs + helper.add_existing_container( + ContainerType.readonly_inputs, existing_inputs + ) else: helper.define_containers(ContainerType.readonly_inputs) helper.create_containers() @@ -108,7 +110,7 @@ def basic( if ( len( self.onefuzz.containers.files.list( - helper.containers[ContainerType.readonly_inputs] + helper.containers[ContainerType.readonly_inputs].name ).files ) == 0 @@ -147,18 +149,23 @@ def basic( self.logger.info("creating radamsa task") - containers = [ + containers: List[Tuple[ContainerType, Container]] = [ (ContainerType.tools, tools), - (ContainerType.setup, helper.containers[ContainerType.setup]), - (ContainerType.crashes, helper.containers[ContainerType.crashes]), + (ContainerType.setup, helper.container_name(ContainerType.setup)), + (ContainerType.crashes, helper.container_name(ContainerType.crashes)), ( ContainerType.readonly_inputs, - helper.containers[ContainerType.readonly_inputs], + helper.container_name(ContainerType.readonly_inputs), ), ] if extra_setup_container is not None: - containers.append((ContainerType.extra_setup, extra_setup_container)) + containers.append( + ( + ContainerType.extra_setup, + extra_setup_container, + ) + ) fuzzer_task = self.onefuzz.tasks.create( helper.job.job_id, @@ -183,18 +190,23 @@ def basic( ) report_containers = [ - (ContainerType.setup, helper.containers[ContainerType.setup]), - (ContainerType.crashes, helper.containers[ContainerType.crashes]), - (ContainerType.reports, helper.containers[ContainerType.reports]), + (ContainerType.setup, helper.container_name(ContainerType.setup)), + (ContainerType.crashes, helper.container_name(ContainerType.crashes)), + (ContainerType.reports, helper.container_name(ContainerType.reports)), ( ContainerType.unique_reports, - helper.containers[ContainerType.unique_reports], + helper.container_name(ContainerType.unique_reports), ), - (ContainerType.no_repro, helper.containers[ContainerType.no_repro]), + (ContainerType.no_repro, helper.container_name(ContainerType.no_repro)), ] if extra_setup_container is not None: - report_containers.append((ContainerType.extra_setup, extra_setup_container)) + report_containers.append( + ( + ContainerType.extra_setup, + extra_setup_container, + ) + ) self.logger.info("creating generic_crash_report task") self.onefuzz.tasks.create( @@ -233,15 +245,18 @@ def basic( self.logger.info("creating custom analysis") analysis_containers = [ - (ContainerType.setup, helper.containers[ContainerType.setup]), + (ContainerType.setup, helper.container_name(ContainerType.setup)), (ContainerType.tools, tools), - (ContainerType.analysis, helper.containers[ContainerType.analysis]), - (ContainerType.crashes, helper.containers[ContainerType.crashes]), + (ContainerType.analysis, helper.container_name(ContainerType.analysis)), + (ContainerType.crashes, helper.container_name(ContainerType.crashes)), ] if extra_setup_container is not None: analysis_containers.append( - (ContainerType.extra_setup, extra_setup_container) + ( + ContainerType.extra_setup, + extra_setup_container, + ) ) self.onefuzz.tasks.create( diff --git a/src/cli/onefuzz/templates/regression.py b/src/cli/onefuzz/templates/regression.py index 00d8a10c37..0aa2550da6 100644 --- a/src/cli/onefuzz/templates/regression.py +++ b/src/cli/onefuzz/templates/regression.py @@ -12,7 +12,7 @@ from onefuzz.api import Command -from . import JobHelper +from . import ContainerTemplate, JobHelper class Regression(Command): @@ -207,31 +207,36 @@ def _create_job( ) containers = [ - (ContainerType.setup, helper.containers[ContainerType.setup]), - (ContainerType.crashes, helper.containers[ContainerType.crashes]), - (ContainerType.reports, helper.containers[ContainerType.reports]), - (ContainerType.no_repro, helper.containers[ContainerType.no_repro]), + (ContainerType.setup, helper.container_name(ContainerType.setup)), + (ContainerType.crashes, helper.container_name(ContainerType.crashes)), + (ContainerType.reports, helper.container_name(ContainerType.reports)), + (ContainerType.no_repro, helper.container_name(ContainerType.no_repro)), ( ContainerType.unique_reports, - helper.containers[ContainerType.unique_reports], + helper.container_name(ContainerType.unique_reports), ), ( ContainerType.regression_reports, - helper.containers[ContainerType.regression_reports], + helper.container_name(ContainerType.regression_reports), ), ] if extra_setup_container: - containers.append((ContainerType.extra_setup, extra_setup_container)) + containers.append( + ( + ContainerType.extra_setup, + extra_setup_container, + ) + ) if crashes: - helper.containers[ - ContainerType.readonly_inputs - ] = helper.get_unique_container_name(ContainerType.readonly_inputs) + helper.containers[ContainerType.readonly_inputs] = ContainerTemplate.fresh( + helper.get_unique_container_name(ContainerType.readonly_inputs) + ) containers.append( ( ContainerType.readonly_inputs, - helper.containers[ContainerType.readonly_inputs], + helper.container_name(ContainerType.readonly_inputs), ) ) @@ -239,7 +244,7 @@ def _create_job( if crashes: for file in crashes: self.onefuzz.containers.files.upload_file( - helper.containers[ContainerType.readonly_inputs], file + helper.container_name(ContainerType.readonly_inputs), file ) helper.setup_notifications(notification_config) @@ -276,7 +281,7 @@ def _create_job( if task.error: raise Exception("task failed: %s", task.error) - container = helper.containers[ContainerType.regression_reports] + container = helper.containers[ContainerType.regression_reports].name for filename in self.onefuzz.containers.files.list(container).files: self.logger.info("checking file: %s", filename) if self._check_regression(container, File(filename)): @@ -287,4 +292,6 @@ def _create_job( delete_input_container and ContainerType.readonly_inputs in helper.containers ): - helper.delete_container(helper.containers[ContainerType.readonly_inputs]) + helper.delete_container( + helper.containers[ContainerType.readonly_inputs].name + ) diff --git a/src/deployment/bicep-templates/feature-flags.bicep b/src/deployment/bicep-templates/feature-flags.bicep index a845d69a9d..46fccb0856 100644 --- a/src/deployment/bicep-templates/feature-flags.bicep +++ b/src/deployment/bicep-templates/feature-flags.bicep @@ -89,4 +89,17 @@ resource enableWorkItemCreation 'Microsoft.AppConfiguration/configurationStores/ } } +resource enableContainerRetentionPolicies 'Microsoft.AppConfiguration/configurationStores/keyValues@2021-10-01-preview' = { + parent: featureFlags + name: '.appconfig.featureflag~2FEnableContainerRetentionPolicies' + properties: { + value: string({ + id: 'EnableContainerRetentionPolicies' + description: 'Enable retention policies on containers' + enabled: true + }) + contentType: 'application/vnd.microsoft.appconfig.ff+json;charset=utf-8' + } +} + output AppConfigEndpoint string = 'https://${appConfigName}.azconfig.io' diff --git a/src/pytypes/onefuzztypes/enums.py b/src/pytypes/onefuzztypes/enums.py index e2ec81eb15..317325de0b 100644 --- a/src/pytypes/onefuzztypes/enums.py +++ b/src/pytypes/onefuzztypes/enums.py @@ -304,6 +304,7 @@ class ErrorCode(Enum): ADO_VALIDATION_MISSING_PAT_SCOPES = 492 ADO_VALIDATION_INVALID_PATH = 495 ADO_VALIDATION_INVALID_PROJECT = 496 + INVALID_RETENTION_PERIOD = 497 # NB: if you update this enum, also update Enums.cs From 7efea432ed1ab3c42b297d7b9eed031e95588d19 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 26 Sep 2023 20:36:39 +0000 Subject: [PATCH 34/88] Bump rayon from 1.7.0 to 1.8.0 in /src/agent (#3520) Bumps [rayon](https://github.com/rayon-rs/rayon) from 1.7.0 to 1.8.0. - [Changelog](https://github.com/rayon-rs/rayon/blob/master/RELEASES.md) - [Commits](https://github.com/rayon-rs/rayon/compare/rayon-core-v1.7.0...rayon-core-v1.8.0) --- updated-dependencies: - dependency-name: rayon dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- src/agent/Cargo.lock | 10 ++++------ src/agent/input-tester/Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index 65c2967ec0..b818339137 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -2728,9 +2728,9 @@ dependencies = [ [[package]] name = "rayon" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b" +checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" dependencies = [ "either", "rayon-core", @@ -2738,14 +2738,12 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" +checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" dependencies = [ - "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", - "num_cpus", ] [[package]] diff --git a/src/agent/input-tester/Cargo.toml b/src/agent/input-tester/Cargo.toml index fe5ac6032c..93aac3172b 100644 --- a/src/agent/input-tester/Cargo.toml +++ b/src/agent/input-tester/Cargo.toml @@ -13,7 +13,7 @@ fnv = "1.0" hex = "0.4" log = "0.4" num_cpus = "1.15" -rayon = "1.7" +rayon = "1.8" sha2 = "0.10.2" win-util = { path = "../win-util" } From f3b7e2059c33475260d1093a3be244455a5f59b9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 26 Sep 2023 20:59:36 +0000 Subject: [PATCH 35/88] Bump insta from 1.31.0 to 1.32.0 in /src/agent (#3521) Bumps [insta](https://github.com/mitsuhiko/insta) from 1.31.0 to 1.32.0. - [Changelog](https://github.com/mitsuhiko/insta/blob/master/CHANGELOG.md) - [Commits](https://github.com/mitsuhiko/insta/compare/1.31.0...1.32.0) --- updated-dependencies: - dependency-name: insta dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- src/agent/Cargo.lock | 4 ++-- src/agent/coverage/Cargo.toml | 2 +- src/agent/stacktrace-parser/Cargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index b818339137..7a79204631 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -1603,9 +1603,9 @@ dependencies = [ [[package]] name = "insta" -version = "1.31.0" +version = "1.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0770b0a3d4c70567f0d58331f3088b0e4c4f56c9b8d764efe654b4a5d46de3a" +checksum = "a3e02c584f4595792d09509a94cdb92a3cef7592b1eb2d9877ee6f527062d0ea" dependencies = [ "console", "globset", diff --git a/src/agent/coverage/Cargo.toml b/src/agent/coverage/Cargo.toml index 70a55cd07f..2a1170e3ae 100644 --- a/src/agent/coverage/Cargo.toml +++ b/src/agent/coverage/Cargo.toml @@ -35,7 +35,7 @@ procfs = { version = "0.15.1", default-features = false, features = ["flate2"] } clap = { version = "4.4", features = ["derive"] } env_logger = "0.10.0" pretty_assertions = "1.4.0" -insta = { version = "1.31.0", features = ["glob"] } +insta = { version = "1.32.0", features = ["glob"] } coverage = { path = "../coverage" } cc = "1.0" tempfile = "3.8.0" diff --git a/src/agent/stacktrace-parser/Cargo.toml b/src/agent/stacktrace-parser/Cargo.toml index 9cb92d9bcf..2428047e66 100644 --- a/src/agent/stacktrace-parser/Cargo.toml +++ b/src/agent/stacktrace-parser/Cargo.toml @@ -16,5 +16,5 @@ serde_json = "1.0" libclusterfuzz = { path = "../libclusterfuzz" } [dev-dependencies] -insta = { version = "1.31.0", features = ["glob", "json"] } +insta = { version = "1.32.0", features = ["glob", "json"] } pretty_assertions = "1.4" From d2ba170c1ddfce944397be99d4abbf372c1f1b7d Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 27 Sep 2023 13:53:29 -0700 Subject: [PATCH 36/88] Disable `repro` and `debug` VM CLI commands. (#3494) * Disable and VM CLI commands. * Formatting. * More formatting. * More formatting. * Removing Repro check. --- src/cli/onefuzz/api.py | 315 +--------------------- src/cli/onefuzz/debug.py | 188 ------------- src/integration-tests/integration-test.py | 25 +- 3 files changed, 9 insertions(+), 519 deletions(-) diff --git a/src/cli/onefuzz/api.py b/src/cli/onefuzz/api.py index 6968192642..852021e5f9 100644 --- a/src/cli/onefuzz/api.py +++ b/src/cli/onefuzz/api.py @@ -9,7 +9,6 @@ import pkgutil import re import subprocess # nosec -import time import uuid from enum import Enum from shutil import which @@ -35,8 +34,7 @@ from .__version__ import __version__ from .azcopy import azcopy_sync -from .backend import Backend, BackendConfig, ContainerWrapper, wait -from .ssh import build_ssh_command, ssh_connect, temp_file +from .backend import Backend, BackendConfig, ContainerWrapper UUID_EXPANSION = TypeVar("UUID_EXPANSION", UUID, str) @@ -530,316 +528,6 @@ def _download_tasks( azcopy_sync(to_download[name], outdir) -class Repro(Endpoint): - """Interact with Reproduction VMs""" - - endpoint = "repro_vms" - - def get(self, vm_id: UUID_EXPANSION) -> models.Repro: - """get information about a Reproduction VM""" - vm_id_expanded = self._disambiguate_uuid( - "vm_id", vm_id, lambda: [str(x.vm_id) for x in self.list()] - ) - - self.logger.debug("get repro vm: %s", vm_id_expanded) - return self._req_model( - "GET", models.Repro, data=requests.ReproGet(vm_id=vm_id_expanded) - ) - - def get_files( - self, - report_container: primitives.Container, - report_name: str, - include_setup: bool = False, - output_dir: primitives.Directory = primitives.Directory("."), - ) -> None: - """downloads the files necessary to locally repro the crash from a given report""" - report_bytes = self.onefuzz.containers.files.get(report_container, report_name) - report = json.loads(report_bytes) - - crash_info = { - "input_blob_container": primitives.Container(""), - "input_blob_name": "", - "job_id": "", - } - if "input_blob" in report: - crash_info["input_blob_container"] = report["input_blob"]["container"] - crash_info["input_blob_name"] = report["input_blob"]["name"] - crash_info["job_id"] = report["job_id"] - elif "crash_test_result" in report and "original_crash_test_result" in report: - if report["original_crash_test_result"]["crash_report"] is None: - self.logger.error( - "No crash report found in the original crash test result, repro files cannot be retrieved" - ) - return - elif report["crash_test_result"]["crash_report"] is None: - self.logger.info( - "No crash report found in the new crash test result, falling back on the original crash test result for job_id" - "Note: if using --include_setup, the downloaded fuzzer binaries may be out-of-date" - ) - - original_report = report["original_crash_test_result"]["crash_report"] - new_report = ( - report["crash_test_result"]["crash_report"] or original_report - ) # fallback on original_report - - crash_info["input_blob_container"] = original_report["input_blob"][ - "container" - ] - crash_info["input_blob_name"] = original_report["input_blob"]["name"] - crash_info["job_id"] = new_report["job_id"] - else: - self.logger.error( - "Encountered an unhandled report format, repro files cannot be retrieved" - ) - return - - self.logger.info( - "downloading files necessary to locally repro crash %s", - crash_info["input_blob_name"], - ) - self.onefuzz.containers.files.download( - primitives.Container(crash_info["input_blob_container"]), - crash_info["input_blob_name"], - os.path.join(output_dir, crash_info["input_blob_name"]), - ) - - if include_setup: - setup_container = list( - self.onefuzz.jobs.containers.list( - crash_info["job_id"], enums.ContainerType.setup - ) - )[0] - - self.onefuzz.containers.files.download_dir( - primitives.Container(setup_container), output_dir - ) - - def create( - self, container: primitives.Container, path: str, duration: int = 24 - ) -> models.Repro: - """Create a Reproduction VM from a Crash Report""" - self.logger.info( - "creating repro vm: %s %s (%d hours)", container, path, duration - ) - return self._req_model( - "POST", - models.Repro, - data=models.ReproConfig(container=container, path=path, duration=duration), - ) - - def delete(self, vm_id: UUID_EXPANSION) -> models.Repro: - """Delete a Reproduction VM""" - vm_id_expanded = self._disambiguate_uuid( - "vm_id", vm_id, lambda: [str(x.vm_id) for x in self.list()] - ) - - self.logger.debug("deleting repro vm: %s", vm_id_expanded) - return self._req_model( - "DELETE", models.Repro, data=requests.ReproGet(vm_id=vm_id_expanded) - ) - - def list(self) -> List[models.Repro]: - """List all VMs""" - self.logger.debug("listing repro vms") - return self._req_model_list("GET", models.Repro, data=requests.ReproGet()) - - def _dbg_linux( - self, repro: models.Repro, debug_command: Optional[str] - ) -> Optional[str]: - """Launch gdb with GDB script that includes 'target remote | ssh ...'""" - - if ( - repro.auth is None - or repro.ip is None - or repro.state != enums.VmState.running - ): - raise Exception("vm setup failed: %s" % repro.state) - - with build_ssh_command( - repro.ip, repro.auth.private_key, command="-T" - ) as ssh_cmd: - gdb_script = [ - "target remote | %s sudo /onefuzz/bin/repro-stdout.sh" - % " ".join(ssh_cmd) - ] - - if debug_command: - gdb_script += [debug_command, "quit"] - - with temp_file("gdb.script", "\n".join(gdb_script)) as gdb_script_path: - dbg = ["gdb", "--silent", "--command", gdb_script_path] - - if debug_command: - dbg += ["--batch"] - - try: - # security note: dbg is built from content coming from - # the server, which is trusted in this context. - return subprocess.run( # nosec - dbg, stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ).stdout.decode(errors="ignore") - except subprocess.CalledProcessError as err: - self.logger.error( - "debug failed: %s", err.output.decode(errors="ignore") - ) - raise err - else: - # security note: dbg is built from content coming from the - # server, which is trusted in this context. - subprocess.call(dbg) # nosec - return None - - def _dbg_windows( - self, - repro: models.Repro, - debug_command: Optional[str], - retry_limit: Optional[int], - ) -> Optional[str]: - """Setup an SSH tunnel, then connect via CDB over SSH tunnel""" - - if ( - repro.auth is None - or repro.ip is None - or repro.state != enums.VmState.running - ): - raise Exception("vm setup failed: %s" % repro.state) - - retry_count = 0 - bind_all = which("wslpath") is not None and repro.os == enums.OS.windows - proxy = "*:" + REPRO_SSH_FORWARD if bind_all else REPRO_SSH_FORWARD - while retry_limit is None or retry_count <= retry_limit: - if retry_limit: - retry_count = retry_count + 1 - with ssh_connect(repro.ip, repro.auth.private_key, proxy=proxy): - dbg = ["cdb.exe", "-remote", "tcp:port=1337,server=localhost"] - if debug_command: - dbg_script = [debug_command, "qq"] - with temp_file( - "db.script", "\r\n".join(dbg_script) - ) as dbg_script_path: - dbg += ["-cf", _wsl_path(dbg_script_path)] - - logging.debug("launching: %s", dbg) - try: - # security note: dbg is built from content coming from the server, - # which is trusted in this context. - return subprocess.run( # nosec - dbg, stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ).stdout.decode(errors="ignore") - except subprocess.CalledProcessError as err: - if err.returncode == 0x8007274D: - self.logger.info( - "failed to connect to debug-server trying again in 10 seconds..." - ) - time.sleep(10.0) - else: - self.logger.error( - "debug failed: %s", - err.output.decode(errors="ignore"), - ) - raise err - else: - logging.debug("launching: %s", dbg) - # security note: dbg is built from content coming from the - # server, which is trusted in this context. - try: - subprocess.check_call(dbg) # nosec - return None - except subprocess.CalledProcessError as err: - if err.returncode == 0x8007274D: - self.logger.info( - "failed to connect to debug-server trying again in 10 seconds..." - ) - time.sleep(10.0) - else: - return None - - if retry_limit is not None: - self.logger.info( - f"failed to connect to debug-server after {retry_limit} attempts. Please try again later " - + f"with onefuzz debug connect {repro.vm_id}" - ) - return None - - def connect( - self, - vm_id: UUID_EXPANSION, - delete_after_use: bool = False, - debug_command: Optional[str] = None, - retry_limit: Optional[int] = None, - ) -> Optional[str]: - """Connect to an existing Reproduction VM""" - - self.logger.info("connecting to reproduction VM: %s", vm_id) - - if which("ssh") is None: - raise Exception("unable to find ssh on local machine") - - def missing_os() -> Tuple[bool, str, models.Repro]: - repro = self.get(vm_id) - return ( - repro.os is not None, - "waiting for os determination", - repro, - ) - - repro = wait(missing_os) - - if repro.os == enums.OS.windows: - if which("cdb.exe") is None: - raise Exception("unable to find cdb.exe on local machine") - if repro.os == enums.OS.linux: - if which("gdb") is None: - raise Exception("unable to find gdb on local machine") - - def func() -> Tuple[bool, str, models.Repro]: - repro = self.get(vm_id) - state = repro.state - return ( - repro.auth is not None - and repro.ip is not None - and state not in [enums.VmState.init, enums.VmState.extensions_launch], - "launching reproducing vm. current state: %s" % state, - repro, - ) - - repro = wait(func) - # give time for debug server to initialize - time.sleep(30.0) - result: Optional[str] = None - if repro.os == enums.OS.windows: - result = self._dbg_windows(repro, debug_command, retry_limit) - elif repro.os == enums.OS.linux: - result = self._dbg_linux(repro, debug_command) - else: - raise NotImplementedError - - if delete_after_use: - self.logger.debug("deleting vm %s", repro.vm_id) - self.delete(repro.vm_id) - - return result - - def create_and_connect( - self, - container: primitives.Container, - path: str, - duration: int = 24, - delete_after_use: bool = False, - debug_command: Optional[str] = None, - retry_limit: Optional[int] = None, - ) -> Optional[str]: - """Create and connect to a Reproduction VM""" - repro = self.create(container, path, duration=duration) - return self.connect( - repro.vm_id, - delete_after_use=delete_after_use, - debug_command=debug_command, - retry_limit=retry_limit, - ) - - class Notifications(Endpoint): """Interact with models.Notifications""" @@ -1900,7 +1588,6 @@ def __init__( client_secret=client_secret, ) self.containers = Containers(self) - self.repro = Repro(self) self.notifications = Notifications(self) self.tasks = Tasks(self) self.jobs = Jobs(self) diff --git a/src/cli/onefuzz/debug.py b/src/cli/onefuzz/debug.py index dc93f49f81..cde03adf57 100644 --- a/src/cli/onefuzz/debug.py +++ b/src/cli/onefuzz/debug.py @@ -28,9 +28,6 @@ from onefuzz.api import UUID_EXPANSION, Command, Endpoint, Onefuzz from .azure_identity_credential_adapter import AzureIdentityCredentialAdapter -from .backend import wait -from .rdp import rdp_connect -from .ssh import ssh_connect EMPTY_SHA256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" ZERO_SHA256 = "0" * len(EMPTY_SHA256) @@ -39,143 +36,6 @@ DEFAULT_TAIL_DELAY = 10.0 -class DebugRepro(Command): - """Debug repro instances""" - - def _disambiguate(self, vm_id: UUID_EXPANSION) -> str: - return str( - self.onefuzz.repro._disambiguate_uuid( - "vm_id", - vm_id, - lambda: [str(x.vm_id) for x in self.onefuzz.repro.list()], - ) - ) - - def _info(self) -> Tuple[str, str]: - info = self.onefuzz.info.get() - return info.resource_group, info.subscription - - def ssh(self, vm_id: str) -> None: - vm_id = self._disambiguate(vm_id) - repro = self.onefuzz.repro.get(vm_id) - if repro.ip is None: - raise Exception("missing IP: %s" % repro) - if repro.auth is None: - raise Exception("missing Auth: %s" % repro) - - with ssh_connect(repro.ip, repro.auth.private_key, call=True): - pass - - def rdp(self, vm_id: str) -> None: - vm_id = self._disambiguate(vm_id) - repro = self.onefuzz.repro.get(vm_id) - if repro.ip is None: - raise Exception("missing IP: %s" % repro) - if repro.auth is None: - raise Exception("missing Auth: %s" % repro) - - RDP_PORT = 3389 - with rdp_connect(repro.ip, repro.auth.password, port=RDP_PORT): - return - - -class DebugNode(Command): - """Debug a specific node on a scaleset""" - - def rdp(self, machine_id: UUID_EXPANSION, duration: Optional[int] = 1) -> None: - node = self.onefuzz.nodes.get(machine_id) - if node.scaleset_id is None: - raise Exception("node is not part of a scaleset") - self.onefuzz.debug.scalesets.rdp( - scaleset_id=node.scaleset_id, machine_id=node.machine_id, duration=duration - ) - - def ssh(self, machine_id: UUID_EXPANSION, duration: Optional[int] = 1) -> None: - node = self.onefuzz.nodes.get(machine_id) - if node.scaleset_id is None: - raise Exception("node is not part of a scaleset") - self.onefuzz.debug.scalesets.ssh( - scaleset_id=node.scaleset_id, machine_id=node.machine_id, duration=duration - ) - - -class DebugScaleset(Command): - """Debug tasks""" - - def _get_proxy_setup( - self, scaleset_id: str, machine_id: UUID, port: int, duration: Optional[int] - ) -> Tuple[bool, str, Optional[Tuple[str, int]]]: - proxy = self.onefuzz.scaleset_proxy.create( - scaleset_id, machine_id, port, duration=duration - ) - if proxy.ip is None: - return (False, "waiting on proxy ip", None) - - return (True, "waiting on proxy port", (proxy.ip, proxy.forward.src_port)) - - def rdp( - self, - scaleset_id: str, - machine_id: UUID_EXPANSION, - duration: Optional[int] = 1, - ) -> None: - ( - scaleset, - machine_id_expanded, - ) = self.onefuzz.scalesets._expand_scaleset_machine( - scaleset_id, machine_id, include_auth=True - ) - - RDP_PORT = 3389 - setup = wait( - lambda: self._get_proxy_setup( - scaleset.scaleset_id, machine_id_expanded, RDP_PORT, duration - ) - ) - if setup is None: - raise Exception("no proxy for RDP port configured") - - if scaleset.auth is None: - raise Exception("auth is not available for scaleset") - - ip, port = setup - with rdp_connect(ip, scaleset.auth.password, port=port): - return - - def ssh( - self, - scaleset_id: str, - machine_id: UUID_EXPANSION, - duration: Optional[int] = 1, - command: Optional[str] = None, - ) -> None: - ( - scaleset, - machine_id_expanded, - ) = self.onefuzz.scalesets._expand_scaleset_machine( - scaleset_id, machine_id, include_auth=True - ) - - SSH_PORT = 22 - setup = wait( - lambda: self._get_proxy_setup( - scaleset.scaleset_id, machine_id_expanded, SSH_PORT, duration - ) - ) - if setup is None: - raise Exception("no proxy for SSH port configured") - - ip, port = setup - - if scaleset.auth is None: - raise Exception("auth is not available for scaleset") - - with ssh_connect( - ip, scaleset.auth.private_key, port=port, call=True, command=command - ): - return - - class DebugTask(Command): """Debug a specific task""" @@ -202,26 +62,6 @@ def _get_node( raise Exception("unable to find scaleset node running on task") - def ssh( - self, - task_id: UUID_EXPANSION, - *, - node_id: Optional[UUID] = None, - duration: Optional[int] = 1, - ) -> None: - scaleset_id, node_id = self._get_node(task_id, node_id) - return self.onefuzz.debug.scalesets.ssh(scaleset_id, node_id, duration=duration) - - def rdp( - self, - task_id: UUID_EXPANSION, - *, - node_id: Optional[UUID] = None, - duration: Optional[int] = 1, - ) -> None: - scaleset_id, node_id = self._get_node(task_id, node_id) - return self.onefuzz.debug.scalesets.rdp(scaleset_id, node_id, duration=duration) - def libfuzzer_coverage( self, task_id: UUID_EXPANSION, @@ -276,37 +116,12 @@ def _get_task(self, job_id: UUID_EXPANSION, task_type: TaskType) -> UUID: "unable to find task type %s for job:%s" % (task_type.name, job_id) ) - def ssh( - self, - job_id: UUID_EXPANSION, - task_type: TaskType, - *, - duration: Optional[int] = 1, - ) -> None: - """SSH into the first node running the specified task type in the job""" - return self.onefuzz.debug.task.ssh( - self._get_task(job_id, task_type), duration=duration - ) - - def rdp( - self, - job_id: UUID_EXPANSION, - task_type: TaskType, - *, - duration: Optional[int] = 1, - ) -> None: - """RDP into the first node running the specified task type in the job""" - return self.onefuzz.debug.task.rdp( - self._get_task(job_id, task_type), duration=duration - ) - class DebugJob(Command): """Debug a specific Job""" def __init__(self, onefuzz: Any, logger: logging.Logger): super().__init__(onefuzz, logger) - self.task = DebugJobTask(onefuzz, logger) def libfuzzer_coverage( self, @@ -883,10 +698,7 @@ class Debug(Command): def __init__(self, onefuzz: Any, logger: logging.Logger): super().__init__(onefuzz, logger) - self.scalesets = DebugScaleset(onefuzz, logger) - self.repro = DebugRepro(onefuzz, logger) self.job = DebugJob(onefuzz, logger) self.notification = DebugNotification(onefuzz, logger) self.task = DebugTask(onefuzz, logger) self.logs = DebugLog(onefuzz, logger) - self.node = DebugNode(onefuzz, logger) diff --git a/src/integration-tests/integration-test.py b/src/integration-tests/integration-test.py index 15ffcfb9fe..1ba572e3f3 100755 --- a/src/integration-tests/integration-test.py +++ b/src/integration-tests/integration-test.py @@ -244,7 +244,7 @@ class Integration(BaseModel): "--test:{extra_setup_dir}", "--write_test_file={extra_output_dir}/test.txt", ], - pool=PoolName("mariner") + pool=PoolName("mariner"), ), "windows-libfuzzer": Integration( template=TemplateType.libfuzzer, @@ -401,10 +401,13 @@ def try_info_get(data: Any) -> None: self.of.pools.create(name, OS.linux) self.logger.info("creating scaleset for pool: %s", name) self.of.scalesets.create( - name, pool_size, region=region, initial_size=pool_size, image="MicrosoftCBLMariner:cbl-mariner:cbl-mariner-2-gen2:latest" + name, + pool_size, + region=region, + initial_size=pool_size, + image="MicrosoftCBLMariner:cbl-mariner:cbl-mariner-2-gen2:latest", ) - class UnmanagedPool: def __init__( self, @@ -644,7 +647,7 @@ def launch( setup = Directory(os.path.join(setup, config.nested_setup_dir)) job: Optional[Job] = None - + job = self.build_job( duration, pool, target, config, setup, target_exe, inputs ) @@ -1277,7 +1280,7 @@ def check_logs_for_errors(self) -> None: if seen_errors: raise Exception("logs included errors") - + def build_pool_name(self, os_type: str) -> PoolName: return PoolName(f"testpool-{os_type}-{self.test_id}") @@ -1462,18 +1465,6 @@ def check_results( job_ids=job_ids, ) - if skip_repro: - self.logger.warning("not testing crash repro") - else: - self.check_repros( - test_id, - endpoint=endpoint, - authority=authority, - client_id=client_id, - client_secret=client_secret, - job_ids=job_ids, - ) - def test_unmanaged( self, samples: Directory, From 2c8ecc9d391f52ee568811bfb0c3703fa24b3fe1 Mon Sep 17 00:00:00 2001 From: Teo Voinea <58236992+tevoinea@users.noreply.github.com> Date: Thu, 28 Sep 2023 08:50:30 -0400 Subject: [PATCH 37/88] Make modules case insenstive on windows (#3527) * Make modules and coverage allowlist case insensitive on Windows * Tests and fmt * PR comments * fmt * Debugging missing file coverage * fmt * Broken linux test * Add a case insensitive transformer for better perf * cargo fix --- src/agent/coverage/src/allowlist.rs | 7 ++++- src/agent/coverage/src/allowlist/tests.rs | 18 +++++++++++++ src/agent/coverage/src/source.rs | 26 +++++++++++++++++++ src/agent/coverage/tests/snapshot.rs | 14 ++++++++-- .../snapshot__windows_snapshot_tests.snap | 2 +- .../windows/{inlinee.cpp => Inlinee.cpp} | 0 src/agent/debugger/src/module.rs | 1 - src/agent/onefuzz/src/expand.rs | 9 ++++--- 8 files changed, 69 insertions(+), 8 deletions(-) rename src/agent/coverage/tests/windows/{inlinee.cpp => Inlinee.cpp} (100%) diff --git a/src/agent/coverage/src/allowlist.rs b/src/agent/coverage/src/allowlist.rs index 079f415004..2c67130375 100644 --- a/src/agent/coverage/src/allowlist.rs +++ b/src/agent/coverage/src/allowlist.rs @@ -142,7 +142,12 @@ fn glob_to_regex(expr: &str) -> Result { let expr = expr.replace(r"\*", ".*"); // Anchor to line start and end. - let expr = format!("^{expr}$"); + // On Windows we should also ignore case. + let expr = if cfg!(windows) { + format!("(?i)^{expr}$") + } else { + format!("^{expr}$") + }; Ok(Regex::new(&expr)?) } diff --git a/src/agent/coverage/src/allowlist/tests.rs b/src/agent/coverage/src/allowlist/tests.rs index 0f46ef3df8..8d22d93962 100644 --- a/src/agent/coverage/src/allowlist/tests.rs +++ b/src/agent/coverage/src/allowlist/tests.rs @@ -175,3 +175,21 @@ fn test_allowlist_escape() -> Result<()> { Ok(()) } + +#[cfg(target_os = "windows")] +#[test] +fn test_windows_allowlists_are_not_case_sensitive() -> Result<()> { + let allowlist = AllowList::parse("vccrt")?; + assert!(allowlist.is_allowed("VCCRT")); + + Ok(()) +} + +#[cfg(not(target_os = "windows"))] +#[test] +fn test_linux_allowlists_are_case_sensitive() -> Result<()> { + let allowlist = AllowList::parse("vccrt")?; + assert!(!allowlist.is_allowed("VCCRT")); + + Ok(()) +} diff --git a/src/agent/coverage/src/source.rs b/src/agent/coverage/src/source.rs index b556fe447a..e06e8aa285 100644 --- a/src/agent/coverage/src/source.rs +++ b/src/agent/coverage/src/source.rs @@ -2,6 +2,7 @@ // Licensed under the MIT License. use std::collections::{BTreeMap, BTreeSet}; + use std::num::NonZeroU32; use anyhow::{Context, Result}; @@ -11,6 +12,7 @@ use debuggable_module::load_module::LoadModule; use debuggable_module::loader::Loader; use debuggable_module::path::FilePath; use debuggable_module::{Module, Offset}; +use symbolic::symcache::transform::{SourceLocation, Transformer}; use crate::allowlist::AllowList; use crate::binary::BinaryCoverage; @@ -69,6 +71,30 @@ pub fn binary_to_source_coverage( let mut symcache = vec![]; let mut converter = SymCacheConverter::new(); + if cfg!(windows) { + use symbolic::symcache::transform::Function; + struct CaseInsensitive {} + impl Transformer for CaseInsensitive { + fn transform_function<'f>(&'f mut self, f: Function<'f>) -> Function<'f> { + f + } + + fn transform_source_location<'f>( + &'f mut self, + mut sl: SourceLocation<'f>, + ) -> SourceLocation<'f> { + sl.file.name = sl.file.name.to_ascii_lowercase().into(); + sl.file.directory = sl.file.directory.map(|d| d.to_ascii_lowercase().into()); + sl.file.comp_dir = sl.file.comp_dir.map(|d| d.to_ascii_lowercase().into()); + sl + } + } + + let case_insensitive_transformer = CaseInsensitive {}; + + converter.add_transformer(case_insensitive_transformer); + } + let exe = Object::parse(module.executable_data())?; converter.process_object(&exe)?; diff --git a/src/agent/coverage/tests/snapshot.rs b/src/agent/coverage/tests/snapshot.rs index 75d524e2da..7c6cb301b4 100644 --- a/src/agent/coverage/tests/snapshot.rs +++ b/src/agent/coverage/tests/snapshot.rs @@ -43,7 +43,8 @@ fn windows_snapshot_tests() { }; // filter to just the input test file: - let source_allowlist = AllowList::parse(&input_path.to_string_lossy()).unwrap(); + let source_allowlist = + AllowList::parse(&input_path.to_string_lossy().to_ascii_lowercase()).unwrap(); let exe_cmd = std::process::Command::new(&exe_name); let recorded = coverage::CoverageRecorder::new(exe_cmd) @@ -57,9 +58,18 @@ fn windows_snapshot_tests() { coverage::source::binary_to_source_coverage(&recorded.coverage, &source_allowlist) .expect("binary_to_source_coverage"); + println!("{:?}", source.files.keys()); + + // For Windows, the source coverage is tracked using case-insensitive paths. + // The conversion from case-sensitive to insensitive is done when converting from binary to source coverage. + // By naming our test file with a capital letter, we can ensure that the case-insensitive conversion is working. + source.files.keys().for_each(|k| { + assert_eq!(k.to_string().to_ascii_lowercase(), k.to_string()); + }); + let file_coverage = source .files - .get(&FilePath::new(input_path.to_string_lossy()).unwrap()) + .get(&FilePath::new(input_path.to_string_lossy().to_ascii_lowercase()).unwrap()) .expect("coverage for input"); let mut result = String::new(); diff --git a/src/agent/coverage/tests/snapshots/snapshot__windows_snapshot_tests.snap b/src/agent/coverage/tests/snapshots/snapshot__windows_snapshot_tests.snap index 016717f8ab..12a38f4ef0 100644 --- a/src/agent/coverage/tests/snapshots/snapshot__windows_snapshot_tests.snap +++ b/src/agent/coverage/tests/snapshots/snapshot__windows_snapshot_tests.snap @@ -1,7 +1,7 @@ --- source: coverage/tests/snapshot.rs expression: result -input_file: coverage/tests/windows/inlinee.cpp +input_file: coverage/tests/windows/Inlinee.cpp --- [ ] #include [ ] diff --git a/src/agent/coverage/tests/windows/inlinee.cpp b/src/agent/coverage/tests/windows/Inlinee.cpp similarity index 100% rename from src/agent/coverage/tests/windows/inlinee.cpp rename to src/agent/coverage/tests/windows/Inlinee.cpp diff --git a/src/agent/debugger/src/module.rs b/src/agent/debugger/src/module.rs index acea7ace7f..aefdb8a92e 100644 --- a/src/agent/debugger/src/module.rs +++ b/src/agent/debugger/src/module.rs @@ -46,7 +46,6 @@ impl Module { error!("Error getting path from file handle: {}", e); "???".into() }); - let image_details = get_image_details(&path)?; Ok(Module { diff --git a/src/agent/onefuzz/src/expand.rs b/src/agent/onefuzz/src/expand.rs index 93587a6b58..7f1813899f 100644 --- a/src/agent/onefuzz/src/expand.rs +++ b/src/agent/onefuzz/src/expand.rs @@ -128,7 +128,8 @@ impl<'a> Expand<'a> { fn input_file_sha256(&self) -> Result> { let Some(val) = self.values.get(PlaceHolder::Input.get_string()) else { - bail!("no value found for {}, unable to evaluate {}", + bail!( + "no value found for {}, unable to evaluate {}", PlaceHolder::Input.get_string(), PlaceHolder::InputFileSha256.get_string(), ) @@ -149,7 +150,8 @@ impl<'a> Expand<'a> { fn extract_file_name_no_ext(&self) -> Result> { let Some(val) = self.values.get(PlaceHolder::Input.get_string()) else { - bail!("no value found for {}, unable to evaluate {}", + bail!( + "no value found for {}, unable to evaluate {}", PlaceHolder::Input.get_string(), PlaceHolder::InputFileNameNoExt.get_string(), ) @@ -173,7 +175,8 @@ impl<'a> Expand<'a> { fn extract_file_name(&self) -> Result> { let Some(val) = self.values.get(PlaceHolder::Input.get_string()) else { - bail!("no value found for {}, unable to evaluate {}", + bail!( + "no value found for {}, unable to evaluate {}", PlaceHolder::Input.get_string(), PlaceHolder::InputFileName.get_string(), ) From e12b41e0ce78723671401f694e89a3b351146e34 Mon Sep 17 00:00:00 2001 From: Teo Voinea <58236992+tevoinea@users.noreply.github.com> Date: Thu, 28 Sep 2023 09:15:03 -0400 Subject: [PATCH 38/88] Update windows interceptor list (#3528) --- .../src/tasks/coverage/generic/windows-interceptor.list | 1 + 1 file changed, 1 insertion(+) diff --git a/src/agent/onefuzz-task/src/tasks/coverage/generic/windows-interceptor.list b/src/agent/onefuzz-task/src/tasks/coverage/generic/windows-interceptor.list index 3669bbadec..0994295944 100644 --- a/src/agent/onefuzz-task/src/tasks/coverage/generic/windows-interceptor.list +++ b/src/agent/onefuzz-task/src/tasks/coverage/generic/windows-interceptor.list @@ -1,5 +1,6 @@ # Required to avoid recording errors. ! *\llvm-project\compiler-rt\* +! *\llvm\compiler-rt\* ! *\vctools\crt\* ! *\Windows Kits\10\Include\*\ucrt\* ! *\ExternalAPIs\Windows\10\sdk\* From 552df4506b2a3ba911deaf8c4c2e766bea63f5a2 Mon Sep 17 00:00:00 2001 From: Teo Voinea <58236992+tevoinea@users.noreply.github.com> Date: Thu, 28 Sep 2023 09:44:46 -0400 Subject: [PATCH 39/88] Template creation command (#3531) * Tasks are selectable * Almost there * It works * fmt * remove dead code * Remove unnecessary comments * Improve instructions * fix bug * Add some dummy values for paths --- src/agent/onefuzz-task/src/local/cmd.rs | 10 +- src/agent/onefuzz-task/src/local/coverage.rs | 15 +- .../onefuzz-task/src/local/create_template.rs | 285 ++++++++++++++++++ .../src/local/generic_analysis.rs | 18 +- .../src/local/generic_crash_report.rs | 20 +- .../src/local/generic_generator.rs | 21 +- src/agent/onefuzz-task/src/local/libfuzzer.rs | 17 +- .../src/local/libfuzzer_crash_report.rs | 19 +- .../onefuzz-task/src/local/libfuzzer_merge.rs | 14 +- .../src/local/libfuzzer_regression.rs | 20 +- .../src/local/libfuzzer_test_input.rs | 16 +- src/agent/onefuzz-task/src/local/mod.rs | 1 + src/agent/onefuzz-task/src/local/template.rs | 15 +- .../onefuzz-task/src/local/test_input.rs | 19 +- 14 files changed, 470 insertions(+), 20 deletions(-) create mode 100644 src/agent/onefuzz-task/src/local/create_template.rs diff --git a/src/agent/onefuzz-task/src/local/cmd.rs b/src/agent/onefuzz-task/src/local/cmd.rs index eabefb71ee..cb800d445e 100644 --- a/src/agent/onefuzz-task/src/local/cmd.rs +++ b/src/agent/onefuzz-task/src/local/cmd.rs @@ -1,19 +1,18 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. +use super::{create_template, template}; #[cfg(any(target_os = "linux", target_os = "windows"))] use crate::local::coverage; use crate::local::{common::add_common_config, libfuzzer_fuzz, tui::TerminalUi}; use anyhow::{Context, Result}; + use clap::{Arg, ArgAction, Command}; use std::time::Duration; use std::{path::PathBuf, str::FromStr}; use strum::IntoEnumIterator; use strum_macros::{EnumIter, EnumString, IntoStaticStr}; use tokio::{select, time::timeout}; - -use super::template; - #[derive(Debug, PartialEq, Eq, EnumString, IntoStaticStr, EnumIter)] #[strum(serialize_all = "kebab-case")] enum Commands { @@ -21,6 +20,7 @@ enum Commands { Coverage, LibfuzzerFuzz, Template, + CreateTemplate, } const TIMEOUT: &str = "timeout"; @@ -43,7 +43,7 @@ pub async fn run(args: clap::ArgMatches) -> Result<()> { let sub_args = sub_args.clone(); - let terminal = if start_ui { + let terminal = if start_ui && command != Commands::CreateTemplate { Some(TerminalUi::init()?) } else { env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init(); @@ -62,6 +62,7 @@ pub async fn run(args: clap::ArgMatches) -> Result<()> { template::launch(config, event_sender).await } + Commands::CreateTemplate => create_template::run(), } }); @@ -116,6 +117,7 @@ pub fn args(name: &'static str) -> Command { .args(vec![Arg::new("config") .value_parser(value_parser!(std::path::PathBuf)) .required(true)]), + Commands::CreateTemplate => create_template::args(subcommand.into()), }; cmd = if add_common { diff --git a/src/agent/onefuzz-task/src/local/coverage.rs b/src/agent/onefuzz-task/src/local/coverage.rs index d091b70695..48e32cb861 100644 --- a/src/agent/onefuzz-task/src/local/coverage.rs +++ b/src/agent/onefuzz-task/src/local/coverage.rs @@ -148,7 +148,20 @@ pub struct Coverage { } #[async_trait] -impl Template for Coverage { +impl Template for Coverage { + fn example_values() -> Coverage { + Coverage { + target_exe: PathBuf::from("path_to_your_exe"), + target_env: HashMap::new(), + target_options: vec![], + target_timeout: None, + module_allowlist: None, + source_allowlist: None, + input_queue: Some(PathBuf::from("path_to_your_inputs")), + readonly_inputs: vec![PathBuf::from("path_to_readonly_inputs")], + coverage: PathBuf::from("path_to_where_you_want_coverage_to_be_output"), + } + } async fn run(&self, context: &RunContext) -> Result<()> { let ri: Result> = self .readonly_inputs diff --git a/src/agent/onefuzz-task/src/local/create_template.rs b/src/agent/onefuzz-task/src/local/create_template.rs new file mode 100644 index 0000000000..474b677ad0 --- /dev/null +++ b/src/agent/onefuzz-task/src/local/create_template.rs @@ -0,0 +1,285 @@ +use crate::local::template::CommonProperties; + +use super::template::{TaskConfig, TaskConfigDiscriminants, TaskGroup}; +use anyhow::Result; +use clap::Command; +use std::str::FromStr; +use std::{ + io, + path::{Path, PathBuf}, +}; + +use strum::VariantNames; + +use crate::local::{ + coverage::Coverage, generic_analysis::Analysis, generic_crash_report::CrashReport, + generic_generator::Generator, libfuzzer::LibFuzzer, + libfuzzer_crash_report::LibfuzzerCrashReport, libfuzzer_merge::LibfuzzerMerge, + libfuzzer_regression::LibfuzzerRegression, libfuzzer_test_input::LibfuzzerTestInput, + template::Template, test_input::TestInput, +}; + +use crossterm::{ + event::{self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode, KeyEventKind}, + execute, + terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen}, +}; +use tui::{prelude::*, widgets::*}; + +pub fn args(name: &'static str) -> Command { + Command::new(name).about("interactively create a template") +} + +pub fn run() -> Result<()> { + // setup terminal + enable_raw_mode()?; + let mut stdout = io::stdout(); + execute!(stdout, EnterAlternateScreen, EnableMouseCapture)?; + let backend = CrosstermBackend::new(stdout); + let mut terminal = Terminal::new(backend)?; + + // create app and run it + let app = App::new(); + let res = run_app(&mut terminal, app); + + // restore terminal + disable_raw_mode()?; + execute!( + terminal.backend_mut(), + LeaveAlternateScreen, + DisableMouseCapture + )?; + terminal.show_cursor()?; + + match res { + Ok(None) => { /* user quit, do nothing */ } + Ok(Some(path)) => match path.canonicalize() { + Ok(canonical_path) => println!("Wrote the template to: {:?}", canonical_path), + _ => println!("Wrote the template to: {:?}", path), + }, + Err(e) => println!("Failed to write template due to {}", e), + } + + Ok(()) +} + +fn run_app(terminal: &mut Terminal, mut app: App) -> Result> { + loop { + terminal.draw(|f| ui(f, &mut app))?; + if let Event::Key(key) = event::read()? { + if key.kind == KeyEventKind::Press { + match key.code { + KeyCode::Char('q') => return Ok(None), + KeyCode::Char(' ') => app.items.toggle(), + KeyCode::Down => app.items.next(), + KeyCode::Up => app.items.previous(), + KeyCode::Enter => { + return match generate_template(app.items.items) { + Ok(p) => Ok(Some(p)), + Err(e) => Err(e), + } + } + _ => {} + } + } + } + } +} + +fn generate_template(items: Vec) -> Result { + let tasks: Vec = items + .iter() + .filter(|item| item.is_included) + .filter_map(|list_element| { + match TaskConfigDiscriminants::from_str(list_element.task_type) { + Err(e) => { + error!( + "Failed to match task config {:?} - {}", + list_element.task_type, e + ); + None + } + Ok(t) => match t { + TaskConfigDiscriminants::LibFuzzer => { + Some(TaskConfig::LibFuzzer(LibFuzzer::example_values())) + } + TaskConfigDiscriminants::Analysis => { + Some(TaskConfig::Analysis(Analysis::example_values())) + } + TaskConfigDiscriminants::Coverage => { + Some(TaskConfig::Coverage(Coverage::example_values())) + } + TaskConfigDiscriminants::CrashReport => { + Some(TaskConfig::CrashReport(CrashReport::example_values())) + } + TaskConfigDiscriminants::Generator => { + Some(TaskConfig::Generator(Generator::example_values())) + } + TaskConfigDiscriminants::LibfuzzerCrashReport => Some( + TaskConfig::LibfuzzerCrashReport(LibfuzzerCrashReport::example_values()), + ), + TaskConfigDiscriminants::LibfuzzerMerge => { + Some(TaskConfig::LibfuzzerMerge(LibfuzzerMerge::example_values())) + } + TaskConfigDiscriminants::LibfuzzerRegression => Some( + TaskConfig::LibfuzzerRegression(LibfuzzerRegression::example_values()), + ), + TaskConfigDiscriminants::LibfuzzerTestInput => Some( + TaskConfig::LibfuzzerTestInput(LibfuzzerTestInput::example_values()), + ), + TaskConfigDiscriminants::TestInput => { + Some(TaskConfig::TestInput(TestInput::example_values())) + } + TaskConfigDiscriminants::Radamsa => Some(TaskConfig::Radamsa), + }, + } + }) + .collect(); + + let definition = TaskGroup { + common: CommonProperties { + setup_dir: None, + extra_setup_dir: None, + extra_dir: None, + create_job_dir: false, + }, + tasks, + }; + + let filename = "template"; + let mut filepath = format!("./{}.yaml", filename); + let mut output_file = Path::new(&filepath); + let mut counter = 0; + while output_file.exists() { + filepath = format!("./{}-{}.yaml", filename, counter); + output_file = Path::new(&filepath); + counter += 1; + } + + std::fs::write(output_file, serde_yaml::to_string(&definition)?)?; + + Ok(output_file.into()) +} + +fn ui(f: &mut Frame, app: &mut App) { + let areas = Layout::default() + .direction(Direction::Vertical) + .constraints([Constraint::Percentage(100)]) + .split(f.size()); + // Iterate through all elements in the `items` app and append some debug text to it. + let items: Vec = app + .items + .items + .iter() + .map(|list_element| { + let title = if list_element.is_included { + format!("✅ {}", list_element.task_type) + } else { + list_element.task_type.to_string() + }; + ListItem::new(title).style(Style::default().fg(Color::Black).bg(Color::White)) + }) + .collect(); + + // Create a List from all list items and highlight the currently selected one + let items = List::new(items) + .block( + Block::default() + .borders(Borders::ALL) + .title("Select which tasks you want to include in the template. Use ⬆/⬇ to navigate and to select. Press when you're done."), + ) + .highlight_style( + Style::default() + .bg(Color::LightGreen) + .add_modifier(Modifier::BOLD), + ) + .highlight_symbol(">> "); + + // We can now render the item list + f.render_stateful_widget(items, areas[0], &mut app.items.state); +} + +struct ListElement<'a> { + pub task_type: &'a str, + pub is_included: bool, +} + +pub trait Toggle { + fn toggle(&mut self) {} +} + +impl<'a> Toggle for ListElement<'a> { + fn toggle(&mut self) { + self.is_included = !self.is_included + } +} + +struct App<'a> { + items: StatefulList>, +} + +impl<'a> App<'a> { + fn new() -> App<'a> { + App { + items: StatefulList::with_items( + TaskConfig::VARIANTS + .iter() + .map(|name| ListElement { + task_type: name, + is_included: false, + }) + .collect(), + ), + } + } +} + +struct StatefulList { + state: ListState, + items: Vec, +} + +impl StatefulList { + fn with_items(items: Vec) -> StatefulList { + StatefulList { + state: ListState::default(), + items, + } + } + + fn next(&mut self) { + let i = match self.state.selected() { + Some(i) => { + if self.items.first().is_some() { + (i + 1) % self.items.len() + } else { + 0 + } + } + None => 0, + }; + self.state.select(Some(i)); + } + + fn previous(&mut self) { + let i = match self.state.selected() { + Some(i) => { + if i == 0 { + self.items.len() - 1 + } else { + i - 1 + } + } + None => 0, + }; + self.state.select(Some(i)); + } + + fn toggle(&mut self) { + if let Some(index) = self.state.selected() { + if let Some(element) = self.items.get_mut(index) { + element.toggle() + } + } + } +} diff --git a/src/agent/onefuzz-task/src/local/generic_analysis.rs b/src/agent/onefuzz-task/src/local/generic_analysis.rs index 429e7b0e3b..cbb31a1ff9 100644 --- a/src/agent/onefuzz-task/src/local/generic_analysis.rs +++ b/src/agent/onefuzz-task/src/local/generic_analysis.rs @@ -27,7 +27,23 @@ pub struct Analysis { } #[async_trait] -impl Template for Analysis { +impl Template for Analysis { + fn example_values() -> Analysis { + Analysis { + analyzer_exe: String::new(), + analyzer_options: vec![], + analyzer_env: HashMap::new(), + target_exe: PathBuf::from("path_to_your_exe"), + target_options: vec![], + input_queue: Some(PathBuf::from("path_to_your_inputs")), + crashes: Some(PathBuf::from("path_where_crashes_written")), + analysis: PathBuf::new(), + tools: None, + reports: Some(PathBuf::from("path_where_reports_written")), + unique_reports: Some(PathBuf::from("path_where_reports_written")), + no_repro: Some(PathBuf::from("path_where_no_repro_reports_written")), + } + } async fn run(&self, context: &RunContext) -> Result<()> { let input_q = if let Some(w) = &self.input_queue { Some(context.monitor_dir(w).await?) diff --git a/src/agent/onefuzz-task/src/local/generic_crash_report.rs b/src/agent/onefuzz-task/src/local/generic_crash_report.rs index 347a8cac76..91dec1ae44 100644 --- a/src/agent/onefuzz-task/src/local/generic_crash_report.rs +++ b/src/agent/onefuzz-task/src/local/generic_crash_report.rs @@ -39,7 +39,25 @@ pub struct CrashReport { minimized_stack_depth: Option, } #[async_trait] -impl Template for CrashReport { +impl Template for CrashReport { + fn example_values() -> CrashReport { + CrashReport { + target_exe: PathBuf::from("path_to_your_exe"), + target_options: vec![], + target_env: HashMap::new(), + input_queue: Some(PathBuf::from("path_to_your_inputs")), + crashes: Some(PathBuf::from("path_where_crashes_written")), + reports: Some(PathBuf::from("path_where_reports_written")), + unique_reports: Some(PathBuf::from("path_where_reports_written")), + no_repro: Some(PathBuf::from("path_where_no_repro_reports_written")), + target_timeout: None, + check_asan_log: true, + check_debugger: true, + check_retry_count: 5, + check_queue: false, + minimized_stack_depth: None, + } + } async fn run(&self, context: &RunContext) -> Result<()> { let input_q_fut: OptionFuture<_> = self .input_queue diff --git a/src/agent/onefuzz-task/src/local/generic_generator.rs b/src/agent/onefuzz-task/src/local/generic_generator.rs index ae9f6a3cc6..3c26af4cf8 100644 --- a/src/agent/onefuzz-task/src/local/generic_generator.rs +++ b/src/agent/onefuzz-task/src/local/generic_generator.rs @@ -35,7 +35,26 @@ pub struct Generator { } #[async_trait] -impl Template for Generator { +impl Template for Generator { + fn example_values() -> Generator { + Generator { + generator_exe: String::new(), + generator_env: HashMap::new(), + generator_options: vec![], + readonly_inputs: vec![PathBuf::from("path_to_readonly_inputs")], + crashes: PathBuf::new(), + tools: None, + target_exe: PathBuf::from("path_to_your_exe"), + target_env: HashMap::new(), + target_options: vec![], + target_timeout: None, + check_asan_log: true, + check_debugger: true, + check_retry_count: 5, + rename_output: false, + ensemble_sync_delay: None, + } + } async fn run(&self, context: &RunContext) -> Result<()> { let generator_config = crate::tasks::fuzz::generator::Config { generator_exe: self.generator_exe.clone(), diff --git a/src/agent/onefuzz-task/src/local/libfuzzer.rs b/src/agent/onefuzz-task/src/local/libfuzzer.rs index 433636be1c..472a6ae9e8 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer.rs @@ -32,7 +32,22 @@ pub struct LibFuzzer { } #[async_trait] -impl Template for LibFuzzer { +impl Template for LibFuzzer { + fn example_values() -> LibFuzzer { + LibFuzzer { + inputs: PathBuf::new(), + readonly_inputs: vec![PathBuf::from("path_to_readonly_inputs")], + crashes: PathBuf::new(), + crashdumps: None, + target_exe: PathBuf::from("path_to_your_exe"), + target_env: HashMap::new(), + target_options: vec![], + target_workers: None, + ensemble_sync_delay: None, + check_fuzzer_help: true, + expect_crash_on_failure: true, + } + } async fn run(&self, context: &RunContext) -> Result<()> { let ri: Result> = self .readonly_inputs diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs b/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs index 04ba4f9225..9de1fc66ce 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_crash_report.rs @@ -36,7 +36,24 @@ pub struct LibfuzzerCrashReport { } #[async_trait] -impl Template for LibfuzzerCrashReport { +impl Template for LibfuzzerCrashReport { + fn example_values() -> LibfuzzerCrashReport { + LibfuzzerCrashReport { + target_exe: PathBuf::from("path_to_your_exe"), + target_env: HashMap::new(), + target_options: vec![], + target_timeout: None, + input_queue: Some(PathBuf::from("path_to_your_inputs")), + crashes: Some(PathBuf::from("path_where_crashes_written")), + reports: Some(PathBuf::from("path_where_reports_written")), + unique_reports: Some(PathBuf::from("path_where_reports_written")), + no_repro: Some(PathBuf::from("path_where_no_repro_reports_written")), + check_fuzzer_help: true, + check_retry_count: 5, + minimized_stack_depth: None, + check_queue: true, + } + } async fn run(&self, context: &RunContext) -> Result<()> { let input_q_fut: OptionFuture<_> = self .input_queue diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs b/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs index 4b3e4ce58f..d4915e6b4c 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_merge.rs @@ -27,7 +27,19 @@ pub struct LibfuzzerMerge { } #[async_trait] -impl Template for LibfuzzerMerge { +impl Template for LibfuzzerMerge { + fn example_values() -> LibfuzzerMerge { + LibfuzzerMerge { + target_exe: PathBuf::from("path_to_your_exe"), + target_env: HashMap::new(), + target_options: vec![], + input_queue: Some(PathBuf::from("path_to_your_inputs")), + inputs: vec![], + unique_inputs: PathBuf::new(), + preserve_existing_outputs: true, + check_fuzzer_help: true, + } + } async fn run(&self, context: &RunContext) -> Result<()> { let input_q_fut: OptionFuture<_> = self .input_queue diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs b/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs index 3fbb9f0bd6..b53fb84c22 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_regression.rs @@ -40,7 +40,25 @@ pub struct LibfuzzerRegression { } #[async_trait] -impl Template for LibfuzzerRegression { +impl Template for LibfuzzerRegression { + fn example_values() -> LibfuzzerRegression { + LibfuzzerRegression { + target_exe: PathBuf::from("path_to_your_exe"), + target_options: vec![], + target_env: HashMap::new(), + target_timeout: None, + crashes: PathBuf::new(), + regression_reports: PathBuf::new(), + report_list: None, + unique_reports: Some(PathBuf::from("path_where_reports_written")), + reports: Some(PathBuf::from("path_where_reports_written")), + no_repro: Some(PathBuf::from("path_where_no_repro_reports_written")), + readonly_inputs: None, + check_fuzzer_help: true, + check_retry_count: 5, + minimized_stack_depth: None, + } + } async fn run(&self, context: &RunContext) -> Result<()> { let libfuzzer_regression = crate::tasks::regression::libfuzzer::Config { target_exe: self.target_exe.clone(), diff --git a/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs b/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs index 5bef2347f7..88c3cd1a3d 100644 --- a/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs +++ b/src/agent/onefuzz-task/src/local/libfuzzer_test_input.rs @@ -24,7 +24,21 @@ pub struct LibfuzzerTestInput { } #[async_trait] -impl Template for LibfuzzerTestInput { +impl Template for LibfuzzerTestInput { + fn example_values() -> LibfuzzerTestInput { + LibfuzzerTestInput { + input: PathBuf::new(), + target_exe: PathBuf::from("path_to_your_exe"), + target_options: vec![], + target_env: HashMap::new(), + setup_dir: PathBuf::new(), + extra_setup_dir: None, + extra_output_dir: None, + target_timeout: None, + check_retry_count: 5, + minimized_stack_depth: None, + } + } async fn run(&self, context: &RunContext) -> Result<()> { let c = self.clone(); let t = tokio::spawn(async move { diff --git a/src/agent/onefuzz-task/src/local/mod.rs b/src/agent/onefuzz-task/src/local/mod.rs index 385ff8ffcd..6020cb0fa6 100644 --- a/src/agent/onefuzz-task/src/local/mod.rs +++ b/src/agent/onefuzz-task/src/local/mod.rs @@ -5,6 +5,7 @@ pub mod cmd; pub mod common; #[cfg(any(target_os = "linux", target_os = "windows"))] pub mod coverage; +pub mod create_template; pub mod generic_analysis; pub mod generic_crash_report; pub mod generic_generator; diff --git a/src/agent/onefuzz-task/src/local/template.rs b/src/agent/onefuzz-task/src/local/template.rs index 64b342744d..3393edd89a 100644 --- a/src/agent/onefuzz-task/src/local/template.rs +++ b/src/agent/onefuzz-task/src/local/template.rs @@ -5,6 +5,7 @@ use path_absolutize::Absolutize; use serde::Deserialize; use std::path::{Path, PathBuf}; use storage_queue::QueueClient; +use strum_macros::{EnumDiscriminants, EnumString, EnumVariantNames}; use tokio::{sync::Mutex, task::JoinHandle}; use url::Url; use uuid::Uuid; @@ -27,14 +28,14 @@ use schemars::JsonSchema; #[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] pub struct TaskGroup { #[serde(flatten)] - common: CommonProperties, + pub common: CommonProperties, /// The list of tasks - tasks: Vec, + pub tasks: Vec, } #[derive(Debug, Deserialize, Serialize, Clone, JsonSchema)] -struct CommonProperties { +pub struct CommonProperties { pub setup_dir: Option, pub extra_setup_dir: Option, pub extra_dir: Option, @@ -42,9 +43,10 @@ struct CommonProperties { pub create_job_dir: bool, } -#[derive(Debug, Serialize, Deserialize, Clone, JsonSchema)] +#[derive(Debug, Serialize, Deserialize, Clone, JsonSchema, EnumVariantNames, EnumDiscriminants)] +#[strum_discriminants(derive(EnumString))] #[serde(tag = "type")] -enum TaskConfig { +pub enum TaskConfig { LibFuzzer(LibFuzzer), Analysis(Analysis), Coverage(Coverage), @@ -61,7 +63,8 @@ enum TaskConfig { } #[async_trait] -pub trait Template { +pub trait Template { + fn example_values() -> T; async fn run(&self, context: &RunContext) -> Result<()>; } diff --git a/src/agent/onefuzz-task/src/local/test_input.rs b/src/agent/onefuzz-task/src/local/test_input.rs index b8027a7f41..0018494ec0 100644 --- a/src/agent/onefuzz-task/src/local/test_input.rs +++ b/src/agent/onefuzz-task/src/local/test_input.rs @@ -28,7 +28,24 @@ pub struct TestInput { } #[async_trait] -impl Template for TestInput { +impl Template for TestInput { + fn example_values() -> TestInput { + TestInput { + input: PathBuf::new(), + target_exe: PathBuf::from("path_to_your_exe"), + target_options: vec![], + target_env: HashMap::new(), + setup_dir: PathBuf::new(), + extra_setup_dir: None, + task_id: Uuid::new_v4(), + job_id: Uuid::new_v4(), + target_timeout: None, + check_retry_count: 5, + check_asan_log: true, + check_debugger: true, + minimized_stack_depth: None, + } + } async fn run(&self, context: &RunContext) -> Result<()> { let c = self.clone(); let t = tokio::spawn(async move { From e3b1e0e93f8021c4bf6d10aad3023e4cdf5bb331 Mon Sep 17 00:00:00 2001 From: Cheick Keita Date: Thu, 28 Sep 2023 17:54:32 -0700 Subject: [PATCH 40/88] Terminate process on timeout in windows for the coverage task (#3529) * Terminate process on timeout in windows for the coverage task * set the timeout before we start the debugger * split the target launch from the debugger initialization wait for the process to finish on a separate thread * fix build * move comments --- src/agent/Cargo.lock | 1 + src/agent/coverage/Cargo.toml | 1 + src/agent/coverage/src/record.rs | 81 ++++++++++++++++++++++-------- src/agent/coverage/src/timer.rs | 1 + src/agent/debugger/src/debugger.rs | 27 ++++++---- 5 files changed, 80 insertions(+), 31 deletions(-) diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index 7a79204631..eb35241201 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -572,6 +572,7 @@ dependencies = [ "nix", "pete", "pretty_assertions", + "process_control", "procfs", "regex", "symbolic", diff --git a/src/agent/coverage/Cargo.toml b/src/agent/coverage/Cargo.toml index 2a1170e3ae..e1ced7050f 100644 --- a/src/agent/coverage/Cargo.toml +++ b/src/agent/coverage/Cargo.toml @@ -20,6 +20,7 @@ symbolic = { version = "12.3", features = [ "symcache", ] } thiserror = "1.0" +process_control = "4.0" [target.'cfg(target_os = "windows")'.dependencies] debugger = { path = "../debugger" } diff --git a/src/agent/coverage/src/record.rs b/src/agent/coverage/src/record.rs index 534d1d4d63..44faded302 100644 --- a/src/agent/coverage/src/record.rs +++ b/src/agent/coverage/src/record.rs @@ -1,7 +1,7 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. -use std::process::{Command, ExitStatus, Stdio}; +use std::process::{Command, Stdio}; use std::sync::Arc; use std::time::Duration; @@ -120,32 +120,58 @@ impl CoverageRecorder { #[cfg(target_os = "windows")] pub fn record(self) -> Result { + use anyhow::bail; use debugger::Debugger; + use process_control::{ChildExt, Control}; use windows::WindowsRecorder; + let child = Debugger::create_child(self.cmd)?; + + // Spawn a thread to wait for the target process to exit. + let taget_process = std::thread::spawn(move || { + let output = child + .controlled_with_output() + .time_limit(self.timeout) + .terminate_for_timeout() + .wait(); + output + }); + let loader = self.loader.clone(); + let mut recorder = + WindowsRecorder::new(&loader, self.module_allowlist, self.cache.as_ref()); - crate::timer::timed(self.timeout, move || { - let mut recorder = - WindowsRecorder::new(&loader, self.module_allowlist, self.cache.as_ref()); - let (mut dbg, child) = Debugger::init(self.cmd, &mut recorder)?; - dbg.run(&mut recorder)?; - - // If the debugger callbacks fail, this may return with a spurious clean exit. - let output = child.wait_with_output()?.into(); - - // Check if debugging was stopped due to a callback error. - // - // If so, the debugger terminated the target, and the recorded coverage and - // output are both invalid. - if let Some(err) = recorder.stop_error { - return Err(err); + // The debugger is initialized in the same thread that created the target process to be able to receive the debug events + let mut dbg = Debugger::init_debugger(&mut recorder)?; + dbg.run(&mut recorder)?; + + // If the debugger callbacks fail, this may return with a spurious clean exit. + let output = match taget_process.join() { + Err(err) => { + bail!("failed to launch target thread: {:?}", err) + } + Ok(Err(err)) => { + bail!("failed to launch target process: {:?}", err) } + Ok(Ok(None)) => { + bail!(crate::timer::TimerError::Timeout(self.timeout)) + } + Ok(Ok(Some(output))) => output, + }; - let coverage = recorder.coverage; + // Check if debugging was stopped due to a callback error. + // + // If so, the debugger terminated the target, and the recorded coverage and + // output are both invalid. + if let Some(err) = recorder.stop_error { + return Err(err); + } - Ok(Recorded { coverage, output }) - })? + let coverage = recorder.coverage; + Ok(Recorded { + coverage, + output: output.into(), + }) } } @@ -157,11 +183,24 @@ pub struct Recorded { #[derive(Clone, Debug, Default)] pub struct Output { - pub status: Option, + pub status: Option, pub stderr: String, pub stdout: String, } +impl From for Output { + fn from(output: process_control::Output) -> Self { + let status = Some(output.status); + let stdout = String::from_utf8_lossy(&output.stdout).into_owned(); + let stderr = String::from_utf8_lossy(&output.stderr).into_owned(); + Self { + status, + stdout, + stderr, + } + } +} + impl From for Output { fn from(output: std::process::Output) -> Self { let status = Some(output.status); @@ -169,7 +208,7 @@ impl From for Output { let stderr = String::from_utf8_lossy(&output.stderr).into_owned(); Self { - status, + status: status.map(Into::into), stdout, stderr, } diff --git a/src/agent/coverage/src/timer.rs b/src/agent/coverage/src/timer.rs index 760e453b28..3271666d67 100644 --- a/src/agent/coverage/src/timer.rs +++ b/src/agent/coverage/src/timer.rs @@ -7,6 +7,7 @@ use std::time::Duration; use thiserror::Error; +#[allow(dead_code)] pub fn timed(timeout: Duration, function: F) -> Result where T: Send + 'static, diff --git a/src/agent/debugger/src/debugger.rs b/src/agent/debugger/src/debugger.rs index d7c0f1ba5e..ae67f66fed 100644 --- a/src/agent/debugger/src/debugger.rs +++ b/src/agent/debugger/src/debugger.rs @@ -134,15 +134,7 @@ pub struct Debugger { } impl Debugger { - pub fn init( - mut command: Command, - callbacks: &mut impl DebugEventHandler, - ) -> Result<(Self, Child)> { - let child = command - .creation_flags(DEBUG_ONLY_THIS_PROCESS.0) - .spawn() - .context("debugee failed to start")?; - + pub fn init_debugger(callbacks: &mut impl DebugEventHandler) -> Result { unsafe { DebugSetProcessKillOnExit(TRUE) } .ok() .context("Setting DebugSetProcessKillOnExit to TRUE")?; @@ -186,12 +178,27 @@ impl Debugger { return Err(last_os_error()); } - Ok((debugger, child)) + Ok(debugger) } else { anyhow::bail!("Unexpected event: {}", de) } } + pub fn create_child(mut command: Command) -> Result { + let child = command + .creation_flags(DEBUG_ONLY_THIS_PROCESS.0) + .spawn() + .context("debugee failed to start")?; + + Ok(child) + } + + pub fn init(command: Command, callbacks: &mut impl DebugEventHandler) -> Result<(Self, Child)> { + let child = Self::create_child(command)?; + let debugger = Self::init_debugger(callbacks)?; + Ok((debugger, child)) + } + pub fn target(&mut self) -> &mut Target { &mut self.target } From 434a435d3c429d5ebe879e69dbcae84704021664 Mon Sep 17 00:00:00 2001 From: Cheick Keita Date: Mon, 2 Oct 2023 10:55:07 -0700 Subject: [PATCH 41/88] Ignore regression update when the work item is in some states (#3532) * Ignore regression update when the work item is in some states * format * formatting * don't hide messages in the poison queue * fix typo * update regression logic update test_template to support regression * build fix * mypy fix * build fix * move regression ignore state under ADODuplicateTemplate * replace extend with append * update set_tcp_keepalive * mke mypy happy * copy ADODuplicateTemplate.OnDuplicate.RegressionIgnoreStates --- docs/webhook_events.md | 8 +++ .../ApiService/Functions/QueueFileChanges.cs | 24 ++++--- .../ApiService/OneFuzzTypes/Model.cs | 5 +- .../ApiService/OneFuzzTypes/Requests.cs | 2 +- .../ApiService/onefuzzlib/Reports.cs | 20 ++++++ .../onefuzzlib/notifications/Ado.cs | 67 +++++++++++-------- src/cli/onefuzz/cli.py | 13 +++- src/cli/onefuzz/debug.py | 41 +++++++++--- src/pytypes/onefuzztypes/models.py | 2 + src/pytypes/onefuzztypes/requests.py | 5 +- 10 files changed, 132 insertions(+), 55 deletions(-) diff --git a/docs/webhook_events.md b/docs/webhook_events.md index a417b7465f..cd8c5932f6 100644 --- a/docs/webhook_events.md +++ b/docs/webhook_events.md @@ -2033,6 +2033,10 @@ If webhook is set to have Event Grid message format then the payload will look a }, "original_crash_test_result": { "$ref": "#/definitions/CrashTestResult" + }, + "report_url": { + "title": "Report Url", + "type": "string" } }, "required": [ @@ -6427,6 +6431,10 @@ If webhook is set to have Event Grid message format then the payload will look a }, "original_crash_test_result": { "$ref": "#/definitions/CrashTestResult" + }, + "report_url": { + "title": "Report Url", + "type": "string" } }, "required": [ diff --git a/src/ApiService/ApiService/Functions/QueueFileChanges.cs b/src/ApiService/ApiService/Functions/QueueFileChanges.cs index f1c4711f9d..9e22f113ad 100644 --- a/src/ApiService/ApiService/Functions/QueueFileChanges.cs +++ b/src/ApiService/ApiService/Functions/QueueFileChanges.cs @@ -128,20 +128,22 @@ private async Async.Task RequeueMessage(string msg, TimeSpan? visibilityTimeout newCustomDequeueCount = json["data"]!["customDequeueCount"]!.GetValue(); } - var queueName = QueueFileChangesQueueName; if (newCustomDequeueCount > MAX_DEQUEUE_COUNT) { _log.LogWarning("Message retried more than {MAX_DEQUEUE_COUNT} times with no success: {msg}", MAX_DEQUEUE_COUNT, msg); - queueName = QueueFileChangesPoisonQueueName; + await _context.Queue.QueueObject( + QueueFileChangesPoisonQueueName, + json, + StorageType.Config) + .IgnoreResult(); + } else { + json!["data"]!["customDequeueCount"] = newCustomDequeueCount + 1; + await _context.Queue.QueueObject( + QueueFileChangesQueueName, + json, + StorageType.Config, + visibilityTimeout ?? CalculateExponentialBackoff(newCustomDequeueCount)) + .IgnoreResult(); } - - json!["data"]!["customDequeueCount"] = newCustomDequeueCount + 1; - - await _context.Queue.QueueObject( - queueName, - json, - StorageType.Config, - visibilityTimeout ?? CalculateExponentialBackoff(newCustomDequeueCount)) - .IgnoreResult(); } // Possible return values: diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index 424669899a..3d67de106d 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -678,7 +678,8 @@ public record ADODuplicateTemplate( Dictionary SetState, Dictionary AdoFields, string? Comment = null, - List>? Unless = null + List>? Unless = null, + List? RegressionIgnoreStates = null ); public record AdoTemplate( @@ -707,7 +708,7 @@ public record RenderedAdoTemplate( ADODuplicateTemplate OnDuplicate, Dictionary? AdoDuplicateFields = null, string? Comment = null - ) : AdoTemplate(BaseUrl, AuthToken, Project, Type, UniqueFields, AdoFields, OnDuplicate, AdoDuplicateFields, Comment); + ) : AdoTemplate(BaseUrl, AuthToken, Project, Type, UniqueFields, AdoFields, OnDuplicate, AdoDuplicateFields, Comment) { } public record TeamsTemplate(SecretData Url) : NotificationTemplate { public Task Validate() { diff --git a/src/ApiService/ApiService/OneFuzzTypes/Requests.cs b/src/ApiService/ApiService/OneFuzzTypes/Requests.cs index 8f3d16aa63..f3cc407b15 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Requests.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Requests.cs @@ -131,7 +131,7 @@ public record NotificationSearch( public record NotificationTest( - [property: Required] Report Report, + [property: Required] IReport Report, [property: Required] Notification Notification ) : BaseRequest; diff --git a/src/ApiService/ApiService/onefuzzlib/Reports.cs b/src/ApiService/ApiService/onefuzzlib/Reports.cs index c1c4aad3be..fdda7259e9 100644 --- a/src/ApiService/ApiService/onefuzzlib/Reports.cs +++ b/src/ApiService/ApiService/onefuzzlib/Reports.cs @@ -1,7 +1,10 @@ using System.Text.Json; +using System.Text.Json.Serialization; using System.Threading.Tasks; using Microsoft.Extensions.Logging; using Microsoft.OneFuzz.Service.OneFuzzLib.Orm; + + namespace Microsoft.OneFuzz.Service; public interface IReports { @@ -85,6 +88,7 @@ public static IReport ParseReportOrRegression(string content, Uri reportUrl) { } } +[JsonConverter(typeof(ReportConverter))] public interface IReport { Uri? ReportUrl { init; @@ -95,3 +99,19 @@ public string FileName() { return string.Concat(segments); } }; + +public class ReportConverter : JsonConverter { + + public override IReport? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) { + using var templateJson = JsonDocument.ParseValue(ref reader); + + if (templateJson.RootElement.TryGetProperty("crash_test_result", out _)) { + return templateJson.Deserialize(options); + } + return templateJson.Deserialize(options); + } + + public override void Write(Utf8JsonWriter writer, IReport value, JsonSerializerOptions options) { + throw new NotImplementedException(); + } +} diff --git a/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs b/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs index b1442851ba..2b01afb37f 100644 --- a/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs +++ b/src/ApiService/ApiService/onefuzzlib/notifications/Ado.cs @@ -19,6 +19,7 @@ public class Ado : NotificationsBase, IAdo { // https://github.com/MicrosoftDocs/azure-devops-docs/issues/5890#issuecomment-539632059 private const int MAX_SYSTEM_TITLE_LENGTH = 128; private const string TITLE_FIELD = "System.Title"; + private static List DEFAULT_REGRESSION_IGNORE_STATES = new() { "New", "Commited", "Active" }; public Ado(ILogger logTracer, IOnefuzzContext context) : base(logTracer, context) { } @@ -56,7 +57,7 @@ public async Async.Task NotifyAdo(AdoTemplate config, Contain _logTracer.LogEvent(adoEventType); try { - await ProcessNotification(_context, container, filename, config, report, _logTracer, notificationInfo); + await ProcessNotification(_context, container, filename, config, report, _logTracer, notificationInfo, isRegression: reportable is RegressionReport); } catch (Exception e) when (e is VssUnauthorizedException || e is VssAuthenticationException || e is VssServiceException) { if (config.AdoFields.TryGetValue("System.AssignedTo", out var assignedTo)) { @@ -298,7 +299,7 @@ private static async Async.Task> GetValidFiel .ToDictionary(field => field.ReferenceName.ToLowerInvariant()); } - private static async Async.Task ProcessNotification(IOnefuzzContext context, Container container, string filename, AdoTemplate config, Report report, ILogger logTracer, IList<(string, string)> notificationInfo, Renderer? renderer = null) { + private static async Async.Task ProcessNotification(IOnefuzzContext context, Container container, string filename, AdoTemplate config, Report report, ILogger logTracer, IList<(string, string)> notificationInfo, Renderer? renderer = null, bool isRegression = false) { if (!config.AdoFields.TryGetValue(TITLE_FIELD, out var issueTitle)) { issueTitle = "{{ report.crash_site }} - {{ report.executable }}"; } @@ -311,7 +312,7 @@ private static async Async.Task ProcessNotification(IOnefuzzContext context, Con var renderedConfig = RenderAdoTemplate(logTracer, renderer, config, instanceUrl); var ado = new AdoConnector(renderedConfig, project!, client, instanceUrl, logTracer, await GetValidFields(client, project)); - await ado.Process(notificationInfo); + await ado.Process(notificationInfo, isRegression); } public static RenderedAdoTemplate RenderAdoTemplate(ILogger logTracer, Renderer renderer, AdoTemplate original, Uri instanceUrl) { @@ -352,7 +353,8 @@ public static RenderedAdoTemplate RenderAdoTemplate(ILogger logTracer, Renderer original.OnDuplicate.SetState, onDuplicateAdoFields, original.OnDuplicate.Comment != null ? Render(renderer, original.OnDuplicate.Comment, instanceUrl, logTracer) : null, - onDuplicateUnless + onDuplicateUnless, + original.OnDuplicate.RegressionIgnoreStates ); return new RenderedAdoTemplate( @@ -598,7 +600,7 @@ private async Async.Task CreateNew() { return (taskType, document); } - public async Async.Task Process(IList<(string, string)> notificationInfo) { + public async Async.Task Process(IList<(string, string)> notificationInfo, bool isRegression) { var updated = false; WorkItem? oldestWorkItem = null; await foreach (var workItem in ExistingWorkItems(notificationInfo)) { @@ -612,6 +614,13 @@ public async Async.Task Process(IList<(string, string)> notificationInfo) { continue; } + var regressionStatesToIgnore = _config.OnDuplicate.RegressionIgnoreStates != null ? _config.OnDuplicate.RegressionIgnoreStates : DEFAULT_REGRESSION_IGNORE_STATES; + if (isRegression) { + var state = (string)workItem.Fields["System.State"]; + if (regressionStatesToIgnore.Contains(state, StringComparer.InvariantCultureIgnoreCase)) + continue; + } + using (_logTracer.BeginScope("Non-duplicate work item")) { _logTracer.AddTags(new List<(string, string)> { ("NonDuplicateWorkItemId", $"{workItem.Id}") }); _logTracer.LogInformation("Found matching non-duplicate work item"); @@ -621,30 +630,32 @@ public async Async.Task Process(IList<(string, string)> notificationInfo) { updated = true; } - if (!updated) { - if (oldestWorkItem != null) { - // We have matching work items but all are duplicates - _logTracer.AddTags(notificationInfo); - _logTracer.LogInformation($"All matching work items were duplicates, re-opening the oldest one"); - var stateChanged = await UpdateExisting(oldestWorkItem, notificationInfo); - if (stateChanged) { - // add a comment if we re-opened the bug - _ = await _client.AddCommentAsync( - new CommentCreate() { - Text = - "This work item was re-opened because OneFuzz could only find related work items that are marked as duplicate." - }, - _project, - (int)oldestWorkItem.Id!); - } - } else { - // We never saw a work item like this before, it must be new - var entry = await CreateNew(); - var adoEventType = "AdoNewItem"; - _logTracer.AddTags(notificationInfo); - _logTracer.AddTag("WorkItemId", entry.Id.HasValue ? entry.Id.Value.ToString() : ""); - _logTracer.LogEvent(adoEventType); + if (updated || isRegression) { + return; + } + + if (oldestWorkItem != null) { + // We have matching work items but all are duplicates + _logTracer.AddTags(notificationInfo); + _logTracer.LogInformation($"All matching work items were duplicates, re-opening the oldest one"); + var stateChanged = await UpdateExisting(oldestWorkItem, notificationInfo); + if (stateChanged) { + // add a comment if we re-opened the bug + _ = await _client.AddCommentAsync( + new CommentCreate() { + Text = + "This work item was re-opened because OneFuzz could only find related work items that are marked as duplicate." + }, + _project, + (int)oldestWorkItem.Id!); } + } else { + // We never saw a work item like this before, it must be new + var entry = await CreateNew(); + var adoEventType = "AdoNewItem"; + _logTracer.AddTags(notificationInfo); + _logTracer.AddTag("WorkItemId", entry.Id.HasValue ? entry.Id.Value.ToString() : ""); + _logTracer.LogEvent(adoEventType); } } diff --git a/src/cli/onefuzz/cli.py b/src/cli/onefuzz/cli.py index 4456f52d49..2ae55046cf 100644 --- a/src/cli/onefuzz/cli.py +++ b/src/cli/onefuzz/cli.py @@ -28,6 +28,7 @@ Type, TypeVar, Union, + cast, ) from uuid import UUID @@ -551,8 +552,16 @@ def set_tcp_keepalive() -> None: # Azure Load Balancer default timeout (4 minutes) # # https://urllib3.readthedocs.io/en/stable/reference/urllib3.connection.html?highlight=keep-alive#:~:text=For%20example%2C%20if,socket.SO_KEEPALIVE%2C%201)%2C%0A%5D - if value not in urllib3.connection.HTTPConnection.default_socket_options: - urllib3.connection.HTTPConnection.default_socket_options.extend((value,)) + + default_socket_options = cast( + List[Tuple[int, int, int]], + urllib3.connection.HTTPConnection.default_socket_options, + ) + + if value not in default_socket_options: + default_socket_options + [ + value, + ] def execute_api(api: Any, api_types: List[Any], version: str) -> int: diff --git a/src/cli/onefuzz/debug.py b/src/cli/onefuzz/debug.py index cde03adf57..0182cb19a1 100644 --- a/src/cli/onefuzz/debug.py +++ b/src/cli/onefuzz/debug.py @@ -21,7 +21,15 @@ from azure.storage.blob import ContainerClient from onefuzztypes import models, requests, responses from onefuzztypes.enums import ContainerType, TaskType -from onefuzztypes.models import BlobRef, Job, NodeAssignment, Report, Task, TaskConfig +from onefuzztypes.models import ( + BlobRef, + Job, + NodeAssignment, + RegressionReport, + Report, + Task, + TaskConfig, +) from onefuzztypes.primitives import Container, Directory, PoolName from onefuzztypes.responses import TemplateValidationResponse @@ -633,35 +641,50 @@ def test_template( self, notificationConfig: models.NotificationConfig, task_id: Optional[UUID] = None, - report: Optional[Report] = None, + report: Optional[str] = None, ) -> responses.NotificationTestResponse: """Test a notification template""" + the_report: Union[Report, RegressionReport, None] = None + + if report is not None: + try: + the_report = RegressionReport.parse_raw(report) + print("testing regression report") + except Exception: + the_report = Report.parse_raw(report) + print("testing normal report") + if task_id is not None: task = self.onefuzz.tasks.get(task_id) - if report is None: + if the_report is None: input_blob_ref = BlobRef( account="dummy-storage-account", container="test-notification-crashes", name="fake-crash-sample", ) - report = self._create_report( + the_report = self._create_report( task.job_id, task.task_id, "fake_target.exe", input_blob_ref ) + elif isinstance(the_report, RegressionReport): + if the_report.crash_test_result.crash_report is None: + raise Exception("invalid regression report: no crash report") + the_report.crash_test_result.crash_report.task_id = task.task_id + the_report.crash_test_result.crash_report.job_id = task.job_id else: - report.task_id = task.task_id - report.job_id = task.job_id - elif report is None: + the_report.task_id = task.task_id + the_report.job_id = task.job_id + elif the_report is None: raise Exception("must specify either task_id or report") - report.report_url = "https://dummy-container.blob.core.windows.net/dummy-reports/dummy-report.json" + the_report.report_url = "https://dummy-container.blob.core.windows.net/dummy-reports/dummy-report.json" endpoint = Endpoint(self.onefuzz) return endpoint._req_model( "POST", responses.NotificationTestResponse, data=requests.NotificationTest( - report=report, + report=the_report, notification=models.Notification( container=Container("test-notification-reports"), notification_id=uuid.uuid4(), diff --git a/src/pytypes/onefuzztypes/models.py b/src/pytypes/onefuzztypes/models.py index c888621600..4b115a3c79 100644 --- a/src/pytypes/onefuzztypes/models.py +++ b/src/pytypes/onefuzztypes/models.py @@ -256,6 +256,7 @@ class CrashTestResult(BaseModel): class RegressionReport(BaseModel): crash_test_result: CrashTestResult original_crash_test_result: Optional[CrashTestResult] + report_url: Optional[str] class ADODuplicateTemplate(BaseModel): @@ -263,6 +264,7 @@ class ADODuplicateTemplate(BaseModel): comment: Optional[str] set_state: Dict[str, str] ado_fields: Dict[str, str] + regression_ignore_states: Optional[List[str]] class ADOTemplate(BaseModel): diff --git a/src/pytypes/onefuzztypes/requests.py b/src/pytypes/onefuzztypes/requests.py index ae6da006e0..d284fb416d 100644 --- a/src/pytypes/onefuzztypes/requests.py +++ b/src/pytypes/onefuzztypes/requests.py @@ -3,7 +3,7 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union from uuid import UUID from pydantic import AnyHttpUrl, BaseModel, Field, root_validator @@ -26,6 +26,7 @@ AutoScaleConfig, InstanceConfig, NotificationConfig, + RegressionReport, Report, TemplateRenderContext, ) @@ -280,7 +281,7 @@ class EventsGet(BaseModel): class NotificationTest(BaseModel): - report: Report + report: Union[Report, RegressionReport] notification: models.Notification From 16fd614b4c6804941506e65bc96fb5667b93c239 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Mon, 2 Oct 2023 21:52:06 +0000 Subject: [PATCH 42/88] Updating IterationCount to be Task-based. --- .../ApiService/Functions/QueueJobResult.cs | 2 +- .../ApiService/OneFuzzTypes/Model.cs | 4 +-- .../onefuzzlib/JobResultOperations.cs | 29 ++++++++++++++----- 3 files changed, 24 insertions(+), 11 deletions(-) diff --git a/src/ApiService/ApiService/Functions/QueueJobResult.cs b/src/ApiService/ApiService/Functions/QueueJobResult.cs index d781a4d1e1..bdb4ee1b2c 100644 --- a/src/ApiService/ApiService/Functions/QueueJobResult.cs +++ b/src/ApiService/ApiService/Functions/QueueJobResult.cs @@ -52,7 +52,7 @@ public async Async.Task Run([QueueTrigger("job-result", Connection = "AzureWebJo return; } - var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jobResultType, value); + var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jr.TaskId, jobResultType, value); if (!jobResult.IsOk) { _log.LogError("failed to create or update with job result {JobId}", job.JobId); } diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index 3d67de106d..ba33886482 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -931,12 +931,12 @@ public record JobResult( double InstructionsCovered = 0, double TotalInstructions = 0, double CoverageRate = 0, - double IterationCount = 0 + Dictionary? IterationDictionary = null ) : EntityBase() { public JobResult(Guid JobId, string Project, string Name) : this( JobId: JobId, Project: Project, - Name: Name, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) { } + Name: Name, 0, 0, 0, 0, 0, 0, 0, 0, 0) { } } public record JobConfig( diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 1166cf91d4..770d5b5de4 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -6,7 +6,7 @@ namespace Microsoft.OneFuzz.Service; public interface IJobResultOperations : IOrm { Async.Task GetJobResult(Guid jobId); - Async.Task CreateOrUpdate(Guid jobId, JobResultType resultType, Dictionary resultValue); + Async.Task CreateOrUpdate(Guid jobId, Guid taskId, JobResultType resultType, Dictionary resultValue); } public class JobResultOperations : Orm, IJobResultOperations { @@ -19,7 +19,7 @@ public JobResultOperations(ILogger log, IOnefuzzContext con return await SearchByPartitionKeys(new[] { jobId.ToString() }).SingleOrDefaultAsync(); } - private JobResult UpdateResult(JobResult result, JobResultType type, Dictionary resultValue) { + private JobResult UpdateResult(JobResult result, Guid taskId, JobResultType type, Dictionary resultValue) { var newResult = result; double newValue; @@ -44,6 +44,10 @@ private JobResult UpdateResult(JobResult result, JobResultType type, Dictionary< newValue = result.NewCrashDump + resultValue["count"]; newResult = result with { NewCrashDump = newValue }; break; + case JobResultType.NoReproCrashingInput: + newValue = result.NoReproCrashingInput + resultValue["count"]; + newResult = result with { NoReproCrashingInput = newValue }; + break; case JobResultType.CoverageData: double newCovered = resultValue["covered"]; double newTotalCovered = resultValue["features"]; @@ -52,7 +56,16 @@ private JobResult UpdateResult(JobResult result, JobResultType type, Dictionary< break; case JobResultType.RuntimeStats: double newTotalIterations = resultValue["total_count"]; - newResult = result with { IterationCount = newTotalIterations }; + Dictionary? resultDictionary = result.IterationDictionary; + if (resultDictionary == null) { + resultDictionary = new Dictionary() { + { taskId, newTotalIterations } + }; + } else { + resultDictionary[taskId] = newTotalIterations; + } + + newResult = result with { IterationDictionary = resultDictionary }; break; default: _logTracer.LogWarning($"Invalid Field {type}."); @@ -62,7 +75,7 @@ private JobResult UpdateResult(JobResult result, JobResultType type, Dictionary< return newResult; } - private async Async.Task TryUpdate(Job job, JobResultType resultType, Dictionary resultValue) { + private async Async.Task TryUpdate(Job job, Guid taskId, JobResultType resultType, Dictionary resultValue) { var jobId = job.JobId; var jobResult = await GetJobResult(jobId); @@ -72,7 +85,7 @@ private async Async.Task TryUpdate(Job job, JobResultType resultType, Dict var entry = new JobResult(JobId: jobId, Project: job.Config.Project, Name: job.Config.Name); - jobResult = UpdateResult(entry, resultType, resultValue); + jobResult = UpdateResult(entry, taskId, resultType, resultValue); var r = await Insert(jobResult); if (!r.IsOk) { @@ -82,7 +95,7 @@ private async Async.Task TryUpdate(Job job, JobResultType resultType, Dict } else { _logTracer.LogInformation("Updating existing JobResult entry for Job {JobId}", jobId); - jobResult = UpdateResult(jobResult, resultType, resultValue); + jobResult = UpdateResult(jobResult, taskId, resultType, resultValue); var r = await Update(jobResult); if (!r.IsOk) { @@ -94,7 +107,7 @@ private async Async.Task TryUpdate(Job job, JobResultType resultType, Dict return true; } - public async Async.Task CreateOrUpdate(Guid jobId, JobResultType resultType, Dictionary resultValue) { + public async Async.Task CreateOrUpdate(Guid jobId, Guid taskId, JobResultType resultType, Dictionary resultValue) { var job = await _context.JobOperations.Get(jobId); if (job == null) { @@ -106,7 +119,7 @@ public async Async.Task CreateOrUpdate(Guid jobId, JobResultT _logTracer.LogInformation("attempt to update job result {JobId}", job.JobId); var policy = Policy.Handle().WaitAndRetryAsync(50, _ => new TimeSpan(0, 0, 5)); await policy.ExecuteAsync(async () => { - success = await TryUpdate(job, resultType, resultValue); + success = await TryUpdate(job, taskId, resultType, resultValue); _logTracer.LogInformation("attempt {success}", success); }); return OneFuzzResultVoid.Ok; From a344badb9d5f94e0593d0c6bc3ebdf532f340ce2 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Mon, 2 Oct 2023 23:46:08 +0000 Subject: [PATCH 43/88] Changing to machine_id based --- .../ApiService/Functions/QueueJobResult.cs | 2 +- .../onefuzzlib/JobResultOperations.cs | 18 +++++++++--------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/ApiService/ApiService/Functions/QueueJobResult.cs b/src/ApiService/ApiService/Functions/QueueJobResult.cs index bdb4ee1b2c..4eb817db66 100644 --- a/src/ApiService/ApiService/Functions/QueueJobResult.cs +++ b/src/ApiService/ApiService/Functions/QueueJobResult.cs @@ -52,7 +52,7 @@ public async Async.Task Run([QueueTrigger("job-result", Connection = "AzureWebJo return; } - var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jr.TaskId, jobResultType, value); + var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jr.MachineId, jobResultType, value); if (!jobResult.IsOk) { _log.LogError("failed to create or update with job result {JobId}", job.JobId); } diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 770d5b5de4..58aee99ed4 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -6,7 +6,7 @@ namespace Microsoft.OneFuzz.Service; public interface IJobResultOperations : IOrm { Async.Task GetJobResult(Guid jobId); - Async.Task CreateOrUpdate(Guid jobId, Guid taskId, JobResultType resultType, Dictionary resultValue); + Async.Task CreateOrUpdate(Guid jobId, Guid machineId, JobResultType resultType, Dictionary resultValue); } public class JobResultOperations : Orm, IJobResultOperations { @@ -19,7 +19,7 @@ public JobResultOperations(ILogger log, IOnefuzzContext con return await SearchByPartitionKeys(new[] { jobId.ToString() }).SingleOrDefaultAsync(); } - private JobResult UpdateResult(JobResult result, Guid taskId, JobResultType type, Dictionary resultValue) { + private JobResult UpdateResult(JobResult result, Guid machineId, JobResultType type, Dictionary resultValue) { var newResult = result; double newValue; @@ -59,10 +59,10 @@ private JobResult UpdateResult(JobResult result, Guid taskId, JobResultType type Dictionary? resultDictionary = result.IterationDictionary; if (resultDictionary == null) { resultDictionary = new Dictionary() { - { taskId, newTotalIterations } + { machineId, newTotalIterations } }; } else { - resultDictionary[taskId] = newTotalIterations; + resultDictionary[machineId] = newTotalIterations; } newResult = result with { IterationDictionary = resultDictionary }; @@ -75,7 +75,7 @@ private JobResult UpdateResult(JobResult result, Guid taskId, JobResultType type return newResult; } - private async Async.Task TryUpdate(Job job, Guid taskId, JobResultType resultType, Dictionary resultValue) { + private async Async.Task TryUpdate(Job job, Guid machineId, JobResultType resultType, Dictionary resultValue) { var jobId = job.JobId; var jobResult = await GetJobResult(jobId); @@ -85,7 +85,7 @@ private async Async.Task TryUpdate(Job job, Guid taskId, JobResultType res var entry = new JobResult(JobId: jobId, Project: job.Config.Project, Name: job.Config.Name); - jobResult = UpdateResult(entry, taskId, resultType, resultValue); + jobResult = UpdateResult(entry, machineId, resultType, resultValue); var r = await Insert(jobResult); if (!r.IsOk) { @@ -95,7 +95,7 @@ private async Async.Task TryUpdate(Job job, Guid taskId, JobResultType res } else { _logTracer.LogInformation("Updating existing JobResult entry for Job {JobId}", jobId); - jobResult = UpdateResult(jobResult, taskId, resultType, resultValue); + jobResult = UpdateResult(jobResult, machineId, resultType, resultValue); var r = await Update(jobResult); if (!r.IsOk) { @@ -107,7 +107,7 @@ private async Async.Task TryUpdate(Job job, Guid taskId, JobResultType res return true; } - public async Async.Task CreateOrUpdate(Guid jobId, Guid taskId, JobResultType resultType, Dictionary resultValue) { + public async Async.Task CreateOrUpdate(Guid jobId, Guid machineId, JobResultType resultType, Dictionary resultValue) { var job = await _context.JobOperations.Get(jobId); if (job == null) { @@ -119,7 +119,7 @@ public async Async.Task CreateOrUpdate(Guid jobId, Guid taskI _logTracer.LogInformation("attempt to update job result {JobId}", job.JobId); var policy = Policy.Handle().WaitAndRetryAsync(50, _ => new TimeSpan(0, 0, 5)); await policy.ExecuteAsync(async () => { - success = await TryUpdate(job, taskId, resultType, resultValue); + success = await TryUpdate(job, machineId, resultType, resultValue); _logTracer.LogInformation("attempt {success}", success); }); return OneFuzzResultVoid.Ok; From 19c7dbb3dfba0a14cbb9c2f1fb11703b44fe516a Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Tue, 3 Oct 2023 00:42:18 +0000 Subject: [PATCH 44/88] Fixing repro event name. --- src/ApiService/ApiService/OneFuzzTypes/Model.cs | 4 ++-- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index ba33886482..b535fd40fb 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -36,7 +36,7 @@ public enum HeartbeatType { [SkipRename] public enum JobResultType { NewCrashingInput, - NoReproCrashingInput, + NewUnableToReproduce, NewReport, NewUniqueReport, NewRegressionReport, @@ -923,7 +923,7 @@ public record JobResult( string Project, string Name, double NewCrashingInput = 0, - double NoReproCrashingInput = 0, + double NewUnableToReproduce = 0, double NewReport = 0, double NewUniqueReport = 0, double NewRegressionReport = 0, diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 58aee99ed4..b40957d5e0 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -44,9 +44,9 @@ private JobResult UpdateResult(JobResult result, Guid machineId, JobResultType t newValue = result.NewCrashDump + resultValue["count"]; newResult = result with { NewCrashDump = newValue }; break; - case JobResultType.NoReproCrashingInput: - newValue = result.NoReproCrashingInput + resultValue["count"]; - newResult = result with { NoReproCrashingInput = newValue }; + case JobResultType.NewUnableToReproduce: + newValue = result.NewUnableToReproduce + resultValue["count"]; + newResult = result with { NewUnableToReproduce = newValue }; break; case JobResultType.CoverageData: double newCovered = resultValue["covered"]; From 69cc9399975d614c33549dd9f2a64cfe2693b532 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Tue, 3 Oct 2023 21:17:56 +0000 Subject: [PATCH 45/88] Updating iteration pr. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index b40957d5e0..20755deb9c 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -71,7 +71,7 @@ private JobResult UpdateResult(JobResult result, Guid machineId, JobResultType t _logTracer.LogWarning($"Invalid Field {type}."); break; } - _logTracer.LogInformation($"Attempting to log new result: {newResult}"); + _logTracer.LogInformation($"Attempting to log result: {newResult}"); return newResult; } From 1b77a855cf04c80e1ff1e96d1b261bd225922591 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 4 Oct 2023 17:02:37 +0000 Subject: [PATCH 46/88] Single entry results. --- .../ApiService/Functions/QueueJobResult.cs | 2 +- .../ApiService/OneFuzzTypes/Model.cs | 40 ++-- .../onefuzzlib/JobResultOperations.cs | 183 +++++++++--------- 3 files changed, 115 insertions(+), 110 deletions(-) diff --git a/src/ApiService/ApiService/Functions/QueueJobResult.cs b/src/ApiService/ApiService/Functions/QueueJobResult.cs index 4eb817db66..725367d8d5 100644 --- a/src/ApiService/ApiService/Functions/QueueJobResult.cs +++ b/src/ApiService/ApiService/Functions/QueueJobResult.cs @@ -52,7 +52,7 @@ public async Async.Task Run([QueueTrigger("job-result", Connection = "AzureWebJo return; } - var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jr.MachineId, jobResultType, value); + var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jr.TaskId, jr.MachineId, jobResultType, value); if (!jobResult.IsOk) { _log.LogError("failed to create or update with job result {JobId}", job.JobId); } diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index b535fd40fb..97497373e7 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -919,25 +919,31 @@ public record SecretData(ISecret Secret) { } public record JobResult( - [PartitionKey][RowKey] Guid JobId, + [PartitionKey] Guid TaskId, + [RowKey] Guid MachineId, + Guid JobId, string Project, string Name, - double NewCrashingInput = 0, - double NewUnableToReproduce = 0, - double NewReport = 0, - double NewUniqueReport = 0, - double NewRegressionReport = 0, - double NewCrashDump = 0, - double InstructionsCovered = 0, - double TotalInstructions = 0, - double CoverageRate = 0, - Dictionary? IterationDictionary = null -) : EntityBase() { - public JobResult(Guid JobId, string Project, string Name) : this( - JobId: JobId, - Project: Project, - Name: Name, 0, 0, 0, 0, 0, 0, 0, 0, 0) { } -} + JobResultType Type, + Dictionary? MetricValue = null +// double NewCrashingInput = 0, +// double NewUnableToReproduce = 0, +// double NewReport = 0, +// double NewUniqueReport = 0, +// double NewRegressionReport = 0, +// double NewCrashDump = 0, +// double InstructionsCovered = 0, +// double TotalInstructions = 0, +// double CoverageRate = 0, +// Dictionary? IterationDictionary = null +) : EntityBase(); +// public JobResult(Guid TaskId, Guid MachineId, Guid JobId, string Project, string Name) : this( +// TaskId: TaskId, +// MachineId: MachineId, +// JobId: JobId, +// Project: Project, +// Name: Name) { } + public record JobConfig( string Project, diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 20755deb9c..adc60c4dac 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -6,7 +6,7 @@ namespace Microsoft.OneFuzz.Service; public interface IJobResultOperations : IOrm { Async.Task GetJobResult(Guid jobId); - Async.Task CreateOrUpdate(Guid jobId, Guid machineId, JobResultType resultType, Dictionary resultValue); + Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, JobResultType resultType, Dictionary resultValue); } public class JobResultOperations : Orm, IJobResultOperations { @@ -19,116 +19,115 @@ public JobResultOperations(ILogger log, IOnefuzzContext con return await SearchByPartitionKeys(new[] { jobId.ToString() }).SingleOrDefaultAsync(); } - private JobResult UpdateResult(JobResult result, Guid machineId, JobResultType type, Dictionary resultValue) { - - var newResult = result; - double newValue; - switch (type) { - case JobResultType.NewCrashingInput: - newValue = result.NewCrashingInput + resultValue["count"]; - newResult = result with { NewCrashingInput = newValue }; - break; - case JobResultType.NewReport: - newValue = result.NewReport + resultValue["count"]; - newResult = result with { NewReport = newValue }; - break; - case JobResultType.NewUniqueReport: - newValue = result.NewUniqueReport + resultValue["count"]; - newResult = result with { NewUniqueReport = newValue }; - break; - case JobResultType.NewRegressionReport: - newValue = result.NewRegressionReport + resultValue["count"]; - newResult = result with { NewRegressionReport = newValue }; - break; - case JobResultType.NewCrashDump: - newValue = result.NewCrashDump + resultValue["count"]; - newResult = result with { NewCrashDump = newValue }; - break; - case JobResultType.NewUnableToReproduce: - newValue = result.NewUnableToReproduce + resultValue["count"]; - newResult = result with { NewUnableToReproduce = newValue }; - break; - case JobResultType.CoverageData: - double newCovered = resultValue["covered"]; - double newTotalCovered = resultValue["features"]; - double newCoverageRate = resultValue["rate"]; - newResult = result with { InstructionsCovered = newCovered, TotalInstructions = newTotalCovered, CoverageRate = newCoverageRate }; - break; - case JobResultType.RuntimeStats: - double newTotalIterations = resultValue["total_count"]; - Dictionary? resultDictionary = result.IterationDictionary; - if (resultDictionary == null) { - resultDictionary = new Dictionary() { - { machineId, newTotalIterations } - }; - } else { - resultDictionary[machineId] = newTotalIterations; - } - - newResult = result with { IterationDictionary = resultDictionary }; - break; - default: - _logTracer.LogWarning($"Invalid Field {type}."); - break; - } - _logTracer.LogInformation($"Attempting to log result: {newResult}"); - return newResult; - } - - private async Async.Task TryUpdate(Job job, Guid machineId, JobResultType resultType, Dictionary resultValue) { + // // private JobResult UpdateResult(JobResult result, Guid machineId, JobResultType type, Dictionary resultValue) { + + // // var newResult = result; + // // double newValue; + // // switch (type) { + // // case JobResultType.NewCrashingInput: + // // newValue = result.NewCrashingInput + resultValue["count"]; + // // newResult = result with { NewCrashingInput = newValue }; + // // break; + // // case JobResultType.NewReport: + // // newValue = result.NewReport + resultValue["count"]; + // // newResult = result with { NewReport = newValue }; + // // break; + // // case JobResultType.NewUniqueReport: + // // newValue = result.NewUniqueReport + resultValue["count"]; + // // newResult = result with { NewUniqueReport = newValue }; + // // break; + // // case JobResultType.NewRegressionReport: + // // newValue = result.NewRegressionReport + resultValue["count"]; + // // newResult = result with { NewRegressionReport = newValue }; + // // break; + // // case JobResultType.NewCrashDump: + // // newValue = result.NewCrashDump + resultValue["count"]; + // // newResult = result with { NewCrashDump = newValue }; + // // break; + // // case JobResultType.NewUnableToReproduce: + // // newValue = result.NewUnableToReproduce + resultValue["count"]; + // // newResult = result with { NewUnableToReproduce = newValue }; + // // break; + // // case JobResultType.CoverageData: + // // double newCovered = resultValue["covered"]; + // // double newTotalCovered = resultValue["features"]; + // // double newCoverageRate = resultValue["rate"]; + // // newResult = result with { InstructionsCovered = newCovered, TotalInstructions = newTotalCovered, CoverageRate = newCoverageRate }; + // // break; + // // case JobResultType.RuntimeStats: + // // double newTotalIterations = resultValue["total_count"]; + // // Dictionary? resultDictionary = result.IterationDictionary; + // // if (resultDictionary == null) { + // // resultDictionary = new Dictionary() { + // // { machineId, newTotalIterations } + // // }; + // // } else { + // // resultDictionary[machineId] = newTotalIterations; + // // } + + // // newResult = result with { IterationDictionary = resultDictionary }; + // // break; + // // default: + // // _logTracer.LogWarning($"Invalid Field {type}."); + // // break; + // // } + // // _logTracer.LogInformation($"Attempting to log result: {newResult}"); + // // return newResult; + // } + + private async Async.Task InsertEntry(Job job, Guid taskId, Guid machineId, JobResultType resultType, Dictionary resultValue) { var jobId = job.JobId; - var jobResult = await GetJobResult(jobId); - - if (jobResult == null) { - _logTracer.LogInformation("Creating new JobResult for Job {JobId}", jobId); - - var entry = new JobResult(JobId: jobId, Project: job.Config.Project, Name: job.Config.Name); - - jobResult = UpdateResult(entry, machineId, resultType, resultValue); + _logTracer.LogInformation("Creating new JobResult for Job {JobId}", jobId); - var r = await Insert(jobResult); - if (!r.IsOk) { - throw new InvalidOperationException($"failed to insert job result {jobResult.JobId}"); - } - _logTracer.LogInformation("created job result {JobId}", jobResult.JobId); - } else { - _logTracer.LogInformation("Updating existing JobResult entry for Job {JobId}", jobId); - - jobResult = UpdateResult(jobResult, machineId, resultType, resultValue); + var entry = new JobResult(TaskId: taskId, MachineId: machineId, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, Type: resultType, MetricValue: resultValue); - var r = await Update(jobResult); - if (!r.IsOk) { - throw new InvalidOperationException($"failed to insert job result {jobResult.JobId}"); - } - _logTracer.LogInformation("updated job result {JobId}", jobResult.JobId); + // do we need retries for job results? + var r = await Insert(entry); + if (!r.IsOk) { + throw new InvalidOperationException($"failed to insert job result {jobId}"); } + _logTracer.LogInformation("created job result {JobId}", jobId); return true; } - public async Async.Task CreateOrUpdate(Guid jobId, Guid machineId, JobResultType resultType, Dictionary resultValue) { + public async Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, JobResultType resultType, Dictionary resultValue) { var job = await _context.JobOperations.Get(jobId); if (job == null) { return OneFuzzResultVoid.Error(ErrorCode.INVALID_REQUEST, "invalid job"); } - var success = false; - try { - _logTracer.LogInformation("attempt to update job result {JobId}", job.JobId); - var policy = Policy.Handle().WaitAndRetryAsync(50, _ => new TimeSpan(0, 0, 5)); - await policy.ExecuteAsync(async () => { - success = await TryUpdate(job, machineId, resultType, resultValue); - _logTracer.LogInformation("attempt {success}", success); - }); + bool success; + _logTracer.LogInformation("attempt to update job result table with entry for {JobId}", job.JobId); + success = await InsertEntry(job, taskId, machineId, resultType, resultValue); + _logTracer.LogInformation("attempt {success}", success); + + if (success) { return OneFuzzResultVoid.Ok; - } catch (Exception e) { + } else { return OneFuzzResultVoid.Error(ErrorCode.UNABLE_TO_UPDATE, new string[] { - $"Unexpected failure when attempting to update job result for {job.JobId}", - $"Exception: {e}" + $"Unexpected failure when attempting to update job result for {job.JobId}" }); } + + + // var success = false; + // try { + // _logTracer.LogInformation("attempt to update job result {JobId}", job.JobId); + // var policy = Policy.Handle().WaitAndRetryAsync(50, _ => new TimeSpan(0, 0, 5)); + // await policy.ExecuteAsync(async () => { + // success = await TryUpdate(job, machineId, resultType, resultValue); + // _logTracer.LogInformation("attempt {success}", success); + // }); + // return OneFuzzResultVoid.Ok; + // } catch (Exception e) { + // return OneFuzzResultVoid.Error(ErrorCode.UNABLE_TO_UPDATE, new string[] { + // $"Unexpected failure when attempting to update job result for {job.JobId}", + // $"Exception: {e}" + // }); + // } } } From 0ba78d14c9ecfad23a93306381bfb547df3746f6 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 4 Oct 2023 17:11:40 +0000 Subject: [PATCH 47/88] Retry. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index adc60c4dac..1cc74780ca 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -1,6 +1,5 @@ using ApiService.OneFuzzLib.Orm; using Microsoft.Extensions.Logging; -using Polly; namespace Microsoft.OneFuzz.Service; public interface IJobResultOperations : IOrm { From f691c99ec191362481ac6c945be7fd1e82d8353b Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 4 Oct 2023 18:29:29 +0000 Subject: [PATCH 48/88] trying with unique guid. --- src/ApiService/ApiService/OneFuzzTypes/Model.cs | 7 ++++--- .../ApiService/onefuzzlib/JobResultOperations.cs | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index 97497373e7..dcd57e4bff 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -919,9 +919,10 @@ public record SecretData(ISecret Secret) { } public record JobResult( - [PartitionKey] Guid TaskId, - [RowKey] Guid MachineId, - Guid JobId, + [PartitionKey] Guid ResultId, + [RowKey] Guid JobId, + Guid TaskId, + Guid MachineId, string Project, string Name, JobResultType Type, diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 1cc74780ca..5e39390ffd 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -79,7 +79,7 @@ private async Async.Task InsertEntry(Job job, Guid taskId, Guid machineId, _logTracer.LogInformation("Creating new JobResult for Job {JobId}", jobId); - var entry = new JobResult(TaskId: taskId, MachineId: machineId, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, Type: resultType, MetricValue: resultValue); + var entry = new JobResult(ResultId: Guid.NewGuid(), JobId: jobId, TaskId: taskId, MachineId: machineId, Project: job.Config.Project, Name: job.Config.Name, Type: resultType, MetricValue: resultValue); // do we need retries for job results? var r = await Insert(entry); From 0a78fd1460d53e1d52a3b610cecf91d6c0e4ce2c Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 4 Oct 2023 18:31:42 +0000 Subject: [PATCH 49/88] Generic string type. --- .../ApiService/OneFuzzTypes/Model.cs | 28 +++++++++---------- .../onefuzzlib/JobResultOperations.cs | 20 ++++++------- 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index dcd57e4bff..a1e7385c70 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -33,18 +33,18 @@ public enum HeartbeatType { TaskAlive, } -[SkipRename] -public enum JobResultType { - NewCrashingInput, - NewUnableToReproduce, - NewReport, - NewUniqueReport, - NewRegressionReport, - NewCoverage, - NewCrashDump, - CoverageData, - RuntimeStats, -} +// [SkipRename] +// public enum JobResultType { +// NewCrashingInput, +// NewUnableToReproduce, +// NewReport, +// NewUniqueReport, +// NewRegressionReport, +// NewCoverage, +// NewCrashDump, +// CoverageData, +// RuntimeStats, +// } public record HeartbeatData(HeartbeatType Type); @@ -54,7 +54,7 @@ public record TaskHeartbeatEntry( Guid MachineId, HeartbeatData[] Data); -public record JobResultData(JobResultType Type); +public record JobResultData(string Type); public record TaskJobResultEntry( Guid TaskId, @@ -925,7 +925,7 @@ public record JobResult( Guid MachineId, string Project, string Name, - JobResultType Type, + string Type, Dictionary? MetricValue = null // double NewCrashingInput = 0, // double NewUnableToReproduce = 0, diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 5e39390ffd..37ed548f1e 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -4,19 +4,19 @@ namespace Microsoft.OneFuzz.Service; public interface IJobResultOperations : IOrm { - Async.Task GetJobResult(Guid jobId); - Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, JobResultType resultType, Dictionary resultValue); + // Async.Task GetJobResult(Guid jobId); + Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, string resultType, Dictionary resultValue); } public class JobResultOperations : Orm, IJobResultOperations { - public JobResultOperations(ILogger log, IOnefuzzContext context) - : base(log, context) { - } + // public JobResultOperations(ILogger log, IOnefuzzContext context) + // : base(log, context) { + // } - public async Async.Task GetJobResult(Guid jobId) { - return await SearchByPartitionKeys(new[] { jobId.ToString() }).SingleOrDefaultAsync(); - } + // public async Async.Task GetJobResult(Guid jobId) { + // return await SearchByPartitionKeys(new[] { jobId.ToString() }).SingleOrDefaultAsync(); + // } // // private JobResult UpdateResult(JobResult result, Guid machineId, JobResultType type, Dictionary resultValue) { @@ -74,7 +74,7 @@ public JobResultOperations(ILogger log, IOnefuzzContext con // // return newResult; // } - private async Async.Task InsertEntry(Job job, Guid taskId, Guid machineId, JobResultType resultType, Dictionary resultValue) { + private async Async.Task InsertEntry(Job job, Guid taskId, Guid machineId, string resultType, Dictionary resultValue) { var jobId = job.JobId; _logTracer.LogInformation("Creating new JobResult for Job {JobId}", jobId); @@ -91,7 +91,7 @@ private async Async.Task InsertEntry(Job job, Guid taskId, Guid machineId, return true; } - public async Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, JobResultType resultType, Dictionary resultValue) { + public async Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, string resultType, Dictionary resultValue) { var job = await _context.JobOperations.Get(jobId); if (job == null) { From 806388d412b83ff0034b458d2e26f4714c910ec1 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 4 Oct 2023 18:36:42 +0000 Subject: [PATCH 50/88] putting it back. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 37ed548f1e..c7d71e10fb 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -10,9 +10,9 @@ public interface IJobResultOperations : IOrm { } public class JobResultOperations : Orm, IJobResultOperations { - // public JobResultOperations(ILogger log, IOnefuzzContext context) - // : base(log, context) { - // } + public JobResultOperations(ILogger log, IOnefuzzContext context) + : base(log, context) { + } // public async Async.Task GetJobResult(Guid jobId) { // return await SearchByPartitionKeys(new[] { jobId.ToString() }).SingleOrDefaultAsync(); From 5fc9f071e3ca248ee582205b2e8098ccbec25b23 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 4 Oct 2023 20:50:11 +0000 Subject: [PATCH 51/88] removing old update code. --- .../onefuzzlib/JobResultOperations.cs | 77 ------------------- 1 file changed, 77 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index c7d71e10fb..2e3dc816ec 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -4,7 +4,6 @@ namespace Microsoft.OneFuzz.Service; public interface IJobResultOperations : IOrm { - // Async.Task GetJobResult(Guid jobId); Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, string resultType, Dictionary resultValue); } @@ -14,66 +13,6 @@ public JobResultOperations(ILogger log, IOnefuzzContext con : base(log, context) { } - // public async Async.Task GetJobResult(Guid jobId) { - // return await SearchByPartitionKeys(new[] { jobId.ToString() }).SingleOrDefaultAsync(); - // } - - // // private JobResult UpdateResult(JobResult result, Guid machineId, JobResultType type, Dictionary resultValue) { - - // // var newResult = result; - // // double newValue; - // // switch (type) { - // // case JobResultType.NewCrashingInput: - // // newValue = result.NewCrashingInput + resultValue["count"]; - // // newResult = result with { NewCrashingInput = newValue }; - // // break; - // // case JobResultType.NewReport: - // // newValue = result.NewReport + resultValue["count"]; - // // newResult = result with { NewReport = newValue }; - // // break; - // // case JobResultType.NewUniqueReport: - // // newValue = result.NewUniqueReport + resultValue["count"]; - // // newResult = result with { NewUniqueReport = newValue }; - // // break; - // // case JobResultType.NewRegressionReport: - // // newValue = result.NewRegressionReport + resultValue["count"]; - // // newResult = result with { NewRegressionReport = newValue }; - // // break; - // // case JobResultType.NewCrashDump: - // // newValue = result.NewCrashDump + resultValue["count"]; - // // newResult = result with { NewCrashDump = newValue }; - // // break; - // // case JobResultType.NewUnableToReproduce: - // // newValue = result.NewUnableToReproduce + resultValue["count"]; - // // newResult = result with { NewUnableToReproduce = newValue }; - // // break; - // // case JobResultType.CoverageData: - // // double newCovered = resultValue["covered"]; - // // double newTotalCovered = resultValue["features"]; - // // double newCoverageRate = resultValue["rate"]; - // // newResult = result with { InstructionsCovered = newCovered, TotalInstructions = newTotalCovered, CoverageRate = newCoverageRate }; - // // break; - // // case JobResultType.RuntimeStats: - // // double newTotalIterations = resultValue["total_count"]; - // // Dictionary? resultDictionary = result.IterationDictionary; - // // if (resultDictionary == null) { - // // resultDictionary = new Dictionary() { - // // { machineId, newTotalIterations } - // // }; - // // } else { - // // resultDictionary[machineId] = newTotalIterations; - // // } - - // // newResult = result with { IterationDictionary = resultDictionary }; - // // break; - // // default: - // // _logTracer.LogWarning($"Invalid Field {type}."); - // // break; - // // } - // // _logTracer.LogInformation($"Attempting to log result: {newResult}"); - // // return newResult; - // } - private async Async.Task InsertEntry(Job job, Guid taskId, Guid machineId, string resultType, Dictionary resultValue) { var jobId = job.JobId; @@ -111,22 +50,6 @@ public async Async.Task CreateOrUpdate(Guid jobId, Guid taskI }); } - - // var success = false; - // try { - // _logTracer.LogInformation("attempt to update job result {JobId}", job.JobId); - // var policy = Policy.Handle().WaitAndRetryAsync(50, _ => new TimeSpan(0, 0, 5)); - // await policy.ExecuteAsync(async () => { - // success = await TryUpdate(job, machineId, resultType, resultValue); - // _logTracer.LogInformation("attempt {success}", success); - // }); - // return OneFuzzResultVoid.Ok; - // } catch (Exception e) { - // return OneFuzzResultVoid.Error(ErrorCode.UNABLE_TO_UPDATE, new string[] { - // $"Unexpected failure when attempting to update job result for {job.JobId}", - // $"Exception: {e}" - // }); - // } } } From a7373c599206ba1146571b399542690a65d0cb43 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 4 Oct 2023 20:51:41 +0000 Subject: [PATCH 52/88] removing comment. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 1 - src/proxy-manager/Cargo.lock | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 2e3dc816ec..626be785da 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -20,7 +20,6 @@ private async Async.Task InsertEntry(Job job, Guid taskId, Guid machineId, var entry = new JobResult(ResultId: Guid.NewGuid(), JobId: jobId, TaskId: taskId, MachineId: machineId, Project: job.Config.Project, Name: job.Config.Name, Type: resultType, MetricValue: resultValue); - // do we need retries for job results? var r = await Insert(entry); if (!r.IsOk) { throw new InvalidOperationException($"failed to insert job result {jobId}"); diff --git a/src/proxy-manager/Cargo.lock b/src/proxy-manager/Cargo.lock index e86da294a7..a9a7b85c6d 100644 --- a/src/proxy-manager/Cargo.lock +++ b/src/proxy-manager/Cargo.lock @@ -199,9 +199,9 @@ checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" [[package]] name = "bytes" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" dependencies = [ "serde", ] From 5d3b6f4de51b1723ae75eb86635a18fc40bdeb79 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 4 Oct 2023 23:00:10 +0000 Subject: [PATCH 53/88] Attempting to partition on task_id, machine_id, and event type. --- .../ApiService/OneFuzzTypes/Model.cs | 9 ++-- .../onefuzzlib/JobResultOperations.cs | 44 +++++++++++++------ 2 files changed, 34 insertions(+), 19 deletions(-) diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index a1e7385c70..32f15cc7b3 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -919,14 +919,13 @@ public record SecretData(ISecret Secret) { } public record JobResult( - [PartitionKey] Guid ResultId, - [RowKey] Guid JobId, - Guid TaskId, - Guid MachineId, + [PartitionKey] Guid TaskId, + [RowKey] string MachineIdMetric, + Guid JobId, string Project, string Name, string Type, - Dictionary? MetricValue = null + Dictionary MetricValue // double NewCrashingInput = 0, // double NewUnableToReproduce = 0, // double NewReport = 0, diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 626be785da..2132764777 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -1,9 +1,11 @@ using ApiService.OneFuzzLib.Orm; using Microsoft.Extensions.Logging; +using Polly; namespace Microsoft.OneFuzz.Service; public interface IJobResultOperations : IOrm { + Async.Task GetJobResult(Guid taskId, string machineIdMetric); Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, string resultType, Dictionary resultValue); } @@ -13,18 +15,30 @@ public JobResultOperations(ILogger log, IOnefuzzContext con : base(log, context) { } - private async Async.Task InsertEntry(Job job, Guid taskId, Guid machineId, string resultType, Dictionary resultValue) { + public async Async.Task GetJobResult(Guid taskId, string machineIdMetric) { + return await GetEntityAsync(taskId.ToString(), machineIdMetric); + } + + private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, string resultType, Dictionary resultValue) { var jobId = job.JobId; + var machineIdMetric = string.Concat(machineId, resultType); - _logTracer.LogInformation("Creating new JobResult for Job {JobId}", jobId); + Dictionary newResultValue; + var jobResult = await GetJobResult(taskId, machineIdMetric); - var entry = new JobResult(ResultId: Guid.NewGuid(), JobId: jobId, TaskId: taskId, MachineId: machineId, Project: job.Config.Project, Name: job.Config.Name, Type: resultType, MetricValue: resultValue); + if (resultType.Equals("CoverageData") || resultType.Equals("RuntimeStats") || jobResult == null) { + newResultValue = resultValue; + } else { + jobResult.MetricValue["count"]++; + newResultValue = jobResult.MetricValue; + } + var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, newResultValue); var r = await Insert(entry); if (!r.IsOk) { - throw new InvalidOperationException($"failed to insert job result {jobId}"); + throw new InvalidOperationException($"failed to insert job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); } - _logTracer.LogInformation("created job result {JobId}", jobId); + _logTracer.LogInformation($"created job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); return true; } @@ -36,19 +50,21 @@ public async Async.Task CreateOrUpdate(Guid jobId, Guid taskI return OneFuzzResultVoid.Error(ErrorCode.INVALID_REQUEST, "invalid job"); } - bool success; - _logTracer.LogInformation("attempt to update job result table with entry for {JobId}", job.JobId); - success = await InsertEntry(job, taskId, machineId, resultType, resultValue); - _logTracer.LogInformation("attempt {success}", success); - - if (success) { + var success = false; + try { + _logTracer.LogInformation("attempt to update job result {JobId}", job.JobId); + var policy = Policy.Handle().WaitAndRetryAsync(50, _ => new TimeSpan(0, 0, 5)); + await policy.ExecuteAsync(async () => { + success = await TryUpdate(job, taskId, machineId, resultType, resultValue); + _logTracer.LogInformation("attempt {success}", success); + }); return OneFuzzResultVoid.Ok; - } else { + } catch (Exception e) { return OneFuzzResultVoid.Error(ErrorCode.UNABLE_TO_UPDATE, new string[] { - $"Unexpected failure when attempting to update job result for {job.JobId}" + $"Unexpected failure when attempting to update job result for {job.JobId}", + $"Exception: {e}" }); } - } } From ea37e7f170f1edaaa2b58f34f1a5eb56f6dd226f Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 4 Oct 2023 23:11:19 +0000 Subject: [PATCH 54/88] Using replace and update. --- .../ApiService/onefuzzlib/JobResultOperations.cs | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 2132764777..eb80b5616e 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -28,16 +28,21 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s if (resultType.Equals("CoverageData") || resultType.Equals("RuntimeStats") || jobResult == null) { newResultValue = resultValue; + var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, newResultValue); + var r = await Replace(entry); + if (!r.IsOk) { + throw new InvalidOperationException($"failed to insert or replace job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); + } } else { jobResult.MetricValue["count"]++; newResultValue = jobResult.MetricValue; + var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, newResultValue); + var r = await Update(entry); + if (!r.IsOk) { + throw new InvalidOperationException($"failed to update job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); + } } - var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, newResultValue); - var r = await Insert(entry); - if (!r.IsOk) { - throw new InvalidOperationException($"failed to insert job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); - } _logTracer.LogInformation($"created job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); return true; From 810bdac05d7d0014844dead7f545653865a15a0e Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Thu, 5 Oct 2023 00:02:48 +0000 Subject: [PATCH 55/88] Add logging statement. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index eb80b5616e..298238160e 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -27,6 +27,7 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s var jobResult = await GetJobResult(taskId, machineIdMetric); if (resultType.Equals("CoverageData") || resultType.Equals("RuntimeStats") || jobResult == null) { + _logTracer.LogInformation($"attempt to insert or replace job result {taskId} and machineId+metricType {machineIdMetric}"); newResultValue = resultValue; var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, newResultValue); var r = await Replace(entry); @@ -34,6 +35,7 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s throw new InvalidOperationException($"failed to insert or replace job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); } } else { + _logTracer.LogInformation($"attempt to update job result {taskId} and machineId+metricType {machineIdMetric}"); jobResult.MetricValue["count"]++; newResultValue = jobResult.MetricValue; var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, newResultValue); From 251ad2ab17ba2f4c4c968b0505fe00b68b01f872 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Thu, 5 Oct 2023 00:09:52 +0000 Subject: [PATCH 56/88] UPdating such that we query. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 298238160e..376969a27f 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -16,7 +16,8 @@ public JobResultOperations(ILogger log, IOnefuzzContext con } public async Async.Task GetJobResult(Guid taskId, string machineIdMetric) { - return await GetEntityAsync(taskId.ToString(), machineIdMetric); + var data = QueryAsync(Query.SingleEntity(taskId.ToString(), machineIdMetric)); + return await data.FirstOrDefaultAsync(); } private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, string resultType, Dictionary resultValue) { From a6bb6d1aad68dcbd6a6f22f7e1ff920f554f0435 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Mon, 9 Oct 2023 18:40:46 +0000 Subject: [PATCH 57/88] attempting to try update. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 376969a27f..6ecf53541b 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -36,10 +36,10 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s throw new InvalidOperationException($"failed to insert or replace job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); } } else { - _logTracer.LogInformation($"attempt to update job result {taskId} and machineId+metricType {machineIdMetric}"); jobResult.MetricValue["count"]++; newResultValue = jobResult.MetricValue; var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, newResultValue); + _logTracer.LogInformation($"attempt to update job result {taskId} and machineId+metricType {machineIdMetric} with new count: {newResultValue}"); var r = await Update(entry); if (!r.IsOk) { throw new InvalidOperationException($"failed to update job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); From 19adb42a8d467258e83a904e998e7d4c2db88692 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Mon, 9 Oct 2023 18:50:18 +0000 Subject: [PATCH 58/88] Trying different update mechanism. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 6ecf53541b..23ed3df0d7 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -38,9 +38,10 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s } else { jobResult.MetricValue["count"]++; newResultValue = jobResult.MetricValue; - var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, newResultValue); + // var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, newResultValue); + var newResult = jobResult with { MetricValue = newResultValue }; _logTracer.LogInformation($"attempt to update job result {taskId} and machineId+metricType {machineIdMetric} with new count: {newResultValue}"); - var r = await Update(entry); + var r = await Update(newResult); if (!r.IsOk) { throw new InvalidOperationException($"failed to update job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); } From e19c99fb449c0521b8026b71850e6af57d372f25 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Tue, 10 Oct 2023 18:25:38 +0000 Subject: [PATCH 59/88] Checking previous value. --- .../onefuzzlib/JobResultOperations.cs | 23 +++++++++++++++---- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 23ed3df0d7..9b567dd652 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -27,13 +27,26 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s Dictionary newResultValue; var jobResult = await GetJobResult(taskId, machineIdMetric); - if (resultType.Equals("CoverageData") || resultType.Equals("RuntimeStats") || jobResult == null) { - _logTracer.LogInformation($"attempt to insert or replace job result {taskId} and machineId+metricType {machineIdMetric}"); - newResultValue = resultValue; - var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, newResultValue); + if (jobResult == null) { + _logTracer.LogInformation($"attempt to insert new job result {taskId} and machineId+metricType {machineIdMetric}"); + var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); + var r = await Insert(entry); + if (!r.IsOk) { + throw new InvalidOperationException($"failed to insert job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); + } + } else if (resultType.Equals("CoverageData") && jobResult.MetricValue["rate"] < resultValue["rate"]) { + _logTracer.LogInformation($"attempt to replace coverage job result for {taskId} and machineId+metricType {machineIdMetric}"); + var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); + var r = await Replace(entry); + if (!r.IsOk) { + throw new InvalidOperationException($"failed to insert job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); + } + } else if (resultType.Equals("RuntimeStats") && jobResult.MetricValue["total_count"] < resultValue["total_count"]) { + _logTracer.LogInformation($"attempt to replace runtime stats job result for {taskId} and machineId+metricType {machineIdMetric}"); + var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); var r = await Replace(entry); if (!r.IsOk) { - throw new InvalidOperationException($"failed to insert or replace job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); + throw new InvalidOperationException($"failed to insert job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); } } else { jobResult.MetricValue["count"]++; From ffde6cd486f1b349f69b6422bee27187fadf6f92 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Tue, 10 Oct 2023 20:41:37 +0000 Subject: [PATCH 60/88] cleanup. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 9b567dd652..185322bc99 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -39,19 +39,18 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); var r = await Replace(entry); if (!r.IsOk) { - throw new InvalidOperationException($"failed to insert job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); + throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); } } else if (resultType.Equals("RuntimeStats") && jobResult.MetricValue["total_count"] < resultValue["total_count"]) { _logTracer.LogInformation($"attempt to replace runtime stats job result for {taskId} and machineId+metricType {machineIdMetric}"); var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); var r = await Replace(entry); if (!r.IsOk) { - throw new InvalidOperationException($"failed to insert job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); + throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); } } else { jobResult.MetricValue["count"]++; newResultValue = jobResult.MetricValue; - // var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, newResultValue); var newResult = jobResult with { MetricValue = newResultValue }; _logTracer.LogInformation($"attempt to update job result {taskId} and machineId+metricType {machineIdMetric} with new count: {newResultValue}"); var r = await Update(newResult); From 54158641665828c522f626a429a86835e51f89c4 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Tue, 10 Oct 2023 20:43:27 +0000 Subject: [PATCH 61/88] Removing old model. --- .../ApiService/OneFuzzTypes/Model.cs | 30 ------------------- 1 file changed, 30 deletions(-) diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index 3a5c639771..4559856cc0 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -33,19 +33,6 @@ public enum HeartbeatType { TaskAlive, } -// [SkipRename] -// public enum JobResultType { -// NewCrashingInput, -// NewUnableToReproduce, -// NewReport, -// NewUniqueReport, -// NewRegressionReport, -// NewCoverage, -// NewCrashDump, -// CoverageData, -// RuntimeStats, -// } - public record HeartbeatData(HeartbeatType Type); public record TaskHeartbeatEntry( @@ -926,24 +913,7 @@ public record JobResult( string Name, string Type, Dictionary MetricValue -// double NewCrashingInput = 0, -// double NewUnableToReproduce = 0, -// double NewReport = 0, -// double NewUniqueReport = 0, -// double NewRegressionReport = 0, -// double NewCrashDump = 0, -// double InstructionsCovered = 0, -// double TotalInstructions = 0, -// double CoverageRate = 0, -// Dictionary? IterationDictionary = null ) : EntityBase(); -// public JobResult(Guid TaskId, Guid MachineId, Guid JobId, string Project, string Name) : this( -// TaskId: TaskId, -// MachineId: MachineId, -// JobId: JobId, -// Project: Project, -// Name: Name) { } - public record JobConfig( string Project, From 25372957d8007f8f8df31605d946407d170ccb72 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Tue, 10 Oct 2023 22:39:38 +0000 Subject: [PATCH 62/88] Case guard for better readability. --- .../onefuzzlib/JobResultOperations.cs | 74 +++++++++++-------- 1 file changed, 44 insertions(+), 30 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 185322bc99..d85471d6d8 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -1,6 +1,7 @@ using ApiService.OneFuzzLib.Orm; using Microsoft.Extensions.Logging; using Polly; +using System.Net; namespace Microsoft.OneFuzz.Service; public interface IJobResultOperations : IOrm { @@ -20,49 +21,62 @@ public JobResultOperations(ILogger log, IOnefuzzContext con return await data.FirstOrDefaultAsync(); } + public async Async.Task> ToUpdate(string resultType, Dictionary newValue, JobResult prevEntry, JobResult newEntry) => resultType switch { + "CoverageData" when prevEntry.MetricValue["rate"] < newValue["rate"] => await Replace(newEntry), + "RuntimeStats" when prevEntry.MetricValue["total_count"] < newValue["total_count"] => await Replace(newEntry), + _ => await Insert(prevEntry with { MetricValue = new Dictionary() { { resultType, prevEntry.MetricValue["count"]++ } } }), + }; + private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, string resultType, Dictionary resultValue) { var jobId = job.JobId; var machineIdMetric = string.Concat(machineId, resultType); - Dictionary newResultValue; - var jobResult = await GetJobResult(taskId, machineIdMetric); + // Dictionary newResultValue; + var oldEntry = await GetJobResult(taskId, machineIdMetric); - if (jobResult == null) { + if (oldEntry == null) { _logTracer.LogInformation($"attempt to insert new job result {taskId} and machineId+metricType {machineIdMetric}"); var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); - var r = await Insert(entry); - if (!r.IsOk) { + var result = await Insert(entry); + if (!result.IsOk) { throw new InvalidOperationException($"failed to insert job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); } - } else if (resultType.Equals("CoverageData") && jobResult.MetricValue["rate"] < resultValue["rate"]) { - _logTracer.LogInformation($"attempt to replace coverage job result for {taskId} and machineId+metricType {machineIdMetric}"); - var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); - var r = await Replace(entry); - if (!r.IsOk) { - throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); - } - } else if (resultType.Equals("RuntimeStats") && jobResult.MetricValue["total_count"] < resultValue["total_count"]) { - _logTracer.LogInformation($"attempt to replace runtime stats job result for {taskId} and machineId+metricType {machineIdMetric}"); - var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); - var r = await Replace(entry); - if (!r.IsOk) { - throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); - } - } else { - jobResult.MetricValue["count"]++; - newResultValue = jobResult.MetricValue; - var newResult = jobResult with { MetricValue = newResultValue }; - _logTracer.LogInformation($"attempt to update job result {taskId} and machineId+metricType {machineIdMetric} with new count: {newResultValue}"); - var r = await Update(newResult); - if (!r.IsOk) { - throw new InvalidOperationException($"failed to update job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); - } + return true; } - _logTracer.LogInformation($"created job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); - + _logTracer.LogInformation($"attempt to replace job result for {taskId} and machineId+metricType {machineIdMetric}"); + var newEntry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); + var r = await ToUpdate(resultType, resultValue, oldEntry, newEntry); + if (!r.IsOk) { + throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); + } return true; } + // } else if (resultType.Equals("CoverageData") && jobResult.MetricValue["rate"] < resultValue["rate"]) { + // _logTracer.LogInformation($"attempt to replace coverage job result for {taskId} and machineId+metricType {machineIdMetric}"); + // var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); + // var r = await Replace(entry); + // if (!r.IsOk) { + // throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); + // } + // } else if (resultType.Equals("RuntimeStats") && jobResult.MetricValue["total_count"] < resultValue["total_count"]) { + // _logTracer.LogInformation($"attempt to replace runtime stats job result for {taskId} and machineId+metricType {machineIdMetric}"); + // var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); + // var r = await Replace(entry); + // if (!r.IsOk) { + // throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); + // } + // } else { + // jobResult.MetricValue["count"]++; + // newResultValue = jobResult.MetricValue; + // var newResult = jobResult with { MetricValue = newResultValue }; + // _logTracer.LogInformation($"attempt to update job result {taskId} and machineId+metricType {machineIdMetric} with new count: {newResultValue}"); + // var r = await Update(newResult); + // if (!r.IsOk) { + // throw new InvalidOperationException($"failed to update job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); + // } + // } + // } public async Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, string resultType, Dictionary resultValue) { From 469e4eebafdb9cb7493e4e62127cf98d0ccd3396 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Tue, 10 Oct 2023 22:41:52 +0000 Subject: [PATCH 63/88] Fix import ordering. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index d85471d6d8..a322588639 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -1,8 +1,8 @@ using ApiService.OneFuzzLib.Orm; using Microsoft.Extensions.Logging; using Polly; -using System.Net; namespace Microsoft.OneFuzz.Service; +using System.Net; public interface IJobResultOperations : IOrm { From 1253ba834cf44a6f8323699c61dbaf3167c33328 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Tue, 10 Oct 2023 22:43:36 +0000 Subject: [PATCH 64/88] Removing duplicate code. --- .../ApiService/onefuzzlib/JobResultOperations.cs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index a322588639..ded470a42d 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -33,19 +33,18 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s // Dictionary newResultValue; var oldEntry = await GetJobResult(taskId, machineIdMetric); + var newEntry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); if (oldEntry == null) { _logTracer.LogInformation($"attempt to insert new job result {taskId} and machineId+metricType {machineIdMetric}"); - var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); - var result = await Insert(entry); + var result = await Insert(newEntry); if (!result.IsOk) { throw new InvalidOperationException($"failed to insert job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); } return true; } - _logTracer.LogInformation($"attempt to replace job result for {taskId} and machineId+metricType {machineIdMetric}"); - var newEntry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); + _logTracer.LogInformation($"attempt to replace job result {taskId} and machineId+metricType {machineIdMetric}"); var r = await ToUpdate(resultType, resultValue, oldEntry, newEntry); if (!r.IsOk) { throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); From 2bff2aa8b6800773ce07b49c75dce2387041dc70 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Tue, 10 Oct 2023 23:22:52 +0000 Subject: [PATCH 65/88] accidentally didn't include update. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index ded470a42d..c613a67698 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -24,7 +24,7 @@ public JobResultOperations(ILogger log, IOnefuzzContext con public async Async.Task> ToUpdate(string resultType, Dictionary newValue, JobResult prevEntry, JobResult newEntry) => resultType switch { "CoverageData" when prevEntry.MetricValue["rate"] < newValue["rate"] => await Replace(newEntry), "RuntimeStats" when prevEntry.MetricValue["total_count"] < newValue["total_count"] => await Replace(newEntry), - _ => await Insert(prevEntry with { MetricValue = new Dictionary() { { resultType, prevEntry.MetricValue["count"]++ } } }), + _ => await Update(prevEntry with { MetricValue = new Dictionary() { { resultType, prevEntry.MetricValue["count"]++ } } }), }; private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, string resultType, Dictionary resultValue) { From 3241f8a4563b7cac478ed86773d57448e2870b05 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 11 Oct 2023 00:19:26 +0000 Subject: [PATCH 66/88] Constructing the wrong way. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index c613a67698..6319b9a23a 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -24,7 +24,7 @@ public JobResultOperations(ILogger log, IOnefuzzContext con public async Async.Task> ToUpdate(string resultType, Dictionary newValue, JobResult prevEntry, JobResult newEntry) => resultType switch { "CoverageData" when prevEntry.MetricValue["rate"] < newValue["rate"] => await Replace(newEntry), "RuntimeStats" when prevEntry.MetricValue["total_count"] < newValue["total_count"] => await Replace(newEntry), - _ => await Update(prevEntry with { MetricValue = new Dictionary() { { resultType, prevEntry.MetricValue["count"]++ } } }), + _ => await Update(prevEntry with { MetricValue = new Dictionary() { { "count", prevEntry.MetricValue["count"]++ } } }), }; private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, string resultType, Dictionary resultValue) { From 713b83d5d39c532cbfcbaedf4d2840f71cab3058 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 11 Oct 2023 17:01:57 +0000 Subject: [PATCH 67/88] Moving back to standard switch. --- .../onefuzzlib/JobResultOperations.cs | 38 +++++++++++++++---- 1 file changed, 30 insertions(+), 8 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 6319b9a23a..ce198ff3af 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -21,11 +21,11 @@ public JobResultOperations(ILogger log, IOnefuzzContext con return await data.FirstOrDefaultAsync(); } - public async Async.Task> ToUpdate(string resultType, Dictionary newValue, JobResult prevEntry, JobResult newEntry) => resultType switch { - "CoverageData" when prevEntry.MetricValue["rate"] < newValue["rate"] => await Replace(newEntry), - "RuntimeStats" when prevEntry.MetricValue["total_count"] < newValue["total_count"] => await Replace(newEntry), - _ => await Update(prevEntry with { MetricValue = new Dictionary() { { "count", prevEntry.MetricValue["count"]++ } } }), - }; + // public async Async.Task> ToUpdate(string resultType, JobResult oldEntry, JobResult newEntry) => resultType switch { + // "CoverageData" when oldEntry.MetricValue["rate"] < newEntry.MetricValue["rate"] => await Replace(newEntry), + // "RuntimeStats" when oldEntry.MetricValue["total_count"] < newEntry.MetricValue["total_count"] => await Replace(newEntry), + // _ => await Update(oldEntry with { MetricValue = new Dictionary() { { "count", oldEntry.MetricValue["count"]++ } } }), + // }; private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, string resultType, Dictionary resultValue) { var jobId = job.JobId; @@ -44,12 +44,34 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s return true; } - _logTracer.LogInformation($"attempt to replace job result {taskId} and machineId+metricType {machineIdMetric}"); - var r = await ToUpdate(resultType, resultValue, oldEntry, newEntry); + ResultVoid<(HttpStatusCode Status, string Reason)> r; + switch (resultType) { + case "CoverageData" when oldEntry.MetricValue["rate"] < newEntry.MetricValue["rate"]: + r = await Replace(newEntry); + break; + case "RuntimeStats" when oldEntry.MetricValue["total_count"] < newEntry.MetricValue["total_count"]: + r = await Replace(newEntry); + break; + default: + _logTracer.LogInformation($"attempt to update job result {taskId} and machineId+metricType {machineIdMetric}"); + oldEntry.MetricValue["count"]++; + var newResult = oldEntry with { MetricValue = oldEntry.MetricValue }; + r = await Update(newResult); + break; + } + if (!r.IsOk) { - throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); + throw new InvalidOperationException($"failed to replace or update job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); } + return true; + + // _logTracer.LogInformation($"attempt to replace job result {taskId} and machineId+metricType {machineIdMetric}"); + // var r = await ToUpdate(resultType, oldEntry, newEntry); + // if (!r.IsOk) { + // throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); + // } + // return true; } // } else if (resultType.Equals("CoverageData") && jobResult.MetricValue["rate"] < resultValue["rate"]) { // _logTracer.LogInformation($"attempt to replace coverage job result for {taskId} and machineId+metricType {machineIdMetric}"); From 64aaefe60c1b9efc80c3aa1ea16580467741f461 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 11 Oct 2023 17:40:42 +0000 Subject: [PATCH 68/88] Removing old code. --- .../onefuzzlib/JobResultOperations.cs | 37 ------------------- 1 file changed, 37 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index ce198ff3af..614ef359db 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -21,12 +21,6 @@ public JobResultOperations(ILogger log, IOnefuzzContext con return await data.FirstOrDefaultAsync(); } - // public async Async.Task> ToUpdate(string resultType, JobResult oldEntry, JobResult newEntry) => resultType switch { - // "CoverageData" when oldEntry.MetricValue["rate"] < newEntry.MetricValue["rate"] => await Replace(newEntry), - // "RuntimeStats" when oldEntry.MetricValue["total_count"] < newEntry.MetricValue["total_count"] => await Replace(newEntry), - // _ => await Update(oldEntry with { MetricValue = new Dictionary() { { "count", oldEntry.MetricValue["count"]++ } } }), - // }; - private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, string resultType, Dictionary resultValue) { var jobId = job.JobId; var machineIdMetric = string.Concat(machineId, resultType); @@ -66,38 +60,7 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s return true; - // _logTracer.LogInformation($"attempt to replace job result {taskId} and machineId+metricType {machineIdMetric}"); - // var r = await ToUpdate(resultType, oldEntry, newEntry); - // if (!r.IsOk) { - // throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); - // } - // return true; } - // } else if (resultType.Equals("CoverageData") && jobResult.MetricValue["rate"] < resultValue["rate"]) { - // _logTracer.LogInformation($"attempt to replace coverage job result for {taskId} and machineId+metricType {machineIdMetric}"); - // var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); - // var r = await Replace(entry); - // if (!r.IsOk) { - // throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); - // } - // } else if (resultType.Equals("RuntimeStats") && jobResult.MetricValue["total_count"] < resultValue["total_count"]) { - // _logTracer.LogInformation($"attempt to replace runtime stats job result for {taskId} and machineId+metricType {machineIdMetric}"); - // var entry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); - // var r = await Replace(entry); - // if (!r.IsOk) { - // throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); - // } - // } else { - // jobResult.MetricValue["count"]++; - // newResultValue = jobResult.MetricValue; - // var newResult = jobResult with { MetricValue = newResultValue }; - // _logTracer.LogInformation($"attempt to update job result {taskId} and machineId+metricType {machineIdMetric} with new count: {newResultValue}"); - // var r = await Update(newResult); - // if (!r.IsOk) { - // throw new InvalidOperationException($"failed to update job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); - // } - // } - // } public async Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, string resultType, Dictionary resultValue) { From 7c4c41acc6c3aa785c5c18b87b960f109cb94f32 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 11 Oct 2023 17:51:32 +0000 Subject: [PATCH 69/88] Removing more old code. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 614ef359db..0126b73c0e 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -25,7 +25,6 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s var jobId = job.JobId; var machineIdMetric = string.Concat(machineId, resultType); - // Dictionary newResultValue; var oldEntry = await GetJobResult(taskId, machineIdMetric); var newEntry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); From 8f557025c191d646d2bcce6abed56c036e4ea0c7 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 11 Oct 2023 20:24:42 +0000 Subject: [PATCH 70/88] Using constants. --- .../ApiService/OneFuzzTypes/Model.cs | 36 +++++++++++++++++++ .../onefuzzlib/JobResultOperations.cs | 7 ++-- 2 files changed, 41 insertions(+), 2 deletions(-) diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index 4559856cc0..303698ab20 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -905,6 +905,12 @@ public record SecretAddress(Uri Url) : ISecret { public record SecretData(ISecret Secret) { } +[SkipRename] +public enum JobResultType { + CoverageData, + RuntimeStats, +} + public record JobResult( [PartitionKey] Guid TaskId, [RowKey] string MachineIdMetric, @@ -915,6 +921,36 @@ public record JobResult( Dictionary MetricValue ) : EntityBase(); +public record CoverageMetricValue(IDictionary inner) { + public double Rate { + get { + return inner["rate"]; + } + } + public double Covered { + get { + return inner["rate"]; + } + } + public double Features { + get { + return inner["Features"]; + } + } +} +public record RuntimeMetricValue(IDictionary inner) { + public double TotalCount { + get { + return inner["total_count"]; + } + } + public double ExecsSec { + get { + return inner["execs_sec"]; + } + } +} + public record JobConfig( string Project, string Name, diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 0126b73c0e..c2f59668d7 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -12,6 +12,9 @@ public interface IJobResultOperations : IOrm { } public class JobResultOperations : Orm, IJobResultOperations { + const string COVERAGE_DATA = "CoverageData"; + const string RUNTIME_STATS = "RuntimeStats"; + public JobResultOperations(ILogger log, IOnefuzzContext context) : base(log, context) { } @@ -39,10 +42,10 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s ResultVoid<(HttpStatusCode Status, string Reason)> r; switch (resultType) { - case "CoverageData" when oldEntry.MetricValue["rate"] < newEntry.MetricValue["rate"]: + case COVERAGE_DATA when oldEntry.MetricValue["rate"] < newEntry.MetricValue["rate"]: r = await Replace(newEntry); break; - case "RuntimeStats" when oldEntry.MetricValue["total_count"] < newEntry.MetricValue["total_count"]: + case RUNTIME_STATS when oldEntry.MetricValue["total_count"] < newEntry.MetricValue["total_count"]: r = await Replace(newEntry); break; default: From d9b85ccaadadcf534707d8c65f5cb0e6d79cedb7 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Fri, 13 Oct 2023 16:45:07 +0000 Subject: [PATCH 71/88] Addressing comments. --- .../ApiService/OneFuzzTypes/Model.cs | 7 ++-- .../onefuzzlib/JobResultOperations.cs | 36 ++++++++++++------- 2 files changed, 27 insertions(+), 16 deletions(-) diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index 303698ab20..4676acd76c 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -912,9 +912,10 @@ public enum JobResultType { } public record JobResult( - [PartitionKey] Guid TaskId, - [RowKey] string MachineIdMetric, - Guid JobId, + [PartitionKey] Guid JobId, + [RowKey] string TaskIdMachineIdMetric, + Guid TaskId, + Guid MachineId, string Project, string Name, string Type, diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index c2f59668d7..a1a6a9712a 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -6,7 +6,8 @@ namespace Microsoft.OneFuzz.Service; public interface IJobResultOperations : IOrm { - Async.Task GetJobResult(Guid taskId, string machineIdMetric); + Async.Task GetJobResult(Guid jobId, Guid taskId, Guid machineId, string metricType); + Async.Task GetJobResults(Guid jobId); Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, string resultType, Dictionary resultValue); } @@ -19,37 +20,46 @@ public JobResultOperations(ILogger log, IOnefuzzContext con : base(log, context) { } - public async Async.Task GetJobResult(Guid taskId, string machineIdMetric) { - var data = QueryAsync(Query.SingleEntity(taskId.ToString(), machineIdMetric)); + public async Async.Task GetJobResult(Guid jobId, Guid taskId, Guid machineId, string metricType) { + var data = QueryAsync(Query.SingleEntity(jobId.ToString(), string.Concat(taskId, machineId, metricType))); + return await data.FirstOrDefaultAsync(); + } + + public async Async.Task GetJobResults(Guid jobId) { + var data = QueryAsync(Query.PartitionKey(jobId.ToString())); return await data.FirstOrDefaultAsync(); } private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, string resultType, Dictionary resultValue) { var jobId = job.JobId; - var machineIdMetric = string.Concat(machineId, resultType); + var taskIdMachineIdMetric = string.Concat(taskId, machineId, resultType); - var oldEntry = await GetJobResult(taskId, machineIdMetric); - var newEntry = new JobResult(TaskId: taskId, MachineIdMetric: machineIdMetric, JobId: jobId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); + var oldEntry = await GetJobResult(jobId, taskId, machineId, resultType); + var newEntry = new JobResult(JobId: jobId, TaskIdMachineIdMetric: taskIdMachineIdMetric, TaskId: taskId, MachineId: machineId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); if (oldEntry == null) { - _logTracer.LogInformation($"attempt to insert new job result {taskId} and machineId+metricType {machineIdMetric}"); + _logTracer.LogInformation($"attempt to insert new job result {taskId} and taskId+machineId+metricType {taskIdMachineIdMetric}"); var result = await Insert(newEntry); if (!result.IsOk) { - throw new InvalidOperationException($"failed to insert job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); + throw new InvalidOperationException($"failed to insert job result with taskId {taskId} and taskId+machineId+metricType {taskIdMachineIdMetric}"); } return true; } ResultVoid<(HttpStatusCode Status, string Reason)> r; switch (resultType) { - case COVERAGE_DATA when oldEntry.MetricValue["rate"] < newEntry.MetricValue["rate"]: - r = await Replace(newEntry); + case COVERAGE_DATA: + if (oldEntry.MetricValue["rate"] < newEntry.MetricValue["rate"]) { + r = await Replace(newEntry); + } break; - case RUNTIME_STATS when oldEntry.MetricValue["total_count"] < newEntry.MetricValue["total_count"]: - r = await Replace(newEntry); + case RUNTIME_STATS: + if (oldEntry.MetricValue["total_count"] < newEntry.MetricValue["total_count"]) { + r = await Replace(newEntry); + } break; default: - _logTracer.LogInformation($"attempt to update job result {taskId} and machineId+metricType {machineIdMetric}"); + _logTracer.LogInformation($"attempt to update job result {taskId} and taskId+machineId+metricType {taskIdMachineIdMetric}"); oldEntry.MetricValue["count"]++; var newResult = oldEntry with { MetricValue = oldEntry.MetricValue }; r = await Update(newResult); From b7d94a70226b886d64cbc9523c6868c51730910f Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Fri, 13 Oct 2023 17:03:15 +0000 Subject: [PATCH 72/88] Corrected. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index a1a6a9712a..5aa5c5c6cd 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -67,7 +67,7 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s } if (!r.IsOk) { - throw new InvalidOperationException($"failed to replace or update job result with taskId {taskId} and machineId+metricType {machineIdMetric}"); + throw new InvalidOperationException($"failed to replace or update job result with taskId {taskId} and machineId+metricType {taskIdMachineIdMetric}"); } return true; From ba714185447d3af42466e012e4deccfa1c7273fd Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Fri, 13 Oct 2023 17:07:05 +0000 Subject: [PATCH 73/88] Updating. --- .../ApiService/onefuzzlib/JobResultOperations.cs | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 5aa5c5c6cd..f64d5123ae 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -51,11 +51,17 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s case COVERAGE_DATA: if (oldEntry.MetricValue["rate"] < newEntry.MetricValue["rate"]) { r = await Replace(newEntry); + if (!r.IsOk) { + throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {taskIdMachineIdMetric}"); + } } break; case RUNTIME_STATS: if (oldEntry.MetricValue["total_count"] < newEntry.MetricValue["total_count"]) { r = await Replace(newEntry); + if (!r.IsOk) { + throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {taskIdMachineIdMetric}"); + } } break; default: @@ -63,12 +69,12 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s oldEntry.MetricValue["count"]++; var newResult = oldEntry with { MetricValue = oldEntry.MetricValue }; r = await Update(newResult); + if (!r.IsOk) { + throw new InvalidOperationException($"failed to update job result with taskId {taskId} and machineId+metricType {taskIdMachineIdMetric}"); + } break; } - if (!r.IsOk) { - throw new InvalidOperationException($"failed to replace or update job result with taskId {taskId} and machineId+metricType {taskIdMachineIdMetric}"); - } return true; From ec295229c6026af7b80b73e79cc3dfb21abc524d Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Mon, 16 Oct 2023 17:37:10 +0000 Subject: [PATCH 74/88] Comment. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index f64d5123ae..7e56f2410e 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -67,8 +67,8 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s default: _logTracer.LogInformation($"attempt to update job result {taskId} and taskId+machineId+metricType {taskIdMachineIdMetric}"); oldEntry.MetricValue["count"]++; - var newResult = oldEntry with { MetricValue = oldEntry.MetricValue }; - r = await Update(newResult); + oldEntry = oldEntry with { MetricValue = oldEntry.MetricValue }; + r = await Update(oldEntry); if (!r.IsOk) { throw new InvalidOperationException($"failed to update job result with taskId {taskId} and machineId+metricType {taskIdMachineIdMetric}"); } From 773765e11240b94795de6090b4c7a17e836f0111 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Mon, 16 Oct 2023 23:51:30 +0000 Subject: [PATCH 75/88] Replacing with Update to deal with edge cases. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 7e56f2410e..4e344de0d0 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -50,7 +50,7 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s switch (resultType) { case COVERAGE_DATA: if (oldEntry.MetricValue["rate"] < newEntry.MetricValue["rate"]) { - r = await Replace(newEntry); + r = await Update(newEntry); if (!r.IsOk) { throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {taskIdMachineIdMetric}"); } @@ -58,7 +58,7 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s break; case RUNTIME_STATS: if (oldEntry.MetricValue["total_count"] < newEntry.MetricValue["total_count"]) { - r = await Replace(newEntry); + r = await Update(newEntry); if (!r.IsOk) { throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {taskIdMachineIdMetric}"); } From 6077e649943313e81e598b16b8aae7017e4f6e32 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Tue, 17 Oct 2023 21:44:54 +0000 Subject: [PATCH 76/88] Using timestamp. --- src/agent/Cargo.lock | 1 + src/agent/onefuzz-result/Cargo.toml | 5 ++++- src/agent/onefuzz-result/src/job_result.rs | 5 ++++- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index eb35241201..7051b93fa2 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -2219,6 +2219,7 @@ version = "0.2.0" dependencies = [ "anyhow", "async-trait", + "chrono", "log", "onefuzz-telemetry", "reqwest", diff --git a/src/agent/onefuzz-result/Cargo.toml b/src/agent/onefuzz-result/Cargo.toml index 7c7de6615c..0f419a815b 100644 --- a/src/agent/onefuzz-result/Cargo.toml +++ b/src/agent/onefuzz-result/Cargo.toml @@ -9,10 +9,13 @@ license = "MIT" [dependencies] anyhow = { version = "1.0", features = ["backtrace"] } async-trait = "0.1" +chrono = { version = "0.4", default-features = false, features = [ + "clock", + "std", +] } reqwest = "0.11" serde = "1.0" storage-queue = { path = "../storage-queue" } uuid = { version = "1.4", features = ["serde", "v4"] } onefuzz-telemetry = { path = "../onefuzz-telemetry" } log = "0.4" - diff --git a/src/agent/onefuzz-result/src/job_result.rs b/src/agent/onefuzz-result/src/job_result.rs index 08f7bbc1ee..9d75ff3e42 100644 --- a/src/agent/onefuzz-result/src/job_result.rs +++ b/src/agent/onefuzz-result/src/job_result.rs @@ -3,6 +3,7 @@ use anyhow::Result; use async_trait::async_trait; +pub use chrono::Utc; use onefuzz_telemetry::warn; use reqwest::Url; use serde::{self, Deserialize, Serialize}; @@ -32,6 +33,7 @@ struct JobResult { job_id: Uuid, machine_id: Uuid, machine_name: String, + created_at: String, data: JobResultData, value: HashMap, } @@ -103,7 +105,7 @@ impl JobResultSender for TaskJobResultClient { let job_id = self.context.state.job_id; let machine_id = self.context.state.machine_id; let machine_name = self.context.state.machine_name.clone(); - + let created_at = chrono::Utc::now().to_string(); let _ = self .context .queue_client @@ -112,6 +114,7 @@ impl JobResultSender for TaskJobResultClient { job_id, machine_id, machine_name, + created_at, data, value, }) From 44ec10937bf8a9472dd4f568c930097b44ff6f8f Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Tue, 17 Oct 2023 21:54:06 +0000 Subject: [PATCH 77/88] Adding CreatedAt field. --- .../ApiService/Functions/QueueJobResult.cs | 2 +- .../ApiService/OneFuzzTypes/Model.cs | 32 ++----------------- .../onefuzzlib/JobResultOperations.cs | 10 +++--- src/agent/Cargo.lock | 1 + src/agent/onefuzz-result/Cargo.toml | 1 + src/agent/onefuzz-result/src/job_result.rs | 5 +-- 6 files changed, 13 insertions(+), 38 deletions(-) diff --git a/src/ApiService/ApiService/Functions/QueueJobResult.cs b/src/ApiService/ApiService/Functions/QueueJobResult.cs index 725367d8d5..e5114d10c4 100644 --- a/src/ApiService/ApiService/Functions/QueueJobResult.cs +++ b/src/ApiService/ApiService/Functions/QueueJobResult.cs @@ -52,7 +52,7 @@ public async Async.Task Run([QueueTrigger("job-result", Connection = "AzureWebJo return; } - var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jr.TaskId, jr.MachineId, jobResultType, value); + var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jr.TaskId, jr.MachineId, jr.CreatedAt, jobResultType, value); if (!jobResult.IsOk) { _log.LogError("failed to create or update with job result {JobId}", job.JobId); } diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index 4676acd76c..53029b76f3 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -47,6 +47,7 @@ public record TaskJobResultEntry( Guid TaskId, Guid? JobId, Guid MachineId, + DateTime CreatedAt, JobResultData Data, Dictionary Value ); @@ -916,42 +917,13 @@ public record JobResult( [RowKey] string TaskIdMachineIdMetric, Guid TaskId, Guid MachineId, + DateTime CreatedAt, string Project, string Name, string Type, Dictionary MetricValue ) : EntityBase(); -public record CoverageMetricValue(IDictionary inner) { - public double Rate { - get { - return inner["rate"]; - } - } - public double Covered { - get { - return inner["rate"]; - } - } - public double Features { - get { - return inner["Features"]; - } - } -} -public record RuntimeMetricValue(IDictionary inner) { - public double TotalCount { - get { - return inner["total_count"]; - } - } - public double ExecsSec { - get { - return inner["execs_sec"]; - } - } -} - public record JobConfig( string Project, string Name, diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 4e344de0d0..a8a4e3da9a 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -8,7 +8,7 @@ public interface IJobResultOperations : IOrm { Async.Task GetJobResult(Guid jobId, Guid taskId, Guid machineId, string metricType); Async.Task GetJobResults(Guid jobId); - Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, string resultType, Dictionary resultValue); + Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, DateTime createdAt, string resultType, Dictionary resultValue); } public class JobResultOperations : Orm, IJobResultOperations { @@ -30,12 +30,12 @@ public JobResultOperations(ILogger log, IOnefuzzContext con return await data.FirstOrDefaultAsync(); } - private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, string resultType, Dictionary resultValue) { + private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, DateTime createdAt, string resultType, Dictionary resultValue) { var jobId = job.JobId; var taskIdMachineIdMetric = string.Concat(taskId, machineId, resultType); var oldEntry = await GetJobResult(jobId, taskId, machineId, resultType); - var newEntry = new JobResult(JobId: jobId, TaskIdMachineIdMetric: taskIdMachineIdMetric, TaskId: taskId, MachineId: machineId, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); + var newEntry = new JobResult(JobId: jobId, TaskIdMachineIdMetric: taskIdMachineIdMetric, TaskId: taskId, MachineId: machineId, CreatedAt: createdAt, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); if (oldEntry == null) { _logTracer.LogInformation($"attempt to insert new job result {taskId} and taskId+machineId+metricType {taskIdMachineIdMetric}"); @@ -80,7 +80,7 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, s } - public async Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, string resultType, Dictionary resultValue) { + public async Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, DateTime createdAt, string resultType, Dictionary resultValue) { var job = await _context.JobOperations.Get(jobId); if (job == null) { @@ -92,7 +92,7 @@ public async Async.Task CreateOrUpdate(Guid jobId, Guid taskI _logTracer.LogInformation("attempt to update job result {JobId}", job.JobId); var policy = Policy.Handle().WaitAndRetryAsync(50, _ => new TimeSpan(0, 0, 5)); await policy.ExecuteAsync(async () => { - success = await TryUpdate(job, taskId, machineId, resultType, resultValue); + success = await TryUpdate(job, taskId, machineId, createdAt, resultType, resultValue); _logTracer.LogInformation("attempt {success}", success); }); return OneFuzzResultVoid.Ok; diff --git a/src/agent/Cargo.lock b/src/agent/Cargo.lock index 7051b93fa2..566aa77ca3 100644 --- a/src/agent/Cargo.lock +++ b/src/agent/Cargo.lock @@ -448,6 +448,7 @@ dependencies = [ "android-tzdata", "iana-time-zone", "num-traits", + "serde", "winapi 0.3.9", ] diff --git a/src/agent/onefuzz-result/Cargo.toml b/src/agent/onefuzz-result/Cargo.toml index 0f419a815b..7e156ac91d 100644 --- a/src/agent/onefuzz-result/Cargo.toml +++ b/src/agent/onefuzz-result/Cargo.toml @@ -12,6 +12,7 @@ async-trait = "0.1" chrono = { version = "0.4", default-features = false, features = [ "clock", "std", + "serde" ] } reqwest = "0.11" serde = "1.0" diff --git a/src/agent/onefuzz-result/src/job_result.rs b/src/agent/onefuzz-result/src/job_result.rs index 9d75ff3e42..ace45d0522 100644 --- a/src/agent/onefuzz-result/src/job_result.rs +++ b/src/agent/onefuzz-result/src/job_result.rs @@ -3,6 +3,7 @@ use anyhow::Result; use async_trait::async_trait; +use chrono::DateTime; pub use chrono::Utc; use onefuzz_telemetry::warn; use reqwest::Url; @@ -33,7 +34,7 @@ struct JobResult { job_id: Uuid, machine_id: Uuid, machine_name: String, - created_at: String, + created_at: DateTime, data: JobResultData, value: HashMap, } @@ -105,7 +106,7 @@ impl JobResultSender for TaskJobResultClient { let job_id = self.context.state.job_id; let machine_id = self.context.state.machine_id; let machine_name = self.context.state.machine_name.clone(); - let created_at = chrono::Utc::now().to_string(); + let created_at = chrono::Utc::now(); let _ = self .context .queue_client From 09bc214078d4bae84532c494259b7f5d3c3b5e10 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Tue, 17 Oct 2023 21:55:13 +0000 Subject: [PATCH 78/88] Comparing timestamps. --- .../ApiService/onefuzzlib/JobResultOperations.cs | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index a8a4e3da9a..052824a300 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -49,15 +49,8 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, D ResultVoid<(HttpStatusCode Status, string Reason)> r; switch (resultType) { case COVERAGE_DATA: - if (oldEntry.MetricValue["rate"] < newEntry.MetricValue["rate"]) { - r = await Update(newEntry); - if (!r.IsOk) { - throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {taskIdMachineIdMetric}"); - } - } - break; case RUNTIME_STATS: - if (oldEntry.MetricValue["total_count"] < newEntry.MetricValue["total_count"]) { + if (oldEntry.CreatedAt < newEntry.CreatedAt) { r = await Update(newEntry); if (!r.IsOk) { throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {taskIdMachineIdMetric}"); From 85b88a13e70be82c80ecbc8c45e234493a8c0cf0 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Tue, 17 Oct 2023 23:32:59 +0000 Subject: [PATCH 79/88] Setting propery. --- .../ApiService/onefuzzlib/JobResultOperations.cs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 052824a300..5ebfdf0640 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -35,10 +35,10 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, D var taskIdMachineIdMetric = string.Concat(taskId, machineId, resultType); var oldEntry = await GetJobResult(jobId, taskId, machineId, resultType); - var newEntry = new JobResult(JobId: jobId, TaskIdMachineIdMetric: taskIdMachineIdMetric, TaskId: taskId, MachineId: machineId, CreatedAt: createdAt, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); if (oldEntry == null) { _logTracer.LogInformation($"attempt to insert new job result {taskId} and taskId+machineId+metricType {taskIdMachineIdMetric}"); + var newEntry = new JobResult(JobId: jobId, TaskIdMachineIdMetric: taskIdMachineIdMetric, TaskId: taskId, MachineId: machineId, CreatedAt: createdAt, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); var result = await Insert(newEntry); if (!result.IsOk) { throw new InvalidOperationException($"failed to insert job result with taskId {taskId} and taskId+machineId+metricType {taskIdMachineIdMetric}"); @@ -50,8 +50,9 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, D switch (resultType) { case COVERAGE_DATA: case RUNTIME_STATS: - if (oldEntry.CreatedAt < newEntry.CreatedAt) { - r = await Update(newEntry); + if (oldEntry.CreatedAt < createdAt) { + oldEntry = oldEntry with { MetricValue = resultValue }; + r = await Update(oldEntry); if (!r.IsOk) { throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {taskIdMachineIdMetric}"); } From ba2be6184421085a029ec0763b3ef05f1e6eadcb Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Tue, 24 Oct 2023 21:33:15 +0000 Subject: [PATCH 80/88] Adding check for CreatedAt. --- src/ApiService/ApiService/Functions/QueueJobResult.cs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/ApiService/ApiService/Functions/QueueJobResult.cs b/src/ApiService/ApiService/Functions/QueueJobResult.cs index e5114d10c4..f787df690a 100644 --- a/src/ApiService/ApiService/Functions/QueueJobResult.cs +++ b/src/ApiService/ApiService/Functions/QueueJobResult.cs @@ -30,11 +30,10 @@ public async Async.Task Run([QueueTrigger("job-result", Connection = "AzureWebJo } var job = await _jobs.Get(task.JobId); - if (job == null) { - _log.LogWarning("invalid {JobId}", task.JobId); + if (job == null || jr.CreatedAt == null) { + _log.LogWarning("invalid message {JobId}", task.JobId); return; } - JobResultData? data = jr.Data; if (data == null) { _log.LogWarning($"job result data is empty, throwing out: {jr}"); From 52c115626101b63e86917b4f9fab7e7c45d8fa91 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Tue, 24 Oct 2023 21:44:58 +0000 Subject: [PATCH 81/88] Making created_at optional. --- src/ApiService/ApiService/Functions/QueueJobResult.cs | 8 +++++++- src/ApiService/ApiService/OneFuzzTypes/Model.cs | 2 +- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/src/ApiService/ApiService/Functions/QueueJobResult.cs b/src/ApiService/ApiService/Functions/QueueJobResult.cs index f787df690a..40858ea46e 100644 --- a/src/ApiService/ApiService/Functions/QueueJobResult.cs +++ b/src/ApiService/ApiService/Functions/QueueJobResult.cs @@ -30,10 +30,16 @@ public async Async.Task Run([QueueTrigger("job-result", Connection = "AzureWebJo } var job = await _jobs.Get(task.JobId); - if (job == null || jr.CreatedAt == null) { + if (job == null) { _log.LogWarning("invalid message {JobId}", task.JobId); return; } + + if (jr.CreatedAt == null) { + _log.LogWarning("invalid message, no created_at field {JobId}", task.JobId); + return; + } + JobResultData? data = jr.Data; if (data == null) { _log.LogWarning($"job result data is empty, throwing out: {jr}"); diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index 84a8f02554..0bbc34a7e6 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -47,7 +47,7 @@ public record TaskJobResultEntry( Guid TaskId, Guid? JobId, Guid MachineId, - DateTime CreatedAt, + DateTime? CreatedAt, JobResultData Data, Dictionary Value ); From 42d30f77efa334555e370d5024433b4cc5e7bcd7 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Tue, 24 Oct 2023 21:57:09 +0000 Subject: [PATCH 82/88] Trying again. --- src/ApiService/ApiService/Functions/QueueJobResult.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ApiService/ApiService/Functions/QueueJobResult.cs b/src/ApiService/ApiService/Functions/QueueJobResult.cs index 40858ea46e..5e0151efb7 100644 --- a/src/ApiService/ApiService/Functions/QueueJobResult.cs +++ b/src/ApiService/ApiService/Functions/QueueJobResult.cs @@ -57,7 +57,7 @@ public async Async.Task Run([QueueTrigger("job-result", Connection = "AzureWebJo return; } - var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jr.TaskId, jr.MachineId, jr.CreatedAt, jobResultType, value); + var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jr.TaskId, jr.MachineId, jr.CreatedAt.Value, jobResultType, value); if (!jobResult.IsOk) { _log.LogError("failed to create or update with job result {JobId}", job.JobId); } From 50ef2f8f90814cd5785a7c7ae2a47c3efaac69f1 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 25 Oct 2023 19:55:03 +0000 Subject: [PATCH 83/88] Remove. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 5ebfdf0640..ad3b46ade5 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -25,11 +25,6 @@ public JobResultOperations(ILogger log, IOnefuzzContext con return await data.FirstOrDefaultAsync(); } - public async Async.Task GetJobResults(Guid jobId) { - var data = QueryAsync(Query.PartitionKey(jobId.ToString())); - return await data.FirstOrDefaultAsync(); - } - private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, DateTime createdAt, string resultType, Dictionary resultValue) { var jobId = job.JobId; var taskIdMachineIdMetric = string.Concat(taskId, machineId, resultType); From c9859e8738ea1b6ffc978f750c7c02ec86c65962 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 25 Oct 2023 19:57:08 +0000 Subject: [PATCH 84/88] Add log statement. --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index ad3b46ade5..cee4dc5a1a 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -51,6 +51,8 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, D if (!r.IsOk) { throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {taskIdMachineIdMetric}"); } + } else { + _logTracer.LogInformation($"received an out-of-date metric. skipping."); } break; default: From 53c5c326046aa2a52c336d4a4b05c6c4238ed1dd Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 25 Oct 2023 19:57:35 +0000 Subject: [PATCH 85/88] Remove function handle.: --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index cee4dc5a1a..bb223fd9e8 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -7,7 +7,6 @@ namespace Microsoft.OneFuzz.Service; public interface IJobResultOperations : IOrm { Async.Task GetJobResult(Guid jobId, Guid taskId, Guid machineId, string metricType); - Async.Task GetJobResults(Guid jobId); Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, DateTime createdAt, string resultType, Dictionary resultValue); } From 9b5c40fe26f182b6bac24f7b41b79cb25734ff67 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 25 Oct 2023 20:02:54 +0000 Subject: [PATCH 86/88] Updating --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index bb223fd9e8..d2c4361098 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -45,7 +45,7 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, D case COVERAGE_DATA: case RUNTIME_STATS: if (oldEntry.CreatedAt < createdAt) { - oldEntry = oldEntry with { MetricValue = resultValue }; + oldEntry = oldEntry with { CreatedAt = createdAt, MetricValue = resultValue }; r = await Update(oldEntry); if (!r.IsOk) { throw new InvalidOperationException($"failed to replace job result with taskId {taskId} and machineId+metricType {taskIdMachineIdMetric}"); From a3537ba7f9b7a156ee77f30232a0edbc2326718e Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 25 Oct 2023 20:26:22 +0000 Subject: [PATCH 87/88] Concat names --- src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index d2c4361098..45e016fc3c 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -20,13 +20,13 @@ public JobResultOperations(ILogger log, IOnefuzzContext con } public async Async.Task GetJobResult(Guid jobId, Guid taskId, Guid machineId, string metricType) { - var data = QueryAsync(Query.SingleEntity(jobId.ToString(), string.Concat(taskId, machineId, metricType))); + var data = QueryAsync(Query.SingleEntity(jobId.ToString(), string.Concat(taskId, "-", machineId, "-", metricType))); return await data.FirstOrDefaultAsync(); } private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, DateTime createdAt, string resultType, Dictionary resultValue) { var jobId = job.JobId; - var taskIdMachineIdMetric = string.Concat(taskId, machineId, resultType); + var taskIdMachineIdMetric = string.Concat(taskId, "-", machineId, "-", resultType); var oldEntry = await GetJobResult(jobId, taskId, machineId, resultType); From 4d5c87da82a8829d5ee8a999754851645afbc698 Mon Sep 17 00:00:00 2001 From: Noah McGregor Harper <74685766+nharper285@users.noreply.github.com> Date: Wed, 25 Oct 2023 21:17:35 +0000 Subject: [PATCH 88/88] Set version. --- src/ApiService/ApiService/Functions/QueueJobResult.cs | 2 +- src/ApiService/ApiService/OneFuzzTypes/Model.cs | 2 ++ .../ApiService/onefuzzlib/JobResultOperations.cs | 10 +++++----- src/agent/onefuzz-result/src/job_result.rs | 3 +++ 4 files changed, 11 insertions(+), 6 deletions(-) diff --git a/src/ApiService/ApiService/Functions/QueueJobResult.cs b/src/ApiService/ApiService/Functions/QueueJobResult.cs index ac49731e2a..31b39802d6 100644 --- a/src/ApiService/ApiService/Functions/QueueJobResult.cs +++ b/src/ApiService/ApiService/Functions/QueueJobResult.cs @@ -57,7 +57,7 @@ public async Async.Task Run([QueueTrigger("job-result", Connection = "AzureWebJo return; } - var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jr.TaskId, jr.MachineId, jr.CreatedAt.Value, jobResultType, value); + var jobResult = await _context.JobResultOperations.CreateOrUpdate(job.JobId, jr.TaskId, jr.MachineId, jr.CreatedAt.Value, jr.Version, jobResultType, value); if (!jobResult.IsOk) { _log.LogError("failed to create or update with job result {JobId}", job.JobId); } diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs index 720f0103ca..4dd4000283 100644 --- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs +++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs @@ -49,6 +49,7 @@ public record TaskJobResultEntry( Guid? JobId, Guid MachineId, DateTime? CreatedAt, + double Version, JobResultData Data, Dictionary Value ); @@ -924,6 +925,7 @@ public record JobResult( string Project, string Name, string Type, + double Version, Dictionary MetricValue ) : EntityBase(); diff --git a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs index 45e016fc3c..b39c654642 100644 --- a/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs +++ b/src/ApiService/ApiService/onefuzzlib/JobResultOperations.cs @@ -7,7 +7,7 @@ namespace Microsoft.OneFuzz.Service; public interface IJobResultOperations : IOrm { Async.Task GetJobResult(Guid jobId, Guid taskId, Guid machineId, string metricType); - Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, DateTime createdAt, string resultType, Dictionary resultValue); + Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, DateTime createdAt, double version, string resultType, Dictionary resultValue); } public class JobResultOperations : Orm, IJobResultOperations { @@ -24,7 +24,7 @@ public JobResultOperations(ILogger log, IOnefuzzContext con return await data.FirstOrDefaultAsync(); } - private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, DateTime createdAt, string resultType, Dictionary resultValue) { + private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, DateTime createdAt, double version, string resultType, Dictionary resultValue) { var jobId = job.JobId; var taskIdMachineIdMetric = string.Concat(taskId, "-", machineId, "-", resultType); @@ -32,7 +32,7 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, D if (oldEntry == null) { _logTracer.LogInformation($"attempt to insert new job result {taskId} and taskId+machineId+metricType {taskIdMachineIdMetric}"); - var newEntry = new JobResult(JobId: jobId, TaskIdMachineIdMetric: taskIdMachineIdMetric, TaskId: taskId, MachineId: machineId, CreatedAt: createdAt, Project: job.Config.Project, Name: job.Config.Name, resultType, resultValue); + var newEntry = new JobResult(JobId: jobId, TaskIdMachineIdMetric: taskIdMachineIdMetric, TaskId: taskId, MachineId: machineId, CreatedAt: createdAt, Project: job.Config.Project, Name: job.Config.Name, resultType, Version: version, resultValue); var result = await Insert(newEntry); if (!result.IsOk) { throw new InvalidOperationException($"failed to insert job result with taskId {taskId} and taskId+machineId+metricType {taskIdMachineIdMetric}"); @@ -70,7 +70,7 @@ private async Async.Task TryUpdate(Job job, Guid taskId, Guid machineId, D } - public async Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, DateTime createdAt, string resultType, Dictionary resultValue) { + public async Async.Task CreateOrUpdate(Guid jobId, Guid taskId, Guid machineId, DateTime createdAt, double version, string resultType, Dictionary resultValue) { var job = await _context.JobOperations.Get(jobId); if (job == null) { @@ -82,7 +82,7 @@ public async Async.Task CreateOrUpdate(Guid jobId, Guid taskI _logTracer.LogInformation("attempt to update job result {JobId}", job.JobId); var policy = Policy.Handle().WaitAndRetryAsync(50, _ => new TimeSpan(0, 0, 5)); await policy.ExecuteAsync(async () => { - success = await TryUpdate(job, taskId, machineId, createdAt, resultType, resultValue); + success = await TryUpdate(job, taskId, machineId, createdAt, version, resultType, resultValue); _logTracer.LogInformation("attempt {success}", success); }); return OneFuzzResultVoid.Ok; diff --git a/src/agent/onefuzz-result/src/job_result.rs b/src/agent/onefuzz-result/src/job_result.rs index ace45d0522..e6b4f50377 100644 --- a/src/agent/onefuzz-result/src/job_result.rs +++ b/src/agent/onefuzz-result/src/job_result.rs @@ -35,6 +35,7 @@ struct JobResult { machine_id: Uuid, machine_name: String, created_at: DateTime, + version: f64, data: JobResultData, value: HashMap, } @@ -107,6 +108,7 @@ impl JobResultSender for TaskJobResultClient { let machine_id = self.context.state.machine_id; let machine_name = self.context.state.machine_name.clone(); let created_at = chrono::Utc::now(); + let version = 1.0; let _ = self .context .queue_client @@ -116,6 +118,7 @@ impl JobResultSender for TaskJobResultClient { machine_id, machine_name, created_at, + version, data, value, })