diff --git a/azure-pipelines.qodana.yml b/azure-pipelines.qodana.yml new file mode 100644 index 000000000..5d2c9f18c --- /dev/null +++ b/azure-pipelines.qodana.yml @@ -0,0 +1,15 @@ +pool: + vmImage: 'ubuntu-latest' + +jobs: + - job: QodanaScan + displayName: 'Qodana Scan' + steps: + - task: Cache@2 # Not required, but Qodana will open projects with cache faster. + inputs: + key: '"$(Build.Repository.Name)" | "$(Build.SourceBranchName)" | "$(Build.SourceVersion)"' + path: '$(Agent.TempDirectory)/qodana/cache' + restoreKeys: | + "$(Build.Repository.Name)" | "$(Build.SourceBranchName)" + "$(Build.Repository.Name)" + - task: QodanaScan@2023 diff --git a/docs/01-getting-started/01-installation.md b/docs/01-getting-started/01-installation.md new file mode 100644 index 000000000..b63bcb8ad --- /dev/null +++ b/docs/01-getting-started/01-installation.md @@ -0,0 +1,39 @@ +--- +title: Installation +--- + +Before you can set up a build project, you need to install NUKE's dedicated [.NET global tool](https://docs.microsoft.com/en-us/dotnet/core/tools/global-tools): + +```powershell +# terminal-command +dotnet tool install Nuke.GlobalTool --global +``` + +From now on, you can use the global tool to: + +- [Set up new builds](02-setup.md) +- [Run existing builds](03-execution.md) +- [Leverage shell completion](../06-global-tool/00-shell-completion.md) +- [Add tool & library packages](../06-global-tool/01-packages.md) +- [Navigate around root directories](../06-global-tool/03-navigation.md) +- [Convert CAKE build scripts](../06-global-tool/04-cake.md) +- [Manage secrets in parameter files](../06-global-tool/02-secrets.md) + +:::note +If you're running on Linux-based systems, it's worth checking if the `nuke` global tool is available. This can be verified with `where nuke`. If the global tool is not found, you have to manually add `$HOME/.dotnet/tools` to your terminal configuration: + + + + +```powershell +# terminal-command +echo 'export PATH=$HOME/.dotnet/tools:$PATH' >> ~/.zshrc +``` + + + +::: + +:::info +While theoretically, you could use NUKE by only adding its main NuGet package, we highly recommend using the dedicated global tool to set up new builds. This ensures that your repository will run consistently in different environments and that your build implementation is always properly formatted. +::: diff --git a/docs/01-getting-started/02-setup.md b/docs/01-getting-started/02-setup.md new file mode 100644 index 000000000..73641e1ac --- /dev/null +++ b/docs/01-getting-started/02-setup.md @@ -0,0 +1,142 @@ +--- +title: Build Setup +--- + +import AsciinemaPlayer from '@site/src/components/AsciinemaPlayer'; + +After [installing the NUKE global tool](01-installation.md), you can call it from anywhere on your machine to set up a new build: + +```powershell +# terminal-command +nuke :setup +``` + +:::tip +Preferably, you should run the setup from inside an existing repository. NUKE will search for the next upwards `.git` or `.svn` directory to determine the _build root directory_. If neither is found, it will use the current directory. You can also pass the `--root` parameter to specify that the current directory should be used as a root directory. +::: + +During the setup, you'll be asked several questions to configure your build to your preferences: + +

+ +

+ +**Congratulations!** πŸ₯³ Your first build has now been set up, and you can [run the build](03-execution.md) with the default implementation! + +## Effective Changes + +The setup will create a number of files in your repository and – if you've chosen so – add the build project to your solution file. Below, you can examine the structure of added files and what they are used for: + +```bash + +β”œβ”€β”€ .nuke # Root directory marker +β”‚ β”œβ”€β”€ build.schema.json # Build schema file +β”‚ └── parameters.json # Default parameters file +β”‚ +β”œβ”€β”€ build +β”‚ β”œβ”€β”€ .editorconfig # Common formatting +β”‚ β”œβ”€β”€ _build.csproj # Build project file +β”‚ β”œβ”€β”€ _build.csproj.DotSettings # ReSharper/Rider formatting +β”‚ β”œβ”€β”€ Build.cs # Default build implementation +β”‚ β”œβ”€β”€ Directory.Build.props # MSBuild stop files +β”‚ └── Directory.Build.targets +β”‚ +β”œβ”€β”€ build.cmd # Cross-platform bootstrapping +β”œβ”€β”€ build.ps1 # Windows/PowerShell bootstrapping +└── build.sh # Linux/Shell bootstrapping +``` + +:::note +If you prefer, you _may_ choose to delete any of the bootstrapping scripts, MSBuild stop files, or formatting settings. For instance, when you're sure that no other MSBuild files will interfere with the build project, or you don't rely on either Roslyn or ReSharper/Rider for formatting. However, note that the **bootstrapping scripts play an essential role** in CI/CD environments, and are also used for the configuration generation feature. +::: + +## Project Structure + +While you can enjoy writing most build-relevant logic inside your build console applications, there is still a large number of files involved in the general process of build automation. NUKE organizes these files in different folders as linked files in the build project for you: + + + + +```powershell + +β”œβ”€β”€ .nuke +β”‚ β”œβ”€β”€ parameters.json # Parameters files +β”‚ └── parameters.*.json +β”‚ +β”œβ”€β”€ GitVersion.yml # GitVersion configuration +β”œβ”€β”€ global.json # SDK version +β”œβ”€β”€ nuget.config # NuGet feeds configuration +└── version.json # Nerdbank GitVersioning configuration +``` + + + + +```powershell + +β”œβ”€β”€ .github +β”‚ └── workflows # GitHub Actions +β”‚ └── *.yml +β”‚ +β”œβ”€β”€ .teamcity # TeamCity +β”‚ └── settings.kts +β”‚ +β”œβ”€β”€ .gitlab-ci.yml # GitLab CI +β”œβ”€β”€ .space.kts # JetBrains Space +β”œβ”€β”€ .travis.yml # Travis CI +β”œβ”€β”€ appveyor.yml # AppVeyor +β”œβ”€β”€ appveyor.*.yml +β”œβ”€β”€ azure-pipelines.yml # Azure Pipelines +β”œβ”€β”€ azure-pipelines.*.yml +└── bitrise.yml # Bitrise +``` + + + + +```powershell + +β”œβ”€β”€ build.cmd # Cross-platform +β”œβ”€β”€ build.ps1 # Windows/PowerShell +└── build.sh # Linux/Shell +``` + + + + +```powershell + +└── ** + β”œβ”€β”€ Directory.Build.props + └── Directory.Build.targets +``` + + + + +:::info +You can deactivate linking of the above files by removing the `NukeRootDirectory` and `NukeScriptDirectory` properties from the build project file. + +```xml title="_build.csproj" + + + + // highlight-start + + + // highlight-end + + + +``` + +::: + +[^1]: Interface default members behave like explicit interface implementations, which means that to access their members, the `this` reference must be cast explicitly to the interface type. For instance, `((IComponent)this).Target`. diff --git a/docs/01-getting-started/03-execution.md b/docs/01-getting-started/03-execution.md new file mode 100644 index 000000000..f656f0c61 --- /dev/null +++ b/docs/01-getting-started/03-execution.md @@ -0,0 +1,239 @@ +--- +title: Build Execution +--- + +import AsciinemaPlayer from '@site/src/components/AsciinemaPlayer'; + +After you've [set up a build](02-setup.md) you can run it either through the global tool or one of the installed bootstrapping scripts: + + + + +```powershell +# terminal-command +nuke [arguments] +``` + + + + +```powershell +# terminal-command +.\build.cmd [arguments] +``` + + + + +```powershell +# terminal-command +./build.sh [arguments] +``` + + + + +:::info +This document discusses the default build arguments (also referred to as parameters). You will learn how to [define custom parameters](../02-fundamentals/06-parameters.md) in a following chapter. +::: + +:::tip +The global tool makes running builds a lot easier. Once you've configured the [shell completion](../06-global-tool/00-shell-completion.md), you can enter arguments much faster and avoid any typos. It also allows you to run a build from anywhere below the root directory without having to go back to where the bootstrapping scripts are located. +::: + +## Build Summary + +Once a build has finished running an execution plan, it will print a comprehensive summary with all involved targets, their outcome, duration, and additional metadata: + + +═══════════════════════════════════════{'\n'} +Target Status Duration{'\n'} +───────────────────────────────────────{'\n'} +Restore Succeeded 0:16{'\n'} +Compile Succeeded 0:59 // Version: 5.3.0-alpha.35{'\n'} +Test Succeeded 0:41 // Passed: 327, Skipped: 6{'\n'} +Pack Succeeded 0:10 // Packages: 4{'\n'} +───────────────────────────────────────{'\n'} +Total 2:08{'\n'} +═══════════════════════════════════════{'\n'} +{'\n'} +Build succeeded on {new Date().toLocaleString()}. οΌΌοΌˆοΌΎα΄—οΌΎοΌ‰οΌ + + +[//]: # (## Default Parameters) +[//]: # () +[//]: # (| Parameter | Comment |) +[//]: # (|:--------------|:----------------------------------------------------------|) +[//]: # (| `--target` | List of targets to be invoked |) +[//]: # (| `--skip` | List of targets to be skipped (empty for all non-invoked) |) +[//]: # (| `--help` | Shows the help text |) +[//]: # (| `--host` | Forcefully sets the `Host` implementation |) +[//]: # (| `--profile` | List of `parameters..json` files to load |) +[//]: # (| `--plan` | Shows the HTML dependency graph |) +[//]: # (| `--verbosity` | Sets the verbosity used for logging |) +[//]: # (| `--continue` | Continues the build from last point of failure |) +[//]: # (| `--root` | Forcefully sets the root directory |) + +## Invoking Targets + +You can invoke a single target or a set of targets either through positional or named arguments: + + + + +```powershell +# terminal-command +nuke [other-targets...] +``` + + + + +```powershell +# terminal-command +nuke [arguments...] --targets [other-targets...] +``` + + + + +:::tip +Passing targets as named arguments allows you to quickly overwrite the invoked targets without moving the caret to the front of a long invocation command. +::: + +## Skipping Targets + +You can skip all or individual targets from the execution plan that are not specifically invoked: + + + + +```powershell +# terminal-command +nuke [targets] --skip +``` + + + + +```powershell +# terminal-command +nuke [targets] --skip +``` + + + + +:::tip +Skipping targets can greatly improve your troubleshooting experience. Irrelevant targets won't waste execution time, and there is no need to temporarily change dependencies between targets. +::: + +## Aborting Builds + +At any moment during execution, you can hit `Ctrl-C` to abort the build with a [SIGINT signal](https://docs.microsoft.com/en-us/windows/console/ctrl-c-and-ctrl-break-signals). Targets that were running at the time will be marked with the `Aborted` status: + +``` +═══════════════════════════════════════ +Target Status Duration +─────────────────────────────────────── +Restore Succeeded 0:16 +Compile Aborted 0:01 +Pack NotRun +─────────────────────────────────────── +Total 0:17 +═══════════════════════════════════════ +``` + +## Continuing Builds + +You can continue a failed or aborted build from the first point of failure: + +```powershell +# terminal-command +nuke [arguments...] --continue +``` + +All previously succeeded targets will be skipped automatically, which can save a lot of unnecessary execution time: + +``` +═══════════════════════════════════════ +Target Status Duration +─────────────────────────────────────── +Restore Skipped +Compile Succeeded 0:15 +Pack Succeeded 0:05 +─────────────────────────────────────── +Total 0:20 +═══════════════════════════════════════ +``` + +:::tip +When you combine the `--continue` argument with the [`dotnet watch`](https://docs.microsoft.com/dotnet/core/tools/dotnet-watch) command, you can establish a very tight feedback loop while working on your target implementation. Just go to the build project directory and call: + +```powershell +# terminal-command +dotnet watch run -- [arguments..] --continue +``` +::: + +:::caution +The state of the build instance is NOT serialized. I.e., if a succeeded target produced data for a failed target, that data won't be available during the continuation of the build. + +Moreover, a build can only reliably continue when the invocation is the same as in the previous attempt. That means that you can only add the `--continue` switch but not change any other arguments. If this rule is violated, the build will start from the very beginning. +::: + +## Help Text + +When you're coming back to a repository or build you haven't worked on in a while, you can bring up the integrated help text by calling: + +```powershell +# terminal-command +nuke --help +``` + +This allows you to inspect all available targets with their direct dependencies as well as parameters with their descriptions: + +```text +Targets (with their direct dependencies): + + Clean + Restore + Compile (default) -> Restore + +Parameters: + + --configuration Configuration to build - Default is 'Debug' (local) or + 'Release' (server). + + --continue Indicates to continue a previously failed build attempt. + --help Shows the help text for this build assembly. + --host Host for execution. Default is 'automatic'. + --no-logo Disables displaying the NUKE logo. + --plan Shows the execution plan (HTML). + --profile Defines the profiles to load. + --root Root directory during build execution. + --skip List of targets to be skipped. Empty list skips all + dependencies. + --target List of targets to be invoked. Default is 'Compile'. + --verbosity Logging verbosity during build execution. Default is + 'Normal'. +``` + +## Execution Plans + +In order to get a better understanding of how your builds are structured, you can load a visual representation of the different execution plans by calling: + +```powershell +# terminal-command +nuke --plan +``` + +Hovering a target will show its individual execution plan, that means, all targets that are going to be executed when one specific target is invoked. The style of an edge (solid/dashed/yellow) between two targets indicates their [dependency relation](../02-fundamentals/05-targets.md#dependencies) (execution/ordering/trigger): + +![Visualizing Execution Plans](plan.gif) + +:::info + +When no targets are hovered, the execution plan for the [default targets](../02-fundamentals/04-builds.md) is highlighted. + +::: diff --git a/docs/01-getting-started/07-telemetry.md b/docs/01-getting-started/07-telemetry.md new file mode 100644 index 000000000..a04a9df4c --- /dev/null +++ b/docs/01-getting-started/07-telemetry.md @@ -0,0 +1,81 @@ +--- +title: Telemetry +--- + +As an effort to improve NUKE and to provide you with a better, more tailored experience, we include a telemetry feature that collects anonymous usage data and enables us to make more informed decisions for the future. + +We want you to be fully aware about telemetry, which is why the global tool will show a disclosure notice on first start. In addition, every build project requires to define a `NukeTelemetryVersion` property: + +```xml title="_build.csproj" + + 1 + +``` + +We will increase the telemetry version whenever we add or change significant data points. With every version change and after updating the `Nuke.Common` package, you will be prompted again for confirmation. + +## Disclosure + +NUKE will display a prompt similar to the following when executing a build project without the `NukeTelemetryVersion` property being set or when executing the global tool for the first time. + +```text +Telemetry +--------- +NUKE collects anonymous usage data in order to help us improve your experience. + +Read more about scope, data points, and opt-out: https://nuke.build/telemetry +``` + +Once you confirm the notice, NUKE will either: + +- Create an awareness cookie under `~/.nuke/telemetry-awareness/v1` for the respective global tool, or +- Add the `NukeTelemetryVersion` property to the project file. + +## Scope + +As a global tool and library, NUKE has [multiple events](https://github.com/nuke-build/nuke/blob/master/source/Nuke.Common/Execution/Telemetry.Events.cs) where telemetry is collected: + +- `BuildStarted` – when a build was started +- `TargetSucceeded` – when a target succeeded (only `Restore`, `Compile`, `Test`) +- `BuildSetup` – when setting up a build via `nuke [:setup]` +- `CakeConvert` – when converting Cake files via `nuke :cake-convert` + +:::info +Data for `BuildStarted` and `TargetSucceeded` is only collected when `IsServerBuild` returns `true` (i.e., CI build), or the build is invoked via global tool. I.e., a contributor executing `build.ps1` or `build.sh` will not have telemetry enabled unknowingly. Likewise, when a build project targets a higher telemetry version than the installed global tool, the lower version will be used. +::: + +## Data Points + +The [telemetry data points](https://github.com/nuke-build/nuke/blob/master/source/Nuke.Common/Execution/Telemetry.Properties.cs) do not collect personal data, such as usernames or email addresses. The data is sent securely to Microsoft servers using [Azure Monitor](https://azure.microsoft.com/services/monitor/) technology, held under restricted access, and published under strict security controls from secure [Azure Storage](https://azure.microsoft.com/services/storage/) systems. + +Protecting your privacy is important to us. If you suspect the telemetry is collecting sensitive data or the data is being insecurely or inappropriately handled, file an issue in the [nuke-build/nuke](https://github.com/nuke-build/nuke) repository or [email us](mailto:info@nuke.build?subject=Telemetry) for investigation. + +The telemetry feature collects the following data: + +| Version | Data | +|:--------|:------------------------------------------------------------------------------------------| +| All | Timestamp of invocation | +| All | Operating system | +| All | Version of .NET SDK | +| All | Repository provider (GitHub, GitLab, Bitbucket, etc.) | +| All | Repository Branch (`main`, `develop`, `feature`, `hotfix`, custom) | +| All | Hashed Repository URL (SHA256; first 6 characters) | +| All | Hashed Commit Sha (SHA256; first 6 characters) | +| All | Compile time of build project in seconds | +| All | Target framework | +| All | Version of `Nuke.Common` and `Nuke.GlobalTool` | +| All | Host implementation (only non-custom) | +| All | Build type (project/global tool) | +| All | Number of executable targets | +| All | Number of custom extensions | +| All | Number of custom components | +| All | Used configuration generators and build components (only non-custom) | +| All | Target execution time in seconds (only for targets named _Restore_, _Compile_, or _Test_) | + +:::note +Whenever a type does not originate from the `Nuke` namespace, it is replaced with ``. +::: + +## How to opt out + +The telemetry feature is enabled by default. To opt out, set the `NUKE_TELEMETRY_OPTOUT` environment variable to `1` or `true`. diff --git a/docs/01-getting-started/_category_.json b/docs/01-getting-started/_category_.json new file mode 100644 index 000000000..41f4c00e7 --- /dev/null +++ b/docs/01-getting-started/_category_.json @@ -0,0 +1,3 @@ +{ + "label": "Getting Started" +} diff --git a/docs/01-getting-started/plan.gif b/docs/01-getting-started/plan.gif new file mode 100644 index 000000000..7f7e52188 Binary files /dev/null and b/docs/01-getting-started/plan.gif differ diff --git a/docs/02-fundamentals/04-builds.md b/docs/02-fundamentals/04-builds.md new file mode 100644 index 000000000..a8d0c02d5 --- /dev/null +++ b/docs/02-fundamentals/04-builds.md @@ -0,0 +1,133 @@ +--- +title: Build Anatomy +--- + +A build project is a regular .NET console application. However, unlike regular console applications, NUKE chooses to name the main class `Build` instead of `Program`. This establishes a convention and allows easier navigation in your solution. The `Build` class must inherit from the `NukeBuild` base class and define a `Main` method to invoke the build execution and define any number of default targets: + + + + +```csharp title="Build.cs" +class Build : NukeBuild +{ + public static int Main() => Execute(x => x.Compile); + + // Target definitions +} +``` + + + + +```csharp title="Build.cs" +class Build : NukeBuild +{ + public static int Main() => Execute(x => x.Test, x => x.Pack); + + // Target definitions +} +``` + + + + +```csharp title="Build.cs" +class Build : NukeBuild +{ + public static int Main() => Execute(); + + // Target definitions +} +``` + + + + +:::info +You will learn how to [write target definitions](05-targets.md) in the next chapter. +::: + +## Base Properties + +The `NukeBuild` base class offers great insight into your build through various properties. + +### Build Environment + +Properties related to the build environment provide information about where the build is running and where various files are located: + +```csharp title="NukeBuild.cs" +abstract class NukeBuild +{ + static Host Host { get; } + static bool IsLocalBuild { get; } + static bool IsServerBuild { get; } + + static AbsolutePath RootDirectory { get; } + static AbsolutePath TemporaryDirectory { get; } + + static AbsolutePath BuildAssemblyFile { get; } + static AbsolutePath BuildAssemblyDirectory { get; } + static AbsolutePath BuildProjectFile { get; } + static AbsolutePath BuildProjectDirectory { get; } +} +``` + +:::tip +With the `Host` property you can determine the running environment, for instance with `Host is TeamCity`. Make sure to explore other implementations of the `Host` base class through your IDE. + +--- + +Since `Host`, `IsLocalBuild`, and `IsServerBuild` are static properties, you can conveniently use them in [static conditions](05-targets.md#conditional-execution) to skip targets (including their dependencies) in local or server builds. +::: + +:::info +Learn more about the `AbsolutePath` class and how it's used for [path construction](../03-common/03-paths.md). +::: + +### Build Status + +Properties related to the build status allow you to examine the status of your targets and the overall build: + +```csharp title="NukeBuild.cs" +abstract class NukeBuild +{ + IReadOnlyCollection InvokedTargets { get; } + IReadOnlyCollection SkippedTargets { get; } + + bool IsSuccessful { get; } + bool IsFailing { get; } + bool IsFinished { get; } + int? ExitCode { get; set; } + + IReadOnlyCollection ExecutionPlan { get; } + + IReadOnlyCollection ScheduledTargets { get; } + IReadOnlyCollection RunningTargets { get; } + IReadOnlyCollection AbortedTargets { get; } + IReadOnlyCollection FailedTargets { get; } + IReadOnlyCollection SucceededTargets { get; } + IReadOnlyCollection FinishedTargets { get; } +} +``` + +:::tip +You can examine the status of targets by using any of the appropriate `ICollection`. For instance, to check if a target has failed, you can write `FailedTargets.Contains(MyTarget)`. This pattern is especially useful with [dynamic conditions](05-targets.md#conditional-execution). +::: + +## Build Events + +For implementing cross-cutting concerns, like telemetry and similar, you can hook into various build events: + +```csharp title="NukeBuild.cs" +abstract class NukeBuild +{ + virtual void OnBuildCreated(); + virtual void OnBuildInitialized(); + virtual void OnBuildFinished(); + + virtual void OnTargetRunning(string target); + virtual void OnTargetSkipped(string target); + virtual void OnTargetFailed(string target); + virtual void OnTargetSucceeded(string target); +} +``` diff --git a/docs/02-fundamentals/05-targets.md b/docs/02-fundamentals/05-targets.md new file mode 100644 index 000000000..5f53a633d --- /dev/null +++ b/docs/02-fundamentals/05-targets.md @@ -0,0 +1,332 @@ +--- +title: Target Definitions +--- + +Inside a `Build` class, you can define your build steps as `Target` properties. The implementation for a build step is provided as a lambda function through the `Executes` method: + + + + +```csharp title="Build.cs" +class Build : NukeBuild +{ + public static int Main() => Execute(); + + Target MyTarget => _ => _ + .Executes(() => + { + Console.WriteLine("Hello!"); + }); +} +``` + + + + + +```csharp title="Build.cs" +class Build : NukeBuild +{ + public static int Main() => Execute(); + + Target MyTarget => _ => _ + .Executes(async () => + { + await Console.Out.WriteLineAsync("Hello!"); + }); +} +``` + +:::caution +Async targets are just a convenience feature that allows you using async APIs in a straightforward way. Behind the scenes, they are still run synchronously. +::: + + + + +## Dependencies + +Specifying dependencies is essential to let targets run in a meaningful and predictable order. There are 3 different types of dependencies, each of them can be defined from both directions. + + + + + + +Define that target `A` must run before target `B` unless `A` is skipped: + +```csharp title="Build.cs" +class Build : NukeBuild +{ + Target A => _ => _ + // highlight-start + .DependentFor(B) // Choose this... + // highlight-end + .Executes(() => { }); + + Target B => _ => _ + // highlight-start + .DependsOn(A) // ...or this! + // highlight-end + .Executes(() => { }); +} +``` + + + + + + +Define that target `A` runs before target `B` if both are scheduled: + +```csharp title="Build.cs" +class Build : NukeBuild +{ + Target A => _ => _ + // highlight-start + .Before(B) // Choose this... + // highlight-end + .Executes(() => { }); + + Target B => _ => _ + // highlight-start + .After(A) // ...or this! + // highlight-end + .Executes(() => { }); +} +``` + + + + + + +Define that target `A` invokes target `B` once it completes: + + +```csharp title="Build.cs" +class Build : NukeBuild +{ + Target A => _ => _ + // highlight-start + .Triggers(B) // Choose this... + // highlight-end + .Executes(() => { }); + + Target B => _ => _ + // highlight-start + .TriggeredBy(A) // ...or this! + // highlight-end + .Executes(() => { }); +} +``` + + + + +:::tip +When choosing a direction, you should ask yourself which target should know about the existence of the other. For instance, should a `Release` target _trigger_ a `Tweet` target? Or should a `Tweet` target _be triggered_ by a `Release` target? + +::: + +:::caution + +Dependencies between targets are solely defined between the individual targets and _not_ through the position they take in a dependency call. The following examples illustrate the difference between the **partial and total order** of targets: + + + + +The execution is nondeterministic between `A->B->C` and `B->A->C`. This isn't necessarily problematic, but something to be aware of. In particular, it allows different targets to run in parallel (currently only in compatible CI/CD environments). + +```csharp title="Build.cs" +class Build : NukeBuild +{ + Target A => _ => _ + .Executes(() => { }); + + Target B => _ => _ + .Executes(() => { }); + + Target C => _ => _ + // highlight-start + .DependsOn(A, B) + // highlight-end + .Executes(() => { }); +} +``` + + + + +The execution is always deterministic with `A->B->C`. + +```csharp title="Build.cs" +class Build : NukeBuild +{ + Target A => _ => _ + .Executes(() => { }); + + Target B => _ => _ + // highlight-start + .DependsOn(A) + // highlight-end + .Executes(() => { }); + + Target C => _ => _ + // highlight-start + .DependsOn(B) + // highlight-end + .Executes(() => { }); +} +``` + + + + +::: + +## Conditional Execution + +Apart from [skipping targets manually](../01-getting-started/03-execution.md#skipping-targets), you can also programmatically decide whether a target should be skipped. Depending on the use-case, you can choose between dynamic and static conditions. + + + + +Define a condition that is checked right before target `B` executes: + +```csharp +class Build : NukeBuild +{ + readonly List Data = new(); + + Target A => _ => _ + .Executes(() => { /* Populate Data */ }); + + Target B => _ => _ + .DependsOn(A) + // highlight-start + .OnlyWhenDynamic(() => Data.Any()) + // highlight-end + .Execute(() => { }); +} +``` + + + + +Define a condition that is checked before target `A` and `B` execute: + +```csharp +class Build : NukeBuild +{ + Target A => _ => _ + .Executes(() => { }); + + Target B => _ => _ + // highlight-start + .OnlyWhenStatic(() => IsLocalBuild) + // By default, dependencies are skipped + .WhenSkipped(DependencyBehavior.Execute) + // highlight-end + .DependsOn(A) + .Execute(() => { }); +} +``` + + + + +:::tip +When a condition is not met, the skip reason is created from the boolean expression. For more complex conditions, you can extract the logic into a separate method or property to make the message more readable. +::: + +## Requirements + +You can define target requirements that are checked right at the beginning of the build execution before any other targets are executed: + +```csharp +class Build : NukeBuild +{ + Target A => _ => _ + // highlight-start + .Requires(() => IsServerBuild) + // highlight-end + .Executes(() => { }); +} +``` + +:::note +Target requirements are an important aspect to achieve a [fail-fast behavior](https://en.wikipedia.org/wiki/Fail-fast). Preceding targets won't waste any execution time only to discover that a condition that was known right from the beginning was not met. +::: + +:::tip +When a requirement is not met, the exception message is created from the boolean expression. For more complex requirements, you can extract the logic into a separate method or property to make the message more readable. +::: + +## Failure Handling + +Not every failing target should completely stop the build. Targets that are not essential can allow to continue the execution for other targets are important to run even if another target has failed. For these use-cases, you can configure the failure handling. + + + + +Define that execution continues after target `A` throws: + +```csharp +class Build : NukeBuild +{ + Target A => _ => _ + // highlight-start + .ProceedAfterFailure() + // highlight-end + .Executes(() => + { + Assert.Fail("error"); + }); + + Target B => _ => _ + .DependsOn(A) + .Execute(() => { }); +} +``` + + + + +Define that target `B` executes even if another target fails: + +```csharp +class Build : NukeBuild +{ + Target A => _ => _ + .Executes(() => + { + Assert.Fail("error"); + }); + + Target B => _ => _ + // highlight-start + .AssuredAfterFailure() + // highlight-end + .DependsOn(A) + .Execute(() => { }); +} +``` + + + + +## Unlisting Targets + +It is good practice to follow the [single-responsibility principle](https://en.wikipedia.org/wiki/Single-responsibility_principle) when implementing targets. However, you may not want to expose every target through the [build help text](../01-getting-started/03-execution.md#help-text). For cases like this, you can un-list a target: + +```csharp +class Build : NukeBuild +{ + Target A => _ => _ + // highlight-start + .Unlisted() + // highlight-end + .Executes(() => { }); +} +``` diff --git a/docs/02-fundamentals/06-parameters.md b/docs/02-fundamentals/06-parameters.md new file mode 100644 index 000000000..318391bc1 --- /dev/null +++ b/docs/02-fundamentals/06-parameters.md @@ -0,0 +1,244 @@ +--- +title: Parameters +--- + +Another important aspect of build automation is the ability of passing input values to your build. These input values can be anything from generic texts, numeric and enum values, file and directory paths, arrays of aforementioned, boolean flags, or secrets. NUKE comes with a succinct way to declare parameters and lets you set their values in various ways. + +You can declare a parameter by adding the `Parameter` attribute to a field or property: + +```csharp +[Parameter("Description")] +readonly string MyParameter; +``` + +:::tip +You can set default values for parameters as you would normally do through field and property initializers. You can also use static [build base properties](04-builds.md#base-properties) like `IsLocalBuild` or `IsServerBuild` for environmental adjustments: + +```csharp +[Parameters] +readonly Configuration Configuration = IsServerBuild + ? Configuration.Release + : Configuration.Debug; +``` + +Following best practices, you should mark all your parameters as `readonly`. +::: + +## Passing Parameter Values + +Parameters are resolved through different mechanisms, each supporting a different automation use-case. These mechanisms are explained in the following sections by their resolution priority. + +### Passing Values through the Command-Line + +In the most straightforward way, you can pass parameter values from the command-line through their [kebab-case](https://www.theserverside.com/definition/Kebab-case) names prefixed with a double-dash: + +```powershell +# terminal-command +nuke --my-parameter +``` + +:::tip +With the global tool installed and [shell completion](../06-global-tool/00-shell-completion.md) configured, you can pass parameters much faster and avoid any typos. +::: + +### Passing Values through Parameter Files + +Instead of providing default values in your `Build` class or repeatedly specifying them through the command-line, you can also define them in so-called parameter files (JSON). These files are located under the `.nuke` directory: + +```json title=".nuke/parameters.json" +{ + "$schema": "./build.schema.json", + "MyParameter": "value" +} +``` + +Besides the default `parameters.json` file, you can create additional profiles following the `parameters..json` naming pattern. These profiles can be loaded on-demand + +```powershell +# terminal-command +nuke --profile [other-profiles...] +``` + +:::info +Profiles are applied in the order they are passed, whereas the default parameters file comes first. +::: + +:::tip +Based on the `build.schema.json` file, you can easily configure your parameters inside your IDE using schema-completion: + +

+ +![Completion in Parameter Files](parameter-file-completion-light.webp#gh-light-mode-only) +![Completion in Parameter Files](parameter-file-completion-dark.webp#gh-dark-mode-only) + +

+ +Remember, that the `build.schema.json` file must be regenerated whenever you add or change a parameter. For instance by calling `nuke --help`. +::: + +### Passing Values through Environment Variables + +You can set parameter values through environment variables, which can be really helpful when setting up global values in CI/CD pipelines. This is done in such a manner that casing and underscores are completely ignored. Also, you can use the `NUKE_` prefix to easily distinguish them from others: + +```powershell +SET MY_PARAMETER = +SET NUKE_MY_PARAMETER = +``` + +## Required Parameters + +You can specify a parameter as a [target requirement](../02-fundamentals/05-targets.md#requirements) using the following shorthand syntax: + + +```cs +Target Deploy => _ => _ + .Requires(() => ApiKey) + .Executes(() => + { + }); +``` + + +:::tip +Using the shorthand syntax allows you to provide the value interactively when the build is executed locally. +::: + +## Secret Parameters + +When parameters are meant to hold **secret values** like passwords or authentication tokens, you can add the `Secret` attribute: + + +```cs +[Parameter] [Secret] readonly string NuGetApiKey; +``` + + +Marking a parameter as a secret allows you to use the [secret management](../06-global-tool/02-secrets.md) through the global tool. + +## Unlisting Parameters + +Just like targets, your parameters will automatically show up in the [build help text](../01-getting-started/03-execution.md#help-text) along with their description. You can opt out from this behavior by setting the `List` property: + +```csharp +[Parameter(List = false)] +readonly string MyParameter; +``` + +Unlisted parameters can be passed as normal and are still available through [shell completion](../06-global-tool/00-shell-completion.md). + +## Supported Types + +Parameters **support a wide range of primitive and complex types**, including their nullable and array counterparts: + + +```cs +[Parameter] readonly string StringValue; +[Parameter] readonly bool BoolValue; +[Parameter] readonly int? IntegerValue; +[Parameter] readonly string[] StringArray; + +[Parameter] readonly MSBuildVersion MSBuildVersion; +[Parameter] readonly Configuration Configuration; +[Parameter] readonly AbsolutePath AbsolutePath; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("StringValue = {Value}", StringValue); + Log.Information("BoolValue = {Value}", BoolValue); + Log.Information("IntegerValue = {Value}", IntegerValue?.ToString() ?? ""); + Log.Information("StringArray = {Value}", StringArray?.JoinComma() ?? ""); + + Log.Information("MSBuildVersion = {Value}", MSBuildVersion); + Log.Information("Configuration = {Value}", Configuration); + Log.Information("AbsolutePath = {Value}", AbsolutePath); + }); +``` + + +:::note +By default, the whitespace character is used to pass multiple values for an array parameter. You can quote your values to treat them as single elements for the parameters. Additionally, you can provide a custom separator through the attribute (whitespace will still work as a separator): + +```csharp +[Parameter(Separator = '+')] +readonly int[] Numbers; +``` +::: + +### Custom Types + +All the supported types from above use the [type converter](https://docs.microsoft.com/en-us/dotnet/api/system.componentmodel.typeconverter) infrastructure to convert string values to their respective type instances. This works independently of the fact how the parameter is resolved. It's worth noting that also parameter files will be converter to strings internally. + +Depending on your use-case, you can choose one of the following approaches to define your custom type that can convert from string values: + + + + +[Enumeration types](https://docs.microsoft.com/en-us/dotnet/csharp/language-reference/builtin-types/enum) define a set of named constants with underlying numeric value: + +```csharp +enum CustomType +{ + One, + Two, + Three +} +``` + +:::info +Members of enumeration types automatically show up during [shell completion](../06-global-tool/00-shell-completion.md). +::: + + + + +[Enumeration classes](https://docs.microsoft.com/en-us/dotnet/architecture/microservices/microservice-ddd-cqrs-patterns/enumeration-classes-over-enum-types) are open for external extension. The `Enumeration` base class defines equality operators and implicit string conversion: + +```csharp +[TypeConverter(typeof(TypeConverter))] +public class Configuration : Enumeration +{ + public static Configuration Debug = new () { Value = nameof(Debug) }; + public static Configuration Release = new () { Value = nameof(Release) }; +} +``` + +:::info +Members of enumeration classes automatically show up during [shell completion](../06-global-tool/00-shell-completion.md). +::: + + + + +```csharp +[TypeConverter(typeof(TypeConverter))] +public class CustomType +{ + public class TypeConverter : System.ComponentModel.TypeConverter + { + public override bool CanConvertFrom(ITypeDescriptorContext context, Type sourceType) + { + return sourceType == typeof(string) || base.CanConvertFrom(context, sourceType); + } + + public override object ConvertFrom(ITypeDescriptorContext context, CultureInfo culture, object value) + { + if (value is string data) + return new CustomType(data); + + if (value is null) + return null; + + return base.ConvertFrom(context, culture, value); + } + } + + public CustomType(string data) + { + // ... + } +} +``` + + + diff --git a/docs/02-fundamentals/10-logging.md b/docs/02-fundamentals/10-logging.md new file mode 100644 index 000000000..2f88e8667 --- /dev/null +++ b/docs/02-fundamentals/10-logging.md @@ -0,0 +1,176 @@ +--- +title: Logging +--- + +As with any other application, good logging greatly reduces the time to detect the source of errors and fix them quickly. NUKE integrates with [Serilog](https://serilog.net/) and prepares a console and file logger for you. Most functions with side effects will automatically log their performed actions. This also includes [invocations of CLI tools](../03-common/08-cli-tools.md). Of course, you can also add your own log messages: + +```csharp +// using Serilog; + +Log.Verbose("This is a verbose message"); +Log.Debug("This is a debug message"); +Log.Information("This is an information message"); +Log.Warning("This is a warning message"); +Log.Error("This is an error message"); +``` + +:::tip +For error messages, you most certainly want to use [assertions](14-assertions.md) instead to also fail the build. +::: + +## Console Sink + +Based on your IDE and CI/CD service, the console sink is automatically configured with the [best-looking themes](https://github.com/serilog/serilog-sinks-console#themes). When your terminal supports [ANSI colors](https://en.wikipedia.org/wiki/ANSI_escape_code) (`TERM=xterm`), an ANSI theme is chosen. Otherwise, a simple [system-color](https://docs.microsoft.com/en-us/dotnet/api/system.consolecolor) theme is used. + +:::info +Adaptive themes are particularly great for consistent colors in your CI/CD environment. +::: + +Log messages are only written to console when the appropriate `LogLevel` is set. You can change it by passing the `--verbosity` parameter: + + + + +```powershell +# terminal-command +nuke --verbosity verbose +``` + + + + +```powershell +# terminal-command +nuke --verbosity normal +``` + + + + +```powershell +# terminal-command +nuke --verbosity minimal +``` + + + + +```powershell +# terminal-command +nuke --verbosity quiet +``` + + + + +Or by setting it directly in the build implementation: + + + + + +```csharp +Logging.Level = LogLevel.Trace; +``` + + + + + +```csharp +Logging.Level = LogLevel.Normal; +``` + + + + +```csharp +Logging.Level = LogLevel.Warning; +``` + + + + +```csharp +Logging.Level = LogLevel.Error; +``` + + + + +In the following image you can see that the verbose message is hidden because the current log level was set to `Normal`: + +

+ +![Logging Output in Console](logging-console-light.webp#gh-light-mode-only) +![Logging Output in Console](logging-console-dark.webp#gh-dark-mode-only) + +

+ +:::tip +Error and warning log messages are repeated right before the [build summary](../01-getting-started/03-execution.md#build-summary) to give you a quick-look at what went wrong. +::: + +## File Sinks + +For each build, a new log file is written to the temporary directory. The Serilog message template is pre-configured as: + +```text title="Message Template" +{Timestamp:HH:mm:ss.fff} | {Level:u1} | {Target} | {Message:l}{{NewLine}{{Exception} +``` + +With the sample logging from above, the file would like roughly like this: + +```log title=".nuke/temp/build.log" +03:57:38.208 | V | Compile | This is a verbose message +03:57:38.208 | D | Compile | This is a debug message +03:57:38.208 | I | Compile | This is an information message +03:57:38.208 | W | Compile | This is a warning message +03:57:38.208 | E | Compile | This is an error message +``` + +:::tip +With the [Ideolog plugin](https://plugins.jetbrains.com/plugin/9746-ideolog) for [JetBrains Rider](https://jetbrains.com/rider/) you can view and inspect log files more comfortably. It automatically highlights messages according to their log level, allows collapsing irrelevant messages based on search terms, and will enable navigation for exception stack traces. + +

+ +![Ideolog plugin in JetBrains Rider](logging-ideolog-light.webp#gh-light-mode-only) +![Ideolog plugin in JetBrains Rider](logging-ideolog-dark.webp#gh-dark-mode-only) + +

+::: + +### Comparing Log Files + +For the purpose of log comparison, local builds will create another log file with the current timestamp in its name but without the timestamp in the message template: + +```text title="Message Template" +{Level:u1} | {Target} | {Message:l}{NewLine}{Exception} +``` + +:::info +Only the last 5 build logs are kept. +::: + +With the same sample logging from above, the file now looks like this: + + +{` +V | Compile | This is a verbose message +D | Compile | This is a debug message +I | Compile | This is an information message +W | Compile | This is a warning message +E | Compile | This is an error message +`.trim()} + + +With the comparison tool of your choice, you can then select two files and compare them. For instance, when you remove the debug message and add another warning message, the comparison tool will show the following: + +```diff title="Diff Output" + V | Compile | This is a verbose message +- D | Compile | This is a debug message + I | Compile | This is an information message + W | Compile | This is a warning message ++ W | Compile | This is another warning message + E | Compile | This is an error message +``` diff --git a/docs/02-fundamentals/14-assertions.md b/docs/02-fundamentals/14-assertions.md new file mode 100644 index 000000000..8b1b84300 --- /dev/null +++ b/docs/02-fundamentals/14-assertions.md @@ -0,0 +1,66 @@ +--- +title: Assertions +--- + +As in any other codebase, it is good practice to assert assumptions before continuing with more heavy procedures in your build automation. When an assertion is violated, it usually entails that the build should fail immediately. + +In the most simple form, you can fail a build by calling: + +```csharp +Assert.Fail("This was unexpected!"); +``` + +Furthermore, you can use one of the following more specific assertion methods: + + + + +```csharp +// Assert not-null fluently +obj.NotNull().ToString(); + +// Assert not-null explicitly +Assert.NotNull(obj); +``` + + + + +```csharp +// Assert true condition +Assert.True(response.IsSuccessStatusCode); + +// Assert false condition +Assert.False(repository.IsOnMainBranch()); +``` + + + + +```csharp +// Assert collection is not empty or empty +Assert.NotEmpty(releaseNotes); +Assert.Empty(errors); + +// Assert collection count +Assert.Count(packages, length: 5); +Assert.HasSingleItem(matchingEntries); +``` + + + + +```csharp +// Assert file exists +Assert.FileExists(file); + +// Assert directory exists +Assert.DirectoryExists(directory); +``` + + + + +:::info +Each of the above methods uses the [`CallerArgumentExpressionAttribute`](https://docs.microsoft.com/en-us/dotnet/csharp/language-reference/proposals/csharp-10.0/caller-argument-expression) to capture usage details from the call-site. If you want to provide a more comprehensive explanation, you can pass the `message` parameter instead. +::: diff --git a/docs/02-fundamentals/_category_.json b/docs/02-fundamentals/_category_.json new file mode 100644 index 000000000..3d3a48f26 --- /dev/null +++ b/docs/02-fundamentals/_category_.json @@ -0,0 +1,3 @@ +{ + "label": "Fundamentals" +} diff --git a/docs/02-fundamentals/logging-console-dark.png b/docs/02-fundamentals/logging-console-dark.png new file mode 100644 index 000000000..4488ef4b0 Binary files /dev/null and b/docs/02-fundamentals/logging-console-dark.png differ diff --git a/docs/02-fundamentals/logging-console-dark.webp b/docs/02-fundamentals/logging-console-dark.webp new file mode 100644 index 000000000..0afdbac5e Binary files /dev/null and b/docs/02-fundamentals/logging-console-dark.webp differ diff --git a/docs/02-fundamentals/logging-console-light.png b/docs/02-fundamentals/logging-console-light.png new file mode 100644 index 000000000..83b6c4dbd Binary files /dev/null and b/docs/02-fundamentals/logging-console-light.png differ diff --git a/docs/02-fundamentals/logging-console-light.webp b/docs/02-fundamentals/logging-console-light.webp new file mode 100644 index 000000000..dbcc4fff5 Binary files /dev/null and b/docs/02-fundamentals/logging-console-light.webp differ diff --git a/docs/02-fundamentals/logging-ideolog-dark.png b/docs/02-fundamentals/logging-ideolog-dark.png new file mode 100644 index 000000000..45ec583ce Binary files /dev/null and b/docs/02-fundamentals/logging-ideolog-dark.png differ diff --git a/docs/02-fundamentals/logging-ideolog-dark.webp b/docs/02-fundamentals/logging-ideolog-dark.webp new file mode 100644 index 000000000..ccdf3894e Binary files /dev/null and b/docs/02-fundamentals/logging-ideolog-dark.webp differ diff --git a/docs/02-fundamentals/logging-ideolog-light.png b/docs/02-fundamentals/logging-ideolog-light.png new file mode 100644 index 000000000..b5a6218df Binary files /dev/null and b/docs/02-fundamentals/logging-ideolog-light.png differ diff --git a/docs/02-fundamentals/logging-ideolog-light.webp b/docs/02-fundamentals/logging-ideolog-light.webp new file mode 100644 index 000000000..8f761677f Binary files /dev/null and b/docs/02-fundamentals/logging-ideolog-light.webp differ diff --git a/docs/02-fundamentals/parameter-file-completion-dark.png b/docs/02-fundamentals/parameter-file-completion-dark.png new file mode 100644 index 000000000..5895593fa Binary files /dev/null and b/docs/02-fundamentals/parameter-file-completion-dark.png differ diff --git a/docs/02-fundamentals/parameter-file-completion-dark.webp b/docs/02-fundamentals/parameter-file-completion-dark.webp new file mode 100644 index 000000000..e136140ab Binary files /dev/null and b/docs/02-fundamentals/parameter-file-completion-dark.webp differ diff --git a/docs/02-fundamentals/parameter-file-completion-light.png b/docs/02-fundamentals/parameter-file-completion-light.png new file mode 100644 index 000000000..c3c495d5e Binary files /dev/null and b/docs/02-fundamentals/parameter-file-completion-light.png differ diff --git a/docs/02-fundamentals/parameter-file-completion-light.webp b/docs/02-fundamentals/parameter-file-completion-light.webp new file mode 100644 index 000000000..56e17dbac Binary files /dev/null and b/docs/02-fundamentals/parameter-file-completion-light.webp differ diff --git a/docs/03-common/03-paths.md b/docs/03-common/03-paths.md new file mode 100644 index 000000000..7d823b071 --- /dev/null +++ b/docs/03-common/03-paths.md @@ -0,0 +1,73 @@ +--- +title: Constructing Paths +--- + +Referencing files and directories seems like a trivial task. Nevertheless, developers often run into problems where relative paths no longer match the current working directory, or find themselves fixing path separator issues that stem from [historical design decisions](https://www.youtube.com/watch?v=5T3IJfBfBmI). NUKE follows the approach to use absolute paths whenever possible, which ensures explicitness and allows copying [tool invocations](08-cli-tools.md) from the log and executing them from anywhere you are. + +Central to the idea of absolute paths is the `AbsolutePath` type and the `NukeBuild.RootDirectory` property. From there on, you can easily construct paths through the [overloaded division operator](https://docs.microsoft.com/en-us/dotnet/csharp/language-reference/operators/operator-overloading): + + +```cs +AbsolutePath SourceDirectory => RootDirectory / "src"; +AbsolutePath OutputDirectory => RootDirectory / "output"; +AbsolutePath IndexFile => RootDirectory / "docs" / "index.md"; +``` + + +## Common Methods + +While `AbsolutePath` is agnostic to whether it points to a file or directory, it provides several commonly used methods for interaction: + + +```cs +// Get names +var nameWithExtension = IndexFile.Name; +var nameWithoutExtension = IndexFile.NameWithoutExtension; +var extensionWithDot = IndexFile.Extension; + +// Get the parent directory +var parent1 = IndexFile.Parent; +var parent2 = IndexFile / ".."; // gets normalized + +// Check if one path contains another +var containsFile = SourceDirectory.Contains(IndexFile); + +// Check if a directory or file exists +var directoryExists = SourceDirectory.DirectoryExists(); +var fileExists = IndexFile.FileExists(); +var pathExists = (RootDirectory / "dirOrFile").Exists(); // checks for both +``` + + +## Relative Paths + +Occasionally, you may actually want relative paths, for instance to include them in manifest files that get shipped with your artifacts. In this case, you can make use of `RelativePath`, which uses the path separator dictated by the operating system, or one of types `WinRelativePath` or `UnixRelativePath`, which enforce using backslash or slash respectively: + + +```cs +// Get the relative path to the index file +var indexRelativeFile = RootDirectory.GetRelativePathTo(IndexFile); + +// Get relative path for Unix +var indexUnixRelativePath1 = RootDirectory.GetUnixRelativePathTo(IndexFile); +var indexUnixRelativePath2 = (UnixRelativePath)indexRelativeFile; +``` + + +All relative path types support using the division operator. + +## Globbing + +Through the integrated [Glob](https://github.com/kthompson/glob) NuGet package, you can use [globbing patterns](https://en.wikipedia.org/wiki/Glob_(programming)) to collect files or directories from a base directory: + + +```cs +// Collect all package files from the output directory +var packageFiles = OutputDirectory.GlobFiles("*.nupkg"); + +// Collect and delete all /obj and /bin directories in all sub-directories +SourceDirectory + .GlobDirectories("**/{obj,bin}", otherPatterns) + .DeleteDirectories(); +``` + diff --git a/docs/03-common/05-repository.md b/docs/03-common/05-repository.md new file mode 100644 index 000000000..7b44f2920 --- /dev/null +++ b/docs/03-common/05-repository.md @@ -0,0 +1,114 @@ +--- +title: Repository Insights +--- + +Having knowledge about the current branch, applied tags, and the repository origin is eminently important in various scenarios. For instance, the deployment destination for an application is different whether executed from a release or personal branch. An announcement target may only be executed when running on the main branch. And in many cases it is advisable to include repository metadata, like origin and commit hash, into the artifacts for better traceability. + +You can use the `GitRepositoryAttribute` on a `GitRepository` field or property, to automatically load all relevant information for the current revision at the beginning of build execution: + + +```cs +[GitRepository] readonly GitRepository Repository; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("Commit = {Value}", Repository.Commit); + Log.Information("Branch = {Value}", Repository.Branch); + Log.Information("Tags = {Value}", Repository.Tags); + + Log.Information("main branch = {Value}", Repository.IsOnMainBranch()); + Log.Information("main/master branch = {Value}", Repository.IsOnMainOrMasterBranch()); + Log.Information("release/* branch = {Value}", Repository.IsOnReleaseBranch()); + Log.Information("hotfix/* branch = {Value}", Repository.IsOnHotfixBranch()); + + Log.Information("Https URL = {Value}", Repository.HttpsUrl); + Log.Information("SSH URL = {Value}", Repository.SshUrl); + }); +``` + + +:::tip +Repository insights allow you to design your targets in a flexible manner using [requirements](../02-fundamentals/05-targets.md#requirements), [conditional execution](../02-fundamentals/05-targets.md#conditional-execution), or hybrid implementations: + + +```cs +[GitRepository] readonly GitRepository Repository; +string OriginalRepositoryUrl => "https://github.com/nuke-build/nuke"; + +Target Deploy => _ => _ + .Requires(() => Repository.IsOnMainOrMasterBranch()); + +Target CheckMilestone => _ => _ + .OnlyWhenStatic(() => Repository.HttpsUrl.EqualsOrdinalIgnoreCase(OriginalRepositoryUrl)); + +Target Hotfix => _ => _ + .Executes(() => + { + if (Repository.IsOnHotfixBranch()) + FinishHotfix(); + else + CreateHotfix(); + }); +``` + +:::tip + +:::info +You can also manually create a `GitRepository` instance: + +```c# +var repository1 = GitRepository.FromLocalDirectory(directory); +var repository2 = GitRepository.FromUrl(url); +``` + +The only difference between `FromUrl` and `FromLocalDirectory` is that the latter can initialize more properties, including `Commit`, `Tags`, and `RemoteBranch`. +::: + +## GitHub Integration + +As one of the most popular Git hosting services, NUKE provides several methods to retrieve GitHub-specific **identifiers and links** from a repository: + + +```cs +// Get repository owner and name +var (owner, name) = (Repository.GetGitHubOwner(), Repository.GetGitHubName()); + +// Get commit details URL when Repository is fully-synced +var commitUrl = Repository.GetGitHubCommitUrl(Repository.Commit); + +// Get comparison URL between tags +var comparisonUrl = Repository.GetGitHubCompareTagsUrl("1.0.1", "1.0.2"); + +// Get file download URL +var downloadUrl = Repository.GetGitHubDownloadUrl(RootDirectory / "CHANGELOG.md", branch: "main"); +``` + + +You can also further interact with the repository using the [Octokit.NET](https://github.com/octokit/octokit.net) integration: + + +```cs +// Get the default branch +var defaultBranch = Repository.GetDefaultBranch(); + +// Get the latest release +var latestRelease = Repository.GetLatestRelease(includePrerelease: false); +``` + + +For certain operations, you may initialize an **authorized client**: + + +```cs +// Set credentials for authorized actions +var credentials = new Credentials(GitHubActions.Instance.Token); +GitHubTasks.GitHubClient = new GitHubClient( + new ProductHeaderValue(nameof(NukeBuild)), + new InMemoryCredentialStore(credentials)); + +// Create and close a milestone +Repository.CreateGitHubMilestone("5.1.0"); +Repository.CloseGitHubMilestone("5.1.0", enableIssueChecks: true); +``` + diff --git a/docs/03-common/06-serialization.md b/docs/03-common/06-serialization.md new file mode 100644 index 000000000..9cb8eebbf --- /dev/null +++ b/docs/03-common/06-serialization.md @@ -0,0 +1,134 @@ +--- +title: Data Serialization +--- + +Structured data plays an essential role in build automation. You may want to read a list of repositories to be checked out, write data that's consumed by another tool, or update version numbers of SDKs and tools you consume. The central entry point for data serialization is the `SerializationTasks` class, which comes with support for JSON, XML, and YAML. + + + + +:::note +Please read the [Newtonsoft.Json documentation](https://www.newtonsoft.com/json/help/html/Introduction.htm) before proceeding. +::: + + + + +:::note +Please read the [XDocument documentation](https://docs.microsoft.com/en-us/dotnet/standard/linq/xdocument-class-overview) before proceeding. +::: + + + + +:::note +Please read the [YamlDotNet documentation](https://github.com/aaubry/YamlDotNet/wiki) before proceeding. +::: + + + + +## String Serialization + +You can serialize data to strings and deserialize back from them as follows: + + + + +```csharp title="Build.cs" +// Strongly-typed +var configuration = json.GetJson(); +var json = configuration.ToJson(); + +// Dynamically-typed +var jobject = json.GetJson(); +``` + + + + +```csharp title="Build.cs" +// Strongly-typed +var configuration = xml.GetXml(); +var xml = configuration.ToXml(); +``` + + + + +```csharp title="Build.cs" +// Strongly-typed +var configuration = yaml.GetYaml(); +var yaml = configuration.ToYaml(); +``` + + + + +## File Serialization + +You can serialize data to files and deserialize back from them as follows: + + + + +```csharp title="Build.cs" +// Strongly-typed +var configuration = jsonFile.ReadJson(); +jsonFile.WriteJson(configuration); + +// Dynamically-typed +var jobject = jsonFile.ReadJson(); +``` + + + + +```csharp title="Build.cs" +// Strongly-typed +var configuration = xmlFile.ReadXml(); +xmlFile.WriteXml(configuration); +``` + + + + +```csharp title="Build.cs" +// Strongly-typed +var configuration = yamlFile.ReadYaml(); +yamlFile.WriteYaml(configuration); +``` + + + + +### Updating Files + +Instead of reading, updating, and writing files in separated steps, you can also use the atomic functions below: + + + + +```csharp title="Build.cs" +jsonFile.UpdateJson( + update: x => x.Value = "new-value"); +``` + + + + +```csharp title="Build.cs" +xmlFile.UpdateXml( + update: x => x.Value = "new-value"); +``` + + + + +```csharp title="Build.cs" +yamlFile.UpdateYaml( + update: x => x.Value = "new-value"); +``` + + + diff --git a/docs/03-common/06-versioning.md b/docs/03-common/06-versioning.md new file mode 100644 index 000000000..042e233e0 --- /dev/null +++ b/docs/03-common/06-versioning.md @@ -0,0 +1,187 @@ +--- +title: Versioning Artifacts +--- + +Whenever a build produces artifacts, those should be identifiable with a unique version number. This avoids making wrong expectations about available features or fixed bugs, and allows for clear discussions between developers, QA team, and product users. The most common version approaches are are [semantic versioning](https://semver.org/) and [calendar versioning](https://calver.org/). + +## Repository-based Versioning + +NUKE supports 4 different tools that help generating version numbers from your repository and its commits. Each of these tools comes with its own attribute that populates the field with the information calculated: + + + + +:::note +Please refer to the [GitVersion documentation](https://gitversion.net/docs/reference/configuration) for any questions. +::: + +```powershell title="Tool Installation" +# terminal-command +nuke :add-package GitVersion.Tool +``` + +```csharp title="Build.cs" +[GitVersion] +readonly GitVersion GitVersion; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("GitVersion = {Value}", GitVersion.MajorMinorPatch); + }); +``` + + + + +:::note +Please refer to the [Nerdbank.GitVersioning documentation](https://github.com/dotnet/Nerdbank.GitVersioning/blob/master/doc/versionJson.md) for any questions. +::: + +```powershell title="Tool Installation" +# terminal-command +nuke :add-package Nerdbank.GitVersioning +``` + +```csharp title="Build.cs" +[NerdbankGitVersioning] +readonly NerdbankGitVersioning NerdbankVersioning; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("NerdbankVersioning = {Value}", NerdbankVersioning.SimpleVersion); + }); +``` + + + + +:::note +Please refer to the [OctoVersion documentation](https://github.com/OctopusDeploy/OctoVersion#configuration) for any questions. +::: + +```powershell title="Tool Installation" +# terminal-command +nuke :add-package Octopus.OctoVersion.Tool +``` + +```csharp title="Build.cs" +[OctoVersion] +readonly OctoVersionInfo OctoVersionInfo; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("OctoVersionInfo = {Value}", OctoVersionInfo.MajorMinorPatch); + }); +``` + + + + +:::note +Please refer to the [MinVer documentation](https://github.com/adamralph/minver#usage) for any questions. +::: + +```powershell title="Tool Installation" +# terminal-command +nuke :add-package MinVer +``` + +```csharp title="Build.cs" +[MinVer] +readonly MinVer MinVer; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("MinVer = {Value}", MinVer.Version); + }); +``` + + + + +:::info +Note that when the versioning tool fails to calculate version numbers, a warning will be logged and the attributed field remains uninitialized. In that case, you can try executing the issued command manually or `nuke --verbosity verbose` to reveal more detailed information. In most cases, the repository is either not initialized, has no commits, or is missing the tool-specific configuration file. +::: + +## Dependency-based Versioning + +When your versioning is affected by external dependencies, NUKE provides another mechanism to load the latest version of those prior to build execution. Each attribute provides various properties to control which versions should be considered and how it should be transformed: + + + + +```csharp title="Build.cs" +[LatestNuGetVersion( + packageId: "JetBrains.ReSharper.SDK", + IncludePrerelease = true)] +readonly NuGetVersion ReSharperVersion; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("ReSharperVersion = {Value}", ReSharperVersion); + }); +``` + + + + +```csharp title="Build.cs" +[LatestGitHubRelease( + identifier: "JetBrains/gradle-intellij-plugin", + TrimPrefix = true)] +readonly string GradlePluginVersion; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("GradlePluginVersion = {Value}", GradlePluginVersion); + }); +``` + + + + +```csharp title="Build.cs" +[LatestMyGetVersion( + feed: "rd-snapshots", + package: "rd-gen")] +readonly string RdGenVersion; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("RdGenVersion = {Value}", RdGenVersion); + }); +``` + + + + +```csharp title="Build.cs" +[LatestMavenVersion( + repository: "plugins.gradle.org/m2", + groupId: "org.jetbrains.kotlin.jvm", + artifactId: "org.jetbrains.kotlin.jvm.gradle.plugin")] +readonly string KotlinJvmVersion; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("KotlinJvmVersion = {Value}", KotlinJvmVersion); + }); +``` + + + + +## Related Resources + +You can learn more about different versioning aspects from the following resources: + +- [Why I don't start versions at 0.x any more](https://codeblog.jonskeet.uk/2019/10/20/why-i-dont-start-versions-at-0-x-any-more/) by Jon Skeet +- [Versioning, and how it makes our heads hurt](https://www.youtube.com/watch?v=GLr72TnSnPw) by Jon Skeet diff --git a/docs/03-common/07-solution-project-model.md b/docs/03-common/07-solution-project-model.md new file mode 100644 index 000000000..5410610ed --- /dev/null +++ b/docs/03-common/07-solution-project-model.md @@ -0,0 +1,107 @@ +--- +title: Solution & Project Model +--- + +Particularly when building .NET applications, your build may require information related to solution or project files. Such information is often duplicated with string literals and quickly becomes out-of-date. For instance, when publishing a project you want to build for every target framework that is defined in the project file. NUKE has best-in-class support to read and modify the .NET solution and project model. + +## Working with Solutions + +The easiest way to load your solution is to create a new `Solution` field, add the `SolutionAttribute`, and define the file path into the default [parameters file](../02-fundamentals/06-parameters.md#passing-values-through-parameter-files): + +```csharp +[Solution] +readonly Solution Solution; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("Solution path = {Value}", Solution); + Log.Information("Solution directory = {Value}", Solution.Directory); + }); +``` + +You can also manually load solutions with `AbsolutePath` extension method or the `ProjectModelTasks`: + +```csharp +var solution1 = SolutionFile.ReadSolution(); +var solution2 = ProjectModelTasks.ParseSolution("/path/to/file"); +``` + +### Read & Write + +With an instance of the `Solution` type you can **read and write the solution** in regard to projects, solution folders, items, and build configurations: + +```csharp +// Gather projects +var globalToolProject = Solution.GetProject("Nuke.GlobalTool"); +var testProjects = Solution.GetProjects("*.Tests"); + +// Gather all solution items +var allItems = Solution.AllSolutionFolders.SelectMany(x => x.Items); + +// Add a new project to solution +var project = Solution.AddProject( + name: "DummyProject", + typeId: ProjectType.CSharpProject, + path: RootDirectory / "DummyProject.csproj"); +Solution.Save(); +``` + +### Strong-Typed Project Access + +Using the `GenerateProjects` property you can enable a [source generator](https://devblogs.microsoft.com/dotnet/introducing-c-source-generators/) that provides **strong-typed access to the solution structure**. This greatly improves how you can reference individual projects: + +```csharp +[Solution(GenerateProjects = true)] +readonly Solution Solution; + +Project GlobalToolProject => Solution.Nuke_GlobalTool; +``` + +:::info +For every `SolutionAttribute` with the `GenerateProjects` property enabled, the source generator will create a new type with the same name as the field. In the example above, the type `Nuke.Common.ProjectModel.Solution` is silently replaced by a new type `global::Solution` that is local to your project. Therefore, the field name and type must always be the same. +::: + +### Creating Solutions + +Apart from reading and writing from existing solutions, you can also **create new solution files**. This can be very helpful to generate a global solution for many decoupled solutions in different repositories: + +```csharp +var globalSolution = CreateSolution( + fileName: "global.generated.sln", + solutions: new[] { MainSolution }.Concat(ExternalSolutions), + folderNameProvider: x => x == Solution ? null : x.Name); + +globalSolution.Save(); +``` + +## Working with Projects through MSBuild + +Apart from reading the path and directory of a project through a `Solution` object, you can also use the [Microsoft.Build](https://www.nuget.org/packages/Microsoft.Build) integration to access the MSBuild project model: + +```csharp +var msbuildProject = project.GetMSBuildProject(); +``` + +Again, you can also manually load the project using: + +```csharp +var msbuildProject = ProjectModelTasks.ParseProject("/path/to/file"); +``` + +Some of the most important information, like target frameworks, runtime identifiers, output type, properties, and item groups can also be retrieved with **predefined helper methods**: + +```csharp +var targetFrameworks = project.GetTargetFrameworks(); +var runtimeIdentifiers = project.GetRuntimeIdentifiers(); +var outputType = project.GetOutputType(); + +var isPackable = project.GetProperty("IsPackable"); +var compiledFiles = project.GetItems("Compile"); +``` + +However, behind the scenes, these methods will still load the project through the `Microsoft.Build` package. + +:::caution +It is **strongly discouraged** to use anything but MSBuild to examine project files. Other approaches, like reading and parsing the XML, are very fragile against the complex evaluation logic that is inherent for project files. +::: diff --git a/docs/03-common/08-cli-tools.md b/docs/03-common/08-cli-tools.md new file mode 100644 index 000000000..e8842501e --- /dev/null +++ b/docs/03-common/08-cli-tools.md @@ -0,0 +1,326 @@ +--- +title: Executing CLI Tools +--- + +import ToolConfirmation from '@site/src/components/ToolConfirmation'; + + + +Interacting with third-party command-line interface tools (CLIs) is an essential task in build automation. This includes a wide range of aspects, such as resolution of the tool path, construction of arguments to be passed, evaluation of the exit code and capturing of standard and error output. NUKE hides these concerns in dedicated auto-generated CLI wrappers. + +
+Exhaustive list of supported tools + +| Tool | Supported Commands | +|:----------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [AzureSignTool](https://github.com/vcsjones/AzureSignTool) | `sign` | +| [BenchmarkDotNet](https://benchmarkdotnet.org/) | _Single top-level command_ | +| [Boots](https://github.com/jonathanpeppers/boots) | _Single top-level command_ | +| [Chocolatey](https://chocolatey.org/) | `find`, `list`, `new`, `outdated`, `pack`, `push`, `search` | +| [CloudFoundry](https://docs.cloudfoundry.org/cf-cli/cf-help.html) | `api`, `auth`, `bind-service`, `bind-service`, `create-route`, `create-service`, `create-space`, `cups`, `curl`, `delete`, `delete-service`, `delete-space`, `login`, `map-route`, `push`, `restage`, `restart`, `scale -f`, `service`, `set-env`, `set-env`, `start`, `stop`, `target`, `unmap-route` | +| [Codecov](https://about.codecov.io/) | _Single top-level command_ | +| [CodeMetrics](https://docs.microsoft.com/en-us/visualstudio/code-quality/code-metrics-values) | _Single top-level command_ | +| [CorFlags](https://docs.microsoft.com/en-us/dotnet/framework/tools/corflags-exe-corflags-conversion-tool) | _Single top-level command_ | +| [CoverallsNet](https://coverallsnet.readthedocs.io) | _Single top-level command_ | +| [Coverlet](https://github.com/tonerdo/coverlet/) | _Single top-level command_ | +| [DocFX](https://dotnet.github.io/docfx/) | `build`, `dependency`, `download`, `help`, `init`, `merge`, `metadata`, `pdf`, `serve`, `template` | +| [Docker](https://www.docker.com/) | `attach`, `build`, `builder`, `builder build`, `builder prune`, `buildx build`, `checkpoint`, `checkpoint create`, `checkpoint ls`, `checkpoint rm`, `commit`, `config`, `config create`, `config inspect`, `config ls`, `config rm`, `container`, `container attach`, `container commit`, `container create`, `container diff`, `container exec`, `container export`, `container inspect`, `container kill`, `container logs`, `container ls`, `container pause`, `container port`, `container prune`, `container rename`, `container restart`, `container rm`, `container run`, `container start`, `container stats`, `container stop`, `container top [ps`, `container unpause`, `container update`, `container wait`, `context`, `context create`, `context export`, `context import`, `context inspect`, `context ls`, `context rm`, `context update`, `context use`, `create`, `deploy`, `diff`, `engine`, `engine activate`, `engine check`, `engine update`, `events`, `exec`, `export`, `history`, `image`, `image build`, `image history`, `image import`, `image inspect`, `image load`, `image ls`, `image prune`, `image pull`, `image push`, `image rm`, `image save`, `image tag`, `images`, `import`, `info`, `inspect`, `kill`, `load`, `login`, `logout`, `logs`, `manifest`, `manifest annotate`, `manifest create`, `manifest inspect`, `manifest push`, `network`, `network connect`, `network create`, `network disconnect`, `network inspect`, `network ls`, `network prune`, `network rm`, `node`, `node demote`, `node inspect`, `node ls`, `node promote`, `node ps`, `node rm`, `node update`, `pause`, `plugin`, `plugin create`, `plugin disable`, `plugin enable`, `plugin inspect`, `plugin install`, `plugin ls`, `plugin push`, `plugin rm`, `plugin set`, `plugin upgrade`, `port`, `ps`, `pull`, `push`, `rename`, `restart`, `rm`, `rmi`, `run`, `save`, `search`, `secret`, `secret create`, `secret inspect`, `secret ls`, `secret rm`, `service`, `service create`, `service inspect`, `service logs`, `service ls`, `service ps`, `service rm`, `service rollback`, `service scale`, `service update`, `stack`, `stack deploy`, `stack ls`, `stack ps`, `stack rm`, `stack services`, `start`, `stats`, `stop`, `swarm`, `swarm ca`, `swarm init`, `swarm join HOST:PORT`, `swarm join-token`, `swarm leave`, `swarm unlock`, `swarm unlock-key`, `swarm update`, `system`, `system df`, `system events`, `system info`, `system prune`, `tag`, `top [ps`, `trust`, `trust inspect`, `trust key`, `trust key generate`, `trust key load`, `trust revoke`, `trust sign IMAGE:TAG`, `trust signer`, `trust signer add`, `trust signer remove`, `unpause`, `update`, `version`, `volume`, `volume create`, `volume inspect`, `volume ls`, `volume prune`, `volume rm`, `wait` | +| [DotCover](https://www.jetbrains.com/dotcover) | `analyse`, `cover`, `delete`, `dotnet`, `merge`, `report`, `zip` | +| [DotNet](https://docs.microsoft.com/en-us/dotnet/core/tools/) | `build`, `clean`, `msbuild`, `nuget add source`, `nuget push`, `pack`, `publish`, `restore`, `run`, `test`, `tool install`, `tool restore`, `tool uninstall`, `tool update` | +| [EntityFramework](https://docs.microsoft.com/en-us/ef/core/miscellaneous/cli/dotnet) | `ef database drop`, `ef database update`, `ef dbcontext info`, `ef dbcontext list`, `ef dbcontext scaffold`, `ef dbcontext script`, `ef migrations add`, `ef migrations list`, `ef migrations remove`, `ef migrations script` | +| [Fixie](https://fixie.github.io/) | _Single top-level command_ | +| [GitLink](https://github.com/GitTools/GitLink/) | _Single top-level command_ | +| [GitReleaseManager](https://gitreleasemanager.readthedocs.io) | `addasset`, `close`, `create`, `export`, `publish` | +| [GitVersion](http://gitversion.readthedocs.io/en/stable/) | _Single top-level command_ | +| [Helm](https://helm.sh/) | `completion`, `create`, `delete`, `dependency build`, `dependency list`, `dependency update`, `fetch`, `get`, `get hooks`, `get manifest`, `get notes`, `get values`, `history`, `home`, `init`, `inspect`, `inspect chart`, `inspect readme`, `inspect values`, `install`, `lint`, `list`, `package`, `plugin install`, `plugin list`, `plugin remove`, `plugin update`, `repo add`, `repo index`, `repo list`, `repo remove`, `repo update`, `reset`, `rollback`, `search`, `serve`, `status`, `template`, `test`, `upgrade`, `verify`, `version` | +| [ILRepack](https://github.com/gluck/il-repack#readme) | _Single top-level command_ | +| [InnoSetup](http://www.jrsoftware.org/isinfo.php) | _Single top-level command_ | +| [Kubernetes](https://kubernetes.io/) | `alpha`, `annotate`, `api-resources`, `api-versions`, `apply`, `apply -k`, `attach`, `auth`, `autoscale`, `certificate`, `cluster-info`, `completion`, `config`, `convert`, `cordon`, `cp`, `create`, `delete`, `describe`, `drain`, `edit`, `exec`, `explain`, `expose`, `get`, `kubectl`, `label`, `logs`, `options`, `patch`, `plugin`, `port-forward`, `proxy`, `replace`, `rolling-update`, `rollout`, `run`, `run-container`, `scale`, `set`, `taint`, `top`, `uncordon`, `version`, `wait` | +| [MauiCheck](https://github.com/Redth/dotnet-maui-check) | `config` | +| [MinVer](https://github.com/adamralph/minver) | _Single top-level command_ | +| [MSBuild](https://msdn.microsoft.com/en-us/library/ms164311.aspx) | _Single top-level command_ | +| [MSpec](https://github.com/machine/machine.specifications) | _Single top-level command_ | +| [NerdbankGitVersioning](https://github.com/AArnott/Nerdbank.GitVersioning) | `cloud`, `get-commits`, `get-version`, `install`, `prepare-release`, `set-version`, `tag` | +| [Netlify](https://docs.netlify.com/cli/get-started/) | `netlify deploy`, `netlify sites:create`, `netlify sites:delete` | +| [Npm](https://www.npmjs.com/) | `ci`, `install`, `run` | +| [NSwag](https://github.com/RSuter/NSwag) | `aspnetcore2openapi`, `aspnetcore2swagger`, `jsonschema2csclient`, `jsonschema2tsclient`, `list-controllers`, `list-types`, `new`, `openapi2csclient`, `openapi2cscontroller`, `openapi2tsclient`, `run`, `swagger2csclient`, `swagger2cscontroller`, `swagger2tsclient`, `types2openapi`, `types2swagger`, `version`, `webapi2openapi`, `webapi2swagger` | +| [NuGet](https://docs.microsoft.com/en-us/nuget/tools/nuget-exe-cli-reference) | `install`, `pack`, `push`, `restore`, `sources add`, `sources disable`, `sources enable`, `sources list`, `sources remove`, `sources update` | +| [NUnit](https://www.nunit.org/) | _Single top-level command_ | +| [Octopus](https://octopus.com/) | `build-information`, `create-release`, `deploy-release`, `pack`, `push` | +| [OctoVersion](https://github.com/OctopusDeploy/OctoVersion) | `octoversion`, `octoversion` | +| [OpenCover](https://github.com/OpenCover/opencover) | _Single top-level command_ | +| [Paket](https://fsprojects.github.io/paket) | `pack`, `push`, `restore`, `update` | +| [PowerShell](https://docs.microsoft.com/en-us/powershell/) | _Single top-level command_ | +| [Pulumi](https://www.pulumi.com/) | `config`, `config cp`, `config get`, `config refresh`, `config rm`, `config set`, `destroy`, `new`, `preview`, `stack`, `stack change-secrets-provider`, `stack export`, `stack graph`, `stack history`, `stack import`, `stack init`, `stack ls`, `stack output`, `stack rename`, `stack rm`, `stack select`, `stack tag get`, `stack tag ls`, `stack tag rm`, `stack tag set`, `up` | +| [ReportGenerator](https://github.com/danielpalme/ReportGenerator) | _Single top-level command_ | +| [ReSharper](https://www.jetbrains.com/help/resharper/ReSharper_Command_Line_Tools.html) | `cleanupcode`, `dupfinder`, `inspectcode` | +| [SignClient](https://discoverdot.net/projects/sign-service) | `sign` | +| [SignTool](https://docs.microsoft.com/en-us/dotnet/framework/tools/signtool-exe) | `sign` | +| [SonarScanner](https://www.sonarqube.org/) | `begin`, `end` | +| [SpecFlow](https://specflow.org/) | `buildserverrun`, `mstestexecutionreport`, `nunitexecutionreport`, `register`, `register`, `register`, `run`, `stepdefinitionreport` | +| [Squirrel](https://github.com/Squirrel/Squirrel.Windows) | _Single top-level command_ | +| [TestCloud](https://developer.xamarin.com/guides/testcloud/) | `submit` | +| [Unity](https://unity3d.com/) | `-createManualActivationFile`, `-returnlicense` | +| [VSTest](https://msdn.microsoft.com/en-us/library/jj155796.aspx) | _Single top-level command_ | +| [VSWhere](https://github.com/Microsoft/vswhere) | _Single top-level command_ | +| [WebConfigTransformRunner](https://github.com/erichexter/WebConfigTransformRunner) | _Single top-level command_ | +| [Xunit](https://xunit.github.io) | _Single top-level command_ | + +
+ +You can execute MSBuild using the [lightweight API](#lightweight-api) as follows: + +```csharp +MSBuildTasks.MSBuild($"{SolutionFile} /target:Rebuild /p:Configuration={Configuration} /nr:false"); +``` + +The returned object is a collection of standard and error output. + +:::info +Many CLI tasks require to add a package reference to the build project file. For instance, when using `NUnitTasks` there should be one of the following entries to ensure the tool is available: + + + + +```xml title="_build.csproj" + + + + + + + +``` + + + + +```xml title="_build.csproj" + + + + + + + +``` + + + + +While it would be possible to magically download required packages, this explicit approach ensures that your builds are reproducible at any time. If a package is not referenced, the resulting error message will include a command to [install the package via the global tool](../06-global-tool/01-packages.md). +::: + +## Fluent API + +While the example from above is quite easy to understand, it also illustrates certain weaknesses. What if `SolutionFile` contains a space and must be quoted? What is the separator to pass multiple targets? Is the configuration actually passed as a dedicated argument or as an MSBuild property? What does the `/nr` switch stand for? To solve these issues, you can use the individual fluent interfaces: + +```csharp +MSBuildTasks.MSBuild(_ => _ + .SetTargetPath(SolutionFile) + .SetTargets("Clean", "Build") + .SetConfiguration(Configuration) + .EnableNodeReuse()); +``` + +You can also use the fluent interfaces to manipulate the process invocation, including tool path, arguments, working directory, timeout and environment variables. + +:::info +All fluent interfaces implement a variation of the [builder pattern](https://en.wikipedia.org/wiki/Builder_pattern), in which every fluent call will create an immutable copy of the current `ToolSettings` instance with the intended changes applied. This enables great flexibility in composing similar process invocations. +::: + + + + +### Conditional Modifications + +In some cases, you may want to apply certain options only when a particular condition is met. This can be done fluently too, by using the `When` extension: + +```csharp +DotNetTasks.DotNetTest(_ => _ + .SetProjectFile(ProjectFile) + .SetConfiguration(Configuration) + .EnableNoBuild() + .When(PublishTestResults, _ => _ + .SetLogger("trx") + .SetResultsDirectory(TestResultsDirectory))); +``` + +### Combinatorial Modifications + +A typical situation when using MSBuild for compilation, is to compile for different configurations, target frameworks or runtimes. You can use the `CombineWith` method to create different combinations for invocation: + +```csharp +var publishCombinations = + from project in new[] { FirstProject, SecondProject } + from framework in project.GetTargetFrameworks() + from runtime in new[] { "win10-x86", "osx-x64", "linux-x64" } + select new { project, framework, runtime }; + +DotNetTasks.DotNetPublish(_ => _ + .EnableNoRestore() + .SetConfiguration(Configuration) + .CombineWith(publishCombinations, (_, v) => _ + .SetProject(v.project) + .SetFramework(v.framework) + .SetRuntime(v.runtime))); +``` + +### Multiple Invocations + +Based on [combinatorial modifications](#combinatorial-modifications) it is possible to set a `degreeOfParallelism` (default `1`) and a flag to `continueOnFailure` (default `false`): + +```csharp +DotNetTasks.DotNetNuGetPush(_ => _ + .SetSource(Source) + .SetSymbolSource(SymbolSource) + .SetApiKey(ApiKey) + .CombineWith( + OutputDirectory.GlobFiles("*.nupkg").NotEmpty(), (_, v) => _ + .SetTargetPath(v)), + degreeOfParallelism: 5, + continueOnFailure: true); +``` + +This example will always have 5 packages being pushed simultaneously. Possible exceptions, for instance when a package already exists, are accumulated to an `AggregateException` and thrown when all invocations have been processed. The console output is buffered until all invocations are completed. + +### Custom Arguments + +It may happen that certain arguments are not available from the fluent interface. In this case, the `SetProcessArgumentConfigurator` method can be used to add them manually: + +```csharp +MSBuildTasks.MSBuild(_ => _ + .SetTargetPath(SolutionFile) + .SetProcessArgumentConfigurator(_ => _ + .Add("/r"))); +``` + + + +### Exit Code Handling + +By default, every invocation is asserted to have a zero exit code. However, you can also overwrite this behavior using `SetProcessExitHandler` when required: + + + + + +```csharp +NUnitTasks.NUnit3(_ => _ + .SetInputFiles(Assemblies) + .SetProcessExitHandler(p => p.ExitCode switch + { + -1 => throw new Exception("Invalid args"), + >0 => throw new Exception($"{p.ExitCode} tests have failed"), + _ => null + })); +``` + + + + +```csharp +NUnitTasks.NUnit3(_ => _ + .SetInputFiles(Assemblies) + .SetProcessExitHandler(p => + { + switch (p.ExitCode) + { + case -1: + throw new Exception("Invalid args"); + case >0: + throw new Exception($"{p.ExitCode} tests have failed"); + } + })); +``` + + + + + + +As a shorthand syntax, you can also disable the default assertion of a zero exit code, which essentially sets an empty delegate as the exit handler: + +```csharp +NUnitTasks.NUnit3(_ => _ + .SetInputFiles(Assemblies) + .DisableProcessAssertZeroExitCode()); +``` + + + + + + +### Verbosity Mapping + +Using the `VerbosityMappingAttribute`, it is possible to automatically map the verbosity passed via `--verbosity` to individual tools. The attribute must be applied on the build class level: + +```csharp +[VerbosityMapping(typeof(MSBuildVerbosity), + Quiet = nameof(MSBuildVerbosity.Quiet), + Minimal = nameof(MSBuildVerbosity.Minimal), + Normal = nameof(MSBuildVerbosity.Normal), + Verbose = nameof(MSBuildVerbosity.Detailed))] +class Build : NukeBuild +{ + // ... +} +``` + +## Lightweight API + +Many of the most popular tools are already implemented. In case a certain tool is not yet supported with a proper CLI task class, NUKE allows you to use the following **injection attributes** to load them: + + +```csharp +[PathVariable] +readonly Tool Git; + +[NuGetPackage( + packageId: "Redth.Net.Maui.Check", + packageExecutable: "MauiCheck.dll", + // Must be set for tools shipping multiple versions + Framework = "net6.0")] +readonly Tool MauiCheck; + +// Relative to root directory or absolute path +[LocalPath("./tools/corflags.exe")] +readonly Tool CorFlags; + +// Different path on Windows and Unix +[LocalPath( + windowsPath: "gradlew.bat", + unixPath: "gradlew")] +readonly Tool Gradle; +``` + + +The injected `Tool` delegate allows passing arguments, working directory, environment variables and many more process-specific options: + + +```csharp +// Pass arguments with string interpolation +Git($"checkout -b {Branch}"); + +// Change working directory and environment variables +CorFlags( + arguments: $"...", + workingDirectory: SourceDirectory, + environmentVariables: EnvironmentInfo.Variables + .ToDictionary(x => x.Key, x => x.Value) + .SetKeyValue("key", "value").AsReadOnly()); + +// Only execute when available +// Requires: +MauiCheck?.Invoke($"--fix --preview"); +``` + diff --git a/docs/03-common/09-compression.md b/docs/03-common/09-compression.md new file mode 100644 index 000000000..f102f8aaa --- /dev/null +++ b/docs/03-common/09-compression.md @@ -0,0 +1,84 @@ +--- +title: Archive Compression +--- + +In many situations you have to deal with compressed archives. Good examples are when you want to provide additional assets for your GitHub releases, or when you depend on other project's release assets yourself, and need to extract them before they can be used. + +:::note +Please refer to the [SharpZipLib documentation](https://github.com/icsharpcode/SharpZipLib) for any questions. +::: + +## Compressing Archives + +You can create a compressed archive from a directory follows: + + + + +```csharp title="Build.cs" +PublishDirectory.ZipTo( + ArchiveFile, + filter: x => !x.HasExtension(ExcludedExtensions), + compressionLevel: CompressionLevel.SmallestSize, + fileMode: FileMode.CreateNew); +``` + + + + +```csharp title="Build.cs" +PublishDirectory.TarGZipTo( + ArchiveFile, + filter: x => !x.HasExtension(ExcludedExtensions), + fileMode: FileMode.CreateNew); +``` + + + + +```csharp title="Build.cs" +PublishDirectory.TarBZip2To( + ArchiveFile, + filter: x => !x.HasExtension(ExcludedExtensions), + fileMode: FileMode.CreateNew); +``` + + + + +:::tip +If you want to allow your consumers to verify the integrity of your archive files, you can calculate their MD5 checksums and publish them publicly: + +``` +var checksum = ArchiveFile.GetFileHash(); +``` + +::: + +## Extracting Archives + +You can extract an existing archive file to a directory: + + + + +```csharp title="Build.cs" +ArchiveFile.UnZipTo(PublishDirectory); +``` + + + + +```csharp title="Build.cs" +ArchiveFile.UnTarGZip(PublishDirectory); +``` + + + + +```csharp title="Build.cs" +ArchiveFile.UnTarBZip2(PublishDirectory); +``` + + + diff --git a/docs/03-common/11-chats.md b/docs/03-common/11-chats.md new file mode 100644 index 000000000..905754085 --- /dev/null +++ b/docs/03-common/11-chats.md @@ -0,0 +1,104 @@ +--- +title: Chats & Social Media +--- + +As a final step of your build automation process, you may want to report errors or announce a new version through different chats and social media channels. NUKE comes with basic support for the most common platforms. + + + + +You can send a [Slack](https://slack.com/) messages as follows: + +```csharp +// using static Nuke.Common.Tools.Slack.SlackTasks; + +[Parameter] [Secret] readonly string SlackWebhook; + +Target Send => _ => _ + .Executes(async () => + { + await SendSlackMessageAsync(_ => _ + .SetText("Hello from NUKE!"), + SlackWebhook); + }); +``` + +:::note +For more advanced scenarios, check out the [SlackAPI](https://github.com/Inumedia/SlackAPI) or [SlackNet](https://github.com/soxtoby/SlackNet) project. +::: + + + + +You can send a [Microsoft Teams](https://www.microsoft.com/en/microsoft-teams/group-chat-software) messages as follows: + +```csharp +// using static Nuke.Common.Tools.Teams.TeamsTasks; + +[Parameter] [Secret] readonly string TeamsWebhook; + +Target Send => _ => _ + .Executes(async () => + { + await SendTeamsMessageAsync(_ => _ + .SetText("Hello from NUKE!"), + TeamsWebhook) + }); +``` + + + + +You can send a [Twitter](https://twitter.com/) messages as follows: + +```csharp +// using static Nuke.Common.Tools.Twitter.TwitterTasks; + +[Parameter] [Secret] readonly string TwitterConsumerKey; +[Parameter] [Secret] readonly string TwitterConsumerSecret; +[Parameter] [Secret] readonly string TwitterAccessToken; +[Parameter] [Secret] readonly string TwitterAccessTokenSecret; + +Target Send => _ => _ + .Executes(async () => + { + await SendTweetAsync( + message: "Hello from NUKE", + TwitterConsumerKey, + TwitterConsumerSecret, + TwitterAccessToken, + TwitterAccessTokenSecret); + }); +``` + +:::note +For more advanced scenarios, check out the [Tweetinvi](https://github.com/linvi/tweetinvi) project. +::: + + + + +You can send a [Gitter](https://gitter.im/) messages as follows: + +```csharp +// using static Nuke.Common.Tools.Gitter.GitterTasks; + +[Parameter] readonly string GitterRoomId; +[Parameter] [Secret] readonly string GitterAuthToken; + +Target Send => _ => _ + .Executes(() => + { + SendGitterMessage( + message: "Hello from NUKE", + GitterRoomId, + GitterAuthToken); + }); +``` + +:::note +For more advanced scenarios, check out the [gitter-api-pcl](https://github.com/uwp-squad/gitter-api-pcl) project. +::: + + + diff --git a/docs/03-common/_category_.json b/docs/03-common/_category_.json new file mode 100644 index 000000000..182e612cb --- /dev/null +++ b/docs/03-common/_category_.json @@ -0,0 +1,3 @@ +{ + "label": "Common Tasks" +} diff --git a/docs/04-sharing/01-global-builds.md b/docs/04-sharing/01-global-builds.md new file mode 100644 index 000000000..332cdc5ca --- /dev/null +++ b/docs/04-sharing/01-global-builds.md @@ -0,0 +1,89 @@ +--- +title: Global Builds +--- + +Instead of adding and maintaining build projects in all your repositories, you can also build them by convention using a global build. Global builds are based on the concept of [.NET global tools](https://docs.microsoft.com/en-us/dotnet/core/tools/global-tools) and additionally include all the necessary tools referenced through NuGet packages. That means that for building one of your repositories, you only need to install and execute your pre-packaged build. + +## Packaging + +As a first step, you need to extend the build project file with the [necessary information for global tool packaging](https://docs.microsoft.com/en-us/dotnet/core/tools/global-tools-how-to-create#setup-the-global-tool). Particularly, that includes the `PackAsTool` and `ToolCommandName` property: + +```xml title="MyBuild.csproj" + + + + Exe + net6.0 + // highlight-start + true + my-build + // highlight-end + + + +``` + +Afterwards, the project can be packaged and deployed as usual: + +```powershell +# terminal-command +dotnet pack --version +# terminal-command +dotnet nuget push MyBuild..nupkg --source --api-key +``` + +:::note +Currently, [single-file deployments](https://docs.microsoft.com/en-us/dotnet/core/deploying/single-file/overview) are not supported. That means that the operating system must have the .NET SDK installed. Feel free to track the [related GitHub issue](https://github.com/nuke-build/nuke/issues/822) for any updates. +::: + +## Installation + +Once the global build is packaged and deployed, you can install it either locally to a repository or globally on your development machine: + + + + +```powershell +# terminal-command +dotnet new tool-manifest +# terminal-command +dotnet tool install MyBuild +``` + + + + +```powershell +# terminal-command +dotnet tool install -g MyBuild +``` + + + + +:::tip +When you want to guarantee reproducibility, local tools are the better fit since the version is pinned individually for every repository. Global tools, on the other hand, provide more convenience in that you're always building with the same version. This is especially helpful when your conventions, like folder structure and namings, are already well evolved. +::: + +## Execution + +After installation, you can invoke the build through the command that you've specified in `ToolCommandName`. As per the example from above: + + + + +```powershell +# terminal-command +dotnet my-build [args] +``` + + + + +```powershell +# terminal-command +my-build [args] +``` + + + diff --git a/docs/04-sharing/02-build-components.md b/docs/04-sharing/02-build-components.md new file mode 100644 index 000000000..63c783012 --- /dev/null +++ b/docs/04-sharing/02-build-components.md @@ -0,0 +1,311 @@ +--- +title: Build Components +--- + +With build components you can implement your build infrastructure once, and compose individual builds across different repositories. Central to the idea of build components are [interface default implementations](https://devblogs.microsoft.com/dotnet/default-implementations-in-interfaces/), which allow you separating targets by their concerns following the [single-responsibility principle](https://en.wikipedia.org/wiki/Single-responsibility_principle), and pulling them into your build just by inheriting the interface. A typical build based on components could look like this: + +```mermaid +classDiagram + direction BT + + INukeBuild <|.. IPack + INukeBuild <|.. ICompile + INukeBuild <|.. ITest + IPack <|.. Build + ICompile <|.. Build + ITest <|.. Build + + <> INukeBuild + + <> IPack + IPack : + Target Pack + + <> ICompile + ICompile : + Target Compile + + <> ITest + ITest : + Target Test +``` + +The component stubs from above can be translated into code as follows, whereas the `INukeBuild` base interface allows the components to use [build base properties](../02-fundamentals/04-builds.md#base-properties): + +```csharp +interface ICompile : INukeBuild +{ + Target Compile => _ => _ + .Executes(() => { /* Implementation */ }); +} + +interface IPack : INukeBuild +{ + Target Pack => _ => _ + .Executes(() => { /* Implementation */ }); +} + +interface ITest : INukeBuild +{ + Target Test => _ => _ + .Executes(() => { /* Implementation */ }); +} +``` + +In the actual `Build` class, all you have to do is to inherit the components: + +```csharp +class Build : NukeBuild, ICompile, IPack, ITest +{ + // Targets are inherited +} +``` + +## Parameters + +In build components, you can use [parameters](../02-fundamentals/06-parameters.md) and other auto-injection attributes, like [`GitRepositoryAttribute`](../03-common/05-repository.md) or [`SolutionAttribute`](../03-common/07-solution-project-model.md), similar as in regular build classes. Though, since interfaces can't define instance fields or properties, the `INukeBuild` base interface provides a helper method that caches and returns resolved values for you: + +```csharp +interface IComponent : INukeBuild +{ + [Parameter] + string Parameter => TryGetValue(() => Parameter); + + [Solution] + string Solution => TryGetValue(() => Solution); +} +``` + +:::tip +The `TryGetValue` method can return `null`, for instance, when a parameter is not available. If you want to provide a default value, you can use the [null-coalescing operator](https://docs.microsoft.com/en-us/dotnet/csharp/language-reference/operators/null-coalescing-operator): + +```csharp +interface IComponent : INukeBuild +{ + [Parameter] + string Parameter => TryGetValue(() => Parameter) ?? "default"; +} +``` + +Note that the fallback value is created on every property access, so you might want to cache it in a static field. +::: + +### Parameter Prefixes + +For better distinction of similarly named component parameters and to avoid [smurf naming](https://blog.codinghorror.com/new-programming-jargon/#21) techniques, you can use the `ParameterPrefixAttribute` to introduce a common prefix for all parameters in a component: + +```csharp +[ParameterPrefix(nameof(IComponent1))] +interface IComponent1 : INukeBuild +{ + // Resolved as IComponent1Value + [Parameter] string Value => TryGetValue(() => Value); +} + +[ParameterPrefix(nameof(IComponent2))] +interface IComponent2 : INukeBuild +{ + // Resolved as IComponent2Value + [Parameter] string Value => TryGetValue(() => Value); +} +``` + +## Dependencies + +You can define [dependencies](../02-fundamentals/05-targets.md#dependencies) between targets similar as in regular build classes. Since targets from components cannot easily be referenced from their inheritors[^1], you must pass the component type as a generic parameter and provide the target through a lambda expression: + +```csharp +class Build : NukeBuild, IComponent +{ + Target MyTarget => _ => _ + .DependsOn(x => x.Target) + .Executes(() => + { + }); +} +``` + +:::tip +When a build component only defines a single target, you can use the shorthand syntax and omit the lambda that specifies the target. For instance, the above example can become: + + +```csharp +class Build : NukeBuild, IComponent +{ + Target MyTarget => _ => _ + .DependsOn() + .Executes(() => + { + }); +} +``` +::: + +### Loose Dependencies + +Apart from [regular dependencies](../02-fundamentals/05-targets.md#dependencies), you can also define loose dependencies that only get applied when the respective component is also inherited. This allows you to compose your build more flexibly without imposing a particular inheritance chain: + + + + +```csharp title="Build.cs" +interface IComponent1 : INukeBuild +{ + Target A => _ => _ + // highlight-start + .TryDependentFor() // Choose this... + // highlight-end + .Executes(() => { }); +} + +interface IComponent2 : INukeBuild +{ + Target B => _ => _ + // highlight-start + .TryDependsOn() // ...or this! + // highlight-end + .Executes(() => { }); +} +``` + + + + +```csharp title="Build.cs" +interface IComponent1 : INukeBuild +{ + Target A => _ => _ + // highlight-start + .TryBefore() // Choose this... + // highlight-end + .Executes(() => { }); +} + +interface IComponent2 : INukeBuild +{ + Target B => _ => _ + // highlight-start + .TryAfter() // ...or this! + // highlight-end + .Executes(() => { }); +} +``` + + + + +```csharp title="Build.cs" +interface IComponent1 : INukeBuild +{ + Target A => _ => _ + // highlight-start + .TryTriggers() // Choose this... + // highlight-end + .Executes(() => { }); +} + +interface IComponent2 : INukeBuild +{ + Target B => _ => _ + // highlight-start + .TryTriggeredBy() // ...or this! + // highlight-end + .Executes(() => { }); +} +``` + + + + +## Extensions and Overrides + +Another SOLID design principle that can be applied to build components is the [open-closed principle](https://en.wikipedia.org/wiki/Open%E2%80%93closed_principle). Once you have pulled a target into your build, it can be extended or overridden using [explicit interface implementations](https://docs.microsoft.com/en-us/dotnet/csharp/programming-guide/interfaces/explicit-interface-implementation): + + + + +```csharp +class Build : NukeBuild, IComponent +{ + Target IComponent.Target => _ => _ + .Inherit() + .Executes(() => { }); +} +``` + + + + +```csharp +class Build : NukeBuild, IComponent +{ + Target IComponent.Target => _ => _ + .Executes(() => { }); +} +``` + + + + +:::tip +With build components you can push the separation of concerns as far as you wish. For instance, consider the following example where a common `ICompile` component only defines the dependency to the `IRestore` component. Another two derived types of `ICompile` provide the actual implementation of the target using the .NET CLI and MSBuild: + +```mermaid +classDiagram + direction RL + + ICompile --|> IRestore : TryDependsOn + ICompileWithDotNet <|.. ICompile + ICompileWithMSBuild <|.. ICompile + + <> IRestore + IRestore : + Target Restore + + <> ICompile + ICompile : + Target Compile + + <> ICompileWithDotNet + ICompileWithDotNet : + Target Compile + + <> ICompileWithMSBuild + ICompileWithMSBuild : + Target Compile +``` + +```csharp +interface IRestore : INukeBuild +{ + Target Restore => _ => _ + .Executes(() => { /* Implementation */ }); +} + +interface ICompile : INukeBuild +{ + Target Compile => _ => _ + .TryDependsOn(); +} + +interface ICompileWithDotNet : ICompile +{ + Target ICompile.Compile => _ => _ + .Inherit() + .Executes(() => { /* .NET CLI implementation */ }); +} + +interface ICompileWithMSBuild : ICompile +{ + Target ICompile.Compile => _ => _ + .Inherit() + .Executes(() => { /* MSBuild implementation */ }); +} +``` + +Targets that follow later in the execution plan can now reference the implementation-agnostic definition: + +```csharp +class Build : NukeBuild, ICompileWithDotNet +{ + Target Pack => _ => _ + .DependsOn() + .Executes(() => { /* Implementation */ }); +} +``` +::: + +[^1]: Interface default members behave like explicit interface implementations, which means that to access their members, the `this` reference must be cast explicitly to the interface type. For instance, `((IComponent)this).Target`. diff --git a/docs/04-sharing/_category_.json b/docs/04-sharing/_category_.json new file mode 100644 index 000000000..873c78fc8 --- /dev/null +++ b/docs/04-sharing/_category_.json @@ -0,0 +1,3 @@ +{ + "label": "Build Sharing" +} diff --git a/docs/05-cicd/_category_.json b/docs/05-cicd/_category_.json new file mode 100644 index 000000000..8d508dedf --- /dev/null +++ b/docs/05-cicd/_category_.json @@ -0,0 +1,3 @@ +{ + "label": "CI/CD Support" +} diff --git a/docs/05-cicd/appveyor-artifacts.png b/docs/05-cicd/appveyor-artifacts.png new file mode 100644 index 000000000..c48c1ae77 Binary files /dev/null and b/docs/05-cicd/appveyor-artifacts.png differ diff --git a/docs/05-cicd/appveyor-artifacts.webp b/docs/05-cicd/appveyor-artifacts.webp new file mode 100644 index 000000000..1040215b4 Binary files /dev/null and b/docs/05-cicd/appveyor-artifacts.webp differ diff --git a/docs/05-cicd/appveyor.md b/docs/05-cicd/appveyor.md new file mode 100644 index 000000000..d675570b7 --- /dev/null +++ b/docs/05-cicd/appveyor.md @@ -0,0 +1,128 @@ +--- +title: AppVeyor +--- + +Running on [AppVeyor](https://www.appveyor.com/) will automatically enable custom theming for your build log output: + +![AppVeyor Log Output](appveyor.webp) + +:::info +Please refer to the official [AppVeyor documentation](https://www.appveyor.com/docs/) for questions not covered here. +::: + +## Environment Variables + +You can access [predefined environment variables](https://www.appveyor.com/docs/environment-variables/) by using the `AppVeyor` class: + +```csharp +AppVeyor AppVeyor => AppVeyor.Instance; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("Branch = {Branch}", AppVeyor.RepositoryBranch); + Log.Information("Commit = {Commit}", AppVeyor.RepositoryCommitSha); + }); +``` + +
+Exhaustive list of strongly-typed properties + +```csharp +class AppVeyor +{ + string AccountName { get; } + string ApiUrl { get; } + string BuildFolder { get; } + int BuildId { get; } + int BuildNumber { get; } + string BuildVersion { get; } + string BuildWorkerImage { get; } + Tool Cli { get; } + string Configuration { get; } + bool ForcedBuild { get; } + string JobId { get; } + string JobName { get; } + int JobNumber { get; } + string Platform { get; } + int ProjectId { get; } + string ProjectName { get; } + string ProjectSlug { get; } + int? PullRequestNumber { get; } + string PullRequestTitle { get; } + bool Rebuild { get; } + string RepositoryBranch { get; } + string RepositoryCommitAuthor { get; } + string RepositoryCommitAuthorEmail { get; } + string RepositoryCommitMessage { get; } + string RepositoryCommitMessageExtended { get; } + string RepositoryCommitSha { get; } + DateTime RepositoryCommitTimestamp { get; } + string RepositoryName { get; } + string RepositoryProvider { get; } + string RepositoryScm { get; } + bool RepositoryTag { get; } + string RepositoryTagName { get; } + bool ScheduledBuild { get; } + string Url { get; } +} +``` + +
+ +## Configuration Generation + +You can generate [build pipeline files](https://appveyor.com/docs/appveyor-yml/) from your existing target definitions by adding the `AppVeyor` attribute. For instance, you can run the `Compile` target on every push with the latest Ubuntu image: + +```csharp title="Build.cs" +[AppVeyor( + AppVeyorImage.VisualStudio2022, + InvokedTargets = new[] { nameof(Compile) })] +class Build : NukeBuild { /* ... */ } +``` + +
+Generated output + +```yaml title="appveyor.yml" + +image: + - Visual Studio 2022 + +build_script: + - cmd: .\build.cmd Compile + - sh: ./build.cmd Compile +``` + +
+ +:::info +Whenever you make changes to the attribute, you have to [run the build](../01-getting-started/03-execution.md) at least once to regenerate the pipelines file. +::: + +## Artifacts + +If your targets produce artifacts, like packages or coverage reports, you can publish those directly from the target definition: + +```csharp +Target Pack => _ => _ + .Produces(PackagesDirectory / "*.nupkg") + .Executes(() => { /* Implementation */ }); +``` + +
+Generated output + +```yaml title="appveyor.yml" +artifacts: + - path: output/packages/*.nupkg +``` +
+ +After your build has finished, those artifacts will be listed under the artifacts tab: + +

+ +![AppVeyor Artifacts Tab](appveyor-artifacts.webp) + +

diff --git a/docs/05-cicd/appveyor.png b/docs/05-cicd/appveyor.png new file mode 100644 index 000000000..ea0bc93bf Binary files /dev/null and b/docs/05-cicd/appveyor.png differ diff --git a/docs/05-cicd/appveyor.webp b/docs/05-cicd/appveyor.webp new file mode 100644 index 000000000..53b900482 Binary files /dev/null and b/docs/05-cicd/appveyor.webp differ diff --git a/docs/05-cicd/azure-pipelines-artifacts-dark.png b/docs/05-cicd/azure-pipelines-artifacts-dark.png new file mode 100644 index 000000000..7c91c55f8 Binary files /dev/null and b/docs/05-cicd/azure-pipelines-artifacts-dark.png differ diff --git a/docs/05-cicd/azure-pipelines-artifacts-dark.webp b/docs/05-cicd/azure-pipelines-artifacts-dark.webp new file mode 100644 index 000000000..b87310eab Binary files /dev/null and b/docs/05-cicd/azure-pipelines-artifacts-dark.webp differ diff --git a/docs/05-cicd/azure-pipelines-artifacts-light.png b/docs/05-cicd/azure-pipelines-artifacts-light.png new file mode 100644 index 000000000..bd1a9a557 Binary files /dev/null and b/docs/05-cicd/azure-pipelines-artifacts-light.png differ diff --git a/docs/05-cicd/azure-pipelines-artifacts-light.webp b/docs/05-cicd/azure-pipelines-artifacts-light.webp new file mode 100644 index 000000000..fbb4e7dc5 Binary files /dev/null and b/docs/05-cicd/azure-pipelines-artifacts-light.webp differ diff --git a/docs/05-cicd/azure-pipelines.md b/docs/05-cicd/azure-pipelines.md new file mode 100644 index 000000000..e18d337a8 --- /dev/null +++ b/docs/05-cicd/azure-pipelines.md @@ -0,0 +1,255 @@ +--- +title: Azure Pipelines +--- + +Running on [Azure Pipelines](https://azure.microsoft.com/en-us/services/devops/pipelines/) will automatically enable custom theming for your build log output including [collapsible sections](https://docs.microsoft.com/en-us/azure/devops/pipelines/scripts/logging-commands#formatting-commands) for better structuring: + +![Azure Pipelines Log Output](azure-pipelines.webp) + +:::info +Please refer to the official [Azure Pipelines documentation](https://docs.microsoft.com/en-us/azure/devops/pipelines/?view=azure-devops) for questions not covered here. +::: + +## Environment Variables + +You can access [predefined environment variables](https://docs.microsoft.com/en-us/azure/devops/pipelines/build/variables) by using the `AzurePipelines` class: + +```csharp +AzurePipelines AzurePipelines => AzurePipelines.Instance; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("Branch = {Branch}", AzurePipelines.SourceBranch); + Log.Information("Commit = {Commit}", AzurePipelines.SourceVersion); + }); +``` + +
+Exhaustive list of strongly-typed properties + +```csharp +class AzurePipelines +{ + string AccessToken { get; } + string AgentBuildDirectory { get; } + string AgentHomeDirectory { get; } + long AgentId { get; } + AzurePipelinesJobStatus AgentJobStatus { get; } + string AgentMachineName { get; } + string AgentName { get; } + string AgentWorkFolder { get; } + string ArtifactStagingDirectory { get; } + string BinariesDirectory { get; } + long BuildId { get; } + string BuildNumber { get; } + AzurePipelinesBuildReason BuildReason { get; } + string BuildUri { get; } + Guid CollectionId { get; } + string DefaultWorkingDirectory { get; } + long DefinitionId { get; } + string DefinitionName { get; } + long DefinitionVersion { get; } + string JobDisplayName { get; } + Guid JobId { get; } + string PhaseName { get; } + long? PullRequestId { get; } + string PullRequestSourceBranch { get; } + string PullRequestTargetBranch { get; } + string QueuedBy { get; } + Guid QueuedById { get; } + bool RepositoryClean { get; } + bool RepositoryGitSubmoduleCheckout { get; } + string RepositoryLocalPath { get; } + string RepositoryName { get; } + AzurePipelinesRepositoryType RepositoryProvider { get; } + string RepositoryTfvcWorkspace { get; } + string RepositoryUri { get; } + string RequestedFor { get; } + string RequestedForEmail { get; } + Guid RequestedForId { get; } + string SourceBranch { get; } + string SourceBranchName { get; } + string SourceDirectory { get; } + string SourceTfvcShelveset { get; } + string SourceVersion { get; } + string StageDisplayName { get; } + string StageName { get; } + string StagingDirectory { get; } + Guid TaskInstanceId { get; } + string TeamFoundationCollectionUri { get; } + string TeamProject { get; } + Guid TeamProjectId { get; } + string TestResultsDirectory { get; } +} +``` + +
+ +## Configuration Generation + +You can generate [build pipeline files](https://docs.microsoft.com/en-us/azure/devops/pipelines/create-first-pipeline) from your existing target definitions by adding the `AzurePipelines` attribute. For instance, you can run the `Compile` target on every push with the latest Ubuntu image: + +```csharp title="Build.cs" +[AzurePipelines( + AzurePipelinesImage.UbuntuLatest, + InvokedTargets = new[] { nameof(Compile) })] +class Build : NukeBuild { /* ... */ } +``` + +
+Generated output + +```yaml title="azure-pipelines.yml" +stages: + - stage: ubuntu_latest + displayName: 'ubuntu-latest' + pool: + vmImage: 'ubuntu-latest' + jobs: + - job: Compile + displayName: 'Compile' + steps: + - task: CmdLine@2 + inputs: + script: './build.cmd Compile --skip' +``` + +
+ +:::info +Whenever you make changes to the attribute, you have to [run the build](../01-getting-started/03-execution.md) at least once to regenerate the pipelines file. +::: + +### Artifacts + +If your targets produce artifacts, like packages or coverage reports, you can publish those directly from the target definition: + +```csharp +Target Pack => _ => _ + .Produces(PackagesDirectory / "*.nupkg") + .Executes(() => { /* Implementation */ }); +``` + +
+Generated output + +```yaml title="azure-pipelines.yml" +- task: PublishBuildArtifacts@1 + inputs: + artifactName: packages + pathtoPublish: 'output/packages' +``` +
+ +After your build has finished, those artifacts will be listed under the artifacts tab: + +

+ +![Azure Pipelines Artifacts Tab](azure-pipelines-artifacts-light.webp#gh-light-mode-only) +![Azure Pipelines Artifacts Tab](azure-pipelines-artifacts-dark.webp#gh-dark-mode-only) + +

+ +### Importing Secrets + +If you want to use [secret variables](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/variables#secret-variables) from your repository, you can use the `ImportSecrets` property to automatically load them into a [secret parameter](../02-fundamentals/06-parameters.md#secret-parameters) defined in your build: + +```csharp title="Build.cs" +[AzurePipelines( + // ... + ImportSecrets = new[] { nameof(NuGetApiKey) })] +class Build : NukeBuild +{ + [Parameter] [Secret] readonly string NuGetApiKey; +} +``` + +
+Generated output + +```yaml title="azure-pipelines.yml" +- task: CmdLine@2 + inputs: + script: './build.cmd Publish --skip' + env: + NuGetApiKey: $(NuGetApiKey) +``` + +
+ +:::note +If you're facing any issues, make sure that the name in the Azure Pipelines settings is the same as generated into the pipelines file. +::: + +### Using Access Tokens + +For every pipeline run, Azure Pipelines generates a [one-time token](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/access-tokens) with [adequate permissions](https://docs.microsoft.com/en-us/azure/devops/pipelines/process/access-tokens#manage-build-service-account-permissions) that you can use to authenticate with the Azure Pipelines API. You can enable the access token in the attribute as follows: + +```csharp title="Build.cs" +[AzurePipelines( + // ... + EnableAccessToken = true)] +class Build : NukeBuild +{ + AzurePipelines AzurePipelines => AzurePipelines.Instance; + + Target Request => _ => _ + .Executes(() => + { + Log.Information("Access Token = {Token}", AzurePipelines.AccessToken); + }); +} +``` + +
+Generated output + +```yaml title="azure-pipelines.yml" +- task: CmdLine@2 + inputs: + script: './build.cmd Release --skip' + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + +``` + +
+ +### Caching + +By default, the generated pipeline file will include [caching tasks](https://docs.microsoft.com/en-us/azure/devops/pipelines/release/caching) to reduce the time for installing the .NET SDK (if not preinstalled) and restoring NuGet packages. + +
+Generated output + +```yaml title="azure-pipelines.yml" +- task: Cache@2 + displayName: Cache (nuke-temp) + inputs: + key: $(Agent.OS) | nuke-temp | **/global.json, **/*.csproj + restoreKeys: $(Agent.OS) | nuke-temp + path: .nuke/temp +- task: Cache@2 + displayName: Cache (nuget-packages) + inputs: + key: $(Agent.OS) | nuget-packages | **/global.json, **/*.csproj + restoreKeys: $(Agent.OS) | nuget-packages + path: $(HOME)/.nuget/packages +``` + +
+ +You can customize the caching tasks by overwriting the following properties: + +```csharp title="Build.cs" +[AzurePipelines( + // ... + CacheKeyFiles = new[] { "**/global.json", "**/*.csproj" }, + CachePaths = new[] + { + AzurePipelinesCachePaths.Nuke, + AzurePipelinesCachePaths.NuGet + })] +class Build : NukeBuild { /* ... */ } +``` diff --git a/docs/05-cicd/azure-pipelines.png b/docs/05-cicd/azure-pipelines.png new file mode 100644 index 000000000..f0fc36032 Binary files /dev/null and b/docs/05-cicd/azure-pipelines.png differ diff --git a/docs/05-cicd/azure-pipelines.webp b/docs/05-cicd/azure-pipelines.webp new file mode 100644 index 000000000..c9bae7cce Binary files /dev/null and b/docs/05-cicd/azure-pipelines.webp differ diff --git a/docs/05-cicd/bitbucket.md b/docs/05-cicd/bitbucket.md new file mode 100644 index 000000000..af6d32f92 --- /dev/null +++ b/docs/05-cicd/bitbucket.md @@ -0,0 +1,60 @@ +--- +title: Bitbucket +--- + +Running on [Bitbucket](https://bitbucket.org/) will use the standard theming for your build log output. + +:::info +Please refer to the official [Bitbucket documentation](https://confluence.atlassian.com/bitbucketserver/) for questions not covered here. +::: + +## Environment Variables + +You can access [predefined environment variables](https://support.atlassian.com/bitbucket-cloud/docs/variables-and-secrets/) by using the `Bitbucket` class: + +```csharp +Bitbucket Bitbucket => Bitbucket.Instance; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("Branch = {Branch}", Bitbucket.Branch); + Log.Information("Commit = {Commit}", Bitbucket.Commit); + }); +``` + +
+Exhaustive list of strongly-typed properties + +```csharp +class Bitbucket +{ + string Bookmark { get; } + string Branch { get; } + long BuildNumber { get; } + string CloneDirectory { get; } + string Commit { get; } + string DeploymentEnvironment { get; } + string DeploymentEnvironmentUuid { get; } + string ExitCode { get; } + string GitHttpOrigin { get; } + string GitSshOrigin { get; } + int ParallelStep { get; } + int ParallelStepCount { get; } + string PipelineUuid { get; } + string ProjectKey { get; } + string ProjectUuid { get; } + string PullRequestDestinationBranch { get; } + int PullRequestId { get; } + string RepositoryFullName { get; } + string RepositorySlug { get; } + string RepositoryUuid { get; } + string StepOidcToken { get; } + string StepTriggererUuid { get; } + string StepUuid { get; } + string Tag { get; } + string Workspace { get; } +} +``` + +
diff --git a/docs/05-cicd/github-actions-artifacts-dark.png b/docs/05-cicd/github-actions-artifacts-dark.png new file mode 100644 index 000000000..88c704134 Binary files /dev/null and b/docs/05-cicd/github-actions-artifacts-dark.png differ diff --git a/docs/05-cicd/github-actions-artifacts-dark.webp b/docs/05-cicd/github-actions-artifacts-dark.webp new file mode 100644 index 000000000..e954cee61 Binary files /dev/null and b/docs/05-cicd/github-actions-artifacts-dark.webp differ diff --git a/docs/05-cicd/github-actions-artifacts-light.png b/docs/05-cicd/github-actions-artifacts-light.png new file mode 100644 index 000000000..75f4e5f93 Binary files /dev/null and b/docs/05-cicd/github-actions-artifacts-light.png differ diff --git a/docs/05-cicd/github-actions-artifacts-light.webp b/docs/05-cicd/github-actions-artifacts-light.webp new file mode 100644 index 000000000..8fb535a25 Binary files /dev/null and b/docs/05-cicd/github-actions-artifacts-light.webp differ diff --git a/docs/05-cicd/github-actions.md b/docs/05-cicd/github-actions.md new file mode 100644 index 000000000..80ec11c9f --- /dev/null +++ b/docs/05-cicd/github-actions.md @@ -0,0 +1,220 @@ +--- +title: GitHub Actions +--- + +Running on [GitHub Actions](https://github.com/features/actions) will automatically enable custom theming for your build log output including [collapsible groups](https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#grouping-log-lines) for better structuring: + +![GitHub Actions Log Output](github-actions.webp) + +:::info +Please refer to the official [GitHub Actions documentation](https://docs.github.com/en/actions) for questions not covered here. +::: + +## Environment Variables + +You can access [predefined environment variables](https://docs.github.com/en/actions/learn-github-actions/environment-variables) by using the `GitHubActions` class: + +```csharp +GitHubActions GitHubActions => GitHubActions.Instance; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("Branch = {Branch}", GitHubActions.Ref); + Log.Information("Commit = {Commit}", GitHubActions.Sha); + }); +``` + +
+Exhaustive list of strongly-typed properties + +```csharp +class GitHubActions +{ + string Action { get; } + string Actor { get; } + string BaseRef { get; } + string EventName { get; } + string EventPath { get; } + JObject GitHubContext { get; } + JObject GitHubEvent { get; } + string HeadRef { get; } + string Home { get; } + bool IsPullRequest { get; } + string Job { get; } + long JobId { get; } + string PullRequestAction { get; } + int? PullRequestNumber { get; } + string Ref { get; } + string Repository { get; } + string RepositoryOwner { get; } + long RunId { get; } + long RunNumber { get; } + string ServerUrl { get; } + string Sha { get; } + string Token { get; } + string Workflow { get; } + string Workspace { get; } +} +``` + +
+ +## Configuration Generation + +You can generate [workflow files](https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions) from your existing target definitions by adding the `GitHubActions` attribute. For instance, you can run the `Compile` target on every push with the latest Ubuntu image: + +```csharp title="Build.cs" +[GitHubActions( + "continuous", + GitHubActionsImage.UbuntuLatest, + On = new[] { GitHubActionsTrigger.Push }, + InvokedTargets = new[] { nameof(Compile) })] +class Build : NukeBuild { /* ... */ } +``` + +
+Generated output + +```yaml title=".github/workflows/continuous.yml" +name: continuous + +on: [push] + +jobs: + ubuntu-latest: + name: ubuntu-latest + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Run './build.cmd Compile' + run: ./build.cmd Compile +``` + +
+ +:::info +Whenever you make changes to the attribute, you have to [run the build](../01-getting-started/03-execution.md) at least once to regenerate the workflow file. +::: + +### Artifacts + +If your targets produce artifacts, like packages or coverage reports, you can publish those directly from the target definition: + +```csharp +Target Pack => _ => _ + .Produces(PackagesDirectory / "*.nupkg") + .Executes(() => { /* Implementation */ }); +``` + +
+Generated output + +```yaml title=".github/workflows/continuous.yml" +- uses: actions/upload-artifact@v1 + with: + name: packages + path: output/packages +``` +
+ +After your build has finished, those artifacts will be listed under the _Summary_ tab: + +

+ +![GitHub Actions Artifacts Tab](github-actions-artifacts-light.webp#gh-light-mode-only) +![GitHub Actions Artifacts Tab](github-actions-artifacts-dark.webp#gh-dark-mode-only) + +

+ +### Importing Secrets + +If you want to use [encrypted secrets](https://docs.github.com/en/actions/security-guides/encrypted-secrets#about-encrypted-secrets) from your organization or repository, you can use the `ImportSecrets` property to automatically load them into a [secret parameter](../02-fundamentals/06-parameters.md#secret-parameters) defined in your build: + +```csharp title="Build.cs" +[GitHubActions( + // ... + ImportSecrets = new[] { nameof(NuGetApiKey) })] +class Build : NukeBuild +{ + [Parameter] [Secret] readonly string NuGetApiKey; +} +``` + +
+Generated output + +```yaml title=".github/workflows/continuous.yml" +- name: Run './build.cmd Publish' + run: ./build.cmd Publish + env: + NuGetApiKey: ${{ secrets.NUGET_API_KEY }} +``` + +
+ +:::note +If you're facing any issues, make sure that the name in the GitHub settings is the same as generated into the workflow file. +::: + +### Using the GitHub Token + +For every workflow run, GitHub generates a [one-time token](https://docs.github.com/en/actions/security-guides/automatic-token-authentication) with [adequate permissions](https://docs.github.com/en/actions/security-guides/automatic-token-authentication#permissions-for-the-github_token) that you can use to authenticate with the GitHub API. You can enable the GitHub token in your attribute as follows: + +```csharp title="Build.cs" +[GitHubActions( + // ... + EnableGitHubToken = true)] +class Build : NukeBuild +{ + GitHubActions GitHubActions => GitHubActions.Instance; + + Target Request => _ => _ + .Executes(() => + { + Log.Information("GitHub Token = {Token}", GitHubActions.Token); + }); +} +``` + +
+Generated output + +```yaml title=".github/workflows/continuous.yml" +- name: Run './build.cmd Release' + run: ./build.cmd Publish + env: + GITHUB_CONTEXT: ${{ toJSON(github) }} +``` + +
+ +### Caching + +By default, the generated workflow file will include a [caching step](https://github.com/actions/cache) to reduce the time for installing the .NET SDK (if not preinstalled) and restoring NuGet packages. + +
+Generated output + +```yaml title=".github/workflows/continuous.yml" +- name: Cache .nuke/temp, ~/.nuget/packages + uses: actions/cache@v2 + with: + path: | + .nuke/temp + ~/.nuget/packages + key: ${{ runner.os }}-${{ hashFiles('global.json', 'source/**/*.csproj') }} +``` + +
+ +You can customize the caching step by overwriting the following properties: + +```csharp title="Build.cs" +[GitHubActions( + // ... + CacheKeyFiles = new[] { "**/global.json", "**/*.csproj" }, + CacheIncludePatterns = new[] { ".nuke/temp", "~/.nuget/packages" }, + CacheExcludePatterns = new string[0])] +class Build : NukeBuild { /* ... */ } +``` diff --git a/docs/05-cicd/github-actions.png b/docs/05-cicd/github-actions.png new file mode 100644 index 000000000..3b81365d3 Binary files /dev/null and b/docs/05-cicd/github-actions.png differ diff --git a/docs/05-cicd/github-actions.webp b/docs/05-cicd/github-actions.webp new file mode 100644 index 000000000..b0680a987 Binary files /dev/null and b/docs/05-cicd/github-actions.webp differ diff --git a/docs/05-cicd/gitlab.md b/docs/05-cicd/gitlab.md new file mode 100644 index 000000000..9c9c9f553 --- /dev/null +++ b/docs/05-cicd/gitlab.md @@ -0,0 +1,75 @@ +--- +title: GitLab +--- + +Running on [GitLab](https://about.gitlab.com/) will automatically enable custom theming for your build log output including [collapsible sections](https://docs.gitlab.com/ee/ci/jobs/#expand-and-collapse-job-log-sections) for better structuring: + +![GitLab Log Output](gitlab.webp) + +:::info +Please refer to the official [GitLab documentation](https://docs.gitlab.com/) for questions not covered here. +::: + +## Environment Variables + +You can access [predefined environment variables](https://docs.gitlab.com/ee/ci/variables/predefined_variables.html) by using the `GitLab` class: + +```csharp +GitLab GitLab => GitLab.Instance; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("Branch = {Branch}", GitLab.CommitRefName); + Log.Information("Commit = {Commit}", GitLab.CommitSha); + }); +``` + +
+Exhaustive list of strongly-typed properties + +```csharp +class GitLab +{ + bool Ci { get; } + string CommitRefName { get; } + string CommitRefSlug { get; } + string CommitSha { get; } + string CommitTag { get; } + string ConfigPath { get; } + bool DisposableEnvironment { get; } + string GitLabUserEmail { get; } + long GitLabUserId { get; } + string GitLabUserLogin { get; } + string GitLabUserName { get; } + long JobId { get; } + bool JobManual { get; } + string JobName { get; } + string JobStage { get; } + string JobToken { get; } + long PipelineId { get; } + string PipelineSource { get; } + bool PipelineTriggered { get; } + string ProjectDirectory { get; } + long ProjectId { get; } + string ProjectName { get; } + string ProjectNamespace { get; } + string ProjectPath { get; } + string ProjectPathSlug { get; } + string ProjectUrl { get; } + GitLabProjectVisibility ProjectVisibility { get; } + string Registry { get; } + string RegistryImage { get; } + string RegistryPassword { get; } + string RegistryUser { get; } + string RepositoryUrl { get; } + string RunnerDescription { get; } + long RunnerId { get; } + string RunnerTags { get; } + string ServerName { get; } + string ServerRevision { get; } + string ServerVersion { get; } +} +``` + +
diff --git a/docs/05-cicd/gitlab.png b/docs/05-cicd/gitlab.png new file mode 100644 index 000000000..e156ea699 Binary files /dev/null and b/docs/05-cicd/gitlab.png differ diff --git a/docs/05-cicd/gitlab.webp b/docs/05-cicd/gitlab.webp new file mode 100644 index 000000000..802b5daeb Binary files /dev/null and b/docs/05-cicd/gitlab.webp differ diff --git a/docs/05-cicd/jenkins.md b/docs/05-cicd/jenkins.md new file mode 100644 index 000000000..1be78447d --- /dev/null +++ b/docs/05-cicd/jenkins.md @@ -0,0 +1,56 @@ +--- +title: Jenkins +--- + +Running on [Jenkins](https://www.jenkins.io/) will use the standard theming for your build log output. + +:::info +Please refer to the official [Jenkins documentation](https://www.jenkins.io/doc/) for questions not covered here. +::: + +## Environment Variables + +You can access [predefined environment variables](https://wiki.jenkins.io/display/JENKINS/Building+a+software+project#Buildingasoftwareproject-belowJenkinsSetEnvironmentVariables) by using the `Jenkins` class: + +```csharp +Jenkins Jenkins => Jenkins.Instance; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("Branch = {Branch}", Jenkins.GitBranch); + Log.Information("Commit = {Commit}", Jenkins.GitCommit); + }); +``` + +
+Exhaustive list of strongly-typed properties + +```csharp +class Jenkins +{ + string BranchName { get; } + string BuilDisplayName { get; } + int BuildNumber { get; } + string BuildTag { get; } + string ChangeId { get; } + int ExecutorNumber { get; } + string GitBranch { get; } + string GitCommit { get; } + string GitPreviousCommit { get; } + string GitPreviousSuccessfulCommit { get; } + string GitUrl { get; } + string JenkinsHome { get; } + string JenkinsServerCookie { get; } + string JobBaseName { get; } + string JobDisplayUrl { get; } + string JobName { get; } + string NodeLabels { get; } + string NodeName { get; } + string RunChangesDisplayUrl { get; } + string RunDisplayUrl { get; } + string Workspace { get; } +} +``` + +
diff --git a/docs/05-cicd/space-automation-dark.png b/docs/05-cicd/space-automation-dark.png new file mode 100644 index 000000000..2a7e30890 Binary files /dev/null and b/docs/05-cicd/space-automation-dark.png differ diff --git a/docs/05-cicd/space-automation-dark.webp b/docs/05-cicd/space-automation-dark.webp new file mode 100644 index 000000000..a2b9904b2 Binary files /dev/null and b/docs/05-cicd/space-automation-dark.webp differ diff --git a/docs/05-cicd/space-automation-light.png b/docs/05-cicd/space-automation-light.png new file mode 100644 index 000000000..943507155 Binary files /dev/null and b/docs/05-cicd/space-automation-light.png differ diff --git a/docs/05-cicd/space-automation-light.webp b/docs/05-cicd/space-automation-light.webp new file mode 100644 index 000000000..ce5f680d9 Binary files /dev/null and b/docs/05-cicd/space-automation-light.webp differ diff --git a/docs/05-cicd/space-automation.md b/docs/05-cicd/space-automation.md new file mode 100644 index 000000000..c2e0f406b --- /dev/null +++ b/docs/05-cicd/space-automation.md @@ -0,0 +1,46 @@ +--- +title: Space Automation +--- + +Running on [JetBrains Space](https://www.jetbrains.com/space/) will use the standard theming for your build log output: + +![Space Automation Log Output](space-automation-light.webp#gh-light-mode-only) +![Space Automation Log Output](space-automation-dark.webp#gh-dark-mode-only) + +:::info +Please refer to the official [Space Automation documentation](https://www.jetbrains.com/help/space/getting-started.html) for questions not covered here. +::: + +## Environment Variables + +You can access [predefined environment variables](https://www.jetbrains.com/help/space/automation-environment-variables.html) by using the `SpaceAutomation` class: + +```csharp +SpaceAutomation SpaceAutomation => SpaceAutomation.Instance; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("Branch = {Branch}", SpaceAutomation.GitBranch); + Log.Information("Commit = {Commit}", SpaceAutomation.GitRevision); + }); +``` + +
+Exhaustive list of strongly-typed properties + +```csharp +class SpaceAutomation +{ + string ApiUrl { get; } + string ClientId { get; } + string ClientSecret { get; } + string ExecutionNumber { get; } + string GitBranch { get; } + string GitRevision { get; } + string ProjectKey { get; } + string RepositoryName { get; } +} +``` + +
diff --git a/docs/05-cicd/teamcity-artifacts.png b/docs/05-cicd/teamcity-artifacts.png new file mode 100644 index 000000000..054f754e8 Binary files /dev/null and b/docs/05-cicd/teamcity-artifacts.png differ diff --git a/docs/05-cicd/teamcity-artifacts.webp b/docs/05-cicd/teamcity-artifacts.webp new file mode 100644 index 000000000..a8ec674f1 Binary files /dev/null and b/docs/05-cicd/teamcity-artifacts.webp differ diff --git a/docs/05-cicd/teamcity-dark.png b/docs/05-cicd/teamcity-dark.png new file mode 100644 index 000000000..81494487f Binary files /dev/null and b/docs/05-cicd/teamcity-dark.png differ diff --git a/docs/05-cicd/teamcity-dark.webp b/docs/05-cicd/teamcity-dark.webp new file mode 100644 index 000000000..ea0e928d4 Binary files /dev/null and b/docs/05-cicd/teamcity-dark.webp differ diff --git a/docs/05-cicd/teamcity-light.png b/docs/05-cicd/teamcity-light.png new file mode 100644 index 000000000..bcd9cefde Binary files /dev/null and b/docs/05-cicd/teamcity-light.png differ diff --git a/docs/05-cicd/teamcity-light.webp b/docs/05-cicd/teamcity-light.webp new file mode 100644 index 000000000..0e2950c89 Binary files /dev/null and b/docs/05-cicd/teamcity-light.webp differ diff --git a/docs/05-cicd/teamcity.md b/docs/05-cicd/teamcity.md new file mode 100644 index 000000000..d418795f4 --- /dev/null +++ b/docs/05-cicd/teamcity.md @@ -0,0 +1,198 @@ +--- +title: TeamCity +--- + +Running on [TeamCity](https://www.jetbrains.com/teamcity/) will automatically enable custom theming for your build log output including [collapsible blocks](https://www.jetbrains.com/help/teamcity/service-messages.html#Blocks+of+Service+Messages) for better structuring: + +![TeamCity Log Output](teamcity-light.webp#gh-light-mode-only) +![TeamCity Log Output](teamcity-dark.webp#gh-dark-mode-only) + +:::info +Please refer to the official [TeamCity documentation](https://www.jetbrains.com/help/teamcity/teamcity-documentation.html) for questions not covered here. +::: + +## Environment Variables + +You can access [predefined parameters](https://www.jetbrains.com/help/teamcity/predefined-build-parameters.html) by using the `TeamCity` class: + +```csharp +TeamCity TeamCity => TeamCity.Instance; + +Target Print => _ => _ + .Executes(() => + { + Log.Information("Branch = {Branch}", TeamCity.BranchName); + Log.Information("Commit = {Commit}", TeamCity.BuildVcsNumber); + }); +``` + +
+Exhaustive list of strongly-typed properties + +```csharp +class TeamCity +{ + string AuthPassword { get; } + string AuthUserId { get; } + string BranchName { get; } + string BuildConfiguration { get; } + long BuildId { get; } + string BuildNumber { get; } + string BuildTypeId { get; } + string BuildVcsNumber { get; } + IReadOnlyDictionary ConfigurationProperties { get; } + bool IsBuildPersonal { get; } + bool IsPullRequest { get; } + string ProjectId { get; } + string ProjectName { get; } + long? PullRequestNumber { get; } + string PullRequestSourceBranch { get; } + string PullRequestTargetBranch { get; } + string PullRequestTitle { get; } + IReadOnlyCollection RecentlyFailedTests { get; } + IReadOnlyDictionary RunnerProperties { get; } + string ServerUrl { get; } + IReadOnlyDictionary SystemProperties { get; } + string Version { get; } +} +``` + +
+ +## Configuration Generation + +You can generate [build configuration files](https://www.jetbrains.com/help/teamcity/kotlin-dsl.html) from your existing target definitions by adding the `TeamCity` attribute. For instance, you can run the `Compile` target on every push with the latest Ubuntu image: + +```csharp title="Build.cs" +[TeamCity( + VcsTriggeredTargets = new[] { nameof(Compile) })] +class Build : NukeBuild { /* ... */ } +``` + +
+Generated output + +```kotlin title=".teamcity/settings.kts" +project { + buildType(Compile) +} + +object Compile : BuildType({ + name = "Compile" + vcs { + root(DslContext.settingsRoot) + cleanCheckout = true + } + steps { + exec { + path = "build.cmd" + arguments = "Compile" + conditions { contains("teamcity.agent.jvm.os.name", "Windows") } + } + exec { + path = "build.sh" + arguments = "Compile" + conditions { doesNotContain("teamcity.agent.jvm.os.name", "Windows") } + } + } + params { + text( + "teamcity.ui.runButton.caption", + "Compile", + display = ParameterDisplay.HIDDEN) + } + triggers { + vcs { + triggerRules = "+:**" + } + } +}) +``` + +
+ +:::info +Whenever you make changes to the attribute, you have to [run the build](../01-getting-started/03-execution.md) at least once to regenerate the pipelines file. +::: + +### Artifacts + +If your targets produce artifacts, like packages or coverage reports, you can publish those directly from the target definition: + +```csharp +Target Pack => _ => _ + .Produces(PackagesDirectory / "*.nupkg") + .Executes(() => { /* Implementation */ }); +``` + +
+Generated output + +```kotlin title=".teamcity/settings.kts" +object Pack : BuildType({ + artifactRules = "output/packages/*.nupkg => output/packages" +} +``` +
+ +After your build has finished, those artifacts will be listed under the artifacts tab: + +

+ +![TeamCity Artifacts Tab](teamcity-artifacts.webp) + +

+ +### Importing Secrets + +If you want to use [secret variables](https://www.jetbrains.com/help/teamcity/storing-project-settings-in-version-control.html#Storing+Secure+Settings) from your TeamCity project, you can use the `ImportSecrets` property and `TeamCityToken` attribute to automatically load them into a [secret parameter](../02-fundamentals/06-parameters.md#secret-parameters) defined in your build: + +```csharp title="Build.cs" +[TeamCity( + // ... + ImportSecrets = new[] { nameof(NuGetApiKey) })] +[TeamCityToken(nameof(NuGetApiKey), "")] +class Build : NukeBuild +{ + [Parameter] [Secret] readonly string NuGetApiKey; +} +``` + +
+Generated output + +```yaml title=".teamcity/settings.kts" +project { + params { + password ( + "env.NuGetApiKey", + label = "NuGetApiKey", + value = "credentialsJSON:", + display = ParameterDisplay.HIDDEN) + } +} +``` + +
+ +:::note +If you're facing any issues, make sure that the name in the TeamCity settings is the same as generated into the pipelines file. +::: + +## Using Credentials + +For every build run, TeamCity generates a pair of [one-time credentials](https://www.jetbrains.com/help/teamcity/rest/teamcity-rest-api-documentation.html#REST+Authentication) that you can use to authenticate with the [TeamCity API](https://www.jetbrains.com/help/teamcity/rest/teamcity-rest-api-documentation.html): + +```csharp title="Build.cs" +class Build : NukeBuild +{ + TeamCity TeamCity => TeamCity.Instance; + + Target Request => _ => _ + .Executes(() => + { + Log.Information("UserId = {UserId}", TeamCity.AuthUserId); + Log.Information("Password = {Password}", TeamCity.AuthPassword); + }); +} +``` diff --git a/docs/06-global-tool/00-shell-completion.md b/docs/06-global-tool/00-shell-completion.md new file mode 100644 index 000000000..0194549b0 --- /dev/null +++ b/docs/06-global-tool/00-shell-completion.md @@ -0,0 +1,79 @@ +--- +title: Shell Completion +--- + +import AsciinemaPlayer from '@site/src/components/AsciinemaPlayer'; + +Typing long target names or parameters can be tedious and error-prone. The global tool helps you to invoke commands more quickly and without any typos, similar to [tab completion for the .NET CLI](https://learn.microsoft.com/en-us/dotnet/core/tools/enable-tab-autocomplete). + +:::info +The shell completion feature relies on the presence of an up-to-date `.nuke/build.schema.json` file. This file is updated with every execution of your build project. + +Whenever you add or change one of your targets or parameters, it is recommended to trigger your build once, for instance by calling `nuke --help`. +::: + +## Configuration + +Add the following snippets to the configuration file of your shell: + + + + +```powershell title="Microsoft.PowerShell_profile.ps1" +Register-ArgumentCompleter -Native -CommandName nuke -ScriptBlock { + param($commandName, $wordToComplete, $cursorPosition) + nuke :complete "$wordToComplete" | ForEach-Object { + [System.Management.Automation.CompletionResult]::new($_, $_, 'ParameterValue', $_) + } +} +``` + + + + +```bash title=".zshrc" +_nuke_zsh_complete() +{ + local completions=("$(nuke :complete "$words")") + reply=( "${(ps:\n:)completions}" ) +} +compctl -K _nuke_zsh_complete nuke +``` + + + + +```bash title=".bashrc" +_nuke_bash_complete() +{ + local word=${COMP_WORDS[COMP_CWORD]} + local completions="$(nuke :complete "${COMP_LINE}")" + COMPREPLY=( $(compgen -W "$completions" -- "$word") ) +} +complete -f -F _nuke_bash_complete nuke +``` + + + + +```bash title="config.fish" +complete -fc nuke --arguments '(nuke :complete (commandline -cp))' +``` + + + + +## Usage + +You can complete targets, parameters, and enumeration values by hitting the TAB key: + +

+ +

diff --git a/docs/06-global-tool/01-packages.md b/docs/06-global-tool/01-packages.md new file mode 100644 index 000000000..ef894d17f --- /dev/null +++ b/docs/06-global-tool/01-packages.md @@ -0,0 +1,68 @@ +--- +title: Adding NuGet Packages +--- + +In many cases, build automation relies on third-party tools. NUKE provides you with a great [API for working with CLI tools](../03-common/08-cli-tools.md), however, it is the responsibility of a build project to reference these tools in the form of NuGet packages and define their exact versions. + +You can add a NuGet package to a build project by calling: + +```powershell +# terminal-command +nuke :add-package [--version ] +``` + +:::info +When no version is provided, the latest version will be used. The major benefit compared to the `dotnet add package` command is that NUKE will automatically determine if the package should be referenced through `PackageReference`, i.e. as a normal library, or through `PackageDownload`, i.e. [without affecting the dependency resolution graph](https://github.com/NuGet/Home/wiki/%5BSpec%5D-PackageDownload-support#solution): + + + + +```xml title="_build.csproj" + + + + + + + +``` + + + + +```xml title="_build.csproj" + + + + + + + +``` + + + + +::: + +## NuGet.org Instruction Tab + +If you're browsing NuGet packages on [NuGet.org](https://nuget.org), you can also use the dedicated instruction tab to quickly copy the `add-package` command for the respective tool and version (only for global tools): + +

+ +![Adding packages from NuGet.org](nuget.webp) + +

+ +[//]: # (NuGet Package Installation for NUKE projects) + +:::tip +When you're using a CLI task that depends on a NuGet package that is not yet installed, for instance `coverlet.console`, you will receive an error message with the appropriate `add-package` command: + +```text +Missing package reference/download. +Run one of the following commands to install the package: + - nuke :add-package coverlet.console --version 3.1.0 +``` +::: diff --git a/docs/06-global-tool/02-secrets.md b/docs/06-global-tool/02-secrets.md new file mode 100644 index 000000000..bad4a7cfe --- /dev/null +++ b/docs/06-global-tool/02-secrets.md @@ -0,0 +1,62 @@ +--- +title: Managing Secrets +--- + +import AsciinemaPlayer from '@site/src/components/AsciinemaPlayer'; + +Historically, secret values like passwords or auth-tokens are often saved as environment variables on local machines or CI/CD servers. This imposes both, security issues because other processes can access these environment variables and inconveniences when a build must be executed locally for emergency reasons (server downtime). NUKE has an integrated encryption utility, which can be used to save and load secret values to and from [parameter files](../02-fundamentals/06-parameters.md#passing-values-through-parameter-files). + +:::danger +Our [custom encryption utility](https://github.com/nuke-build/nuke/blob/develop/source/Nuke.Common/Utilities/EncryptionUtility.cs) is provided "AS IS" without warranty of any kind. + +The implementation uses your password, a static salt, 10.000 iterations, and SHA256 to generate a [key-derivation function](https://docs.microsoft.com/en-us/dotnet/api/system.security.cryptography.rfc2898derivebytes) ([RFC2898](https://datatracker.ietf.org/doc/html/rfc2898)), which is then used to create a crypto-stream to encrypt and decrypt values via [Advanced Encryption Standard (AES)](https://en.wikipedia.org/wiki/Advanced_Encryption_Standard). + +**Please review the implementation carefully and [contact us](mailto:info@nuke.build) about any possible flaws.** +::: + +## Adding & Updating Secrets + +You can start managing your secrets by calling: + +```powershell +# terminal-command +nuke :secrets [profile] +``` + +When your parameter file does not contain secrets yet, you'll be prompted to choose a password. Otherwise, you have to provide the original password chosen. + +:::tip +On macOS you can also choose to generate a password and save it to your [keychain](https://support.apple.com/guide/mac-help/use-keychains-to-store-passwords-mchlf375f392/mac) in order to profit from native security tooling. + +

+ +![macOS Keychain Integration](secrets-macos.webp) + +

+::: + +Afterwards, you can choose from a list of secret parameters, to either set or update their values, and finally accept or discard your changes: + +

+ +

+ +When secrets are saved to a parameters file, they are prefixed with `v1:` to indicate the underlying encryption method: + +```json title=".nuke/parameters.json" +{ + "$schema": "./build.schema.json", + "NuGetApiKey": "v1:4VDyDmFs4Pf6IX8UvosDdjOgb23g0IXs0aP/MBqOK+K6TB8JuthtPgRUrUsi9tLD" +} +``` + +## Removing Secrets + +If you want to delete a secret, you can simply remove it from the parameters file. In the event of a lost password, you have to remove all secrets and re-populate the parameters file via `nuke :secrets`. diff --git a/docs/06-global-tool/03-navigation.md b/docs/06-global-tool/03-navigation.md new file mode 100644 index 000000000..f6003e510 --- /dev/null +++ b/docs/06-global-tool/03-navigation.md @@ -0,0 +1,31 @@ +--- +title: Navigation +--- + +Over time, you might accumulate more and more projects that are built using NUKE. Some of these might even form a hierarchical structure, where one root directory contains several other root directories, and so on. + +## Configuration + +Add the following functions to your shell configuration (similar as for [shell completion](00-shell-completion.md)): + +``` +function nuke/ { nuke :PushWithChosenRootDirectory; cd $(nuke :GetNextDirectory) } +function nuke. { nuke :PushWithCurrentRootDirectory; cd $(nuke :GetNextDirectory) } +function nuke.. { nuke :PushWithParentRootDirectory; cd $(nuke :GetNextDirectory) } +function nuke- { nuke :PopDirectory; cd $(nuke :GetNextDirectory) } +``` + +## Usage + +The global tool comes with a handful of functions for improved navigation: + +| Command | Function | +|:---------|:-----------------------------------------------| +| `nuke.` | Navigates to the current root directory | +| `nuke..` | Navigates to the parent root directory | +| `nuke/` | Lists subdirectories that are root directories | +| `nuke-` | Navigates to the last root directory | + +:::note +The `nuke-` command is only supported on shells that set the `TERM_SESSION_ID` or `WT_SESSION` environment variable. As of now, this includes [iTerm](https://iterm2.com/) and the [Windows Terminal](https://github.com/microsoft/terminal). +::: diff --git a/docs/06-global-tool/04-cake.md b/docs/06-global-tool/04-cake.md new file mode 100644 index 000000000..3c4afed8d --- /dev/null +++ b/docs/06-global-tool/04-cake.md @@ -0,0 +1,38 @@ +--- +title: Converting from Cake +--- + +Over the years, the .NET community has come up with a lot of great build automation tools, including [FAKE](https://fake.build/), [Cake](https://cakebuild.net/), [FlubuCore](https://flubucore.dotnetcore.xyz/), and [BullsEye](https://github.com/adamralph/bullseye). When coming from Cake Scripting, the time for converting build scripts can be greatly reduced with a best-effort approach using [Roslyn](https://github.com/dotnet/roslyn) and its [syntax transformation](https://docs.microsoft.com/en-us/dotnet/csharp/roslyn-sdk/get-started/syntax-transformation) capabilities. + +:::caution +The resulting source code is **expected to contain compilation errors** since there is no direct API mapping between Cake and NUKE. Most notably, the order of `IsDependentOn` on a single target in Cake reflects the order of execution of these dependencies, whereas in NUKE the dependencies are solely defined between the individual targets. +::: + +## Conversion + +You can start the conversion by calling: + +```powershell +# terminal-command +nuke :cake-convert +``` + +The global tool searches for all `*.cake` and converts them to `*.cs` files. During this process it transforms: + +- Target definitions +- Default targets +- Parameter declarations +- Path usages +- Globbing patterns +- Tool invocations + +Additionally – if you choose to create a build project file – it will collect NuGet packages from `#addin` and `#tool` directives, and add them as `PackageReference` and `PackageDownload` respectively. + +## Cleanup + +After you've fully verified the conversion, you can clear all `*.cake` files by calling: + +```powershell +# terminal-command +nuke :cake-clean +``` diff --git a/docs/06-global-tool/_category_.json b/docs/06-global-tool/_category_.json new file mode 100644 index 000000000..0e565a5d6 --- /dev/null +++ b/docs/06-global-tool/_category_.json @@ -0,0 +1,3 @@ +{ + "label": "Global Tool" +} diff --git a/docs/06-global-tool/nuget.png b/docs/06-global-tool/nuget.png new file mode 100644 index 000000000..7ea0e1f17 Binary files /dev/null and b/docs/06-global-tool/nuget.png differ diff --git a/docs/06-global-tool/nuget.webp b/docs/06-global-tool/nuget.webp new file mode 100644 index 000000000..ec19791bc Binary files /dev/null and b/docs/06-global-tool/nuget.webp differ diff --git a/docs/06-global-tool/secrets-macos.png b/docs/06-global-tool/secrets-macos.png new file mode 100644 index 000000000..12ba466c8 Binary files /dev/null and b/docs/06-global-tool/secrets-macos.png differ diff --git a/docs/06-global-tool/secrets-macos.webp b/docs/06-global-tool/secrets-macos.webp new file mode 100644 index 000000000..11c940ab4 Binary files /dev/null and b/docs/06-global-tool/secrets-macos.webp differ diff --git a/docs/07-ide/_category_.json b/docs/07-ide/_category_.json new file mode 100644 index 000000000..59bac6513 --- /dev/null +++ b/docs/07-ide/_category_.json @@ -0,0 +1,3 @@ +{ + "label": "IDE Support" +} diff --git a/docs/07-ide/resharper-dark.png b/docs/07-ide/resharper-dark.png new file mode 100644 index 000000000..ad306e707 Binary files /dev/null and b/docs/07-ide/resharper-dark.png differ diff --git a/docs/07-ide/resharper-dark.webp b/docs/07-ide/resharper-dark.webp new file mode 100644 index 000000000..7171272c9 Binary files /dev/null and b/docs/07-ide/resharper-dark.webp differ diff --git a/docs/07-ide/resharper-light.png b/docs/07-ide/resharper-light.png new file mode 100644 index 000000000..c4559d5fb Binary files /dev/null and b/docs/07-ide/resharper-light.png differ diff --git a/docs/07-ide/resharper-light.webp b/docs/07-ide/resharper-light.webp new file mode 100644 index 000000000..75ceee095 Binary files /dev/null and b/docs/07-ide/resharper-light.webp differ diff --git a/docs/07-ide/resharper.md b/docs/07-ide/resharper.md new file mode 100644 index 000000000..d4066ea90 --- /dev/null +++ b/docs/07-ide/resharper.md @@ -0,0 +1,16 @@ +--- +title: ReSharper +--- + +import InstallButton from '@site/src/components/InstallButton'; + + + +In [ReSharper](https://www.jetbrains.com/resharper) you can install the [NUKE Support extension](https://plugins.jetbrains.com/plugin/11804-nuke-support) to be more productive in writing, running, and debugging your builds. + +You can click the gutter icon next to your targets or hit Alt + Enter from inside their declaration to run and debug them. The top-level item starts a normal execution including all dependencies. From the submenu, you can debug and run/debug without dependencies: + +![ReSharper](resharper-light.webp#gh-light-mode-only) +![ReSharper](resharper-dark.webp#gh-dark-mode-only) diff --git a/docs/07-ide/rider-macos-dark.png b/docs/07-ide/rider-macos-dark.png new file mode 100644 index 000000000..ddc5abba4 Binary files /dev/null and b/docs/07-ide/rider-macos-dark.png differ diff --git a/docs/07-ide/rider-macos-dark.webp b/docs/07-ide/rider-macos-dark.webp new file mode 100644 index 000000000..55c7c3d82 Binary files /dev/null and b/docs/07-ide/rider-macos-dark.webp differ diff --git a/docs/07-ide/rider-macos-light.png b/docs/07-ide/rider-macos-light.png new file mode 100644 index 000000000..54a54a50a Binary files /dev/null and b/docs/07-ide/rider-macos-light.png differ diff --git a/docs/07-ide/rider-macos-light.webp b/docs/07-ide/rider-macos-light.webp new file mode 100644 index 000000000..e86782896 Binary files /dev/null and b/docs/07-ide/rider-macos-light.webp differ diff --git a/docs/07-ide/rider-win-dark.png b/docs/07-ide/rider-win-dark.png new file mode 100644 index 000000000..d1eeaf440 Binary files /dev/null and b/docs/07-ide/rider-win-dark.png differ diff --git a/docs/07-ide/rider-win-dark.webp b/docs/07-ide/rider-win-dark.webp new file mode 100644 index 000000000..c846a3a53 Binary files /dev/null and b/docs/07-ide/rider-win-dark.webp differ diff --git a/docs/07-ide/rider-win-light.png b/docs/07-ide/rider-win-light.png new file mode 100644 index 000000000..e85b998e6 Binary files /dev/null and b/docs/07-ide/rider-win-light.png differ diff --git a/docs/07-ide/rider-win-light.webp b/docs/07-ide/rider-win-light.webp new file mode 100644 index 000000000..721b0666a Binary files /dev/null and b/docs/07-ide/rider-win-light.webp differ diff --git a/docs/07-ide/rider.md b/docs/07-ide/rider.md new file mode 100644 index 000000000..df04b850f --- /dev/null +++ b/docs/07-ide/rider.md @@ -0,0 +1,18 @@ +--- +title: JetBrains Rider +sidebar_position: 1 +--- + +import RiderInstallButton from '@site/src/components/RiderInstallButton'; + + + +In [JetBrains Rider](https://www.jetbrains.com/rider) you can install the [NUKE Support plugin](https://plugins.jetbrains.com/plugin/10803-nuke-support) to be more productive in writing, running, and debugging your builds. + +You can click the gutter icon next to your targets or hit Alt + Enter from inside their declaration to run and debug them. The top-level item starts a normal execution including all dependencies. From the submenu, you can debug and run/debug without dependencies: + +![JetBrains Rider](rider-win-light.webp#gh-light-mode-only) +![JetBrains Rider](rider-win-dark.webp#gh-dark-mode-only) diff --git a/docs/07-ide/visual-studio-dark.png b/docs/07-ide/visual-studio-dark.png new file mode 100644 index 000000000..63924b420 Binary files /dev/null and b/docs/07-ide/visual-studio-dark.png differ diff --git a/docs/07-ide/visual-studio-dark.webp b/docs/07-ide/visual-studio-dark.webp new file mode 100644 index 000000000..65f093601 Binary files /dev/null and b/docs/07-ide/visual-studio-dark.webp differ diff --git a/docs/07-ide/visual-studio-light.png b/docs/07-ide/visual-studio-light.png new file mode 100644 index 000000000..ddb86f720 Binary files /dev/null and b/docs/07-ide/visual-studio-light.png differ diff --git a/docs/07-ide/visual-studio-light.webp b/docs/07-ide/visual-studio-light.webp new file mode 100644 index 000000000..f33e989f2 Binary files /dev/null and b/docs/07-ide/visual-studio-light.webp differ diff --git a/docs/07-ide/visual-studio.md b/docs/07-ide/visual-studio.md new file mode 100644 index 000000000..b447c4ac8 --- /dev/null +++ b/docs/07-ide/visual-studio.md @@ -0,0 +1,16 @@ +--- +title: Visual Studio +--- + +import InstallButton from '@site/src/components/InstallButton'; + + + +In [Visual Studio](https://visualstudio.microsoft.com/) you can install the [NUKE Support extension](https://marketplace.visualstudio.com/items?itemName=nuke.visualstudio) to be more productive in writing, running, and debugging your builds. + +From the _Task Runner Explorer_, you can double-click a target to run it. Additionally, you can use toggle buttons to attach the debugger or skip dependencies: + +![Visual Studio](visual-studio-light.webp#gh-light-mode-only) +![Visual Studio](visual-studio-dark.webp#gh-dark-mode-only) diff --git a/docs/07-ide/vscode-macos-dark.png b/docs/07-ide/vscode-macos-dark.png new file mode 100644 index 000000000..11452400d Binary files /dev/null and b/docs/07-ide/vscode-macos-dark.png differ diff --git a/docs/07-ide/vscode-macos-dark.webp b/docs/07-ide/vscode-macos-dark.webp new file mode 100644 index 000000000..0d15bc666 Binary files /dev/null and b/docs/07-ide/vscode-macos-dark.webp differ diff --git a/docs/07-ide/vscode-macos-light.png b/docs/07-ide/vscode-macos-light.png new file mode 100644 index 000000000..04ffc9f39 Binary files /dev/null and b/docs/07-ide/vscode-macos-light.png differ diff --git a/docs/07-ide/vscode-macos-light.webp b/docs/07-ide/vscode-macos-light.webp new file mode 100644 index 000000000..f43a8ae99 Binary files /dev/null and b/docs/07-ide/vscode-macos-light.webp differ diff --git a/docs/07-ide/vscode-win-dark.png b/docs/07-ide/vscode-win-dark.png new file mode 100644 index 000000000..e500b2001 Binary files /dev/null and b/docs/07-ide/vscode-win-dark.png differ diff --git a/docs/07-ide/vscode-win-dark.webp b/docs/07-ide/vscode-win-dark.webp new file mode 100644 index 000000000..1c3b1fc2a Binary files /dev/null and b/docs/07-ide/vscode-win-dark.webp differ diff --git a/docs/07-ide/vscode-win-light.png b/docs/07-ide/vscode-win-light.png new file mode 100644 index 000000000..2652f35b5 Binary files /dev/null and b/docs/07-ide/vscode-win-light.png differ diff --git a/docs/07-ide/vscode-win-light.webp b/docs/07-ide/vscode-win-light.webp new file mode 100644 index 000000000..6c00f9d50 Binary files /dev/null and b/docs/07-ide/vscode-win-light.webp differ diff --git a/docs/07-ide/vscode.md b/docs/07-ide/vscode.md new file mode 100644 index 000000000..e70c76b7a --- /dev/null +++ b/docs/07-ide/vscode.md @@ -0,0 +1,18 @@ +--- +title: Visual Studio Code +--- + +import InstallButton from '@site/src/components/InstallButton'; +import ControlKey from '@site/src/components/ControlKey'; + + + +In [Visual Studio Code](https://code.visualstudio.com/) you can install the [NUKE Support extension](https://marketplace.visualstudio.com/items?itemName=nuke.support) to be more productive in writing, running, and debugging your builds. + +Above each target, you can click the `Run Target` or `Debug Target` CodeLens items. Additionally, you can bring up the command palette via + Shift + P and call one of the actions to run/debug with/without dependencies: + +![Visual Studio Code](vscode-win-light.webp#gh-light-mode-only) +![Visual Studio Code](vscode-win-dark.webp#gh-dark-mode-only) diff --git a/docs/_snippets/Foo.js b/docs/_snippets/Foo.js new file mode 100644 index 000000000..a612d55e9 --- /dev/null +++ b/docs/_snippets/Foo.js @@ -0,0 +1,24 @@ +import React from "react"; +import CodeBlock from '@theme/CodeBlock'; + +export default function HomepageFeatures() { + return ( + + {''}using System;{'\n'} + {''}using System.Text.RegularExpressions;{'\n'} + {''}{'\n'} + {''}// ReSharper disable All{'\n'} + {''}#pragma warning disable 67{'\n'} + {''}#pragma warning disable 169{'\n'} + {''}{'\n'} + {''}class All{'\n'} + {''}{{'\n'} + {''} /**{'\n'} + {''} * let's C# how it used to be{'\n'} + {''} */{'\n'} + {''} public delegate void EventHandler(object sender, EventArgs s);{'\n'} + {''} public event EventHandler Event;{'\n'} + {''}{'\n'} + + ); +} diff --git a/docs/introduction.md b/docs/introduction.md new file mode 100644 index 000000000..1401a8d86 --- /dev/null +++ b/docs/introduction.md @@ -0,0 +1,61 @@ +--- +title: Introduction +sidebar_position: 0 +--- + + + +πŸͺ΄ Write **automation tools and CI/CD pipelines** in plain C# and with access to all .NET libraries. + +πŸ’Έ Tedious commit cycles are expensive. Instead, **debug and test your builds locally**. + +πŸ’‘ Install one of the **support extensions** for Visual Studio, Rider, ReSharper, or VS Code. + +πŸ’₯ Ready for more? Use **advanced features** like CI/CD generation, parallel execution, and build sharing. + +## Fast Track ⏱ {#fast-track} + +**1. Install the global tool:** + +```powershell +# terminal-command +dotnet tool install Nuke.GlobalTool --global +``` + +**2. Go to a repository of your choice and set up a build:** + +```powershell +# terminal-command +nuke :setup +``` + +**3. Run the build:** + +```powershell +# terminal-command +nuke +``` + +**4. Open the build project and explore the default `Build` class.** + +## Coming from Cake? 🍰 {#coming-from-cake} + +Get a feeling how your Cake scripts would look like in NUKE. + +**1. Install the global tool:** + +```powershell +# terminal-command +dotnet tool install Nuke.GlobalTool --global +``` + +**2. Go to a repository built with Cake.** + +**3. Convert your Cake scripts:** + +```powershell +# terminal-command +nuke :cake-convert +``` + +**4. Inspect the outcome (errors are expected).** diff --git a/source/Nuke.Common.Tests/ChangelogTasksTest.cs b/source/Nuke.Common.Tests/ChangelogTasksTest.cs index 3f7f265d9..74f9434b7 100644 --- a/source/Nuke.Common.Tests/ChangelogTasksTest.cs +++ b/source/Nuke.Common.Tests/ChangelogTasksTest.cs @@ -13,127 +13,128 @@ using VerifyXunit; using Xunit; -namespace Nuke.Common.Tests +// ReSharper disable ReturnValueOfPureMethodIsNotUsed + +namespace Nuke.Common.Tests; + +[UsesVerify] +public class ChangelogTasksTest { - [UsesVerify] - public class ChangelogTasksTest + private static AbsolutePath RootDirectory => Constants.TryGetRootDirectoryFrom(EnvironmentInfo.WorkingDirectory).NotNull(); + + private static AbsolutePath PathToChangelogReferenceFiles => RootDirectory / "source" / "Nuke.Common.Tests" / "ChangelogReferenceFiles"; + + [Theory] + [MemberData(nameof(AllChangelogReference_1_0_0_Files))] + [MemberData(nameof(AllChangelogReference_NUKE_Files))] + public void ReadReleaseNotes_ChangelogReferenceFile_ThrowsNoExceptions(AbsolutePath file) + { + Action act = () => ChangelogTasks.ReadReleaseNotes(file); + + act.Should().NotThrow(); + } + + [Theory] + [MemberData(nameof(AllChangelogReference_1_0_0_Files))] + [MemberData(nameof(AllChangelogReference_NUKE_Files))] + public void ReadReleaseNotes_ChangelogReferenceFile_ReturnsAnyReleaseNotes(AbsolutePath file) + { + var releaseNotes = ChangelogTasks.ReadReleaseNotes(file); + + releaseNotes.Should().NotBeEmpty(); + } + + [Theory] + [MemberData(nameof(AllChangelogReference_1_0_0_Files))] + [MemberData(nameof(AllChangelogReference_NUKE_Files))] + public void ReadChangelog_ChangelogReferenceFile_ThrowsNoExceptions(AbsolutePath file) + { + Action act = () => ChangelogTasks.ReadChangelog(file); + + act.Should().NotThrow(); + } + + [Theory] + [MemberData(nameof(AllChangelogReference_1_0_0_Files))] + [MemberData(nameof(AllChangelogReference_NUKE_Files))] + public void ExtractChangelogSectionNotes_ChangelogReferenceFile_ThrowsNoExceptions(AbsolutePath file) + { + Action act = () => ChangelogTasks.ExtractChangelogSectionNotes(file); + + act.Should().NotThrow(); + } + + [Theory] + [MemberData(nameof(AllChangelogReference_1_0_0_Files))] + [MemberData(nameof(AllChangelogReference_NUKE_Files))] + public Task ReadReleaseNotes_ChangelogReferenceFile_HasParsedCorrectly(AbsolutePath file) + { + var releaseNotes = ChangelogTasks.ReadReleaseNotes(file); + + return Verifier.Verify(releaseNotes).UseDirectory(PathToChangelogReferenceFiles).UseFileName(file.NameWithoutExtension); + } + + [Fact] + public void GetReleaseSections_ChangelogReferenceFileWithoutReleaseHead_ReturnsEmpty() { - private static AbsolutePath RootDirectory => Constants.TryGetRootDirectoryFrom(EnvironmentInfo.WorkingDirectory).NotNull(); - - private static AbsolutePath PathToChangelogReferenceFiles => RootDirectory / "source" / "Nuke.Common.Tests" / "ChangelogReferenceFiles"; - - [Theory] - [MemberData(nameof(AllChangelogReference_1_0_0_Files))] - [MemberData(nameof(AllChangelogReference_NUKE_Files))] - public void ReadReleaseNotes_ChangelogReferenceFile_ThrowsNoExceptions(AbsolutePath file) - { - Action act = () => ChangelogTasks.ReadReleaseNotes(file); - - act.Should().NotThrow(); - } - - [Theory] - [MemberData(nameof(AllChangelogReference_1_0_0_Files))] - [MemberData(nameof(AllChangelogReference_NUKE_Files))] - public void ReadReleaseNotes_ChangelogReferenceFile_ReturnsAnyReleaseNotes(AbsolutePath file) - { - var releaseNotes = ChangelogTasks.ReadReleaseNotes(file); - - releaseNotes.Should().NotBeEmpty(); - } - - [Theory] - [MemberData(nameof(AllChangelogReference_1_0_0_Files))] - [MemberData(nameof(AllChangelogReference_NUKE_Files))] - public void ReadChangelog_ChangelogReferenceFile_ThrowsNoExceptions(AbsolutePath file) - { - Action act = () => ChangelogTasks.ReadChangelog(file); - - act.Should().NotThrow(); - } - - [Theory] - [MemberData(nameof(AllChangelogReference_1_0_0_Files))] - [MemberData(nameof(AllChangelogReference_NUKE_Files))] - public void ExtractChangelogSectionNotes_ChangelogReferenceFile_ThrowsNoExceptions(AbsolutePath file) - { - Action act = () => ChangelogTasks.ExtractChangelogSectionNotes(file); - - act.Should().NotThrow(); - } - - [Theory] - [MemberData(nameof(AllChangelogReference_1_0_0_Files))] - [MemberData(nameof(AllChangelogReference_NUKE_Files))] - public Task ReadReleaseNotes_ChangelogReferenceFile_HasParsedCorrectly(AbsolutePath file) - { - var releaseNotes = ChangelogTasks.ReadReleaseNotes(file); - - return Verifier.Verify(releaseNotes).UseDirectory(PathToChangelogReferenceFiles).UseFileName(file.NameWithoutExtension); - } - - [Fact] - public void GetReleaseSections_ChangelogReferenceFileWithoutReleaseHead_ReturnsEmpty() - { - var file = PathToChangelogReferenceFiles / "changelog_reference_invalid_variant_1.md"; - var lines = file.ReadAllLines().ToList(); - - ChangelogTasks.GetReleaseSections(lines).Should().BeEmpty(); - } - - [Theory] - [InlineData("changelog_reference_1.0.0_variant_5.md", "0.2.3")] - public Task ExtractChangelogSectionNotes_WithTag_ReturnsSectionThatMatchesProvidedTag(string fileName, string tag) - { - var changeLogFilePath = PathToChangelogReferenceFiles / fileName; - var sectionNotes = ChangelogTasks.ExtractChangelogSectionNotes(changeLogFilePath, tag); - - return Verifier.Verify(sectionNotes).UseDirectory(PathToChangelogReferenceFiles) - .UseFileName($"{changeLogFilePath.NameWithoutExtension}_section_{tag}"); - } - - [Theory] - [InlineData("changelog_reference_1.0.0_variant_5.md", "0.0.0")] - [InlineData("changelog_reference_1.0.0_variant_5.md", "9.9.9")] - public void ExtractChangelogSection_WithNonExistingTag_ThrowsInformativeException(string fileName, string tag) - { - var file = PathToChangelogReferenceFiles / fileName; - - Action act = () => ChangelogTasks.ExtractChangelogSectionNotes(file, tag); - - act.Should().Throw().WithMessage($"Could not find release section for '{tag}'."); - } - - [Theory] - [InlineData("changelog_reference_invalid_variant_2.md")] - public void ReadChangelog_ChangelogFileThatHasMultipleUnreleasedSection_ThrowsInformativeException(string fileName) - { - var file = PathToChangelogReferenceFiles / fileName; - - Action act = () => ChangelogTasks.ReadChangelog(file); - - act.Should().Throw().WithMessage("Changelog should have only one draft section"); - } - - [Theory] - [InlineData("changelog_reference_invalid_variant_1.md")] - public void ReadChangelog_EmptyChangelogFile_ThrowsInformativeException(string fileName) - { - var file = PathToChangelogReferenceFiles / fileName; - - Action act = () => ChangelogTasks.ReadChangelog(file); - - act.Should().Throw().WithMessage("Changelog should have at least one release note section"); - } - - [UsedImplicitly] - public static IEnumerable AllChangelogReference_1_0_0_Files - { - get => PathToChangelogReferenceFiles.GlobFiles("changelog_reference_1.0.0*.md").Select(x => new object[] { x }); - } - - [UsedImplicitly] - public static IEnumerable AllChangelogReference_NUKE_Files - => PathToChangelogReferenceFiles.GlobFiles("changelog_reference_NUKE*.md").Select(x => new object[] { x }); + var file = PathToChangelogReferenceFiles / "changelog_reference_invalid_variant_1.md"; + var lines = file.ReadAllLines().ToList(); + + ChangelogTasks.GetReleaseSections(lines).Should().BeEmpty(); + } + + [Theory] + [InlineData("changelog_reference_1.0.0_variant_5.md", "0.2.3")] + public Task ExtractChangelogSectionNotes_WithTag_ReturnsSectionThatMatchesProvidedTag(string fileName, string tag) + { + var changeLogFilePath = PathToChangelogReferenceFiles / fileName; + var sectionNotes = ChangelogTasks.ExtractChangelogSectionNotes(changeLogFilePath, tag); + + return Verifier.Verify(sectionNotes).UseDirectory(PathToChangelogReferenceFiles) + .UseFileName($"{changeLogFilePath.NameWithoutExtension}_section_{tag}"); + } + + [Theory] + [InlineData("changelog_reference_1.0.0_variant_5.md", "0.0.0")] + [InlineData("changelog_reference_1.0.0_variant_5.md", "9.9.9")] + public void ExtractChangelogSection_WithNonExistingTag_ThrowsInformativeException(string fileName, string tag) + { + var file = PathToChangelogReferenceFiles / fileName; + + Action act = () => ChangelogTasks.ExtractChangelogSectionNotes(file, tag); + + act.Should().Throw().WithMessage($"Could not find release section for '{tag}'."); + } + + [Theory] + [InlineData("changelog_reference_invalid_variant_2.md")] + public void ReadChangelog_ChangelogFileThatHasMultipleUnreleasedSection_ThrowsInformativeException(string fileName) + { + var file = PathToChangelogReferenceFiles / fileName; + + Action act = () => ChangelogTasks.ReadChangelog(file); + + act.Should().Throw().WithMessage("Changelog should have only one draft section"); + } + + [Theory] + [InlineData("changelog_reference_invalid_variant_1.md")] + public void ReadChangelog_EmptyChangelogFile_ThrowsInformativeException(string fileName) + { + var file = PathToChangelogReferenceFiles / fileName; + + Action act = () => ChangelogTasks.ReadChangelog(file); + + act.Should().Throw().WithMessage("Changelog should have at least one release note section"); + } + + [UsedImplicitly] + public static IEnumerable AllChangelogReference_1_0_0_Files + { + get => PathToChangelogReferenceFiles.GlobFiles("changelog_reference_1.0.0*.md").Select(x => new object[] { x }); } + + [UsedImplicitly] + public static IEnumerable AllChangelogReference_NUKE_Files + => PathToChangelogReferenceFiles.GlobFiles("changelog_reference_NUKE*.md").Select(x => new object[] { x }); } diff --git a/source/Nuke.Common/Tools/MSpec/MSpec.json b/source/Nuke.Common/Tools/MSpec/MSpec.json index 0d19beaa7..1bbfcbd2a 100644 --- a/source/Nuke.Common/Tools/MSpec/MSpec.json +++ b/source/Nuke.Common/Tools/MSpec/MSpec.json @@ -23,34 +23,34 @@ { "name": "Filters", "type": "List", - "format": "-f={value}", + "format": "-filters {value}", "separator": ",", "help": "Filter file specifying contexts to execute (full type name, one per line). Takes precedence over tags." }, { "name": "Includes", "type": "List", - "format": "-i={value}", + "format": "-include {value}", "separator": ",", "help": "Executes all specifications in contexts with these comma delimited tags. Ex. -i 'foo, bar, foo_bar'." }, { "name": "Excludes", "type": "List", - "format": "-x={value}", + "format": "-exclude {value}", "separator": ",", "help": "Exclude specifications in contexts with these comma delimited tags. Ex. -x 'foo, bar, foo_bar'." }, { "name": "HtmlOutput", "type": "string", - "format": "--html={value}", + "format": "--html {value}", "help": "Outputs the HTML report to path, one-per-assembly w/ index.html (if directory, otherwise all are in one file). Ex. --html=output/reports/" }, { "name": "XmlOutput", "type": "string", - "format": "--xml={value}", + "format": "--xml {value}", "help": "Outputs the XML report to the file referenced by the path. Ex. --xml=output/reports/MSpecResults.xml" }, { diff --git a/source/Nuke.Common/Tools/SqlPackage/SqlPackage.json b/source/Nuke.Common/Tools/SqlPackage/SqlPackage.json new file mode 100644 index 000000000..3d36101d5 --- /dev/null +++ b/source/Nuke.Common/Tools/SqlPackage/SqlPackage.json @@ -0,0 +1,3145 @@ +{ + "$schema": "https://raw.githubusercontent.com/nuke-build/nuke/master/source/Nuke.Tooling.Generator/schema.json", + "references": [ + "https://github.com/MicrosoftDocs/sql-docs/blob/live/docs/tools/sqlpackage/sqlpackage-deploy-drift-report.md", + "https://github.com/MicrosoftDocs/sql-docs/blob/live/docs/tools/sqlpackage/sqlpackage-download.md", + "https://github.com/MicrosoftDocs/sql-docs/blob/live/docs/tools/sqlpackage/sqlpackage-export.md", + "https://github.com/MicrosoftDocs/sql-docs/blob/live/docs/tools/sqlpackage/sqlpackage-extract.md", + "https://github.com/MicrosoftDocs/sql-docs/blob/live/docs/tools/sqlpackage/sqlpackage-for-azure-synapse-analytics.md", + "https://github.com/MicrosoftDocs/sql-docs/blob/live/docs/tools/sqlpackage/sqlpackage-pipelines.md", + "https://github.com/MicrosoftDocs/sql-docs/blob/live/docs/tools/sqlpackage/sqlpackage-publish.md", + "https://github.com/MicrosoftDocs/sql-docs/blob/live/docs/tools/sqlpackage/sqlpackage-script.md" + ], + "name": "SqlPackage", + "officialUrl": "https://learn.microsoft.com/en-us/sql/tools/sqlpackage/sqlpackage", + "help": "SqlPackage is a command-line utility that automates the following database development tasks by exposing some of the public Data-Tier Application Framework (DacFx) APIs", + "packageExecutable": "sqlpackage.exe", + "tasks": [ + { + "postfix": "Version", + "definiteArgument": "/Version", + "officialUrl": "https://learn.microsoft.com/en-us/sql/tools/sqlpackage/sqlpackage?view=sql-server-ver16#version", + "settingsClass": {} + }, + { + "help": "

The SqlPackage publish operation incrementally updates the schema of a target database to match the structure of a source database. Publishing a deployment package that contains user data for all or a subset of tables update the table data in addition to the schema. Data deployment overwrites the schema and data in existing tables of the target database. Data deployment will not change existing schema or data in the target database for tables not included in the deployment package. A new database can be created by the publish action when the authenticated user has create database permissions. The required permissions for the publish action on an existing database is db_owner.

", + "postfix": "Publish", + "definiteArgument": "/Action:Publish", + "officialUrl": "https://learn.microsoft.com/en-us/sql/tools/sqlpackage/sqlpackage-publish", + "settingsClass": { + "properties": [ + { + "name": "AccessToken", + "type": "string", + "format": "/AccessToken:{value}", + "help": "Specifies the token-based authentication access token to use when connect to the target database." + }, + { + "name": "AzureCloudConfig", + "type": "string", + "format": "/AzureCloudConfig:{value}", + "help": "Specifies the custom endpoints for connecting to Azure Active Directory in the format: AzureActiveDirectoryAuthority={value};DatabaseServicePrincipalName={value}\" ." + }, + { + "name": "AzureKeyVaultAuthMethod", + "type": "AzureKeyVaultAuthMethod", + "format": "/AzureKeyVaultAuthMethod:{value}", + "help": "Specifies what authentication method is used for accessing Azure KeyVault if a publish operation includes modifications to an encrypted table/column." + }, + { + "name": "ClientId", + "type": "string", + "format": "/ClientId:{value}", + "help": "Specifies the Client ID to be used in authenticating against Azure KeyVault, when necessary" + }, + { + "name": "DeployReportPath", + "type": "string", + "format": "/DeployReportPath:{value}", + "help": "Specifies an optional file path to output the deployment report xml file." + }, + { + "name": "DeployScriptPath", + "type": "string", + "format": "/DeployScriptPath:{value}", + "help": "Specifies an optional file path to output the deployment script. For Azure deployments, if there are TSQL commands to create or modify the master database, a script will be written to the same path but with \"Filename_Master.sql\" as the output file name." + }, + { + "name": "Diagnostics", + "type": "bool", + "format": "/Diagnostics:{value}", + "help": "Specifies whether diagnostic logging is output to the console. Defaults to False." + }, + { + "name": "DiagnosticsFile", + "type": "string", + "format": "/DiagnosticsFile:{value}", + "help": "Specifies a file to store diagnostic logs." + }, + { + "name": "MaxParallelism", + "type": "int", + "format": "/MaxParallelism:{value}", + "help": "Specifies the degree of parallelism for concurrent operations running against a database. The default value is 8." + }, + { + "name": "ModelFilePath", + "type": "string", + "format": "/ModelFilePath:{value}", + "help": "Specifies the file path to override the model.xml in the source file. Use of this setting may result in deployment failure and/or unintended data loss. This setting is intended only for use when troubleshooting issues with publish, import, or script generation." + }, + { + "name": "OverwriteFiles", + "type": "bool", + "format": "/OverwriteFiles:{value}", + "help": "Specifies if SqlPackage should overwrite existing files. Specifying false causes SqlPackage to abort action if an existing file is encountered. Default value is True." + }, + { + "name": "Profile", + "type": "string", + "format": "/Profile:{value}", + "help": "Specifies the file path to a DAC Publish Profile. The profile defines a collection of properties and variables to use when generating outputs." + }, + { + "name": "Properties", + "type": "Dictionary", + "format": "/p:{value}", + "itemFormat": "{key}={value}", + "help": "Specifies a name value pair for an [action-specific property](#properties-specific-to-the-publish-action);{PropertyName}={Value}.", + "delegates": [ + { + "name": "AdditionalDeploymentContributorArguments", + "type": "List", + "separator": ";", + "help": "Specifies additional deployment contributor arguments for the deployment contributors. This property should be a semi-colon delimited list of values." + }, + { + "name": "AdditionalDeploymentContributorPaths", + "type": "List", + "separator": ";", + "help": "Specifies paths to load additional deployment contributors. This property should be a semi-colon delimited list of values." + }, + { + "name": "AdditionalDeploymentContributors", + "type": "List", + "separator": ";", + "help": "Specifies additional deployment contributors, which should run when the dacpac is deployed. This property should be a semi-colon delimited list of fully qualified build contributor names or IDs." + }, + { + "name": "AllowDropBlockingAssemblies", + "type": "bool", + "help": "This property is used by SqlClr deployment to cause any blocking assemblies to be dropped as part of the deployment plan. By default, any blocking/referencing assemblies will block an assembly update if the referencing assembly needs to be dropped." + }, + { + "name": "AllowExternalLanguagePaths", + "type": "bool", + "help": "Allows file paths, if available, to be used to generate external language statements." + }, + { + "name": "AllowExternalLibraryPaths", + "type": "bool", + "help": "Allows file paths, if available, to be used to generate external library statements." + }, + { + "name": "AllowIncompatiblePlatform", + "type": "bool", + "help": "Specifies whether to attempt the action despite incompatible SQL Server platforms." + }, + { + "name": "AllowUnsafeRowLevelSecurityDataMovement", + "type": "bool", + "help": "Do not block data motion on a table that has Row Level Security if this property is set to true. Default is false." + }, + { + "name": "AzureSharedAccessSignatureToken", + "type": "string", + "help": "Azure shared access signature (SAS) token. See [SqlPackage for Azure Synapse Analytics](sqlpackage-for-azure-synapse-analytics.md#publish-import-data)." + }, + { + "name": "AzureStorageBlobEndpoint", + "type": "string", + "help": "Azure Blob Storage endpoint, see [SqlPackage for Azure Synapse Analytics](sqlpackage-for-azure-synapse-analytics.md#publish-import-data)." + }, + { + "name": "AzureStorageContainer", + "type": "string", + "help": "Azure Blob Storage container, see [SqlPackage for Azure Synapse Analytics](sqlpackage-for-azure-synapse-analytics.md#publish-import-data)." + }, + { + "name": "AzureStorageKey", + "type": "string", + "help": "Azure storage account key, see [SqlPackage for Azure Synapse Analytics](sqlpackage-for-azure-synapse-analytics.md#publish-import-data)." + }, + { + "name": "AzureStorageRootPath", + "type": "string", + "help": "Storage root path within the container. Without this property, the path defaults to `servername/databasename/timestamp/`. See [SqlPackage for Azure Synapse Analytics](sqlpackage-for-azure-synapse-analytics.md#publish-import-data)." + }, + { + "name": "BackupDatabaseBeforeChanges", + "type": "bool", + "help": "Backups the database before deploying any changes. This property is not applicable to Azure SQL Database." + }, + { + "name": "BlockOnPossibleDataLoss", + "type": "bool", + "help": "Specifies that the operation will be terminated during the schema validation step if the resulting schema changes could incur a loss of data, including due to data precision reduction or a data type change that requires a cast operation. The default (`True`) value causes the operation to terminate regardless if the target database contains data. An execution with a `False` value for BlockOnPossibleDataLoss can still fail during deployment plan execution if data is present on the target that cannot be converted to the new column type." + }, + { + "name": "BlockWhenDriftDetected", + "type": "bool", + "help": "Specifies whether to block updating a database whose schema no longer matches its registration or is unregistered." + }, + { + "name": "CommandTimeout", + "type": "int", + "help": "Specifies the command timeout in seconds when executing queries against SQL Server." + }, + { + "name": "CommentOutSetVarDeclarations", + "type": "bool", + "help": "Specifies whether the declaration of SETVAR variables should be commented out in the generated publish script. You might choose to do this if you plan to specify the values on the command line when you publish by using a tool such as SQLCMD.EXE." + }, + { + "name": "CompareUsingTargetCollation", + "type": "bool", + "help": "This setting dictates how the database's collation is handled during deployment; by default the target database's collation will be updated if it does not match the collation specified by the source. When this option is set, the target database's (or server's) collation should be used." + }, + { + "name": "CreateNewDatabase", + "type": "bool", + "help": "Specifies whether the target database should be updated or whether it should be dropped and re-created when you publish to a database." + }, + { + "name": "DatabaseEdition", + "type": "DatabaseEdition", + "help": "Defines the edition of an Azure SQL Database. See [Azure SQL Database service tiers](/azure/azure-sql/database/service-tiers-general-purpose-business-critical)." + }, + { + "name": "DatabaseLockTimeout", + "type": "int", + "help": "Specifies the database lock timeout in seconds when executing queries against SQLServer. Use -1 to wait indefinitely." + }, + { + "name": "DatabaseMaximumSize", + "type": "int", + "help": "Defines the maximum size in GB of an Azure SQL Database." + }, + { + "name": "DatabaseServiceObjective", + "type": "string", + "help": "Defines the performance level of an Azure SQL Database such as \"P0\" or \"S1\"." + }, + { + "name": "DeployDatabaseInSingleUserMode", + "type": "bool", + "help": "if true, the database is set to Single User Mode before deploying." + }, + { + "name": "DisableAndReenableDdlTriggers", + "type": "bool", + "help": "Specifies whether Data Definition Language (DDL) triggers are disabled at the beginning of the publish process and re-enabled at the end of the publish action." + }, + { + "name": "DisableIndexesForDataPhase", + "type": "bool", + "help": "Disable indexes before importing data into SQL Server." + }, + { + "name": "DisableParallelismForEnablingIndexes", + "type": "bool", + "help": "Not using parallelism when rebuilding indexes while importing data into SQL Server." + }, + { + "name": "DoNotAlterChangeDataCaptureObjects", + "type": "bool", + "help": "If true, Change Data Capture objects are not altered." + }, + { + "name": "DoNotAlterReplicatedObjects", + "type": "bool", + "help": "Specifies whether objects that are replicated are identified during verification." + }, + { + "name": "DoNotDropDatabaseWorkloadGroups", + "type": "bool", + "help": "When false, Database WorkloadGroups in the target database that are not defined in the source will be dropped during deployment." + }, + { + "name": "DoNotDropObjectType", + "type": "string", + "help": "An object type that should not be dropped when DropObjectsNotInSource is true. Valid object type names are Aggregates, ApplicationRoles, Assemblies, AssemblyFiles, AsymmetricKeys, BrokerPriorities, Certificates, ColumnEncryptionKeys, ColumnMasterKeys, Contracts, DatabaseOptions, DatabaseRoles, DatabaseTriggers, Defaults, ExtendedProperties, ExternalDataSources, ExternalFileFormats, ExternalTables, Filegroups, Files, FileTables, FullTextCatalogs, FullTextStoplists, MessageTypes, PartitionFunctions, PartitionSchemes, Permissions, Queues, RemoteServiceBindings, RoleMembership, Rules, ScalarValuedFunctions, SearchPropertyLists, SecurityPolicies, Sequences, Services, Signatures, StoredProcedures, SymmetricKeys, Synonyms, Tables, TableValuedFunctions, UserDefinedDataTypes, UserDefinedTableTypes, ClrUserDefinedTypes, Users, Views, XmlSchemaCollections, Audits, Credentials, CryptographicProviders, DatabaseAuditSpecifications, DatabaseEncryptionKeys, DatabaseScopedCredentials, Endpoints, ErrorMessages, EventNotifications, EventSessions, LinkedServerLogins, LinkedServers, Logins, MasterKeys, Routes, ServerAuditSpecifications, ServerRoleMembership, ServerRoles, ServerTriggers, ExternalStreams, ExternalStreamingJobs, DatabaseWorkloadGroups, WorkloadClassifiers, ExternalLibraries, ExternalLanguages. This property may be specified multiple times to indicate multiple options." + }, + { + "name": "DoNotDropObjectTypes", + "type": "List", + "separator": ";", + "help": "A semicolon-delimited list of object types that should not be dropped when DropObjectsNotInSource is true. Valid object type names are Aggregates, ApplicationRoles, Assemblies, AssemblyFiles, AsymmetricKeys, BrokerPriorities, Certificates, ColumnEncryptionKeys, ColumnMasterKeys, Contracts, DatabaseOptions, DatabaseRoles, DatabaseTriggers, Defaults, ExtendedProperties, ExternalDataSources, ExternalFileFormats, ExternalTables, Filegroups, Files, FileTables, FullTextCatalogs, FullTextStoplists, MessageTypes, PartitionFunctions, PartitionSchemes, Permissions, Queues, RemoteServiceBindings, RoleMembership, Rules, ScalarValuedFunctions, SearchPropertyLists, SecurityPolicies, Sequences, Services, Signatures, StoredProcedures, SymmetricKeys, Synonyms, Tables, TableValuedFunctions, UserDefinedDataTypes, UserDefinedTableTypes, ClrUserDefinedTypes, Users, Views, XmlSchemaCollections, Audits, Credentials, CryptographicProviders, DatabaseAuditSpecifications, DatabaseEncryptionKeys, DatabaseScopedCredentials, Endpoints, ErrorMessages, EventNotifications, EventSessions, LinkedServerLogins, LinkedServers, Logins, MasterKeys, Routes, ServerAuditSpecifications, ServerRoleMembership, ServerRoles, ServerTriggers, ExternalStreams, ExternalStreamingJobs, DatabaseWorkloadGroups, WorkloadClassifiers, ExternalLibraries, ExternalLanguages." + }, + { + "name": "DoNotDropWorkloadClassifiers", + "type": "bool", + "help": "When false, WorkloadClassifiers in the target database that are not defined in the source will be dropped during deployment." + }, + { + "name": "DoNotEvaluateSqlCmdVariables", + "type": "bool", + "help": "Specifies whether SQLCMD variables to not replace with values" + }, + { + "name": "DropConstraintsNotInSource", + "type": "bool", + "help": "Specifies whether constraints that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database." + }, + { + "name": "DropDmlTriggersNotInSource", + "type": "bool", + "help": "Specifies whether DML triggers that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database." + }, + { + "name": "DropExtendedPropertiesNotInSource", + "type": "bool", + "help": "Specifies whether extended properties that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database." + }, + { + "name": "DropIndexesNotInSource", + "type": "bool", + "help": "Specifies whether indexes that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database." + }, + { + "name": "DropObjectsNotInSource", + "type": "bool", + "help": "Specifies whether objects that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database. This value takes precedence over DropExtendedProperties." + }, + { + "name": "DropPermissionsNotInSource", + "type": "bool", + "help": "Specifies whether permissions that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish updates to a database." + }, + { + "name": "DropRoleMembersNotInSource", + "type": "bool", + "help": "Specifies whether role members that are not defined in the database snapshot (.dacpac) file will be dropped from the target database when you publish updates to a database." + }, + { + "name": "DropStatisticsNotInSource", + "type": "bool", + "help": "Specifies whether statistics that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database." + }, + { + "name": "EnclaveAttestationProtocol", + "type": "string", + "help": "Specifies an attestation protocol to be used with enclave based Always Encrypted." + }, + { + "name": "EnclaveAttestationUrl", + "type": "string", + "help": "Specifies the enclave attestation URL (an attestation service endpoint) to be used with enclave based Always Encrypted." + }, + { + "name": "ExcludeObjectType", + "type": "string", + "help": "An object type that should be ignored during deployment. Valid object type names are Aggregates, ApplicationRoles, Assemblies, AssemblyFiles, AsymmetricKeys, BrokerPriorities, Certificates, ColumnEncryptionKeys, ColumnMasterKeys, Contracts, DatabaseOptions, DatabaseRoles, DatabaseTriggers, Defaults, ExtendedProperties, ExternalDataSources, ExternalFileFormats, ExternalTables, Filegroups, Files, FileTables, FullTextCatalogs, FullTextStoplists, MessageTypes, PartitionFunctions, PartitionSchemes, Permissions, Queues, RemoteServiceBindings, RoleMembership, Rules, ScalarValuedFunctions, SearchPropertyLists, SecurityPolicies, Sequences, Services, Signatures, StoredProcedures, SymmetricKeys, Synonyms, Tables, TableValuedFunctions, UserDefinedDataTypes, UserDefinedTableTypes, ClrUserDefinedTypes, Users, Views, XmlSchemaCollections, Audits, Credentials, CryptographicProviders, DatabaseAuditSpecifications, DatabaseEncryptionKeys, DatabaseScopedCredentials, Endpoints, ErrorMessages, EventNotifications, EventSessions, LinkedServerLogins, LinkedServers, Logins, MasterKeys, Routes, ServerAuditSpecifications, ServerRoleMembership, ServerRoles, ServerTriggers, ExternalStreams, ExternalStreamingJobs, DatabaseWorkloadGroups, WorkloadClassifiers, ExternalLibraries, ExternalLanguages. This property may be specified multiple times to indicate multiple options." + }, + { + "name": "ExcludeObjectTypes", + "type": "List", + "separator": ";", + "help": "A semicolon delimited list of object types that should be ignored during deployment. Valid object type names are Aggregates, ApplicationRoles, Assemblies, AssemblyFiles, AsymmetricKeys, BrokerPriorities, Certificates, ColumnEncryptionKeys, ColumnMasterKeys, Contracts, DatabaseOptions, DatabaseRoles, DatabaseTriggers, Defaults, ExtendedProperties, ExternalDataSources, ExternalFileFormats, ExternalTables, Filegroups, Files, FileTables, FullTextCatalogs, FullTextStoplists, MessageTypes, PartitionFunctions, PartitionSchemes, Permissions, Queues, RemoteServiceBindings, RoleMembership, Rules, ScalarValuedFunctions, SearchPropertyLists, SecurityPolicies, Sequences, Services, Signatures, StoredProcedures, SymmetricKeys, Synonyms, Tables, TableValuedFunctions, UserDefinedDataTypes, UserDefinedTableTypes, ClrUserDefinedTypes, Users, Views, XmlSchemaCollections, Audits, Credentials, CryptographicProviders, DatabaseAuditSpecifications, DatabaseEncryptionKeys, DatabaseScopedCredentials, Endpoints, ErrorMessages, EventNotifications, EventSessions, LinkedServerLogins, LinkedServers, Logins, MasterKeys, Routes, ServerAuditSpecifications, ServerRoleMembership, ServerRoles, ServerTriggers, ExternalStreams, ExternalStreamingJobs, DatabaseWorkloadGroups, WorkloadClassifiers, ExternalLibraries, ExternalLanguages." + }, + { + "name": "GenerateSmartDefaults", + "type": "bool", + "help": "Automatically provides a default value when updating a table that contains data with a column that does not allow null values." + }, + { + "name": "HashObjectNamesInLogs", + "type": "bool", + "help": "Specifies whether to replace all object names in logs with a random hash value." + }, + { + "name": "IgnoreAnsiNulls", + "type": "bool", + "help": "Specifies whether differences in the ANSI NULLS setting should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreAuthorizer", + "type": "bool", + "help": "Specifies whether differences in the Authorizer should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreColumnCollation", + "type": "bool", + "help": "Specifies whether differences in the column collations should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreColumnOrder", + "type": "bool", + "help": "Specifies whether differences in table column order should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreComments", + "type": "bool", + "help": "Specifies whether differences in the comments should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreCryptographicProviderFilePath", + "type": "bool", + "help": "Specifies whether differences in the file path for the cryptographic provider should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreDatabaseWorkloadGroups", + "type": "bool", + "help": "Specifies whether to exclude workload groups that exist on the target during deployment. No Database Workload Groups will be added, modified, or dropped." + }, + { + "name": "IgnoreDdlTriggerOrder", + "type": "bool", + "help": "Specifies whether differences in the order of Data Definition Language (DDL) triggers should be ignored or updated when you publish to a database or server." + }, + { + "name": "IgnoreDdlTriggerState", + "type": "bool", + "help": "Specifies whether differences in the enabled or disabled state of Data Definition Language (DDL) triggers should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreDefaultSchema", + "type": "bool", + "help": "Specifies whether differences in the default schema should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreDmlTriggerOrder", + "type": "bool", + "help": "Specifies whether differences in the order of Data Manipulation Language (DML) triggers should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreDmlTriggerState", + "type": "bool", + "help": "Specifies whether differences in the enabled or disabled state of DML triggers should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreExtendedProperties", + "type": "bool", + "help": "Specifies whether differences in the extended properties should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreFileAndLogFilePath", + "type": "bool", + "help": "Specifies whether differences in the paths for files and log files should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreFilegroupPlacement", + "type": "bool", + "help": "Specifies whether differences in the placement of objects in FILEGROUPs should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreFileSize", + "type": "bool", + "help": "Specifies whether differences in the file sizes should be ignored or whether a warning should be issued when you publish to a database." + }, + { + "name": "IgnoreFillFactor", + "type": "bool", + "help": "Specifies whether differences in the fill factor for index storage should be ignored or whether a warning should be issued when you publish to a database." + }, + { + "name": "IgnoreFullTextCatalogFilePath", + "type": "bool", + "help": "Specifies whether differences in the file path for the full-text catalog should be ignored or whether a warning should be issued when you publish to a database." + }, + { + "name": "IgnoreIdentitySeed", + "type": "bool", + "help": "Specifies whether differences in the seed for an identity column should be ignored or updated when you publish updates to a database." + }, + { + "name": "IgnoreIncrement", + "type": "bool", + "help": "Specifies whether differences in the increment for an identity column should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreIndexOptions", + "type": "bool", + "help": "Specifies whether differences in the index options should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreIndexPadding", + "type": "bool", + "help": "Specifies whether differences in the index padding should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreKeywordCasing", + "type": "bool", + "help": "Specifies whether differences in the casing of keywords should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreLockHintsOnIndexes", + "type": "bool", + "help": "Specifies whether differences in the lock hints on indexes should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreLoginSids", + "type": "bool", + "help": "Specifies whether differences in the security identification number (SID) should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreNotForReplication", + "type": "bool", + "help": "Specifies whether the not for replication settings should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreObjectPlacementOnPartitionScheme", + "type": "bool", + "help": "Specifies whether an object's placement on a partition scheme should be ignored or updated when you publish to a database." + }, + { + "name": "IgnorePartitionSchemes", + "type": "bool", + "help": "Specifies whether differences in partition schemes and functions should be ignored or updated when you publish to a database." + }, + { + "name": "IgnorePermissions", + "type": "bool", + "help": "Specifies whether differences in the permissions should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreQuotedIdentifiers", + "type": "bool", + "help": "Specifies whether differences in the quoted identifiers setting should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreRoleMembership", + "type": "bool", + "help": "Specifies whether differences in the role membership of logins should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreRouteLifetime", + "type": "bool", + "help": "Specifies whether differences in the amount of time that SQL Server retains the route in the routing table should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreSemicolonBetweenStatements", + "type": "bool", + "help": "Specifies whether differences in the semi-colons between T-SQL statements will be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreSensitivityClassifications", + "type": "bool", + "help": "Specifies whether data sensitivity classifications on columns should be ignored when comparing schema models. This only works for classifications added with the ADD SENSITIVITY CLASSIFICATION syntax introduced in SQL 2019." + }, + { + "name": "IgnoreTableOptions", + "type": "bool", + "help": "Specifies whether differences in the table options will be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreTablePartitionOptions", + "type": "bool", + "help": "Specifies whether differences in the table partition options will be ignored or updated when you publish to a database. This option applies only to Azure Synapse Analytics dedicated SQL pool databases." + }, + { + "name": "IgnoreUserSettingsObjects", + "type": "bool", + "help": "Specifies whether differences in the user settings objects will be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreWhitespace", + "type": "bool", + "help": "Specifies whether differences in white space will be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreWithNocheckOnCheckConstraints", + "type": "bool", + "help": "Specifies whether differences in the value of the WITH NOCHECK clause for check constraints will be ignored or updated when you publish." + }, + { + "name": "IgnoreWithNocheckOnForeignKeys", + "type": "bool", + "help": "Specifies whether differences in the value of the WITH NOCHECK clause for foreign keys will be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreWorkloadClassifiers", + "type": "bool", + "help": "Specifies whether to exclude workload classifiers that exist on the target during deployment." + }, + { + "name": "IncludeCompositeObjects", + "type": "bool", + "help": "Include all composite elements with the same database as part of a single publish operation." + }, + { + "name": "IncludeTransactionalScripts", + "type": "bool", + "help": "Specifies whether transactional statements should be used where possible when you publish to a database." + }, + { + "name": "IsAlwaysEncryptedParameterizationEnabled", + "type": "bool", + "help": "Enables variable parameterization on Always Encrypted columns in pre/post deployment scripts." + }, + { + "name": "LongRunningCommandTimeout", + "type": "int", + "help": "Specifies the long running command timeout in seconds when executing queries against SQL Server. Use 0 to wait indefinitely." + }, + { + "name": "NoAlterStatementsToChangeClrTypes", + "type": "bool", + "help": "Specifies that publish should always drop and re-create an assembly if there is a difference instead of issuing an ALTER ASSEMBLY statement." + }, + { + "name": "PopulateFilesOnFileGroups", + "type": "bool", + "help": "Specifies whether a new file is also created when a new FileGroup is created in the target database." + }, + { + "name": "PreserveIdentityLastValues", + "type": "bool", + "help": "Specifies whether last values for identity columns should be preserved during deployment." + }, + { + "name": "RebuildIndexesOfflineForDataPhase", + "type": "bool", + "help": "Rebuild indexes offline after importing data." + }, + { + "name": "RegisterDataTierApplication", + "type": "bool", + "help": "Specifies whether the schema is registered with the database server." + }, + { + "name": "RestoreSequenceCurrentValue", + "type": "bool", + "help": "Specifies whether sequence object current value should be deployed with dacpac file, the default value is True." + }, + { + "name": "RunDeploymentPlanExecutors", + "type": "bool", + "help": "Specifies whether DeploymentPlanExecutor contributors should be run when other operations are executed." + }, + { + "name": "ScriptDatabaseCollation", + "type": "bool", + "help": "Specifies whether differences in the database collation should be ignored or updated when you publish to a database." + }, + { + "name": "ScriptDatabaseCompatibility", + "type": "bool", + "help": "Specifies whether differences in the database compatibility should be ignored or updated when you publish to a database." + }, + { + "name": "ScriptDatabaseOptions", + "type": "bool", + "help": "Specifies whether target database properties should be set or updated as part of the publish action." + }, + { + "name": "ScriptDeployStateChecks", + "type": "bool", + "help": "Specifies whether statements are generated in the publish script to verify that the database name and server name match the names specified in the database project." + }, + { + "name": "ScriptFileSize", + "type": "bool", + "help": "Controls whether size is specified when adding a file to a filegroup." + }, + { + "name": "ScriptNewConstraintValidation", + "type": "bool", + "help": "At the end of publish all of the constraints will be verified as one set, avoiding data errors caused by a check or foreign key constraint in the middle of publish. If set to False, your constraints are published without checking the corresponding data." + }, + { + "name": "ScriptRefreshModule", + "type": "bool", + "help": "Include refresh statements at the end of the publish script." + }, + { + "name": "Storage", + "type": "StorageType", + "help": "Specifies how elements are stored when building the database model. For performance reasons the default is InMemory. For large databases, File backed storage may be required and is only available for .NET Framework version of SqlPackage." + }, + { + "name": "TreatVerificationErrorsAsWarnings", + "type": "bool", + "help": "Specifies whether errors encountered during publish verification should be treated as warnings. The check is performed against the generated deployment plan before the plan is executed against your target database. Plan verification detects problems such as the loss of target-only objects (such as indexes) that must be dropped to make a change. Verification will also detect situations where dependencies (such as a table or view) exist because of a reference to a composite project, but do not exist in the target database. You might choose to do this to get a complete list of all issues, instead of having the publish action stop on the first error." + }, + { + "name": "UnmodifiableObjectWarnings", + "type": "bool", + "help": "Specifies whether warnings should be generated when differences are found in objects that cannot be modified, for example, if the file size or file paths were different for a file." + }, + { + "name": "VerifyCollationCompatibility", + "type": "bool", + "help": "Specifies whether collation compatibility is verified." + }, + { + "name": "VerifyDeployment", + "type": "bool", + "help": "Specifies whether checks should be performed before publishing that will stop the publish action if issues are present that might block successful publishing. For example, your publish action might stop if you have foreign keys on the target database that do not exist in the database project, and that causes errors when you publish." + } + ] + }, + { + "name": "Quiet", + "type": "bool", + "format": "/Quiet:{value}", + "help": "Specifies whether detailed feedback is suppressed. Defaults to False." + }, + { + "name": "ReferencePaths", + "type": "Dictionary", + "format": "/rp:{value}", + "itemFormat": "{key}={value}", + "help": "Specifies the additional directories to search for .dacpac references." + }, + { + "name": "Secret", + "type": "string", + "format": "/Secret:{value}", + "help": "Specifies the Client Secret to be used in authenticating against Azure KeyVault, when necessary" + }, + { + "name": "SourceFile", + "type": "string", + "format": "/SourceFile:{value}", + "help": "Specifies a source file to be used as the source of action instead of a database from local storage. If this name is used, no other source name shall be valid." + }, + { + "name": "SourceConnectionString", + "type": "string", + "format": "/SourceConnectionString:{value}", + "help": "Specifies a valid [SQL Server/Azure connection string](/dotnet/api/microsoft.data.sqlclient.sqlconnection.connectionstring) to the source database. If this name is specified, it shall be used exclusively of all other source names." + }, + { + "name": "SourceDatabaseName", + "type": "string", + "format": "/SourceDatabaseName:{value}", + "help": "Defines the name of the source database." + }, + { + "name": "SourceEncryptConnection", + "type": "bool", + "format": "/SourceEncryptConnection:{value}", + "help": "Specifies if SQL encryption should be used for the source database connection. Default value is True." + }, + { + "name": "SourceHostNameInCertificate", + "type": "string", + "format": "/SourceHostNameInCertificate:{value}", + "help": "Specifies value that is used to validate the source SQL Server TLS/SSL certificate when the communication layer is encrypted by using TLS." + }, + { + "name": "SourcePassword", + "type": "string", + "format": "/SourcePassword:{value}", + "help": "For SQL Server Auth scenarios, defines the password to use to access the source database." + }, + { + "name": "SourceServerName", + "type": "string", + "format": "/SourceServerName:{value}", + "help": "Defines the name of the server hosting the source database." + }, + { + "name": "SourceTimeout", + "type": "int", + "format": "/SourceTimeout:{value}", + "help": "Specifies the timeout for establishing a connection to the source database in seconds." + }, + { + "name": "SourceTrustServerCertificate", + "type": "bool", + "format": "/SourceTrustServerCertificate:{value}", + "help": "Specifies whether to use TLS to encrypt the source database connection and bypass walking the certificate chain to validate trust. Default value is False." + }, + { + "name": "SourceUser", + "type": "string", + "format": "/SourceUser:{value}", + "help": "For SQL Server Auth scenarios, defines the SQL Server user to use to access the source database." + }, + { + "name": "TargetConnectionString", + "type": "string", + "format": "/TargetConnectionString:{value}", + "help": "Specifies a valid [SQL Server/Azure connection string](/dotnet/api/microsoft.data.sqlclient.sqlconnection.connectionstring) to the target database. If this name is specified, it shall be used exclusively of all other target names." + }, + { + "name": "TargetDatabaseName", + "type": "string", + "format": "/TargetDatabaseName:{value}", + "help": "Specifies an override for the name of the database that is the target of SqlPackage Action." + }, + { + "name": "TargetEncryptConnection", + "type": "bool", + "format": "/TargetEncryptConnection:{value}", + "help": "Specifies if SQL encryption should be used for the target database connection. Default value is True." + }, + { + "name": "TargetHostNameInCertificate", + "type": "string", + "format": "/TargetHostNameInCertificate:{value}", + "help": "Specifies value that is used to validate the target SQL Server TLS/SSL certificate when the communication layer is encrypted by using TLS." + }, + { + "name": "TargetPassword", + "type": "string", + "format": "/TargetPassword:{value}", + "help": "For SQL Server Auth scenarios, defines the password to use to access the target database." + }, + { + "name": "TargetServerName", + "type": "string", + "format": "/TargetServerName:{value}", + "help": "Defines the name of the server hosting the target database." + }, + { + "name": "TargetTimeout", + "type": "int", + "format": "/TargetTimeout:{value}", + "help": "Specifies the timeout for establishing a connection to the target database in seconds. For Azure AD, it is recommended that this value be greater than or equal to 30 seconds." + }, + { + "name": "TargetTrustServerCertificate", + "type": "bool", + "format": "/TargetTrustServerCertificate:{value}", + "help": "Specifies whether to use TLS to encrypt the target database connection and bypass walking the certificate chain to validate trust. Default value is False." + }, + { + "name": "TargetUser", + "type": "string", + "format": "/TargetUser:{value}", + "help": "For SQL Server Auth scenarios, defines the SQL Server user to use to access the target database." + }, + { + "name": "TenantId", + "type": "string", + "format": "/TenantId:{value}", + "help": "Represents the Azure AD tenant ID or domain name. This option is required to support guest or imported Azure AD users as well as Microsoft accounts such as outlook.com, hotmail.com, or live.com. If this name is omitted, the default tenant ID for Azure AD will be used, assuming that the authenticated user is a native user for this AD. However, in this case any guest or imported users and/or Microsoft accounts hosted in this Azure AD are not supported and the operation will fail.
For more information about Active Directory Universal Authentication, see [Universal Authentication with SQL Database and Azure Synapse Analytics (SSMS support for MFA)](/azure/sql-database/sql-database-ssms-mfa-authentication)." + }, + { + "name": "ThreadMaxStackSize", + "type": "int", + "format": "/ThreadMaxStackSize:{value}", + "help": "Specifies the maximum size in megabytes for the thread running the SqlPackage action. This option should only be used when encountering stack overflow exceptions that occur when parsing very large TSQL statements." + }, + { + "name": "UniversalAuthentication", + "type": "bool", + "format": "/UniversalAuthentication:{value}", + "help": "Specifies if Universal Authentication should be used. When set to True, the interactive authentication protocol is activated supporting MFA. This option can also be used for Azure AD authentication without MFA, using an interactive protocol requiring the user to enter their username and password or integrated authentication (Windows credentials). When /UniversalAuthentication is set to True, no Azure AD authentication can be specified in SourceConnectionString (/scs). When /UniversalAuthentication is set to False, Azure AD authentication must be specified in SourceConnectionString (/scs).
For more information about Active Directory Universal Authentication, see [Universal Authentication with SQL Database and Azure Synapse Analytics (SSMS support for MFA)](/azure/sql-database/sql-database-ssms-mfa-authentication)." + }, + { + "name": "Variables", + "type": "Dictionary", + "format": "/v:{value}", + "itemFormat": "{key}={value}", + "help": "Specifies a name value pair for an action-specific variable;{VariableName}={Value}. The DACPAC file contains the list of valid SQLCMD variables. An error results if a value is not provided for every variable." + } + ] + } + }, + { + "help": "The SqlPackage DeployReport action creates an XML report of the changes that would be made by a publish action", + "postfix": "DeployReport", + "definiteArgument": "/Action:DeployReport", + "officialUrl": "https://learn.microsoft.com/en-us/sql/tools/sqlpackage/sqlpackage-deploy-drift-report", + "settingsClass": { + "properties": [ + { + "name": "AccessToken", + "type": "string", + "format": "/AccessToken:{value}", + "help": "Specifies the token based authentication access token to use when connect to the target database." + }, + { + "name": "Diagnostics", + "type": "bool", + "format": "/Diagnostics:{value}", + "help": "Specifies whether diagnostic logging is output to the console. Defaults to False." + }, + { + "name": "DiagnosticsFile", + "type": "string", + "format": "/DiagnosticsFile:{value}", + "help": "Specifies a file to store diagnostic logs." + }, + { + "name": "MaxParallelism", + "type": "int", + "format": "/MaxParallelism:{value}", + "help": "Specifies the degree of parallelism for concurrent operations running against a database. The default value is 8." + }, + { + "name": "OutputPath", + "type": "string", + "format": "/OutputPath:{value}", + "help": "Specifies the file path where the output files are generated." + }, + { + "name": "OverwriteFiles", + "type": "bool", + "format": "/OverwriteFiles:{value}", + "help": "Specifies if SqlPackage should overwrite existing files. Specifying false causes SqlPackage to abort action if an existing file is encountered. Default value is True." + }, + { + "name": "Profile", + "type": "string", + "format": "/Profile:{value}", + "help": "Specifies the file path to a DAC Publish Profile. The profile defines a collection of properties and variables to use when generating outputs." + }, + { + "name": "Properties", + "type": "Dictionary", + "format": "/p:{value}", + "itemFormat": "{key}={value}", + "help": "Specifies a name value pair for an [action-specific property](#deployreport-action-properties); {key}={value}.", + "delegates": [ + { + "name": "AdditionalDeploymentContributorArguments", + "type": "List", + "separator": ";", + "help": "Specifies additional deployment contributor arguments for the deployment contributors. This should be a semi-colon delimited list of values." + }, + { + "name": "AdditionalDeploymentContributors", + "type": "List", + "separator": ";", + "help": "Specifies additional deployment contributors, which should run when the dacpac is deployed. This should be a semi-colon delimited list of fully qualified build contributor names or IDs." + }, + { + "name": "AdditionalDeploymentContributorPaths", + "type": "List", + "separator": ";", + "help": "Specifies paths to load additional deployment contributors. This should be a semi-colon delimited list of values." + }, + { + "name": "AllowDropBlockingAssemblies", + "type": "bool", + "help": "This property is used by SqlClr deployment to cause any blocking assemblies to be dropped as part of the deployment plan. By default, any blocking/referencing assemblies will block an assembly update if the referencing assembly needs to be dropped." + }, + { + "name": "AllowIncompatiblePlatform", + "type": "bool", + "help": "Specifies whether to attempt the action despite incompatible SQL Server platforms." + }, + { + "name": "AllowUnsafeRowLevelSecurityDataMovement", + "type": "bool", + "help": "Do not block data motion on a table that has Row Level Security if this property is set to true. Default is false." + }, + { + "name": "BackupDatabaseBeforeChanges", + "type": "bool", + "help": "Backups the database before deploying any changes." + }, + { + "name": "BlockOnPossibleDataLoss", + "type": "bool", + "help": "Specifies that the operation will be terminated during the schema validation step if the resulting schema changes could incur a loss of data, including due to data precision reduction or a data type change that requires a cast operation. The default (`True`) value causes the operation to terminate regardless if the target database contains data. An execution with a `False` value for BlockOnPossibleDataLoss can still fail during deployment plan execution if data is present on the target that cannot be converted to the new column type." + }, + { + "name": "BlockWhenDriftDetected", + "type": "bool", + "help": "Specifies whether to block updating a database whose schema no longer matches its registration or is unregistered." + }, + { + "name": "CommandTimeout", + "type": "int", + "help": "Specifies the command timeout in seconds when executing queries against SQL Server." + }, + { + "name": "CommentOutSetVarDeclarations", + "type": "bool", + "help": "Specifies whether the declaration of SETVAR variables should be commented out in the generated publish script. You might choose to do this if you plan to specify the values on the command line when you publish by using a tool such as SQLCMD.EXE." + }, + { + "name": "CompareUsingTargetCollation", + "type": "bool", + "help": "This setting dictates how the database's collation is handled during deployment; by default the target database's collation will be updated if it does not match the collation specified by the source. When this option is set, the target database's (or server's) collation should be used." + }, + { + "name": "CreateNewDatabase", + "type": "bool", + "help": "Specifies whether the target database should be updated or whether it should be dropped and re-created when you publish to a database." + }, + { + "name": "DatabaseEdition", + "type": "DatabaseEdition", + "help": "Defines the edition of an Azure SQL Database. See [Azure SQL Database service tiers](/azure/azure-sql/database/service-tiers-general-purpose-business-critical)." + }, + { + "name": "DatabaseLockTimeout", + "type": "int", + "help": "Specifies the database lock timeout in seconds when executing queries against SQLServer. Use -1 to wait indefinitely." + }, + { + "name": "DatabaseMaximumSize", + "type": "int", + "help": "Defines the maximum size in GB of an Azure SQL Database." + }, + { + "name": "DatabaseServiceObjective", + "type": "string", + "help": "Defines the performance level of an Azure SQL Database such as \"P0\" or \"S1\"." + }, + { + "name": "DeployDatabaseInSingleUserMode", + "type": "bool", + "help": "if true, the database is set to Single User Mode before deploying." + }, + { + "name": "DisableAndReenableDdlTriggers", + "type": "bool", + "help": "Specifies whether Data Definition Language (DDL) triggers are disabled at the beginning of the publish process and re-enabled at the end of the publish action." + }, + { + "name": "DoNotAlterChangeDataCaptureObjects", + "type": "bool", + "help": "If true, Change Data Capture objects are not altered." + }, + { + "name": "DoNotAlterReplicatedObjects", + "type": "bool", + "help": "Specifies whether objects that are replicated are identified during verification." + }, + { + "name": "DoNotDropObjectType", + "type": "string", + "help": "An object type that should not be dropped when DropObjectsNotInSource is true. Valid object type names are Aggregates, ApplicationRoles, Assemblies, AsymmetricKeys, BrokerPriorities, Certificates, ColumnEncryptionKeys, ColumnMasterKeys, Contracts, DatabaseRoles, DatabaseTriggers, Defaults, ExtendedProperties, ExternalDataSources, ExternalFileFormats, ExternalTables, Filegroups, FileTables, FullTextCatalogs, FullTextStoplists, MessageTypes, PartitionFunctions, PartitionSchemes, Permissions, Queues, RemoteServiceBindings, RoleMembership, Rules, ScalarValuedFunctions, SearchPropertyLists, SecurityPolicies, Sequences, Services, Signatures, StoredProcedures, SymmetricKeys, Synonyms, Tables, TableValuedFunctions, UserDefinedDataTypes, UserDefinedTableTypes, ClrUserDefinedTypes, Users, Views, XmlSchemaCollections, Audits, Credentials, CryptographicProviders, DatabaseAuditSpecifications, DatabaseScopedCredentials, Endpoints, ErrorMessages, EventNotifications, EventSessions, LinkedServerLogins, LinkedServers, Logins, Routes, ServerAuditSpecifications, ServerRoleMembership, ServerRoles, ServerTriggers. This property may be specified multiple times to indicate multiple options." + }, + { + "name": "DoNotDropObjectTypes", + "type": "List", + "separator": ";", + "help": "A semicolon-delimited list of object types that should not be dropped when DropObjectsNotInSource is true. Valid object type names are Aggregates, ApplicationRoles, Assemblies, AsymmetricKeys, BrokerPriorities, Certificates, ColumnEncryptionKeys, ColumnMasterKeys, Contracts, DatabaseRoles, DatabaseTriggers, Defaults, ExtendedProperties, ExternalDataSources, ExternalFileFormats, ExternalTables, Filegroups, FileTables, FullTextCatalogs, FullTextStoplists, MessageTypes, PartitionFunctions, PartitionSchemes, Permissions, Queues, RemoteServiceBindings, RoleMembership, Rules, ScalarValuedFunctions, SearchPropertyLists, SecurityPolicies, Sequences, Services, Signatures, StoredProcedures, SymmetricKeys, Synonyms, Tables, TableValuedFunctions, UserDefinedDataTypes, UserDefinedTableTypes, ClrUserDefinedTypes, Users, Views, XmlSchemaCollections, Audits, Credentials, CryptographicProviders, DatabaseAuditSpecifications, DatabaseScopedCredentials, Endpoints, ErrorMessages, EventNotifications, EventSessions, LinkedServerLogins, LinkedServers, Logins, Routes, ServerAuditSpecifications, ServerRoleMembership, ServerRoles, ServerTriggers." + }, + { + "name": "DropConstraintsNotInSource", + "type": "bool", + "help": "Specifies whether constraints that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database." + }, + { + "name": "DropDmlTriggersNotInSource", + "type": "bool", + "help": "Specifies whether DML triggers that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database." + }, + { + "name": "DropExtendedPropertiesNotInSource", + "type": "bool", + "help": "Specifies whether extended properties that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database." + }, + { + "name": "DropIndexesNotInSource", + "type": "bool", + "help": "Specifies whether indexes that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database." + }, + { + "name": "DropObjectsNotInSource", + "type": "bool", + "help": "Specifies whether objects that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database. This value takes precedence over DropExtendedProperties." + }, + { + "name": "DropPermissionsNotInSource", + "type": "bool", + "help": "Specifies whether permissions that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish updates to a database." + }, + { + "name": "DropRoleMembersNotInSource", + "type": "bool", + "help": "Specifies whether role members that are not defined in the database snapshot (.dacpac) file will be dropped from the target database when you publish updates to a database." + }, + { + "name": "DropStatisticsNotInSource", + "type": "bool", + "help": "Specifies whether statistics that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database." + }, + { + "name": "ExcludeObjectType", + "type": "string", + "help": "An object type that should be ignored during deployment. Valid object type names are Aggregates, ApplicationRoles, Assemblies, AsymmetricKeys, BrokerPriorities, Certificates, ColumnEncryptionKeys, ColumnMasterKeys, Contracts, DatabaseRoles, DatabaseTriggers, Defaults, ExtendedProperties, ExternalDataSources, ExternalFileFormats, ExternalTables, Filegroups, FileTables, FullTextCatalogs, FullTextStoplists, MessageTypes, PartitionFunctions, PartitionSchemes, Permissions, Queues, RemoteServiceBindings, RoleMembership, Rules, ScalarValuedFunctions, SearchPropertyLists, SecurityPolicies, Sequences, Services, Signatures, StoredProcedures, SymmetricKeys, Synonyms, Tables, TableValuedFunctions, UserDefinedDataTypes, UserDefinedTableTypes, ClrUserDefinedTypes, Users, Views, XmlSchemaCollections, Audits, Credentials, CryptographicProviders, DatabaseAuditSpecifications, DatabaseScopedCredentials, Endpoints, ErrorMessages, EventNotifications, EventSessions, LinkedServerLogins, LinkedServers, Logins, Routes, ServerAuditSpecifications, ServerRoleMembership, ServerRoles, ServerTriggers." + }, + { + "name": "ExcludeObjectTypes", + "type": "List", + "separator": ";", + "help": "A semicolon-delimited list of object types that should be ignored during deployment. Valid object type names are Aggregates, ApplicationRoles, Assemblies, AsymmetricKeys, BrokerPriorities, Certificates, ColumnEncryptionKeys, ColumnMasterKeys, Contracts, DatabaseRoles, DatabaseTriggers, Defaults, ExtendedProperties, ExternalDataSources, ExternalFileFormats, ExternalTables, Filegroups, FileTables, FullTextCatalogs, FullTextStoplists, MessageTypes, PartitionFunctions, PartitionSchemes, Permissions, Queues, RemoteServiceBindings, RoleMembership, Rules, ScalarValuedFunctions, SearchPropertyLists, SecurityPolicies, Sequences, Services, Signatures, StoredProcedures, SymmetricKeys, Synonyms, Tables, TableValuedFunctions, UserDefinedDataTypes, UserDefinedTableTypes, ClrUserDefinedTypes, Users, Views, XmlSchemaCollections, Audits, Credentials, CryptographicProviders, DatabaseAuditSpecifications, DatabaseScopedCredentials, Endpoints, ErrorMessages, EventNotifications, EventSessions, LinkedServerLogins, LinkedServers, Logins, Routes, ServerAuditSpecifications, ServerRoleMembership, ServerRoles, ServerTriggers." + }, + { + "name": "GenerateSmartDefaults", + "type": "bool", + "help": "Automatically provides a default value when updating a table that contains data with a column that does not allow null values." + }, + { + "name": "IgnoreAnsiNulls", + "type": "bool", + "help": "Specifies whether differences in the ANSI NULLS setting should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreAuthorizer", + "type": "bool", + "help": "Specifies whether differences in the Authorizer should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreColumnCollation", + "type": "bool", + "help": "Specifies whether differences in the column collations should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreColumnOrder", + "type": "bool", + "help": "Specifies whether differences in table column order should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreComments", + "type": "bool", + "help": "Specifies whether differences in the comments should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreCryptographicProviderFilePath", + "type": "bool", + "help": "Specifies whether differences in the file path for the cryptographic provider should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreDdlTriggerOrder", + "type": "bool", + "help": "Specifies whether differences in the order of Data Definition Language (DDL) triggers should be ignored or updated when you publish to a database or server." + }, + { + "name": "IgnoreDdlTriggerState", + "type": "bool", + "help": "Specifies whether differences in the enabled or disabled state of Data Definition Language (DDL) triggers should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreDefaultSchema", + "type": "bool", + "help": "Specifies whether differences in the default schema should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreDmlTriggerOrder", + "type": "bool", + "help": "Specifies whether differences in the order of Data Manipulation Language (DML) triggers should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreDmlTriggerState", + "type": "bool", + "help": "Specifies whether differences in the enabled or disabled state of DML triggers should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreExtendedProperties", + "type": "bool", + "help": "Specifies whether differences in the extended properties should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreFileAndLogFilePath", + "type": "bool", + "help": "Specifies whether differences in the paths for files and log files should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreFilegroupPlacement", + "type": "bool", + "help": "Specifies whether differences in the placement of objects in FILEGROUPs should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreFileSize", + "type": "bool", + "help": "Specifies whether differences in the file sizes should be ignored or whether a warning should be issued when you publish to a database." + }, + { + "name": "IgnoreFillFactor", + "type": "bool", + "help": "Specifies whether differences in the fill factor for index storage should be ignored or whether a warning should be issued when you publish to a database" + }, + { + "name": "IgnoreFullTextCatalogFilePath", + "type": "bool", + "help": "Specifies whether differences in the file path for the full-text catalog should be ignored or whether a warning should be issued when you publish to a database." + }, + { + "name": "IgnoreIdentitySeed", + "type": "bool", + "help": "Specifies whether differences in the seed for an identity column should be ignored or updated when you publish updates to a database." + }, + { + "name": "IgnoreIncrement", + "type": "bool", + "help": "Specifies whether differences in the increment for an identity column should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreIndexOptions", + "type": "bool", + "help": "Specifies whether differences in the index options should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreIndexPadding", + "type": "bool", + "help": "Specifies whether differences in the index padding should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreKeywordCasing", + "type": "bool", + "help": "Specifies whether differences in the casing of keywords should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreLockHintsOnIndexes", + "type": "bool", + "help": "Specifies whether differences in the lock hints on indexes should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreLoginSids", + "type": "bool", + "help": "Specifies whether differences in the security identification number (SID) should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreNotForReplication", + "type": "bool", + "help": "Specifies whether the not for replication settings should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreObjectPlacementOnPartitionScheme", + "type": "bool", + "help": "Specifies whether an object's placement on a partition scheme should be ignored or updated when you publish to a database." + }, + { + "name": "IgnorePartitionSchemes", + "type": "bool", + "help": "Stypepecifies whether differences in partition schemes and functions should be ignored or updated when you publish to a database." + }, + { + "name": "IgnorePermissions", + "type": "bool", + "help": "Specifies whether differences in the permissions should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreQuotedIdentifiers", + "type": "bool", + "help": "Specifies whether differences in the quoted identifiers setting should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreRoleMembership", + "type": "bool", + "help": "Specifies whether differences in the role membership of logins should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreRouteLifetime", + "type": "bool", + "help": "Specifies whether differences in the amount of time that SQL Server retains the route in the routing table should be ignored or updated when you publish to a database" + }, + { + "name": "IgnoreSemicolonBetweenStatements", + "type": "bool", + "help": "Specifies whether differences in the semi-colons between T-SQL statements will be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreTableOptions", + "type": "bool", + "help": "Specifies whether differences in the table options will be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreTablePartitionOptions", + "type": "bool", + "help": "Specifies whether differences in the table partition options will be ignored or updated when you publish to a database. This option applies only to Azure Synapse Analytics data warehouse databases." + }, + { + "name": "IgnoreUserSettingsObjects", + "type": "bool", + "help": "Specifies whether differences in the user settings objects will be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreWhitespace", + "type": "bool", + "help": "Specifies whether differences in white space will be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreWithNocheckOnCheckConstraints", + "type": "bool", + "help": "Specifies whether differences in the value of the WITH NOCHECK clause for check constraints will be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreWithNocheckOnForeignKeys", + "type": "bool", + "help": "Specifies whether differences in the value of the WITH NOCHECK clause for foreign keys will be ignored or updated when you publish to a database." + }, + { + "name": "IncludeCompositeObjects", + "type": "bool", + "help": "Include all composite elements with the same database as part of a single publish operation." + }, + { + "name": "IncludeTransactionalScripts", + "type": "bool", + "help": "Specifies whether transactional statements should be used where possible when you publish to a database." + }, + { + "name": "LongRunningCommandTimeout", + "type": "int", + "help": "Specifies the long running command timeout in seconds when executing queries against SQL Server. Use 0 to wait indefinitely." + }, + { + "name": "NoAlterStatementsToChangeClrTypes", + "type": "bool", + "help": "Specifies that publish should always drop and re-create an assembly if there is a difference instead of issuing an ALTER ASSEMBLY statement." + }, + { + "name": "PopulateFilesOnFileGroups", + "type": "bool", + "help": "Specifies whether a new file is also created when a new FileGroup is created in the target database." + }, + { + "name": "RegisterDataTierApplication", + "type": "bool", + "help": "Specifies whether the schema is registered with the database server." + }, + { + "name": "RunDeploymentPlanExecutors", + "type": "bool", + "help": "Specifies whether DeploymentPlanExecutor contributors should be run when other operations are executed." + }, + { + "name": "ScriptDatabaseCollation", + "type": "bool", + "help": "Specifies whether differences in the database collation should be ignored or updated when you publish to a database." + }, + { + "name": "ScriptDatabaseCompatibility", + "type": "bool", + "help": "Specifies whether differences in the database compatibility should be ignored or updated when you publish to a database." + }, + { + "name": "ScriptDatabaseOptions", + "type": "bool", + "help": "Specifies whether target database properties should be set or updated as part of the publish action." + }, + { + "name": "ScriptDeployStateChecks", + "type": "bool", + "help": "Specifies whether statements are generated in the publish script to verify that the database name and server name match the names specified in the database project." + }, + { + "name": "ScriptFileSize", + "type": "bool", + "help": "Controls whether size is specified when adding a file to a filegroup." + }, + { + "name": "ScriptNewConstraintValidation", + "type": "bool", + "help": "At the end of publish all of the constraints will be verified as one set, avoiding data errors caused by a check or foreign key constraint in the middle of publish. If set to False, your constraints are published without checking the corresponding data." + }, + { + "name": "ScriptRefreshModule", + "type": "bool", + "help": "Include refresh statements at the end of the publish script." + }, + { + "name": "Storage", + "type": "StorageType", + "help": "Specifies how elements are stored when building the database model. For performance reasons the default is InMemory. For large databases, File backed storage may be required and is only available for .NET Framework version of SqlPackage." + }, + { + "name": "TreatVerificationErrorsAsWarnings", + "type": "bool", + "help": "Specifies whether errors encountered during publish verification should be treated as warnings. The check is performed against the generated deployment plan before the plan is executed against your target database. Plan verification detects problems such as the loss of target-only objects (such as indexes) that must be dropped to make a change. Verification will also detect situations where dependencies (such as a table or view) exist because of a reference to a composite project, but do not exist in the target database. You might choose to do this to get a complete list of all issues, instead of having the publish action stop on the first error." + }, + { + "name": "UnmodifiableObjectWarnings", + "type": "bool", + "help": "Specifies whether warnings should be generated when differences are found in objects that cannot be modified, for example, if the file size or file paths were different for a file." + }, + { + "name": "VerifyCollationCompatibility", + "type": "bool", + "help": "Specifies whether collation compatibility is verified." + }, + { + "name": "VerifyDeployment", + "type": "bool", + "help": "Specifies whether checks should be performed before publishing that will stop the publish action if issues are present that might block successful publishing. For example, your publish action might stop if you have foreign keys on the target database that do not exist in the database project, and that causes errors when you publish." + } + ] + }, + { + "name": "Quiet", + "type": "bool", + "format": "/Quiet:{value}", + "help": "Specifies whether detailed feedback is suppressed. Defaults to False." + }, + { + "name": "SourceFile", + "type": "string", + "format": "/SourceFile:{value}", + "help": "Specifies a source file to be used as the source of action instead of a database. If this name is ed, no other source name sha beva." + }, + { + "name": "SourceConnectionString", + "type": "string", + "format": "/SourceConnectionString:{value}", + "help": "Specifies a valid [SQL Server/Azure connection string](/dotnet/api/microsoft.data.sqlclient.sqlconnection.connectionstring) to the source database. If this name is ecified, it shall be used exclusively of all other sourcen." + }, + { + "name": "SourceDatabaseName", + "type": "string", + "format": "/SourceDatabaseName:{value}", + "help": "Defines the name of the source database." + }, + { + "name": "SourceEncryptConnection", + "type": "EncryptionType", + "format": "/SourceEncryptConnection:{value}", + "help": "Specifies if SQL encryption should be used for the source database connection. Default value is True." + }, + { + "name": "SourceHostNameInCertificate", + "type": "string", + "format": "/SourceHostNameInCertificate:{value}", + "help": "Specifies value that is used to validate the source SQL Server TLS/SSL certificate when the communication layer is encrypted by using TLS." + }, + { + "name": "SourcePassword", + "type": "string", + "format": "/SourcePassword:{value}", + "help": "For SQL Server Auth scenarios, defines the password to use to access the source database." + }, + { + "name": "SourceServerName", + "type": "string", + "format": "/SourceServerName:{value}", + "help": "Defines the name of the server hosting the source database." + }, + { + "name": "SourceTimeout", + "type": "int", + "format": "/SourceTimeout:{value}", + "help": "Specifies the timeout for establishing a connection to the source database in seconds." + }, + { + "name": "SourceTrustServerCertificate", + "type": "bool", + "format": "/SourceTrustServerCertificate:{value}", + "help": "Specifies whether to use TLS to encrypt the source database connection and bypass walking the certificate chain to validate trust. Default value is False." + }, + { + "name": "SourceUser", + "type": "string", + "format": "/SourceUser:{value}", + "help": "For SQL Server Auth scenarios, defines the SQL Server user to use to access the source database." + }, + { + "name": "TargetFile", + "type": "string", + "format": "/TargetFile:{value}", + "help": "Specifies a target file (that is, a .dacpac file) to be used as the target of action instead of a database. If this name is ed, no other target name sha be valid. This name sha be invalid for actions that only support databasetarg." + }, + { + "name": "TargetConnectionString", + "type": "string", + "format": "/TargetConnectionString:{value}", + "help": "Specifies a valid [SQL Server/Azure connection string](/dotnet/api/microsoft.data.sqlclient.sqlconnection.connectionstring) to the target database. If this name is ecified, it shall be used exclusively of all other targetn " + }, + { + "name": "TargetDatabaseName", + "type": "string", + "format": "/TargetDatabaseName:{value}", + "help": "Specifies an override for the name of the database that is the target of SqlPackage Action." + }, + { + "name": "TargetEncryptConnection", + "type": "EncryptionType", + "format": "/TargetEncryptConnection:{value}", + "help": "Specifies if SQL encryption should be used for the target database connection. Default value is True." + }, + { + "name": "TargetHostNameInCertificate", + "type": "string", + "format": "/TargetHostNameInCertificate:{value}", + "help": "Specifies value that is used to validate the target SQL Server TLS/SSL certificate when the communication layer is encrypted by using TLS." + }, + { + "name": "TargetPassword", + "type": "string", + "format": "/TargetPassword:{value}", + "help": "For SQL Server Auth scenarios, defines the password to use to access the target database." + }, + { + "name": "TargetServerName", + "type": "string", + "format": "/TargetServerName:{value}", + "help": "Defines the name of the server hosting the target database." + }, + { + "name": "TargetTimeout", + "type": "int", + "format": "/TargetTimeout:{value}", + "help": "Specifies the timeout for establishing a connection to the target database in seconds. For Azure AD, it is recommended that this value be greater than or equal to 30 seconds." + }, + { + "name": "TargetTrustServerCertificate", + "type": "bool", + "format": "/TargetTrustServerCertificate:{value}", + "help": "Specifies whether to use TLS to encrypt the target database connection and bypass walking the certificate chain to validate trust. Default value is False." + }, + { + "name": "TargetUser", + "type": "string", + "format": "/TargetUser:{value}", + "help": "For SQL Server Auth scenarios, defines the SQL Server user to use to access the target database." + }, + { + "name": "TenantId", + "type": "string", + "format": "/TenantId:{value}", + "help": "Represents the Azure AD tenant ID or domain name. This option is required to support guest or imported Azure AD users as well as Microsoft accounts such as outlook.com, hotmail.com, or live.com. If this name is itted, the default tenant ID for Azure AD will be used, assuming that the authenticated user is a native user for this AD. However, in this case any guest or imported users and/or Microsoft accounts hosted in this Azure AD are not supported and the operation will fail.
For more information about Active Directory Universal Authentication, see [Universal Authentication with SQL Database and Azure Synapse Analytics (SSMS support for MFA)](/azure/sql-database/sql-database-ssms-mfaauthenticati." + }, + { + "name": "UniversalAuthentication", + "type": "bool", + "format": "/UniversalAuthentication:{value}", + "help": "Specifies if Universal Authentication should be used. When set to True, the interactive authentication protocol is activated supporting MFA. This option can also be used for Azure AD authentication without MFA, using an interactive protocol requiring the user to enter their username and password or integrated authentication (Windows credentials). When /UniversalAuthentication is set to True, no Azure AD authentication can be specified in SourceConnectionString (/scs). When /UniversalAuthentication is set to False, Azure AD authentication must be specified in SourceConnectionString (/scs).
For more information about Active Directory Universal Authentication, see [Universal Authentication with SQL Database and Azure Synapse Analytics (SSMS support for MFA)](/azure/sql-database/sql-database-ssms-mfa-authentication)." + }, + { + "name": "Variables", + "type": "Dictionary", + "format": "/v:{value}", + "itemFormat": "{key}={value}", + "help": "Specifies a name value pair for an action-specific variable; {VariableName}={Value}. The DACPAC file contains the list of valid SQLCMD variables. An error results if a value is not provided for every variable." + } + ] + } + }, + { + "help": "The SqlPackage DriftReport action creates an XML report of the changes that have been made to the registered database since it was last registered.", + "postfix": "DriftReport", + "definiteArgument": "/Action:DriftReport", + "officialUrl": "https://learn.microsoft.com/en-us/sql/tools/sqlpackage/sqlpackage-deploy-drift-report", + "settingsClass": { + "properties": [ + { + "name": "AccessToken", + "type": "string", + "format": "/AccessToken:{value}", + "help": "Specifies the token based authentication access token to use when connect to the target database." + }, + { + "name": "Diagnostics", + "type": "bool", + "format": "/Diagnostics:{value}", + "help": "Specifies whether diagnostic logging is output to the console. Defaults to False." + }, + { + "name": "DiagnosticsFile", + "type": "string", + "format": "/DiagnosticsFile:{value}", + "help": "Specifies a file to store diagnostic logs." + }, + { + "name": "MaxParallelism", + "type": "int", + "format": "/MaxParallelism:{value}", + "help": "Specifies the degree of parallelism for concurrent operations running against a database. The default value is 8." + }, + { + "name": "OutputPath", + "type": "string", + "format": "/OutputPath:{value}", + "help": "Specifies the file path where the output files are generated." + }, + { + "name": "OverwriteFiles", + "type": "bool", + "format": "/OverwriteFiles:{value}", + "help": "Specifies if SqlPackage should overwrite existing files. Specifying false causes SqlPackage to abort action if an existing file is encountered. Default value is True." + }, + { + "name": "Quiet", + "type": "bool", + "format": "/Quiet:{value}", + "help": "Specifies whether detailed feedback is suppressed. Defaults to False." + }, + { + "name": "TargetConnectionString", + "type": "string", + "format": "/TargetConnectionString:{value}", + "help": "Specifies a valid SQL Server/Azure connection string to the target database. If this parameter is specified, it shall be used exclusively of all other target parameters." + }, + { + "name": "TargetDatabaseName", + "type": "string", + "format": "/TargetDatabaseName:{value}", + "help": "Specifies an override for the name of the database that is the target of SqlPackage Action." + }, + { + "name": "TargetEncryptConnection", + "type": "EncryptionType", + "format": "/TargetEncryptConnection:{value}", + "help": "Specifies if SQL encryption should be used for the target database connection. Default value is True." + }, + { + "name": "TargetHostNameInCertificate", + "type": "string", + "format": "/TargetHostNameInCertificate:{value}", + "help": "Specifies value that is used to validate the target SQL Server TLS/SSL certificate when the communication layer is encrypted by using TLS." + }, + { + "name": "TargetPassword", + "type": "string", + "format": "/TargetPassword:{value}", + "help": "For SQL Server Auth scenarios, defines the password to use to access the target database." + }, + { + "name": "TargetServerName", + "type": "string", + "format": "/TargetServerName:{value}", + "help": "Defines the name of the server hosting the target database." + }, + { + "name": "TargetTimeout", + "type": "int", + "format": "/TargetTimeout:{value}", + "help": "Specifies the timeout for establishing a connection to the target database in seconds. For Azure AD, it is recommended that this value be greater than or equal to 30 seconds." + }, + { + "name": "TargetTrustServerCertificate", + "type": "bool", + "format": "/TargetTrustServerCertificate:{value}", + "help": "Specifies whether to use TLS to encrypt the target database connection and bypass walking the certificate chain to validate trust. Default value is False." + }, + { + "name": "TargetUser", + "type": "string", + "format": "/TargetUser:{value}", + "help": "For SQL Server Auth scenarios, defines the SQL Server user to use to access the target database." + }, + { + "name": "TenantId", + "type": "string", + "format": "/TenantId:{value}", + "help": "Represents the Azure AD tenant ID or domain name. This option is required to support guest or imported Azure AD users as well as Microsoft accounts such as outlook.com, hotmail.com, or live.com. If this parameter is omitted, the default tenant ID for Azure AD will be used, assuming that the authenticated user is a native user for this AD. However, in this case any guest or imported users and/or Microsoft accounts hosted in this Azure AD are not supported and the operation will fail.
For more information about Active Directory Universal Authentication, see [Universal Authentication with SQL Database and Azure Synapse Analytics (SSMS support for MFA)](/azure/sql-database/sql-database-ssms-mfa-authentication)." + }, + { + "name": "UniversalAuthentication", + "type": "bool", + "format": "/UniversalAuthentication:{value}", + "help": "Specifies if Universal Authentication should be used. When set to True, the interactive authentication protocol is activated supporting MFA. This option can also be used for Azure AD authentication without MFA, using an interactive protocol requiring the user to enter their username and password or integrated authentication (Windows credentials). When /UniversalAuthentication is set to True, no Azure AD authentication can be specified in SourceConnectionString (/scs). When /UniversalAuthentication is set to False, Azure AD authentication must be specified in SourceConnectionString (/scs).
For more information about Active Directory Universal Authentication, see [Universal Authentication with SQL Database and Azure Synapse Analytics (SSMS support for MFA)](/azure/sql-database/sql-database-ssms-mfa-authentication)." + } + ] + } + }, + { + "help": "The SqlPackage Script action creates a Transact-SQL incremental update script that updates the schema of a target database to match the schema of a source database.", + "postfix": "Script", + "definiteArgument": "/Action:Script", + "officialUrl": "https://learn.microsoft.com/en-us/sql/tools/sqlpackage/sqlpackage-script", + "settingsClass": { + "properties": [ + { + "name": "AccessToken", + "type": "string", + "format": "/AccessToken:{value}", + "help": "Specifies the token based authentication access token to use when connect to the target database." + }, + { + "name": "AzureCloudConfig", + "type": "string", + "format": "/AzureCloudConfig:{value}", + "help": "Specifies the custom endpoints for connecting to Azure Active Directory in the format: AzureActiveDirectoryAuthority={value};DatabaseServicePrincipalName={value}\" ." + }, + { + "name": "DeployReportPath", + "type": "string", + "format": "/DeployReportPath:{value}", + "help": "Specifies an optional file path to output the deployment report xml file." + }, + { + "name": "DeployScriptPath", + "type": "string", + "format": "/DeployScriptPath:{value}", + "help": "Specifies an optional file path to output the deployment script. For Azure deployments, if there are TSQL commands to create or modify the master database, a script will be written to the same path but with \"Filename_Master.sql\" as the output file name." + }, + { + "name": "Diagnostics", + "type": "bool", + "format": "/Diagnostics:{value}", + "help": "Specifies whether diagnostic logging is output to the console. Defaults to False." + }, + { + "name": "DiagnosticsFile", + "type": "string", + "format": "/DiagnosticsFile:{value}", + "help": "Specifies a file to store diagnostic logs." + }, + { + "name": "MaxParallelism", + "type": "int", + "format": "/MaxParallelism:{value}", + "help": "Specifies the degree of parallelism for concurrent operations running against a database. The default value is 8." + }, + { + "name": "ModelFilePath", + "type": "string", + "format": "/ModelFilePath:{value}", + "help": "Specifies the file path to override the model.xml in the source file. Use of this setting may result in deployment failure and/or unintended data loss. This setting is intended only for use when troubleshooting issues with publish, import, or script generation." + }, + { + "name": "OutputPath", + "type": "string", + "format": "/OutputPath:{value}", + "help": "Specifies the file path where the output files are generated." + }, + { + "name": "OverwriteFiles", + "type": "bool", + "format": "/OverwriteFiles:{value}", + "help": "Specifies if SqlPackage should overwrite existing files. Specifying false causes SqlPackage to abort action if an existing file is encountered. Default value is True." + }, + { + "name": "Profile", + "type": "string", + "format": "/Profile:{value}", + "help": "Specifies the file path to a DAC Publish Profile. The profile defines a collection of properties and variables to use when generating outputs." + }, + { + "name": "Properties", + "type": "Dictionary", + "format": "/p:{value}", + "itemFormat": "{key}={value}", + "help": "Specifies a name value pair for an [action-specific property](#properties-specific-to-the-script-action); {PropertyValue}={Value}.", + "delegates": [ + { + "name": "AdditionalDeploymentContributorArguments", + "type": "List", + "separator": ";", + "help": "Specifies additional deployment contributor arguments for the deployment contributors. This should be a semi-colon delimited list of values." + }, + { + "name": "AdditionalDeploymentContributors", + "type": "List", + "separator": ";", + "help": "Specifies additional deployment contributors, which should run when the dacpac is deployed. This should be a semi-colon delimited list of fully qualified build contributor names or IDs." + }, + { + "name": "AdditionalDeploymentContributorPaths", + "type": "List", + "separator": ";", + "help": "Specifies paths to load additional deployment contributors. This should be a semi-colon delimited list of values." + }, + { + "name": "AllowDropBlockingAssemblies", + "type": "bool", + "help": "This property is used by SqlClr deployment to cause any blocking assemblies to be dropped as part of the deployment plan. By default, any blocking/referencing assemblies will block an assembly update if the referencing assembly needs to be dropped." + }, + { + "name": "AllowExternalLanguagePaths", + "type": "bool", + "help": "Allows file paths, if available, to be used to generate external language statements." + }, + { + "name": "AllowExternalLibraryPaths", + "type": "bool", + "help": "Allows file paths, if available, to be used to generate external library statements." + }, + { + "name": "AllowIncompatiblePlatform", + "type": "bool", + "help": "Specifies whether to attempt the action despite incompatible SQL Server platforms." + }, + { + "name": "AllowUnsafeRowLevelSecurityDataMovement", + "type": "bool", + "help": "Do not block data motion on a table that has Row Level Security if this property is set to true. Default is false." + }, + { + "name": "BackupDatabaseBeforeChanges", + "type": "bool", + "help": "Backups the database before deploying any changes." + }, + { + "name": "BlockOnPossibleDataLoss", + "type": "bool", + "help": "Specifies that the operation will be terminated during the schema validation step if the resulting schema changes could incur a loss of data, including due to data precision reduction or a data type change that requires a cast operation. The default (`True`) value causes the operation to terminate regardless if the target database contains data. An execution with a `False` value for BlockOnPossibleDataLoss can still fail during deployment plan execution if data is present on the target that cannot be converted to the new column type." + }, + { + "name": "BlockWhenDriftDetected", + "type": "bool", + "help": "Specifies whether to block updating a database whose schema no longer matches its registration or is unregistered." + }, + { + "name": "CommandTimeout", + "type": "int", + "help": "Specifies the command timeout in seconds when executing queries against SQL Server." + }, + { + "name": "CommentOutSetVarDeclarations", + "type": "bool", + "help": "Specifies whether the declaration of SETVAR variables should be commented out in the generated publish script. You might choose to do this if you plan to specify the values on the command line when you publish by using a tool such as SQLCMD.EXE." + }, + { + "name": "CompareUsingTargetCollation", + "type": "bool", + "help": "This setting dictates how the database's collation is handled during deployment; by default the target database's collation will be updated if it does not match the collation specified by the source. When this option is set, the target database's (or server's) collation should be used." + }, + { + "name": "CreateNewDatabase", + "type": "bool", + "help": "Specifies whether the target database should be updated or whether it should be dropped and re-created when you publish to a database." + }, + { + "name": "DatabaseEdition", + "type": "DatabaseEdition", + "help": "Defines the edition of an Azure SQL Database. See [Azure SQL Database service tiers](/azure/azure-sql/database/service-tiers-general-purpose-business-critical)." + }, + { + "name": "DatabaseLockTimeout", + "type": "int", + "help": "Specifies the database lock timeout in seconds when executing queries against SQLServer. Use -1 to wait indefinitely." + }, + { + "name": "DatabaseMaximumSize", + "type": "int", + "help": "Defines the maximum size in GB of an Azure SQL Database." + }, + { + "name": "DatabaseServiceObjective", + "type": "string", + "help": "Defines the performance level of an Azure SQL Database such as \"P0\" or \"S1\"." + }, + { + "name": "DeployDatabaseInSingleUserMode", + "type": "bool", + "help": "if true, the database is set to Single User Mode before deploying." + }, + { + "name": "DisableAndReenableDdlTriggers", + "type": "bool", + "help": "Specifies whether Data Definition Language (DDL) triggers are disabled at the beginning of the publish process and re-enabled at the end of the publish action." + }, + { + "name": "DoNotAlterChangeDataCaptureObjects", + "type": "bool", + "help": "If true, Change Data Capture objects are not altered." + }, + { + "name": "DoNotAlterReplicatedObjects", + "type": "bool", + "help": "Specifies whether objects that are replicated are identified during verification." + }, + { + "name": "DoNotDropObjectType", + "type": "string", + "help": "An object type that should not be dropped when DropObjectsNotInSource is true. Valid object type names are Aggregates, ApplicationRoles, Assemblies, AssemblyFiles, AsymmetricKeys, BrokerPriorities, Certificates, ColumnEncryptionKeys, ColumnMasterKeys, Contracts, DatabaseOptions, DatabaseRoles, DatabaseTriggers, Defaults, ExtendedProperties, ExternalDataSources, ExternalFileFormats, ExternalTables, Filegroups, Files, FileTables, FullTextCatalogs, FullTextStoplists, MessageTypes, PartitionFunctions, PartitionSchemes, Permissions, Queues, RemoteServiceBindings, RoleMembership, Rules, ScalarValuedFunctions, SearchPropertyLists, SecurityPolicies, Sequences, Services, Signatures, StoredProcedures, SymmetricKeys, Synonyms, Tables, TableValuedFunctions, UserDefinedDataTypes, UserDefinedTableTypes, ClrUserDefinedTypes, Users, Views, XmlSchemaCollections, Audits, Credentials, CryptographicProviders, DatabaseAuditSpecifications, DatabaseEncryptionKeys, DatabaseScopedCredentials, Endpoints, ErrorMessages, EventNotifications, EventSessions, LinkedServerLogins, LinkedServers, Logins, MasterKeys, Routes, ServerAuditSpecifications, ServerRoleMembership, ServerRoles, ServerTriggers, ExternalStreams, ExternalStreamingJobs, DatabaseWorkloadGroups, WorkloadClassifiers, ExternalLibraries, ExternalLanguages. This property may be specified multiple times to indicate multiple options." + }, + { + "name": "DoNotDropObjectTypes", + "type": "List", + "separator": ";", + "help": "A semicolon-delimited list of object types that should not be dropped whenDropObjectsNotInSource is true. Valid object type names are Aggregates, ApplicationRoles, Assemblies, AssemblyFiles, AsymmetricKeys, BrokerPriorities, Certificates, ColumnEncryptionKeys, ColumnMasterKeys, Contracts, DatabaseOptions, DatabaseRoles, DatabaseTriggers, Defaults, ExtendedProperties, ExternalDataSources, ExternalFileFormats, ExternalTables, Filegroups, Files, FileTables, FullTextCatalogs, FullTextStoplists, MessageTypes, PartitionFunctions, PartitionSchemes, Permissions, Queues, RemoteServiceBindings, RoleMembership, Rules, ScalarValuedFunctions, SearchPropertyLists, SecurityPolicies, Sequences, Services, Signatures, StoredProcedures, SymmetricKeys, Synonyms, Tables, TableValuedFunctions, UserDefinedDataTypes, UserDefinedTableTypes, ClrUserDefinedTypes, Users, Views, XmlSchemaCollections, Audits, Credentials, CryptographicProviders, DatabaseAuditSpecifications, DatabaseEncryptionKeys, DatabaseScopedCredentials, Endpoints, ErrorMessages, EventNotifications, EventSessions, LinkedServerLogins, LinkedServers, Logins, MasterKeys, Routes, ServerAuditSpecifications, ServerRoleMembership, ServerRoles, ServerTriggers, ExternalStreams, ExternalStreamingJobs, DatabaseWorkloadGroups, WorkloadClassifiers, ExternalLibraries, ExternalLanguages." + }, + { + "name": "DoNotDropWorkloadClassifiers", + "type": "bool", + "help": "When false, WorkloadClassifiers in the target database that are not defined in the source will be dropped during deployment." + }, + { + "name": "DoNotEvaluateSqlCmdVariables", + "type": "bool", + "help": "Specifies whether SQLCMD variables to not replace with values" + }, + { + "name": "DropConstraintsNotInSource", + "type": "bool", + "help": "Specifies whether constraints that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database." + }, + { + "name": "DropDmlTriggersNotInSource", + "type": "bool", + "help": "Specifies whether DML triggers that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database." + }, + { + "name": "DropExtendedPropertiesNotInSource", + "type": "bool", + "help": "Specifies whether extended properties that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database." + }, + { + "name": "DropIndexesNotInSource", + "type": "bool", + "help": "Specifies whether indexes that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database." + }, + { + "name": "DropObjectsNotInSource", + "type": "bool", + "help": "Specifies whether objects that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database. This value takes precedence over DropExtendedProperties." + }, + { + "name": "DropPermissionsNotInSource", + "type": "bool", + "help": "Specifies whether permissions that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish updates to a database." + }, + { + "name": "DropRoleMembersNotInSource", + "type": "bool", + "help": "Specifies whether role members that are not defined in the database snapshot (.dacpac) file will be dropped from the target database when you publish updates to a database." + }, + { + "name": "DropStatisticsNotInSource", + "type": "bool", + "help": "Specifies whether statistics that do not exist in the database snapshot (.dacpac) file will be dropped from the target database when you publish to a database." + }, + { + "name": "EnclaveAttestationProtocol", + "type": "string", + "help": "Specifies an attestation protocol to be used with enclave based Always Encrypted." + }, + { + "name": "EnclaveAttestationUrl", + "type": "string", + "help": "Specifies the enclave attestation URL (an attestation service endpoint) to be used with enclave based Always Encrypted." + }, + { + "name": "ExcludeObjectType", + "type": "string", + "help": "An object type that should be ignored during deployment. Valid object type names are Aggregates, ApplicationRoles, Assemblies, AssemblyFiles, AsymmetricKeys, BrokerPriorities, Certificates, ColumnEncryptionKeys, ColumnMasterKeys, Contracts, DatabaseOptions, DatabaseRoles, DatabaseTriggers, Defaults, ExtendedProperties, ExternalDataSources, ExternalFileFormats, ExternalTables, Filegroups, Files, FileTables, FullTextCatalogs, FullTextStoplists, MessageTypes, PartitionFunctions, PartitionSchemes, Permissions, Queues, RemoteServiceBindings, RoleMembership, Rules, ScalarValuedFunctions, SearchPropertyLists, SecurityPolicies, Sequences, Services, Signatures, StoredProcedures, SymmetricKeys, Synonyms, Tables, TableValuedFunctions, UserDefinedDataTypes, UserDefinedTableTypes, ClrUserDefinedTypes, Users, Views, XmlSchemaCollections, Audits, Credentials, CryptographicProviders, DatabaseAuditSpecifications, DatabaseEncryptionKeys, DatabaseScopedCredentials, Endpoints, ErrorMessages, EventNotifications, EventSessions, LinkedServerLogins, LinkedServers, Logins, MasterKeys, Routes, ServerAuditSpecifications, ServerRoleMembership, ServerRoles, ServerTriggers, ExternalStreams, ExternalStreamingJobs, DatabaseWorkloadGroups, WorkloadClassifiers, ExternalLibraries, ExternalLanguages." + }, + { + "name": "ExcludeObjectTypes", + "type": "List", + "separator": ";", + "help": "A semicolon-delimited list of object types that should be ignored during deployment. Valid object type names are Aggregates, ApplicationRoles, Assemblies, AssemblyFiles, AsymmetricKeys, BrokerPriorities, Certificates, ColumnEncryptionKeys, ColumnMasterKeys, Contracts, DatabaseOptions, DatabaseRoles, DatabaseTriggers, Defaults, ExtendedProperties, ExternalDataSources, ExternalFileFormats, ExternalTables, Filegroups, Files, FileTables, FullTextCatalogs, FullTextStoplists, MessageTypes, PartitionFunctions, PartitionSchemes, Permissions, Queues, RemoteServiceBindings, RoleMembership, Rules, ScalarValuedFunctions, SearchPropertyLists, SecurityPolicies, Sequences, Services, Signatures, StoredProcedures, SymmetricKeys, Synonyms, Tables, TableValuedFunctions, UserDefinedDataTypes, UserDefinedTableTypes, ClrUserDefinedTypes, Users, Views, XmlSchemaCollections, Audits, Credentials, CryptographicProviders, DatabaseAuditSpecifications, DatabaseEncryptionKeys, DatabaseScopedCredentials, Endpoints, ErrorMessages, EventNotifications, EventSessions, LinkedServerLogins, LinkedServers, Logins, MasterKeys, Routes, ServerAuditSpecifications, ServerRoleMembership, ServerRoles, ServerTriggers, ExternalStreams, ExternalStreamingJobs, DatabaseWorkloadGroups, WorkloadClassifiers, ExternalLibraries, ExternalLanguages." + }, + { + "name": "GenerateSmartDefaults", + "type": "bool", + "help": "Automatically provides a default value when updating a table that contains data with a column that does not allow null values." + }, + { + "name": "HashObjectNamesInLogs", + "type": "bool", + "help": "Specifies whether to replace all object names in logs with a random hash value." + }, + { + "name": "IgnoreAnsiNulls", + "type": "bool", + "help": "Specifies whether differences in the ANSI NULLS setting should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreAuthorizer", + "type": "bool", + "help": "Specifies whether differences in the Authorizer should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreColumnCollation", + "type": "bool", + "help": "Specifies whether differences in the column collations should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreColumnOrder", + "type": "bool", + "help": "Specifies whether differences in table column order should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreComments", + "type": "bool", + "help": "Specifies whether differences in the comments should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreCryptographicProviderFilePath", + "type": "bool", + "help": "Specifies whether differences in the file path for the cryptographic provider should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreDatabaseWorkloadGroups", + "type": "bool", + "help": "Specifies whether to exclude workload groups that exist on the target during deployment. No Database Workload Groups will be added, modified, or dropped." + }, + { + "name": "IgnoreDdlTriggerOrder", + "type": "bool", + "help": "Specifies whether differences in the order of Data Definition Language (DDL) triggers should be ignored or updated when you publish to a database or server." + }, + { + "name": "IgnoreDdlTriggerState", + "type": "bool", + "help": "Specifies whether differences in the enabled or disabled state of Data Definition Language (DDL) triggers should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreDefaultSchema", + "type": "bool", + "help": "Specifies whether differences in the default schema should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreDmlTriggerOrder", + "type": "bool", + "help": "Specifies whether differences in the order of Data Manipulation Language (DML) triggers should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreDmlTriggerState", + "type": "bool", + "help": "Specifies whether differences in the enabled or disabled state of DML triggers should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreExtendedProperties", + "type": "bool", + "help": "Specifies whether differences in the extended properties should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreFileAndLogFilePath", + "type": "bool", + "help": "Specifies whether differences in the paths for files and log files should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreFilegroupPlacement", + "type": "bool", + "help": "Specifies whether differences in the placement of objects in FILEGROUPs should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreFileSize", + "type": "bool", + "help": "Specifies whether differences in the file sizes should be ignored or whether a warning should be issued when you publish to a database." + }, + { + "name": "IgnoreFillFactor", + "type": "bool", + "help": "Specifies whether differences in the fill factor for index storage should be ignored or whether a warning should be issued when you publish." + }, + { + "name": "IgnoreFullTextCatalogFilePath", + "type": "bool", + "help": "Specifies whether differences in the file path for the full-text be ignored or whether a warning should be issued when you publish to a database." + }, + { + "name": "IgnoreIdentitySeed", + "type": "bool", + "help": "Specifies whether differences in the seed for an identity column should be ignored or updated when you publish updates to a database." + }, + { + "name": "IgnoreIncrement", + "type": "bool", + "help": "Specifies whether differences in the increment for an identity column should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreIndexOptions", + "type": "bool", + "help": "Specifies whether differences in the index options should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreIndexPadding", + "type": "bool", + "help": "Specifies whether differences in the index padding should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreKeywordCasing", + "type": "bool", + "help": "Specifies whether differences in the casing of keywords should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreLockHintsOnIndexes", + "type": "bool", + "help": "Specifies whether differences in the lock hints on indexes should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreLoginSids", + "type": "bool", + "help": "Specifies whether differences in the security identification number (SID) should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreNotForReplication", + "type": "bool", + "help": "Specifies whether the not for replication settings should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreObjectPlacementOnPartitionScheme", + "type": "bool", + "help": "Specifies whether an object's placement on a partition scheme should be ignored or updated when you publish to a database." + }, + { + "name": "IgnorePartitionSchemes", + "type": "bool", + "help": "Specifies whether differences in partition schemes and functions should be ignored or updated when you publish to a database." + }, + { + "name": "IgnorePermissions", + "type": "bool", + "help": "Specifies whether differences in the permissions should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreQuotedIdentifiers", + "type": "bool", + "help": "Specifies whether differences in the quoted identifiers setting should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreRoleMembership", + "type": "bool", + "help": "Specifies whether differences in the role membership of logins should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreRouteLifetime", + "type": "bool", + "help": "Specifies whether differences in the amount of time that SQL Server retains the route in the routing table should be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreSemicolonBetweenStatements", + "type": "bool", + "help": "Specifies whether differences in the semi-colons between T-SQL statements will be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreSensitivityClassifications", + "type": "bool", + "help": "Specifies whether data sensitivity classifications on columns should be ignored when comparing schema models. This only works for classifications added with the ADD SENSITIVITY CLASSIFICATION syntax introduced in SQL 2019." + }, + { + "name": "IgnoreTableOptions", + "type": "bool", + "help": "Specifies whether differences in the table options will be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreTablePartitionOptions", + "type": "bool", + "help": "Specifies whether differences in the table partition options will be ignored or updated when you publish to a database. This option applies only to Azure Synapse Analytics data warehouse databases." + }, + { + "name": "IgnoreUserSettingsObjects", + "type": "bool", + "help": "Specifies whether differences in the user settings objects will be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreWhitespace", + "type": "bool", + "help": "Specifies whether differences in white space will be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreWithNocheckOnCheckConstraints", + "type": "bool", + "help": "Specifies whether differences in the value of the WITH NOCHECK clause for check constraints will be ignored or updated when you publish." + }, + { + "name": "IgnoreWithNocheckOnForeignKeys", + "type": "bool", + "help": "Specifies whether differences in the value of the WITH NOCHECK clause for foreign keys will be ignored or updated when you publish to a database." + }, + { + "name": "IgnoreWorkloadClassifiers", + "type": "bool", + "help": "Specifies whether to exclude workload classifiers that exist on the target during deployment." + }, + { + "name": "IncludeCompositeObjects", + "type": "bool", + "help": "Include all composite elements with the same database as part of a single publish operation." + }, + { + "name": "IncludeTransactionalScripts", + "type": "bool", + "help": "Specifies whether transactional statements should be used where possible when you publish to a database." + }, + { + "name": "IsAlwaysEncryptedParameterizationEnabled", + "type": "bool", + "help": "Enables variable parameterization on Always Encrypted columns in pre/post deployment scripts." + }, + { + "name": "LongRunningCommandTimeout", + "type": "int", + "help": "Specifies the long running command timeout in seconds when executing queries against SQL Server. Use 0 to wait indefinitely." + }, + { + "name": "NoAlterStatementsToChangeClrTypes", + "type": "bool", + "help": "Specifies that publish should always drop and re-create an assembly if there is a difference instead of issuing an ALTER ASSEMBLY statement." + }, + { + "name": "PopulateFilesOnFileGroups", + "type": "bool", + "help": "Specifies whether a new file is also created when a new FileGroup is created in the target database." + }, + { + "name": "PreserveIdentityLastValues", + "type": "bool", + "help": "Specifies whether last values for identity columns should be preserved during deployment." + }, + { + "name": "RegisterDataTierApplication", + "type": "bool", + "help": "Specifies whether the schema is registered with the database server." + }, + { + "name": "RestoreSequenceCurrentValue", + "type": "bool", + "help": "Specifies whether sequence object current value should be deployed with dacpac file, the default value is True." + }, + { + "name": "RunDeploymentPlanExecutors", + "type": "bool", + "help": "Specifies whether DeploymentPlanExecutor contributors should be run when other operations are executed." + }, + { + "name": "ScriptDatabaseCollation", + "type": "bool", + "help": "Specifies whether differences in the database collation should be ignored or updated when you publish to a database." + }, + { + "name": "ScriptDatabaseCompatibility", + "type": "bool", + "help": "Specifies whether differences in the database compatibility should be ignored or updated when you publish to a database." + }, + { + "name": "ScriptDatabaseOptions", + "type": "bool", + "help": "Specifies whether target database properties should be set or updated as part of the publish action." + }, + { + "name": "ScriptDeployStateChecks", + "type": "bool", + "help": "Specifies whether statements are generated in the publish script to verify that the database name and server name match the names specified in the database project." + }, + { + "name": "ScriptFileSize", + "type": "bool", + "help": "Controls whether size is specified when adding a file to a filegroup." + }, + { + "name": "ScriptNewConstraintValidation", + "type": "bool", + "help": "At the end of publish all of the constraints will be verified as one set, avoiding data errors caused by a check or foreign key constraint in the middle of publish. If set to False, your constraints are published without checking the corresponding data." + }, + { + "name": "ScriptRefreshModule", + "type": "bool", + "help": "Include refresh statements at the end of the publish script." + }, + { + "name": "Storage", + "type": "StorageType", + "help": "Specifies how elements are stored when building the database model. For performance reasons the default is InMemory. For large databases, File backed storage may be required and is only available for .NET Framework version of SqlPackage." + }, + { + "name": "TreatVerificationErrorsAsWarnings", + "type": "bool", + "help": "Specifies whether errors encountered during publish verification should be treated as warnings. The check is performed against the generated deployment plan before the plan is executed against your target database. Plan verification detects problems such as the loss of target-only objects (such as indexes) that must be dropped to make a change. Verification will also detect situations where dependencies (such as a table or view) exist because of a reference to a composite project, but do not exist in the target database. You might choose to do this to get a complete list of all issues, instead of having the publish action stop on the first error." + }, + { + "name": "UnmodifiableObjectWarnings", + "type": "bool", + "help": "Specifies whether warnings should be generated when differences are found in objects that cannot be modified, for example, if the file size or file paths were different for a file." + }, + { + "name": "VerifyCollationCompatibility", + "type": "bool", + "help": "Specifies whether collation compatibility is verified." + }, + { + "name": "VerifyDeployment", + "type": "bool", + "help": "Specifies whether checks should be performed before publishing that will stop the publish action if issues are present that might block successful publishing. For example, your publish action might stop if you have foreign keys on the target database that do not exist in the database project, and that causes errors when you publish." + } + ] + }, + { + "name": "Quiet", + "type": "bool", + "format": "/Quiet:{value}", + "help": "Specifies whether detailed feedback is suppressed. Defaults to False." + }, + { + "name": "ReferencePaths", + "type": "Dictionary", + "format": "/rp:{value}", + "itemFormat": "{key}={value}", + "help": "Specifies the additional directories to search for .dacpac references." + }, + { + "name": "SourceFile", + "type": "string", + "format": "/SourceFile:{value}", + "help": "Specifies a source file to be used as the source of action. If this parameter is used, no other source parameter shall be valid." + }, + { + "name": "SourceConnectionString", + "type": "string", + "format": "/SourceConnectionString:{value}", + "help": "Specifies a valid [SQL Server/Azure connection string](/dotnet/api/microsoft.data.sqlclient.sqlconnection.connectionstring) to the source database. If this parameter is specified, it shall be used exclusively of all other source parameters." + }, + { + "name": "SourceDatabaseName", + "type": "string", + "format": "/SourceDatabaseName:{value}", + "help": "Defines the name of the source database." + }, + { + "name": "SourceEncryptConnection", + "type": "EncryptionType", + "format": "/SourceEncryptConnection:{value}", + "help": "Specifies if SQL encryption should be used for the source database connection. Default value is True." + }, + { + "name": "SourceHostNameInCertificate", + "type": "string", + "format": "/SourceHostNameInCertificate:{value}", + "help": "Specifies value that is used to validate the source SQL Server TLS/SSL certificate when the communication layer is encrypted by using TLS." + }, + { + "name": "SourcePassword", + "type": "string", + "format": "/SourcePassword:{value}", + "help": "For SQL Server Auth scenarios, defines the password to use to access the source database." + }, + { + "name": "SourceServerName", + "type": "string", + "format": "/SourceServerName:{value}", + "help": "Defines the name of the server hosting the source database." + }, + { + "name": "SourceTimeout", + "type": "int", + "format": "/SourceTimeout:{value}", + "help": "Specifies the timeout for establishing a connection to the source database in seconds." + }, + { + "name": "SourceTrustServerCertificate", + "type": "bool", + "format": "/SourceTrustServerCertificate:{value}", + "help": "Specifies whether to use TLS to encrypt the source database connection and bypass walking the certificate chain to validate trust. Default value is False." + }, + { + "name": "SourceUser", + "type": "string", + "format": "/SourceUser:{value}", + "help": "For SQL Server Auth scenarios, defines the SQL Server user to use to access the source database." + }, + { + "name": "TargetFile", + "type": "string", + "format": "/TargetFile:{value}", + "help": "Specifies a target file (that is, a .dacpac file) to be used as the target of action instead of a database. If this parameter is used, no other target parameter shall be valid. This parameter shall be invalid for actions that only support database targets." + }, + { + "name": "TargetConnectionString", + "type": "string", + "format": "/TargetConnectionString:{value}", + "help": "Specifies a valid [SQL Server/Azure connection string](/dotnet/api/microsoft.data.sqlclient.sqlconnection.connectionstring) to the target database. If this parameter is specified, it shall be used exclusively of all other target parameters." + }, + { + "name": "TargetDatabaseName", + "type": "string", + "format": "/TargetDatabaseName:{value}", + "help": "Specifies an override for the name of the database that is the target of SqlPackage Action." + }, + { + "name": "TargetEncryptConnection", + "type": "EncryptionType", + "format": "/TargetEncryptConnection:{value}", + "help": "Specifies if SQL encryption should be used for the target database connection. Default value is True." + }, + { + "name": "TargetHostNameInCertificate", + "type": "string", + "format": "/TargetHostNameInCertificate:{value}", + "help": "Specifies value that is used to validate the target SQL Server TLS/SSL certificate when the communication layer is encrypted by using TLS." + }, + { + "name": "TargetPassword", + "type": "string", + "format": "/TargetPassword:{value}", + "help": "For SQL Server Auth scenarios, defines the password to use to access the target database." + }, + { + "name": "TargetServerName", + "type": "string", + "format": "/TargetServerName:{value}", + "help": "Defines the name of the server hosting the target database." + }, + { + "name": "TargetTimeout", + "type": "int", + "format": "/TargetTimeout:{value}", + "help": "Specifies the timeout for establishing a connection to the target database in seconds. For Azure AD, it is recommended that this value be greater than or equal to 30 seconds." + }, + { + "name": "TargetTrustServerCertificate", + "type": "bool", + "format": "/TargetTrustServerCertificate:{value}", + "help": "Specifies whether to use TLS to encrypt the target database connection and bypass walking the certificate chain to validate trust. Default value is False." + }, + { + "name": "TargetUser", + "type": "string", + "format": "/TargetUser:{value}", + "help": "For SQL Server Auth scenarios, defines the SQL Server user to use to access the target database." + }, + { + "name": "TenantId", + "type": "string", + "format": "/TenantId:{value}", + "help": "Represents the Azure AD tenant ID or domain name. This option is required to support guest or imported Azure AD users as well as Microsoft accounts such as outlook.com, hotmail.com, or live.com. If this parameter is omitted, the default tenant ID for Azure AD will be used, assuming that the authenticated user is a native user for this AD. However, in this case any guest or imported users and/or Microsoft accounts hosted in this Azure AD are not supported and the operation will fail.
For more information about Active Directory Universal Authentication, see [Universal Authentication with SQL Database and Azure Synapse Analytics (SSMS support for MFA)](/azure/sql-database/sql-database-ssms-mfa-authentication)." + }, + { + "name": "ThreadMaxStackSize", + "type": "int", + "format": "/ThreadMaxStackSize:{value}", + "help": "Specifies the maximum size in megabytes for the thread running the SqlPackage action. This option should only be used when encountering stack overflow exceptions that occur when parsing very large TSQL statements." + }, + { + "name": "UniversalAuthentication", + "type": "bool", + "format": "/UniversalAuthentication:{value}", + "help": "Specifies if Universal Authentication should be used. When set to True, the interactive authentication protocol is activated supporting MFA. This option can also be used for Azure AD authentication without MFA, using an interactive protocol requiring the user to enter their username and password or integrated authentication (Windows credentials). When /UniversalAuthentication is set to True, no Azure AD authentication can be specified in SourceConnectionString (/scs). When /UniversalAuthentication is set to False, Azure AD authentication must be specified in SourceConnectionString (/scs).
For more information about Active Directory Universal Authentication, see [Universal Authentication with SQL Database and Azure Synapse Analytics (SSMS support for MFA)](/azure/sql-database/sql-database-ssms-mfa-authentication)." + }, + { + "name": "Variables", + "type": "Dictionary", + "format": "/v:{value}", + "itemFormat": "{key}={value}", + "help": "Specifies a name value pair for an action-specific variable;{VariableName}={Value}. The DACPAC file contains the list of valid SQLCMD variables. An error results if a value is not provided for every variable." + } + ] + } + }, + { + "help": "The SqlPackage Import action imports the schema and table data from a BACPAC file (.bacpac) into a new or empty database in SQL Server or Azure SQL Database. At the time of the import operation to an existing database the target database cannot contain any user-defined schema objects. Alternatively, a new database can be created by the import action when the authenticated user has create database permissions.", + "postfix": "Import", + "definiteArgument": "/Action:Import", + "officialUrl": "https://learn.microsoft.com/en-us/sql/tools/sqlpackage/sqlpackage-import", + "settingsClass": { + "properties": [ + { + "name": "AccessToken", + "type": "string", + "format": "/AccessToken:{value}", + "help": "Specifies the token-based authentication access token to use when connect to the target database." + }, + { + "name": "AzureCloudConfig", + "type": "string", + "format": "/AzureCloudConfig:{value}", + "help": "Specifies the custom endpoints for connecting to Azure Active Directory in the format: AzureActiveDirectoryAuthority={value};DatabaseServicePrincipalName={value}\" ." + }, + { + "name": "Diagnostics", + "type": "bool", + "format": "/Diagnostics:{value}", + "help": "Specifies whether diagnostic logging is output to the console. Defaults to False." + }, + { + "name": "DiagnosticsFile", + "type": "string", + "format": "/DiagnosticsFile:{value}", + "help": "Specifies a file to store diagnostic logs." + }, + { + "name": "MaxParallelism", + "type": "int", + "format": "/MaxParallelism:{value}", + "help": "Specifies the degree of parallelism for concurrent operations running against a database. The default value is 8." + }, + { + "name": "ModelFilePath", + "type": "string", + "format": "/ModelFilePath:{value}", + "help": "Specifies the file path to override the model.xml in the source file. Use of this setting may result in deployment failure and/or unintended data loss. This setting is intended only for use when troubleshooting issues with publish, import, or script generation." + }, + { + "name": "Properties", + "type": "Dictionary", + "format": "/p:{value}", + "itemFormat": "{key}={value}", + "help": "Specifies a name value pair for an [action-specific property](#properties-specific-to-the-import-action); {PropertyName}={Value}.", + "delegates": [ + { + "name": "CommandTimeout", + "type": "int", + "help": "Specifies the command timeout in seconds when executing queries against SQL Server." + }, + { + "name": "DatabaseEdition", + "type": "DatabaseEdition", + "help": "Defines the edition of an Azure SQL Database. See [Azure SQL Database service tiers](/azure/azure-sql/database/service-tiers-general-purpose-business-critical)." + }, + { + "name": "DatabaseLockTimeout", + "type": "int", + "help": "Specifies the database lock timeout in seconds when executing queries against SQLServer. Use -1 to wait indefinitely." + }, + { + "name": "DatabaseMaximumSize", + "type": "int", + "help": "Defines the maximum size in GB of an Azure SQL Database." + }, + { + "name": "DatabaseServiceObjective", + "type": "string", + "help": "Defines the performance level of an Azure SQL Database such as \"P0\" or \"S1\"." + }, + { + "name": "DisableIndexesForDataPhase", + "type": "bool", + "help": "When true (default), disables indexes before importing data. When false, indexes are not rebuilt." + }, + { + "name": "DisableParallelismForEnablingIndexes", + "type": "bool", + "help": "Not using parallelism when rebuilding indexes while importing data into SQL Server." + }, + { + "name": "HashObjectNamesInLogs", + "type": "bool", + "help": "Specifies whether to replace all object names in logs with a random hash value." + }, + { + "name": "ImportContributorArguments", + "type": "List", + "separator": ";", + "help": "Specifies deployment contributor arguments for the deployment contributors. This property should be a semi-colon delimited list of values." + }, + { + "name": "ImportContributorPaths", + "type": "List", + "separator": ";", + "help": "Specifies paths to load additional import contributors. This property should be a semi-colon delimited list of values." + }, + { + "name": "ImportContributors", + "type": "List", + "separator": ";", + "help": "Specifies the deployment contributors, which should run when the bacpac is imported. This property should be a semi-colon delimited list of fully qualified build contributor names or IDs." + }, + { + "name": "LongRunningCommandTimeout", + "type": "int", + "help": "Specifies the long running command timeout in seconds when executing queries against SQL Server. Use 0 to wait indefinitely." + }, + { + "name": "PreserveIdentityLastValues", + "type": "bool", + "help": "Specifies whether last values for identity columns should be preserved during deployment." + }, + { + "name": "RebuildIndexesOfflineForDataPhase", + "type": "bool", + "help": "When true, rebuilds indexes offline after importing data into SQL Server." + }, + { + "name": "Storage", + "type": "StorageType", + "help": "Specifies how elements are stored when building the database model. For performance reasons the default is InMemory. For large databases, File backed storage is required." + } + ] + }, + { + "name": "Quiet", + "type": "bool", + "format": "/Quiet:{value}", + "help": "Specifies whether detailed feedback is suppressed. Defaults to False." + }, + { + "name": "SourceFile", + "type": "string", + "format": "/SourceFile:{value}", + "help": "Specifies a source file to be used as the source of action from local storage. If this parameter is used, no other source parameter shall be valid." + }, + { + "name": "TargetConnectionString", + "type": "string", + "format": "/TargetConnectionString:{value}", + "help": "Specifies a valid [SQL Server/Azure connection string](/dotnet/api/microsoft.data.sqlclient.sqlconnection.connectionstring) to the target database. If this parameter is specified, it shall be used exclusively of all other target parameters." + }, + { + "name": "TargetDatabaseName", + "type": "string", + "format": "/TargetDatabaseName:{value}", + "help": "Specifies an override for the name of the database that is the target of SqlPackage Action." + }, + { + "name": "TargetEncryptConnection", + "type": "EncryptionType", + "format": "/TargetEncryptConnection:{value}", + "help": "Specifies if SQL encryption should be used for the target database connection. Default value is True." + }, + { + "name": "TargetHostNameInCertificate", + "type": "string", + "format": "/TargetHostNameInCertificate:{value}", + "help": "Specifies value that is used to validate the target SQL Server TLS/SSL certificate when the communication layer is encrypted by using TLS." + }, + { + "name": "TargetPassword", + "type": "string", + "format": "/TargetPassword:{value}", + "help": "For SQL Server Auth scenarios, defines the password to use to access the target database." + }, + { + "name": "TargetServerName", + "type": "string", + "format": "/TargetServerName:{value}", + "help": "Defines the name of the server hosting the target database." + }, + { + "name": "TargetTimeout", + "type": "int", + "format": "/TargetTimeout:{value}", + "help": "Specifies the timeout for establishing a connection to the target database in seconds. For Azure AD, it is recommended that this value be greater than or equal to 30 seconds." + }, + { + "name": "TargetTrustServerCertificate", + "type": "bool", + "format": "/TargetTrustServerCertificate:{value}", + "help": "Specifies whether to use TLS to encrypt the target database connection and bypass walking the certificate chain to validate trust. Default value is False." + }, + { + "name": "TargetUser", + "type": "string", + "format": "/TargetUser:{value}", + "help": "For SQL Server Auth scenarios, defines the SQL Server user to use to access the target database." + }, + { + "name": "TenantId", + "type": "string", + "format": "/TenantId:{value}", + "help": "Represents the Azure AD tenant ID or domain name. This option is required to support guest or imported Azure AD users as well as Microsoft accounts such as outlook.com, hotmail.com, or live.com. If this parameter is omitted, the default tenant ID for Azure AD will be used, assuming that the authenticated user is a native user for this AD. However, in this case any guest or imported users and/or Microsoft accounts hosted in this Azure AD are not supported and the operation will fail.
For more information about Active Directory Universal Authentication, see [Universal Authentication with SQL Database and Azure Synapse Analytics (SSMS support for MFA)](/azure/sql-database/sql-database-ssms-mfa-authentication)." + }, + { + "name": "ThreadMaxStackSize", + "type": "int", + "format": "/ThreadMaxStackSize:{value}", + "help": "Specifies the maximum size in megabytes for the thread running the SqlPackage action. This option should only be used when encountering stack overflow exceptions that occur when parsing very large TSQL statements." + }, + { + "name": "UniversalAuthentication", + "type": "bool", + "format": "/UniversalAuthentication:{value}", + "help": "Specifies if Universal Authentication should be used. When set to True, the interactive authentication protocol is activated supporting MFA. This option can also be used for Azure AD authentication without MFA, using an interactive protocol requiring the user to enter their username and password or integrated authentication (Windows credentials). When /UniversalAuthentication is set to True, no Azure AD authentication can be specified in SourceConnectionString (/scs). When /UniversalAuthentication is set to False, Azure AD authentication must be specified in SourceConnectionString (/scs).
For more information about Active Directory Universal Authentication, see [Universal Authentication with SQL Database and Azure Synapse Analytics (SSMS support for MFA)](/azure/sql-database/sql-database-ssms-mfa-authentication)." + } + ] + } + }, + { + "help": "The SqlPackage Export action exports a connected database to a BACPAC file (.bacpac). By default, data for all tables will be included in the .bacpac file. Optionally, you can specify only a subset of tables for which to export data. Validation for the Export action ensures Azure SQL Database compatibility for the complete targeted database even if a subset of tables is specified for the export.", + "postfix": "Export", + "definiteArgument": "/Action:Export", + "officialUrl": "https://learn.microsoft.com/en-us/sql/tools/sqlpackage/sqlpackage-export", + "settingsClass": { + "properties": [ + { + "name": "AccessToken", + "type": "string", + "format": "/AccessToken:{value}", + "help": "Specifies the token-based authentication access token to use when connect to the target database." + }, + { + "name": "AzureCloudConfig", + "type": "string", + "format": "/AzureCloudConfig:{value}", + "help": "Specifies the custom endpoints for connecting to Azure Active Directory in the format: AzureActiveDirectoryAuthority={value};DatabaseServicePrincipalName={value}\" ." + }, + { + "name": "Diagnostics", + "type": "bool", + "format": "/Diagnostics:{value}", + "help": "Specifies whether diagnostic logging is output to the console. Defaults to False." + }, + { + "name": "DiagnosticsFile", + "type": "string", + "format": "/DiagnosticsFile:{value}", + "help": "Specifies a file to store diagnostic logs." + }, + { + "name": "MaxParallelism", + "type": "int", + "format": "/MaxParallelism:{value}", + "help": "Specifies the degree of parallelism for concurrent operations running against a database. The default value is 8." + }, + { + "name": "OverwriteFiles", + "type": "bool", + "format": "/OverwriteFiles:{value}", + "help": "Specifies if SqlPackage should overwrite existing files. Specifying false causes SqlPackage to abort action if an existing file is encountered. Default value is True." + }, + { + "name": "Properties", + "type": "Dictionary", + "format": "/p:{value}", + "itemFormat": "{key}={value}", + "help": "Specifies a name value pair for an [action-specific property](#properties-specific-to-the-export-action);{PropertyName}={Value}.", + "delegates": [ + { + "name": "CommandTimeout", + "type": "int", + "help": "Specifies the command timeout in seconds when executing queries against SQL Server." + }, + { + "name": "CompressionOption", + "type": "CompressionOption", + "help": "Specifies the type of compression." + }, + { + "name": "DatabaseLockTimeout", + "type": "int", + "help": "Specifies the database lock timeout in seconds when executing queries against SQLServer. Use -1 to wait indefinitely." + }, + { + "name": "HashObjectNamesInLogs", + "type": "bool", + "help": "Specifies whether to replace all object names in logs with a random hash value." + }, + { + "name": "IgnoreIndexesStatisticsOnEnclaveEnabledColumns", + "type": "bool", + "help": "Specifies whether indexes or statistics on columns encrypted using randomized encryption and enclave-enabled column encryption keys should be ignored (not included in the generated bacpac). By default (false) any index or a statistic on a column encrypted using randomized encryption and an enclave-enabled column encryption key will block the export action." + }, + { + "name": "LongRunningCommandTimeout", + "type": "int", + "help": "Specifies the long running command timeout in seconds when executing queries against SQL Server. Use 0 to wait indefinitely." + }, + { + "name": "Storage", + "type": "StorageType", + "help": "Specifies the type of backing storage for the schema model used during extraction. 'Memory' is default for .NET Core version of SqlPackage. 'File' is only available and default for .NET Framework version of SqlPackage." + }, + { + "name": "TableData", + "type": "string", + "help": "Indicates the table from which data will be extracted. Specify the table name with or without the brackets surrounding the name parts in the following format: schema_name.table_identifier. This property may be specified multiple times to indicate multiple options." + }, + { + "name": "TargetEngineVersion", + "type": "TargetEngineVersion", + "help": "This property is deprecated and use is not recommended. Specifies the version the target engine for Azure SQL Database is expected to be." + }, + { + "name": "TempDirectoryForTableData", + "type": "string", + "help": "Specifies an alternative temporary directory used to buffer table data before being written to the package file. The space required in this location may be large and is relative to the full size of the database." + }, + { + "name": "VerifyExtraction", + "type": "bool", + "help": "Specifies whether the extracted schema model should be verified. If set to true, schema validation rules are run on the dacpac or bacpac." + }, + { + "name": "VerifyFullTextDocumentTypesSupported", + "type": "bool", + "help": "Specifies whether the supported full-text document types for Microsoft Azure SQL Database v12 should be verified." + } + ] + }, + { + "name": "Quiet", + "type": "bool", + "format": "/Quiet:{value}", + "help": "Specifies whether detailed feedback is suppressed. Defaults to False." + }, + { + "name": "SourceConnectionString", + "type": "string", + "format": "/SourceConnectionString:{value}", + "help": "Specifies a valid [SQL Server/Azure connection string](/dotnet/api/microsoft.data.sqlclient.sqlconnection.connectionstring) to the source database. If this parameter is specified, it shall be used exclusively of all other source parameters." + }, + { + "name": "SourceDatabaseName", + "type": "string", + "format": "/SourceDatabaseName:{value}", + "help": "Defines the name of the source database." + }, + { + "name": "SourceEncryptConnection", + "type": "EncryptionType", + "format": "/SourceEncryptConnection:{value}", + "help": "Specifies if SQL encryption should be used for the source database connection. Default value is True." + }, + { + "name": "SourceHostNameInCertificate", + "type": "string", + "format": "/SourceHostNameInCertificate:{value}", + "help": "Specifies value that is used to validate the source SQL Server TLS/SSL certificate when the communication layer is encrypted by using TLS." + }, + { + "name": "SourcePassword", + "type": "string", + "format": "/SourcePassword:{value}", + "help": "For SQL Server Auth scenarios, defines the password to use to access the source database." + }, + { + "name": "SourceServerName", + "type": "string", + "format": "/SourceServerName:{value}", + "help": "Defines the name of the server hosting the source database." + }, + { + "name": "SourceTimeout", + "type": "int", + "format": "/SourceTimeout:{value}", + "help": "Specifies the timeout for establishing a connection to the source database in seconds." + }, + { + "name": "SourceTrustServerCertificate", + "type": "bool", + "format": "/SourceTrustServerCertificate:{value}", + "help": "Specifies whether to use TLS to encrypt the source database connection and bypass walking the certificate chain to validate trust. Default value is False." + }, + { + "name": "SourceUser", + "type": "string", + "format": "/SourceUser:{value}", + "help": "For SQL Server Auth scenarios, defines the SQL Server user to use to access the source database." + }, + { + "name": "TargetFile", + "type": "string", + "format": "/TargetFile:{value}", + "help": "Specifies a target file (that is, a .dacpac file) to be used as the target of action instead of a database. If this parameter is used, no other target parameter shall be valid. This parameter shall be invalid for actions that only support database targets." + }, + { + "name": "TenantId", + "type": "string", + "format": "/TenantId:{value}", + "help": "Represents the Azure AD tenant ID or domain name. This option is required to support guest or imported Azure AD users as well as Microsoft accounts such as outlook.com, hotmail.com, or live.com. If this parameter is omitted, the default tenant ID for Azure AD will be used, assuming that the authenticated user is a native user for this AD. However, in this case any guest or imported users and/or Microsoft accounts hosted in this Azure AD are not supported and the operation will fail.
For more information about Active Directory Universal Authentication, see [Universal Authentication with SQL Database and Azure Synapse Analytics (SSMS support for MFA)](/azure/sql-database/sql-database-ssms-mfa-authentication)." + }, + { + "name": "ThreadMaxStackSize", + "type": "int", + "format": "/ThreadMaxStackSize:{value}", + "help": "Specifies the maximum size in megabytes for the thread running the SqlPackage action. This option should only be used when encountering stack overflow exceptions that occur when parsing very large TSQL statements." + }, + { + "name": "UniversalAuthentication", + "type": "bool", + "format": "/UniversalAuthentication:{value}", + "help": "Specifies if Universal Authentication should be used. When set to True, the interactive authentication protocol is activated supporting MFA. This option can also be used for Azure AD authentication without MFA, using an interactive protocol requiring the user to enter their username and password or integrated authentication (Windows credentials). When /UniversalAuthentication is set to True, no Azure AD authentication can be specified in SourceConnectionString (/scs). When /UniversalAuthentication is set to False, Azure AD authentication must be specified in SourceConnectionString (/scs).
For more information about Active Directory Universal Authentication, see [Universal Authentication with SQL Database and Azure Synapse Analytics (SSMS support for MFA)](/azure/sql-database/sql-database-ssms-mfa-authentication)." + } + ] + } + }, + { + "help": "The SqlPackage Extract action creates a schema of a connected database in a DACPAC file (.dacpac). By default, data is not included in the .dacpac file. To include data, utilize the Export action or use the Extract properties ExtractAllTableData/TableData.", + "postfix": "Extract", + "definiteArgument": "/Action:Extract", + "officialUrl": "https://learn.microsoft.com/en-us/sql/tools/sqlpackage/sqlpackage-extract", + "settingsClass": { + "properties": [ + { + "name": "AccessToken", + "type": "string", + "format": "/AccessToken:{value}", + "help": "Specifies the token based authentication access token to use when connect to the target database." + }, + { + "name": "AzureCloudConfig", + "type": "string", + "format": "/AzureCloudConfig:{value}", + "help": "Specifies the custom endpoints for connecting to Azure Active Directory in the format: AzureActiveDirectoryAuthority={value};DatabaseServicePrincipalName={value}\" ." + }, + { + "name": "Diagnostics", + "type": "bool", + "format": "/Diagnostics:{value}", + "help": "Specifies whether diagnostic logging is output to the console. Defaults to False." + }, + { + "name": "DiagnosticsFile", + "type": "string", + "format": "/DiagnosticsFile:{value}", + "help": "Specifies a file to store diagnostic logs." + }, + { + "name": "MaxParallelism", + "type": "int", + "format": "/MaxParallelism:{value}", + "help": "Specifies the degree of parallelism for concurrent operations running against a database. The default value is 8." + }, + { + "name": "OverwriteFiles", + "type": "bool", + "format": "/OverwriteFiles:{value}", + "help": "Specifies if SqlPackage should overwrite existing files. Specifying false causes SqlPackage to abort action if an existing file is encountered. Default value is True." + }, + { + "name": "Properties", + "type": "Dictionary", + "format": "/p:{value}", + "itemFormat": "{key}={value}", + "help": "Specifies a name value pair for an [action-specific property](#properties-specific-to-the-extract-action); {PropertyName}={Value}.", + "delegates": [ + { + "name": "AzureStorageBlobEndpoint", + "type": "string", + "help": "Azure Blob Storage endpoint. See [SqlPackage for Azure Synapse Analytics](sqlpackage-for-azure-synapse-analytics.md#extract-export-data)." + }, + { + "name": "AzureStorageContainer", + "type": "string", + "help": "Azure Blob Storage container. See [SqlPackage for Azure Synapse Analytics](sqlpackage-for-azure-synapse-analytics.md#extract-export-data)." + }, + { + "name": "AzureStorageKey", + "type": "string", + "help": "Azure storage account key. See [SqlPackage for Azure Synapse Analytics](sqlpackage-for-azure-synapse-analytics.md#extract-export-data)." + }, + { + "name": "AzureStorageRootPath", + "type": "string", + "help": "Storage root path within the container. Without this property, the path defaults to `servername/databasename/timestamp/`. See [SqlPackage for Azure Synapse Analytics](sqlpackage-for-azure-synapse-analytics.md#extract-export-data)." + }, + { + "name": "CommandTimeout", + "type": "int", + "help": "Specifies the command timeout in seconds when executing queries against SQL Server." + }, + { + "name": "CompressionOption", + "type": "CompressionOption", + "help": "Specifies the type of compression." + }, + { + "name": "DacApplicationDescription", + "type": "string", + "help": "Defines the Application description to be stored in the DACPAC metadata." + }, + { + "name": "DacApplicationName", + "type": "string", + "help": "Defined the Application name to be stored in the DACPAC metadata. The default value is the database name." + }, + { + "name": "DacMajorVersion", + "type": "int", + "help": "Defines the major version to be stored in the DACPAC metadata." + }, + { + "name": "DacMinorVersion", + "type": "int", + "help": "Defines the minor version to be stored in the DACPAC metadata." + }, + { + "name": "DatabaseLockTimeout", + "type": "int", + "help": "Specifies the database lock timeout in seconds when executing queries against SQLServer. Use -1 to wait indefinitely." + }, + { + "name": "ExtractAllTableData", + "type": "bool", + "help": "Indicates whether data from all user tables is extracted. If 'true', data from all user tables is extracted, and you cannot specify individual user tables for extracting data. If 'false', specify one or more user tables to extract data from." + }, + { + "name": "ExtractApplicationScopedObjectsOnly", + "type": "bool", + "help": "If true, only extract application-scoped objects for the specified source. If false, extract all objects for the specified source." + }, + { + "name": "ExtractReferencedServerScopedElements", + "type": "bool", + "help": "If true, extract login, server audit, and credential objects referenced by source database objects." + }, + { + "name": "ExtractTarget", + "type": "ExtractTarget", + "help": "Specifies alternative output formats of the database schema, default is 'DacPac' to output a `.dacpac` single file. Additional options output one or more `.sql` files organized by either 'SchemaObjectType' (files in folders for each schema and object type), 'Schema' (files in folders for each schema), 'ObjectType' (files in folders for each object type), 'Flat' (all files in the same folder), or 'File' (1 single file)." + }, + { + "name": "ExtractUsageProperties", + "type": "bool", + "help": "Specifies whether usage properties, such as table row count and index size, will be extracted from the database." + }, + { + "name": "HashObjectNamesInLogs", + "type": "bool", + "help": "Specifies whether to replace all object names in logs with a random hash value." + }, + { + "name": "IgnoreExtendedProperties", + "type": "bool", + "help": "Specifies whether extended properties should be ignored." + }, + { + "name": "IgnorePermissions", + "type": "bool", + "help": "Specifies whether permissions should be ignored." + }, + { + "name": "IgnoreUserLoginMappings", + "type": "bool", + "help": "Specifies whether relationships between users and logins are ignored." + }, + { + "name": "LongRunningCommandTimeout", + "type": "int", + "help": "Specifies the long running command timeout in seconds when executing queries against SQL Server. Use 0 to wait indefinitely." + }, + { + "name": "Storage", + "type": "StorageType", + "help": "Specifies the type of backing storage for the schema model used during extraction. 'Memory' is default for .NET Core version of SqlPackage. 'File' is only available and default for .NET Framework version of SqlPackage." + }, + { + "name": "TableData", + "type": "string", + "help": "Indicates the table from which data will be extracted. Specify the table name with or without the brackets surrounding the name parts in the following format: schema_name.table_identifier. This property may be specified multiple times to indicate multiple options." + }, + { + "name": "TempDirectoryForTableData", + "type": "string", + "help": "Specifies the temporary directory used to buffer table data before being written to the package file." + }, + { + "name": "VerifyExtraction", + "type": "bool", + "help": "Specifies whether the extracted schema model should be verified." + } + ] + }, + { + "name": "Quiet", + "type": "bool", + "format": "/Quiet:{value}", + "help": "Specifies whether detailed feedback is suppressed. Defaults to False." + }, + { + "name": "SourceConnectionString", + "type": "string", + "format": "/SourceConnectionString:{value}", + "help": "Specifies a valid [SQL Server/Azure connection string](/dotnet/api/microsoft.data.sqlclient.sqlconnection.connectionstring) to the source database. If this parameter is specified, it shall be used exclusively of all other source parameters." + }, + { + "name": "SourceDatabaseName", + "type": "string", + "format": "/SourceDatabaseName:{value}", + "help": "Defines the name of the source database." + }, + { + "name": "SourceEncryptConnection", + "type": "EncryptionType", + "format": "/SourceEncryptConnection:{value}", + "help": "Specifies if SQL encryption should be used for the source database connection. Default value is True." + }, + { + "name": "SourceHostNameInCertificate", + "type": "string", + "format": "/SourceHostNameInCertificate:{value}", + "help": "Specifies value that is used to validate the source SQL Server TLS/SSL certificate when the communication layer is encrypted by using TLS." + }, + { + "name": "SourcePassword", + "type": "string", + "format": "/SourcePassword:{value}", + "help": "For SQL Server Auth scenarios, defines the password to use to access the source database." + }, + { + "name": "SourceServerName", + "type": "string", + "format": "/SourceServerName:{value}", + "help": "Defines the name of the server hosting the source database." + }, + { + "name": "SourceTimeout", + "type": "int", + "format": "/SourceTimeout:{value}", + "help": "Specifies the timeout for establishing a connection to the source database in seconds." + }, + { + "name": "SourceTrustServerCertificate", + "type": "bool", + "format": "/SourceTrustServerCertificate:{value}", + "help": "Specifies whether to use TLS to encrypt the source database connection and bypass walking the certificate chain to validate trust. Default value is False." + }, + { + "name": "SourceUser", + "type": "string", + "format": "/SourceUser:{value}", + "help": "For SQL Server Auth scenarios, defines the SQL Server user to use to access the source database." + }, + { + "name": "TargetFile", + "type": "string", + "format": "/TargetFile:{value}", + "help": "Specifies a target file (that is, a .dacpac file) in local storage to be used as the target of action instead of a database. If this parameter is used, no other target parameter shall be valid. This parameter shall be invalid for actions that only support database targets." + }, + { + "name": "TenantId", + "type": "string", + "format": "/TenantId:{value}", + "help": "Represents the Azure AD tenant ID or domain name. This option is required to support guest or imported Azure AD users as well as Microsoft accounts such as outlook.com, hotmail.com, or live.com. If this parameter is omitted, the default tenant ID for Azure AD will be used, assuming that the authenticated user is a native user for this AD. However, in this case any guest or imported users and/or Microsoft accounts hosted in this Azure AD are not supported and the operation will fail.
For more information about Active Directory Universal Authentication, see [Universal Authentication with SQL Database and Azure Synapse Analytics (SSMS support for MFA)](/azure/sql-database/sql-database-ssms-mfa-authentication)." + }, + { + "name": "ThreadMaxStackSize", + "type": "int", + "format": "/ThreadMaxStackSize:{value}", + "help": "Specifies the maximum size in megabytes for the thread running the SqlPackage action. This option should only be used when encountering stack overflow exceptions that occur when parsing very large TSQL statements." + }, + { + "name": "UniversalAuthentication", + "type": "bool", + "format": "/UniversalAuthentication:{value}", + "help": "Specifies if Universal Authentication should be used. When set to True, the interactive authentication protocol is activated supporting MFA. This option can also be used for Azure AD authentication without MFA, using an interactive protocol requiring the user to enter their username and password or integrated authentication (Windows credentials). When /UniversalAuthentication is set to True, no Azure AD authentication can be specified in SourceConnectionString (/scs). When /UniversalAuthentication is set to False, Azure AD authentication must be specified in SourceConnectionString (/scs).
For more information about Active Directory Universal Authentication, see [Universal Authentication with SQL Database and Azure Synapse Analytics (SSMS support for MFA)](/azure/sql-database/sql-database-ssms-mfa-authentication)." + } + ] + } + } + ], + "enumerations": [ + { + "name": "AzureKeyVaultAuthMethod", + "values": [ + "Interactive", + "ClientIdSecret" + ] + }, + { + "name": "EncryptionType", + "values": [ + "Optional", + "Mandatory", + "Strict", + "True", + "False" + ] + }, + { + "name": "DatabaseEdition", + "values": [ + "Basic", + "Standard", + "Premium", + "Datawarehouse", + "GeneralPurpose", + "BusinessCritical", + "Hyperscale", + "Default" + ] + }, + { + "name": "StorageType", + "values": [ + "File", + "Memory" + ] + }, + { + "name": "CompressionOption", + "values": [ + "Normal", + "Maximum", + "Fast", + "SuperFast", + "NotCompressed" + ] + }, + { + "name": "TargetEngineVersion", + "values": [ + "Default", + "Latest", + "V11", + "V12" + ] + }, + { + "name": "ExtractTarget", + "values": [ + "DacPac", + "File", + "Flat", + "ObjectType", + "Schema", + "SchemaObjectType" + ] + } + ] +} diff --git a/source/Nuke.Common/Tools/SqlPackage/SqlPackage.md b/source/Nuke.Common/Tools/SqlPackage/SqlPackage.md new file mode 100644 index 000000000..b2caefe80 --- /dev/null +++ b/source/Nuke.Common/Tools/SqlPackage/SqlPackage.md @@ -0,0 +1,16 @@ +

Using SqlPackage Tool Wrapper

+ +

Overview

+ +

The [SqlPackage](https://learn.microsoft.com/en-us/sql/tools/sqlpackage/sqlpackage) is a command-line utility that automates the following database development tasks by exposing some of the public Data-Tier Application Framework (DacFx) APIs.

+ +

This package wraps [SqlPackage Global .NET Tool](https://learn.microsoft.com/en-us/sql/tools/sqlpackage/sqlpackage-download) Nuget package into Nuke tasks

+ + +

Usage

+ +Install SqlPackage Global tool into Nuke build project. + + nuke :add-package Microsoft.SqlPackage --version + +SqlPackage as a Nuke Task (SqlPackageTasks) uses Microsoft.SqlPackage location to find entry point executable. diff --git a/source/Nuke.Common/Tools/SqlPackage/SqlPackageTasks.cs b/source/Nuke.Common/Tools/SqlPackage/SqlPackageTasks.cs new file mode 100644 index 000000000..9bf4899e7 --- /dev/null +++ b/source/Nuke.Common/Tools/SqlPackage/SqlPackageTasks.cs @@ -0,0 +1,12 @@ +ο»Ώusing Nuke.Common.Tooling; + +namespace Nuke.Common.Tools.SqlPackage; + +public partial class SqlPackageTasks +{ + internal static string GetToolPath() + { + return NuGetToolPathResolver.GetPackageExecutable(packageId: "Microsoft.SqlPackage", packageExecutable: "sqlpackage.dll|sqlpackage.exe"); + } +} + diff --git a/source/Nuke.Common/Tools/Teams/TeamsTasks.cs b/source/Nuke.Common/Tools/Teams/TeamsTasks.cs index a1821f028..4c029e372 100644 --- a/source/Nuke.Common/Tools/Teams/TeamsTasks.cs +++ b/source/Nuke.Common/Tools/Teams/TeamsTasks.cs @@ -7,7 +7,6 @@ using System.Net.Http; using System.Threading.Tasks; using JetBrains.Annotations; -using Newtonsoft.Json; using Nuke.Common.Tooling; using Nuke.Common.Utilities.Net; @@ -24,12 +23,11 @@ public static void SendTeamsMessage(Configure configurator, string public static async Task SendTeamsMessageAsync(Configure configurator, string webhook) { var message = configurator(new TeamsMessage()); - var messageJson = JsonConvert.SerializeObject(message); using var client = new HttpClient(); var response = await client.CreateRequest(HttpMethod.Post, webhook) - .WithJsonContent(messageJson) + .WithJsonContent(message) .GetResponseAsync(); var responseText = await response.GetBodyAsync(); diff --git a/source/Nuke.Tooling.Tests/ArgumentStringHandlerTest.cs b/source/Nuke.Tooling.Tests/ArgumentStringHandlerTest.cs index f30a9ee54..cbcd0e3c0 100644 --- a/source/Nuke.Tooling.Tests/ArgumentStringHandlerTest.cs +++ b/source/Nuke.Tooling.Tests/ArgumentStringHandlerTest.cs @@ -7,6 +7,8 @@ using Nuke.Common.Tooling; using Xunit; +// ReSharper disable StringLiteralAsInterpolationArgument + namespace Nuke.Common.Tests; public class ArgumentStringHandlerTest @@ -80,5 +82,5 @@ public void TestSecret() filteredOutput.Should().Be("There is a [REDACTED]!"); } - string ArgsToString(ArgumentStringHandler args) => args.ToStringAndClear(); + private string ArgsToString(ArgumentStringHandler args) => args.ToStringAndClear(); } diff --git a/source/Nuke.Tooling/ProcessTasks.cs b/source/Nuke.Tooling/ProcessTasks.cs index 2b98b52b5..5a184e6f5 100644 --- a/source/Nuke.Tooling/ProcessTasks.cs +++ b/source/Nuke.Tooling/ProcessTasks.cs @@ -211,9 +211,8 @@ private static BlockingCollection GetOutputCollection( if (e.Data == null) return; - output.Add(new Output { Text = e.Data, Type = OutputType.Std }); - var filteredOutput = outputFilter(e.Data); + output.Add(new Output { Text = filteredOutput, Type = OutputType.Std }); logger?.Invoke(OutputType.Std, filteredOutput); }; process.ErrorDataReceived += (_, e) => @@ -221,9 +220,8 @@ private static BlockingCollection GetOutputCollection( if (e.Data == null) return; - output.Add(new Output { Text = e.Data, Type = OutputType.Err }); - var filteredOutput = outputFilter(e.Data); + output.Add(new Output { Text = filteredOutput, Type = OutputType.Err }); logger?.Invoke(OutputType.Err, filteredOutput); };