From 9780ffe0982c1aa8df6dba8391a66b429f819fa3 Mon Sep 17 00:00:00 2001 From: Vladyslav Nikonov Date: Tue, 24 Mar 2026 17:08:23 +0200 Subject: [PATCH] feat(agent): agent self update and scheduled updates --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: vnikonov-devolutions <246051166+vnikonov-devolutions@users.noreply.github.com> bugfixes Doc fixes --- .github/workflows/ci.yml | 5 + .github/workflows/package.yml | 6 +- Cargo.lock | 9 + Cargo.toml | 2 + ci/package-agent-windows.ps1 | 19 +- ci/tlk.ps1 | 7 +- crates/devolutions-agent-shared/Cargo.toml | 1 + crates/devolutions-agent-shared/src/lib.rs | 23 +- .../src/update_json.rs | 75 -- .../src/update_manifest.rs | 451 ++++++++++ .../src/update_status.rs | 188 ++++ devolutions-agent-updater/Cargo.toml | 15 + devolutions-agent-updater/src/main.rs | 247 +++++ devolutions-agent/Cargo.toml | 5 + devolutions-agent/src/config.rs | 92 +- devolutions-agent/src/updater/detect.rs | 7 +- devolutions-agent/src/updater/error.rs | 6 + devolutions-agent/src/updater/mod.rs | 846 +++++++++++++++++- devolutions-agent/src/updater/package.rs | 178 +++- devolutions-agent/src/updater/product.rs | 22 +- .../src/updater/product_actions.rs | 34 + .../src/updater/productinfo/mod.rs | 2 + devolutions-agent/src/updater/security.rs | 14 + devolutions-gateway/openapi/gateway-api.yaml | 248 ++++- devolutions-gateway/src/api/mod.rs | 9 +- devolutions-gateway/src/api/update.rs | 541 ++++++++++- devolutions-gateway/src/extract.rs | 20 + devolutions-gateway/src/openapi.rs | 14 +- devolutions-gateway/src/token.rs | 2 + .../Actions/AgentActions.cs | 3 +- package/AgentWindowsManaged/Program.cs | 13 +- 31 files changed, 2908 insertions(+), 196 deletions(-) delete mode 100644 crates/devolutions-agent-shared/src/update_json.rs create mode 100644 crates/devolutions-agent-shared/src/update_manifest.rs create mode 100644 crates/devolutions-agent-shared/src/update_status.rs create mode 100644 devolutions-agent-updater/Cargo.toml create mode 100644 devolutions-agent-updater/src/main.rs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 10efc2bf3..433d927eb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -806,6 +806,9 @@ jobs: $DAgentSessionExecutable = Join-Path $TargetOutputPath "DevolutionsSession.exe" echo "dagent-session-executable=$DAgentSessionExecutable" >> $Env:GITHUB_OUTPUT + + $DAgentUpdaterExecutable = Join-Path $TargetOutputPath "DevolutionsAgentUpdater.exe" + echo "dagent-updater-executable=$DAgentUpdaterExecutable" >> $Env:GITHUB_OUTPUT } $DAgentExecutable = Join-Path $TargetOutputPath $ExecutableFileName @@ -901,6 +904,7 @@ jobs: - name: Build run: | if ($Env:RUNNER_OS -eq "Windows") { + $Env:DAGENT_UPDATER_EXECUTABLE = "${{ steps.load-variables.outputs.dagent-updater-executable }}" $Env:DAGENT_PEDM_SHELL_EXT_DLL = "${{ steps.load-variables.outputs.dagent-pedm-shell-ext-dll }}" $Env:DAGENT_PEDM_SHELL_EXT_MSIX = "${{ steps.load-variables.outputs.dagent-pedm-shell-ext-msix }}" $Env:DAGENT_SESSION_EXECUTABLE = "${{ steps.load-variables.outputs.dagent-session-executable }}" @@ -933,6 +937,7 @@ jobs: $Env:DAGENT_PACKAGE = "${{ steps.load-variables.outputs.dagent-package }}" $Env:DAGENT_DESKTOP_AGENT_PATH = $DesktopStagingPath + $Env:DAGENT_UPDATER_EXECUTABLE = "${{ steps.load-variables.outputs.dagent-updater-executable }}" $Env:DAGENT_PEDM_SHELL_EXT_DLL = "${{ steps.load-variables.outputs.dagent-pedm-shell-ext-dll }}" $Env:DAGENT_PEDM_SHELL_EXT_MSIX = "${{ steps.load-variables.outputs.dagent-pedm-shell-ext-msix }}" $Env:DAGENT_SESSION_EXECUTABLE = "${{ steps.load-variables.outputs.dagent-session-executable }}" diff --git a/.github/workflows/package.yml b/.github/workflows/package.yml index bb8c12d47..f235ddce2 100644 --- a/.github/workflows/package.yml +++ b/.github/workflows/package.yml @@ -319,7 +319,7 @@ jobs: run: | $IncludePattern = @(switch ('${{ matrix.project }}') { 'devolutions-gateway' { @('DevolutionsGateway_*.exe') } - 'devolutions-agent' { @('DevolutionsAgent_*.exe', 'DevolutionsPedmShellExt.dll', 'DevolutionsPedmShellExt.msix', 'DevolutionsDesktopAgent.exe') } + 'devolutions-agent' { @('DevolutionsAgent_*.exe', 'DevolutionsAgentUpdater.exe', 'DevolutionsPedmShellExt.dll', 'DevolutionsPedmShellExt.msix', 'DevolutionsDesktopAgent.exe') } 'jetsocat' { @('jetsocat_*') } }) $ExcludePattern = "*.pdb" @@ -473,7 +473,8 @@ jobs: run: | $PackageRoot = Join-Path ${{ runner.temp }} ${{ matrix.project}} - $Env:DAGENT_EXECUTABLE = Get-ChildItem -Path $PackageRoot -Recurse -Include '*DevolutionsAgent*.exe' | Select -First 1 + $Env:DAGENT_EXECUTABLE = Get-ChildItem -Path $PackageRoot -Recurse -Include '*DevolutionsAgent_*.exe' | Select -First 1 + $Env:DAGENT_UPDATER_EXECUTABLE = Get-ChildItem -Path $PackageRoot -Recurse -Include 'DevolutionsAgentUpdater.exe' | Select -First 1 $Env:DAGENT_DESKTOP_AGENT_PATH = Resolve-Path -Path "devolutions-pedm-desktop" $Env:DAGENT_PEDM_SHELL_EXT_DLL = Get-ChildItem -Path $PackageRoot -Recurse -Include 'DevolutionsPedmShellExt.dll' | Select -First 1 $Env:DAGENT_PEDM_SHELL_EXT_MSIX = Get-ChildItem -Path $PackageRoot -Recurse -Include 'DevolutionsPedmShellExt.msix' | Select -First 1 @@ -482,6 +483,7 @@ jobs: $Env:DAGENT_WINTUN_DLL = Get-ChildItem -Path $PackageRoot -Recurse -Include 'wintun.dll' | Select -First 1 Write-Host "DAGENT_EXECUTABLE = ${Env:DAGENT_EXECUTABLE}" + Write-Host "DAGENT_UPDATER_EXECUTABLE = ${Env:DAGENT_UPDATER_EXECUTABLE}" Write-Host "DAGENT_DESKTOP_AGENT_PATH = ${Env:DAGENT_DESKTOP_AGENT_PATH}" Write-Host "DAGENT_PEDM_SHELL_EXT_DLL = ${Env:DAGENT_PEDM_SHELL_EXT_DLL}" Write-Host "DAGENT_PEDM_SHELL_EXT_MSIX = ${Env:DAGENT_PEDM_SHELL_EXT_MSIX}" diff --git a/Cargo.lock b/Cargo.lock index 46b826db1..aef2d39b8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1465,6 +1465,7 @@ dependencies = [ "sha2 0.10.9", "tap", "thiserror 2.0.18", + "time", "tokio 1.49.0", "tokio-rustls", "tracing", @@ -1488,6 +1489,14 @@ dependencies = [ "windows-result 0.3.4", ] +[[package]] +name = "devolutions-agent-updater" +version = "2026.1.1" +dependencies = [ + "camino", + "devolutions-agent", +] + [[package]] name = "devolutions-gateway" version = "2026.1.1" diff --git a/Cargo.toml b/Cargo.toml index ab13284f2..8278867bd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,6 +3,7 @@ resolver = "2" members = [ "crates/*", "devolutions-agent", + "devolutions-agent-updater", "devolutions-gateway", "devolutions-session", "jetsocat", @@ -11,6 +12,7 @@ members = [ ] default-members = [ "devolutions-agent", + "devolutions-agent-updater", "devolutions-gateway", "devolutions-session", "jetsocat", diff --git a/ci/package-agent-windows.ps1 b/ci/package-agent-windows.ps1 index 8931f3dd5..4a37cc512 100644 --- a/ci/package-agent-windows.ps1 +++ b/ci/package-agent-windows.ps1 @@ -3,6 +3,8 @@ param( [parameter(Mandatory = $true)] [string] $Exe, [parameter(Mandatory = $true)] + [string] $UpdaterExe, + [parameter(Mandatory = $true)] [string] $PedmDll, [parameter(Mandatory = $true)] [string] $PedmMsix, @@ -31,7 +33,7 @@ function Set-FileNameAndCopy { [string]$Path, [string]$NewName ) - + if (-Not (Test-Path $Path)) { throw "File not found: $Path" } @@ -84,6 +86,9 @@ function New-AgentMsi() { # The path to the devolutions-agent.exe file. [string] $Exe, [parameter(Mandatory = $true)] + # The path to the devolutions-agent-updater.exe file. + [string] $UpdaterExe, + [parameter(Mandatory = $true)] # The path to the devolutions_pedm_shell_ext.dll file. [string] $PedmDll, [parameter(Mandatory = $true)] @@ -111,6 +116,7 @@ function New-AgentMsi() { # Convert slashes. This does not affect function. It's just for display. $Exe = Convert-Path -Path $Exe + $UpdaterExe = Convert-Path -Path $UpdaterExe $PedmDll = Convert-Path -Path $PedmDll $PedmMsix = Convert-Path -Path $PedmMsix $SessionExe = Convert-Path -Path $SessionExe @@ -127,20 +133,23 @@ function New-AgentMsi() { # These file names don't matter for building, but we will clean them up anyways for consistency. The names can be seen if inspecting the MSI. # The Agent exe will get copied to `C:\Program Files\Devolutions\Agent\DevolutionsAgent.exe` after install. $myExe = Set-FileNameAndCopy -Path $Exe -NewName 'DevolutionsAgent.exe' + # The updater shim is a detached helper for installing MSI updates. + $myUpdaterExe = Set-FileNameAndCopy -Path $UpdaterExe -NewName 'DevolutionsAgentUpdater.exe' # The session is a service that gets launched on demand. $mySessionExe = Set-FileNameAndCopy -Path $SessionExe -NewName 'DevolutionsSession.exe' Write-Output "$repoDir\dotnet\DesktopAgent\bin\Release\net48\DevolutionsDesktopAgent.exe" Set-EnvVarPath 'DAGENT_EXECUTABLE' $myExe + Set-EnvVarPath 'DAGENT_UPDATER_EXECUTABLE' $myUpdaterExe Set-EnvVarPath 'DAGENT_PEDM_SHELL_EXT_DLL' $myPedmDll Set-EnvVarPath 'DAGENT_PEDM_SHELL_EXT_MSIX' $myPedmMsix Set-EnvVarPath 'DAGENT_SESSION_EXECUTABLE' $mySessionExe # The actual DevolutionsDesktopAgent.exe will be `\dotnet\DesktopAgent\bin\Release\net48\DevolutionsDesktopAgent.exe`. - # After install, the contsnts of `net48` will be copied to `C:\Program Files\Devolutions\Agent\desktop\`. + # After install, the contents of `net48` will be copied to `C:\Program Files\Devolutions\Agent\desktop\`. Set-EnvVarPath 'DAGENT_DESKTOP_AGENT_PATH' "$repoDir\dotnet\DesktopAgent\bin\Release\net48" - + $version = Get-Version Push-Location @@ -152,7 +161,7 @@ function New-AgentMsi() { if ($Generate) { # This is used by `package/WindowsManaged/Program.cs`. $Env:DAGENT_MSI_SOURCE_ONLY_BUILD = '1' - + foreach ($lang in Get-PackageLanguages) { $Env:DAGENT_MSI_LANG_ID = $lang.Name & 'MSBuild.exe' 'DevolutionsAgent.sln' '/t:restore,build' '/p:Configuration=Release' | Out-Host @@ -175,4 +184,4 @@ function New-AgentMsi() { Pop-Location } -New-AgentMsi -Generate:($Generate.IsPresent) -Exe $Exe -PedmDll $PedmDll -PedmMsix $PedmMsix -SessionExe $SessionExe -Architecture $Architecture -Outfile $Outfile +New-AgentMsi -Generate:($Generate.IsPresent) -Exe $Exe -UpdaterExe $UpdaterExe -PedmDll $PedmDll -PedmMsix $PedmMsix -SessionExe $SessionExe -Architecture $Architecture -Outfile $Outfile diff --git a/ci/tlk.ps1 b/ci/tlk.ps1 index 40b5ad392..cf0fc0aef 100755 --- a/ci/tlk.ps1 +++ b/ci/tlk.ps1 @@ -303,6 +303,7 @@ class TlkRecipe $agentPackages = @([TlkPackage]::new("devolutions-agent", "devolutions-agent", $false)) if ($this.Target.IsWindows()) { + $agentPackages += [TlkPackage]::new("devolutions-agent-updater", "devolutions-agent-updater", $false) $agentPackages += [TlkPackage]::new("devolutions-pedm-shell-ext", "crates/devolutions-pedm-shell-ext", $true) $agentPackages += [TlkPackage]::new("devolutions-session", "devolutions-session", $false) } @@ -387,6 +388,8 @@ class TlkRecipe "agent" { if ($CargoPackage.Name -Eq "devolutions-agent" -And (Test-Path Env:DAGENT_EXECUTABLE)) { $Env:DAGENT_EXECUTABLE + } elseif ($CargoPackage.Name -Eq "devolutions-agent-updater" -And (Test-Path Env:DAGENT_UPDATER_EXECUTABLE)) { + $Env:DAGENT_UPDATER_EXECUTABLE } elseif ($CargoPackage.Name -Eq "devolutions-pedm-shell-ext" -And (Test-Path Env:DAGENT_PEDM_SHELL_EXT_DLL)) { $Env:DAGENT_PEDM_SHELL_EXT_DLL } elseif ($CargoPackage.Name -Eq "devolutions-session" -And (Test-Path Env:DAGENT_SESSION_EXECUTABLE)) { @@ -760,7 +763,7 @@ class TlkRecipe } $DebUpstreamChangelogFile = Join-Path $OutputPath "changelog_deb_upstream" - + Merge-Tokens -TemplateFile $RulesTemplate -Tokens @{ dh_shlibdeps = $DhShLibDepsOverride upstream_changelog = $DebUpstreamChangelogFile @@ -799,7 +802,7 @@ class TlkRecipe # input for debian/changelog is the package-specific CHANGELOG.md $PackagingChangelogFile = Join-Path $InputPackagePath "CHANGELOG.md" - + $s = New-Changelog ` -Format 'Deb' ` -InputFile $UpstreamChangelogFile ` diff --git a/crates/devolutions-agent-shared/Cargo.toml b/crates/devolutions-agent-shared/Cargo.toml index 3f29bc996..f51135bf1 100644 --- a/crates/devolutions-agent-shared/Cargo.toml +++ b/crates/devolutions-agent-shared/Cargo.toml @@ -13,6 +13,7 @@ workspace = true camino = "1.1" cfg-if = "1" serde = { version = "1", features = ["derive"] } +serde_json = "1" thiserror = "2" [target.'cfg(windows)'.dependencies] diff --git a/crates/devolutions-agent-shared/src/lib.rs b/crates/devolutions-agent-shared/src/lib.rs index 149c45e25..23d155057 100644 --- a/crates/devolutions-agent-shared/src/lib.rs +++ b/crates/devolutions-agent-shared/src/lib.rs @@ -1,21 +1,21 @@ -#[macro_use] -extern crate serde; - #[cfg(windows)] pub mod windows; mod date_version; -mod update_json; +mod update_manifest; +mod update_status; use std::env; use camino::Utf8PathBuf; use cfg_if::cfg_if; - -#[rustfmt::skip] pub use date_version::{DateVersion, DateVersionError}; -#[rustfmt::skip] -pub use update_json::{ProductUpdateInfo, UpdateJson, VersionSpecification}; +pub use update_manifest::{ + InstalledProductUpdateInfo, ProductUpdateInfo, UPDATE_MANIFEST_V2_MINOR_VERSION, UpdateManifest, UpdateManifestV1, + UpdateManifestV2, UpdateProductKey, UpdateSchedule, VersionMajorV2, VersionSpecification, + default_schedule_window_start, detect_update_manifest_major_version, +}; +pub use update_status::{UpdateStatus, UpdateStatusV2}; cfg_if! { if #[cfg(target_os = "windows")] { @@ -77,7 +77,12 @@ pub fn get_data_dir() -> Utf8PathBuf { } } -/// Returns the path to the `update.json` file +/// Returns the path to the `update.json` file. pub fn get_updater_file_path() -> Utf8PathBuf { get_data_dir().join("update.json") } + +/// Returns the path to the `update_status.json` file. +pub fn get_update_status_file_path() -> Utf8PathBuf { + get_data_dir().join("update_status.json") +} diff --git a/crates/devolutions-agent-shared/src/update_json.rs b/crates/devolutions-agent-shared/src/update_json.rs deleted file mode 100644 index e8366fe54..000000000 --- a/crates/devolutions-agent-shared/src/update_json.rs +++ /dev/null @@ -1,75 +0,0 @@ -use std::fmt; - -use crate::DateVersion; - -/// Example JSON structure: -/// -/// ```json -/// { -/// "Gateway": { -/// "TargetVersion": "1.2.3.4" -/// }, -/// "HubService": { -/// "TargetVersion": "latest" -/// } -/// } -/// ``` -/// -#[derive(Debug, Default, Deserialize, Serialize)] -#[serde(rename_all = "PascalCase")] -pub struct UpdateJson { - #[serde(skip_serializing_if = "Option::is_none")] - pub gateway: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub hub_service: Option, -} - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] -#[serde(rename_all = "snake_case")] -pub enum VersionSpecification { - Latest, - #[serde(untagged)] - Specific(DateVersion), -} - -impl fmt::Display for VersionSpecification { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - VersionSpecification::Latest => write!(f, "latest"), - VersionSpecification::Specific(version) => write!(f, "{version}"), - } - } -} - -#[derive(Debug, Clone, Deserialize, Serialize)] -#[serde(rename_all = "PascalCase")] -pub struct ProductUpdateInfo { - /// The version of the product to update to. - pub target_version: VersionSpecification, -} - -#[cfg(test)] -mod tests { - #![allow(clippy::unwrap_used, reason = "test code can panic on errors")] - - use super::*; - - #[test] - fn version_specification_roundtrip() { - let cases: &[(&'static str, VersionSpecification)] = &[ - ( - "2022.2.24.0", - VersionSpecification::Specific("2022.2.24.0".parse().unwrap()), - ), - ("latest", VersionSpecification::Latest), - ]; - - for (serialized, deserialized) in cases { - let parsed = serde_json::from_str::(&format!("\"{serialized}\"")).unwrap(); - assert_eq!(parsed, *deserialized); - - let reserialized = serde_json::to_string(&parsed).unwrap(); - assert_eq!(reserialized, format!("\"{serialized}\"")); - } - } -} diff --git a/crates/devolutions-agent-shared/src/update_manifest.rs b/crates/devolutions-agent-shared/src/update_manifest.rs new file mode 100644 index 000000000..1ab43dfc4 --- /dev/null +++ b/crates/devolutions-agent-shared/src/update_manifest.rs @@ -0,0 +1,451 @@ +use std::collections::HashMap; +use std::fmt; + +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +use crate::DateVersion; + +/// Old gateway-written manifest format (v2026.1.0 and prior), supported for backward compatibility. +/// +/// Example V1 JSON structure: +/// +/// ```json +/// { +/// "Gateway": { "Version": "1.2.3.4" }, +/// "HubService": { "Version": "latest" } +/// } +/// ``` +#[derive(Debug, Default, Deserialize, Serialize)] +#[serde(rename_all = "PascalCase")] +pub struct UpdateManifestV1 { + #[serde(skip_serializing_if = "Option::is_none")] + pub gateway: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub hub_service: Option, +} + +// ── Shared value types ──────────────────────────────────────────────────────── + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum VersionSpecification { + Latest, + #[serde(untagged)] + Specific(DateVersion), +} + +impl fmt::Display for VersionSpecification { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + VersionSpecification::Latest => write!(f, "latest"), + VersionSpecification::Specific(version) => write!(f, "{version}"), + } + } +} + +impl std::str::FromStr for VersionSpecification { + type Err = crate::DateVersionError; + + fn from_str(s: &str) -> Result { + if s.eq_ignore_ascii_case("latest") { + Ok(Self::Latest) + } else { + Ok(Self::Specific(s.parse()?)) + } + } +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct ProductUpdateInfo { + /// The version of the product to update to. + #[serde(rename = "Version")] + pub target_version: VersionSpecification, +} + +/// Currently installed version of a product, as reported in `update_status.json`. +/// +/// Distinguished from [`ProductUpdateInfo`] (which carries a *requested* target version +/// for `update.json`) so that the JSON key name clearly reflects what the field means: +/// `"Version"` is the version that is on disk right now. +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(rename_all = "PascalCase")] +pub struct InstalledProductUpdateInfo { + /// Currently installed version of the product. + pub version: VersionSpecification, +} + +/// Minor version of the V2 manifest format written by the current build of the agent. +/// +/// Increment this value when adding new fields to [`UpdateManifestV2`] or making other +/// backwards-compatible changes that the gateway should be aware of. +pub const UPDATE_MANIFEST_V2_MINOR_VERSION: u32 = 1; + +pub fn default_schedule_window_start() -> u32 { + 7_200 +} + +/// Auto-update schedule for the Devolutions Agent, embedded in [`UpdateManifestV2`]. +/// +/// Written by the gateway via `POST /jet/update/schedule` and consumed by the agent, +/// which validates the values, applies them to the running scheduling loop, and persists them +/// to `agent.json`. +/// +/// Additionally, Agent writes the current scheduler recorded in `agent.json` +/// so gateway can retrieve it back via `GET /jet/update/schedule` without needing to introduce +/// knowledge of agent's configuration file format on the gateway side. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)] +#[serde(rename_all = "PascalCase")] +pub struct UpdateSchedule { + /// Enable periodic Devolutions Agent self-update checks. + pub enabled: bool, + + /// Minimum interval between update checks, in seconds. + /// + /// 0 value has a special meaning of "only check once at `update_window_start`. + #[serde(default)] + pub interval: u64, + + /// Start of the maintenance window as seconds past midnight, local time. + #[serde(default = "default_schedule_window_start")] + pub update_window_start: u32, + + /// End of the maintenance window as seconds past midnight, local time, exclusive. + /// + /// `None` means no upper bound. + /// When end < start the window crosses midnight. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub update_window_end: Option, + + #[serde(default)] + /// Products for which the agent autonomously polls for new versions. + pub products: Vec, +} + +/// Marker type that always serializes/deserializes as the number `2`. +/// +/// Embedded as the `VersionMajor` field in [`UpdateManifestV2`] so that the +/// untagged [`UpdateManifest`] enum can distinguish V2 from legacy V1 payloads: +/// if `VersionMajor` is absent or not `"2"`, `ManifestV2` deserialization fails +/// and the `Legacy` variant is tried next. When a V3 format is introduced, a new +/// marker type and `ManifestV3` variant are added in a similar way. +#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)] +pub struct VersionMajorV2; + +impl Serialize for VersionMajorV2 { + fn serialize(&self, s: S) -> Result { + s.serialize_u32(2) + } +} + +impl<'de> Deserialize<'de> for VersionMajorV2 { + fn deserialize>(d: D) -> Result { + struct V; + impl serde::de::Visitor<'_> for V { + type Value = VersionMajorV2; + fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "update manifest major version 2") + } + fn visit_u64(self, n: u64) -> Result { + if n == 2 { + Ok(VersionMajorV2) + } else { + Err(E::invalid_value(serde::de::Unexpected::Unsigned(n), &self)) + } + } + } + d.deserialize_u64(V) + } +} + +/// Version 2 of the update manifest format, written by agent/gateway >=2026.2.0. +/// Includes product update list and auto-update schedule. Adding a product name should always +/// increase the minor version, to allow the gateway API caller to know supported products list. +/// +/// Example (full V2 file): +/// ```json +/// { +/// "VersionMajor": 2, +/// "VersionMinor": 1, +/// "Schedule": { "Enabled": false, "Interval": 86400, "UpdateWindowStart": 7200, "UpdateWindowEnd": 14400 }, +/// "Products": { +/// "Gateway": { "Version": "2026.1.0" }, +/// "Agent": { "Version": "latest" } +/// } +/// } +/// ``` +#[derive(Debug, Deserialize, Serialize)] +#[serde(rename_all = "PascalCase")] +pub struct UpdateManifestV2 { + /// Always `2` — the presence and value of this field let the untagged + /// [`UpdateManifest`] distinguish V2 from legacy V1 payloads and prevent further parsing + /// attempt of V2 structure + pub version_major: VersionMajorV2, + /// Feature-set version within V2. + pub version_minor: u32, + /// Auto-update schedule set by the gateway. Agent persists it to `agent.json`. + #[serde(skip_serializing_if = "Option::is_none")] + pub schedule: Option, + /// Map of product name → update info. Empty when the file is a bare V2 stub. + #[serde(default, skip_serializing_if = "HashMap::is_empty")] + pub products: HashMap, +} + +impl Default for UpdateManifestV2 { + fn default() -> Self { + Self { + version_major: VersionMajorV2, + version_minor: UPDATE_MANIFEST_V2_MINOR_VERSION, + schedule: None, + products: HashMap::new(), + } + } +} + +/// A parsed update manifest: either a V2 file or a legacy V1 file. +/// +/// New agents initialise `update.json` with `{"VersionMajor": "2", "VersionMinor": 0}`; +/// old agents write `{}`. The gateway reads the existing file before writing to determine +/// which format to use. +/// +/// Serde variant order is significant: `ManifestV2` is tried first; its `VersionMajor` +/// field causes deserialization to fail when absent or not `2`, allowing the untagged +/// enum to fall through to `Legacy`. When V3 is introduced, a `ManifestV3` variant is +/// inserted before `ManifestV2`. +#[derive(Debug, Deserialize, Serialize)] +#[serde(untagged)] +pub enum UpdateManifest { + /// V2 format: contains `"VersionMajor": 2`. + ManifestV2(UpdateManifestV2), + /// Legacy V1 format: no `"VersionMajor"` field. + Legacy(UpdateManifestV1), +} + +pub(crate) fn strip_bom(data: &[u8]) -> &[u8] { + data.strip_prefix(b"\xEF\xBB\xBF").unwrap_or(data) +} + +impl UpdateManifest { + /// Parse `update.json` bytes, automatically detecting the format. + /// + /// Strips a UTF-8 BOM if present before parsing. + pub fn parse(data: &[u8]) -> serde_json::Result { + serde_json::from_slice(strip_bom(data)) + } + + /// Normalise the manifest into a flat product map for uniform processing. + /// + /// - V2 `products` is used directly. + /// - V1 named fields are mapped to their [`UpdateProductKey`] equivalents. + /// - V1 `other` entries are best-effort converted; entries that do not match + /// [`ProductUpdateInfo`]'s schema are silently dropped. + pub fn into_products(self) -> HashMap { + match self { + Self::ManifestV2(v2) => v2.products, + Self::Legacy(v1) => { + let mut map = HashMap::new(); + if let Some(gw) = v1.gateway { + map.insert(UpdateProductKey::Gateway, gw); + } + if let Some(hs) = v1.hub_service { + map.insert(UpdateProductKey::HubService, hs); + } + map + } + } + } +} + +/// Detect the `VersionMajor` of an `update.json` payload without fully parsing the manifest. +/// +/// Returns `1` for legacy V1 files (no `VersionMajor` field) or the numeric major version +/// for V2+ files. Both the numeric form and the legacy string form (written by 2026.1 +/// agents) are accepted for robustness during format transitions. +/// +/// Returns a [`serde_json::Error`] when `data` is not valid JSON. +pub fn detect_update_manifest_major_version(data: &[u8]) -> serde_json::Result { + let value = serde_json::from_slice::(strip_bom(data))?; + let Some(v) = value.get("VersionMajor") else { + return Ok(1); + }; + Ok(v.as_u64() + .and_then(|n| u32::try_from(n).ok()) + .or_else(|| v.as_str().and_then(|s| s.parse().ok())) + .unwrap_or(1)) +} + +// ── Product key ────────────────────────────────────────────────────────────── + +/// Product key used in the V2 update manifest `Products` map. +/// +/// Known variants correspond to products this version of the agent understands. +/// `Other` captures any product name that is not yet known and preserves it so +/// that a future agent version can act on it. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum UpdateProductKey { + Gateway, + HubService, + Agent, + /// Any product name not recognised by this version of the agent. + Other(String), +} + +impl UpdateProductKey { + pub fn as_str(&self) -> &str { + match self { + Self::Gateway => "Gateway", + Self::HubService => "HubService", + Self::Agent => "Agent", + Self::Other(s) => s.as_str(), + } + } +} + +impl fmt::Display for UpdateProductKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.as_str()) + } +} + +impl Serialize for UpdateProductKey { + fn serialize(&self, s: S) -> Result { + s.serialize_str(self.as_str()) + } +} + +impl<'de> Deserialize<'de> for UpdateProductKey { + fn deserialize>(d: D) -> Result { + struct KeyVisitor; + + impl serde::de::Visitor<'_> for KeyVisitor { + type Value = UpdateProductKey; + + fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "a product name string") + } + + fn visit_str(self, v: &str) -> Result { + Ok(match v { + "Gateway" => UpdateProductKey::Gateway, + "HubService" => UpdateProductKey::HubService, + "Agent" => UpdateProductKey::Agent, + other => UpdateProductKey::Other(other.to_owned()), + }) + } + } + + d.deserialize_str(KeyVisitor) + } +} + +// ── Tests ───────────────────────────────────────────────────────────────────── + +#[cfg(test)] +mod tests { + #![allow(clippy::unwrap_used, reason = "test code can panic on errors")] + + use super::*; + + #[test] + fn version_specification_roundtrip() { + let cases: &[(&'static str, VersionSpecification)] = &[ + ( + "2022.2.24.0", + VersionSpecification::Specific("2022.2.24.0".parse().unwrap()), + ), + ("latest", VersionSpecification::Latest), + ]; + + for (serialized, deserialized) in cases { + let parsed = serde_json::from_str::(&format!("\"{serialized}\"")).unwrap(); + assert_eq!(parsed, *deserialized); + + let reserialized = serde_json::to_string(&parsed).unwrap(); + assert_eq!(reserialized, format!("\"{serialized}\"")); + } + } + + #[test] + fn empty_v1_parses_as_legacy() { + let manifest = UpdateManifest::parse(b"{}").unwrap(); + assert!(matches!(manifest, UpdateManifest::Legacy(_))); + assert!(manifest.into_products().is_empty()); + } + + #[test] + fn empty_v2_stub_parses_as_manifest() { + let manifest = UpdateManifest::parse(br#"{"VersionMajor":2,"VersionMinor":1}"#).unwrap(); + assert!(matches!(manifest, UpdateManifest::ManifestV2(_))); + assert!(manifest.into_products().is_empty()); + } + + #[test] + fn v2_with_products_roundtrip() { + let json = r#"{"VersionMajor":2,"VersionMinor":0,"Products":{"Agent":{"Version":"latest"},"Gateway":{"Version":"2026.1.0"}}}"#; + let manifest = UpdateManifest::parse(json.as_bytes()).unwrap(); + assert!(matches!(manifest, UpdateManifest::ManifestV2(_))); + let products = manifest.into_products(); + assert_eq!(products.len(), 2); + assert!(matches!( + products[&UpdateProductKey::Agent].target_version, + VersionSpecification::Latest + )); + } + + #[test] + fn v1_with_products_into_products() { + let json = r#"{"Gateway":{"Version":"2026.1.0"},"HubService":{"Version":"latest"}}"#; + let manifest = UpdateManifest::parse(json.as_bytes()).unwrap(); + assert!(matches!(manifest, UpdateManifest::Legacy(_))); + let products = manifest.into_products(); + assert_eq!(products.len(), 2); + assert!(matches!( + products[&UpdateProductKey::Gateway].target_version, + VersionSpecification::Specific(_) + )); + } + + #[test] + fn bom_is_stripped() { + // UTF-8 BOM prefix + let mut data = vec![0xEF, 0xBB, 0xBF]; + data.extend_from_slice(b"{}"); + let manifest = UpdateManifest::parse(&data).unwrap(); + assert!(matches!(manifest, UpdateManifest::Legacy(_))); + } + + #[test] + fn v2_stub_serialise_roundtrip() { + let stub = UpdateManifest::ManifestV2(UpdateManifestV2::default()); + let serialized = serde_json::to_string(&stub).unwrap(); + assert_eq!(serialized, r#"{"VersionMajor":2,"VersionMinor":1}"#); + let back = UpdateManifest::parse(serialized.as_bytes()).unwrap(); + assert!(matches!(back, UpdateManifest::ManifestV2(_))); + } + + #[test] + fn detect_version_legacy_no_field() { + assert_eq!(detect_update_manifest_major_version(b"{}").unwrap(), 1); + } + + #[test] + fn detect_version_v2_numeric() { + assert_eq!( + detect_update_manifest_major_version(br#"{"VersionMajor":2}"#).unwrap(), + 2 + ); + } + + #[test] + fn detect_version_v2_legacy_string_form() { + // Backward compat: 2026.1 agents wrote VersionMajor as a string. + assert_eq!( + detect_update_manifest_major_version(br#"{"VersionMajor":"2"}"#).unwrap(), + 2 + ); + } + + #[test] + fn detect_version_unparsable_returns_error() { + assert!(detect_update_manifest_major_version(b"not json").is_err()); + } +} diff --git a/crates/devolutions-agent-shared/src/update_status.rs b/crates/devolutions-agent-shared/src/update_status.rs new file mode 100644 index 000000000..be2d28367 --- /dev/null +++ b/crates/devolutions-agent-shared/src/update_status.rs @@ -0,0 +1,188 @@ +use std::collections::HashMap; + +use serde::{Deserialize, Serialize}; + +use crate::update_manifest::strip_bom; +use crate::{ + InstalledProductUpdateInfo, UPDATE_MANIFEST_V2_MINOR_VERSION, UpdateProductKey, UpdateSchedule, VersionMajorV2, +}; + +/// Version 2 of the agent status format, written by agent >=2026.2.0. +/// +/// Uses the same major version marker ([`VersionMajorV2`]) as [`crate::UpdateManifestV2`] +/// so both files share the minor-version constant and version numbering scheme. +/// +/// Example: +/// ```json +/// { +/// "VersionMajor": 2, +/// "VersionMinor": 1, +/// "Schedule": { "Enabled": true, "Interval": 86400, "UpdateWindowStart": 7200 }, +/// "Products": { "Agent": { "Version": "2026.2.0" } } +/// } +/// ``` +/// +/// Agent runtime status written to `update_status.json` on agent start and refreshed +/// after each updater run or auto-update schedule change. +/// +/// The gateway reads this file for `GET /jet/update` and `GET /jet/update/schedule` so +/// that it can surface current agent state without needing knowledge of the agent's +/// internal `agent.json` configuration format. +/// +/// Unlike [`crate::UpdateManifest`] (`update.json`), this file is **read-only** for +/// the Gateway service: its DACL grants NETWORK SERVICE read access but **no write +/// access**. The agent is the sole writer. +/// +/// Note: if the agent itself is being updated, `update_status.json` will be +/// automatically refreshed when the agent restarts after the update completes. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "PascalCase")] +pub struct UpdateStatusV2 { + /// Always `2` — reuses [`VersionMajorV2`] so the version numbering is consistent + /// with [`crate::UpdateManifestV2`]. + pub version_major: VersionMajorV2, + /// Feature-set version within V2. + pub version_minor: u32, + /// Current auto-update schedule configured for this agent. + #[serde(skip_serializing_if = "Option::is_none")] + pub schedule: Option, + /// Map of product name → currently **installed** version. + /// + /// Products that are not installed are omitted. + #[serde(default, skip_serializing_if = "HashMap::is_empty")] + pub products: HashMap, +} + +impl Default for UpdateStatusV2 { + fn default() -> Self { + Self { + version_major: VersionMajorV2, + version_minor: UPDATE_MANIFEST_V2_MINOR_VERSION, + schedule: None, + products: HashMap::new(), + } + } +} + +/// A parsed agent status file: currently only V2 is defined. +/// +/// Serde variant order is significant: `StatusV2` is tried first; its `VersionMajor` +/// field causes deserialization to fail when the value is not `2`, allowing the untagged +/// enum to fall through to future variants. When V3 is introduced, a `StatusV3` +/// variant is inserted before `StatusV2`. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(untagged)] +pub enum UpdateStatus { + /// V2 format: contains `"VersionMajor": 2`. + StatusV2(UpdateStatusV2), +} + +impl UpdateStatus { + /// Parse `update_status.json` bytes. + /// + /// Strips a UTF-8 BOM if present before parsing. + pub fn parse(data: &[u8]) -> serde_json::Result { + serde_json::from_slice(strip_bom(data)) + } + + /// Return the format version as a `"major.minor"` string (e.g. `"2.1"`). + pub fn version_string(&self) -> String { + match self { + Self::StatusV2(v2) => format!("2.{}", v2.version_minor), + } + } + + /// Borrow the schedule from whichever version is present. + pub fn schedule(&self) -> Option<&UpdateSchedule> { + match self { + Self::StatusV2(v2) => v2.schedule.as_ref(), + } + } + + /// Consume the status and return the product map from whichever version is present. + pub fn into_products(self) -> HashMap { + match self { + Self::StatusV2(v2) => v2.products, + } + } +} + +impl Default for UpdateStatus { + fn default() -> Self { + Self::StatusV2(UpdateStatusV2::default()) + } +} + +#[cfg(test)] +mod tests { + #![allow(clippy::unwrap_used, reason = "test code can panic on errors")] + + use super::*; + use crate::VersionSpecification; + + #[test] + fn bom_is_stripped() { + // UTF-8 BOM prefix + let mut data = vec![0xEF, 0xBB, 0xBF]; + data.extend_from_slice(br#"{"VersionMajor":2,"VersionMinor":1}"#); + let status = UpdateStatus::parse(&data).unwrap(); + assert!(matches!(status, UpdateStatus::StatusV2(_))); + } + + #[test] + fn v2_minimal_parses() { + let status = UpdateStatus::parse(br#"{"VersionMajor":2,"VersionMinor":1}"#).unwrap(); + assert!(matches!(status, UpdateStatus::StatusV2(_))); + assert!(status.schedule().is_none()); + assert!(status.into_products().is_empty()); + } + + #[test] + fn wrong_major_fails() { + assert!(UpdateStatus::parse(br#"{"VersionMajor":1,"VersionMinor":1}"#).is_err()); + assert!(UpdateStatus::parse(br#"{"VersionMajor":3,"VersionMinor":0}"#).is_err()); + } + + #[test] + fn v2_with_schedule_roundtrip() { + let json = r#"{"VersionMajor":2,"VersionMinor":1,"Schedule":{"Enabled":true,"Interval":86400,"UpdateWindowStart":7200,"Products":[]}}"#; + let status = UpdateStatus::parse(json.as_bytes()).unwrap(); + let schedule = status.schedule().unwrap(); + assert!(schedule.enabled); + assert_eq!(schedule.interval, 86400); + assert_eq!(schedule.update_window_start, 7200); + let reserialized = serde_json::to_string(&status).unwrap(); + assert_eq!(reserialized, json); + } + + #[test] + fn v2_with_products_roundtrip() { + let json = r#"{"VersionMajor":2,"VersionMinor":1,"Products":{"Agent":{"Version":"2026.2.0"},"Gateway":{"Version":"latest"}}}"#; + let status = UpdateStatus::parse(json.as_bytes()).unwrap(); + let products = status.into_products(); + assert_eq!(products.len(), 2); + assert!(matches!( + products[&UpdateProductKey::Gateway].version, + VersionSpecification::Latest + )); + assert!(matches!( + products[&UpdateProductKey::Agent].version, + VersionSpecification::Specific(_) + )); + } + + #[test] + fn version_string_format() { + let status = UpdateStatus::parse(br#"{"VersionMajor":2,"VersionMinor":3}"#).unwrap(); + assert_eq!(status.version_string(), "2.3"); + } + + #[test] + fn v2_stub_serialise_roundtrip() { + let stub = UpdateStatus::StatusV2(UpdateStatusV2::default()); + let serialized = serde_json::to_string(&stub).unwrap(); + assert_eq!(serialized, r#"{"VersionMajor":2,"VersionMinor":1}"#); + let back = UpdateStatus::parse(serialized.as_bytes()).unwrap(); + assert!(matches!(back, UpdateStatus::StatusV2(_))); + } +} diff --git a/devolutions-agent-updater/Cargo.toml b/devolutions-agent-updater/Cargo.toml new file mode 100644 index 000000000..f294f6372 --- /dev/null +++ b/devolutions-agent-updater/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "devolutions-agent-updater" +version.workspace = true +edition = "2024" +license = "MIT/Apache-2.0" +authors = ["Devolutions Inc. "] +description = "Updater shim for Devolutions Agent" +publish = false + +[lints] +workspace = true + +[dependencies] +camino = "1.1" +devolutions-agent = { path = "../devolutions-agent" } diff --git a/devolutions-agent-updater/src/main.rs b/devolutions-agent-updater/src/main.rs new file mode 100644 index 000000000..ff33629da --- /dev/null +++ b/devolutions-agent-updater/src/main.rs @@ -0,0 +1,247 @@ +//! Devolutions Agent Updater shim. +//! +//! This minimal executable is launched as a detached process by the Devolutions Agent service +//! to perform a silent MSI update of Devolutions Agent itself. +//! +//! Running as a detached process is necessary because the MSI installer stops and restarts +//! the Devolutions Agent Windows service during installation. If the agent tried to call +//! msiexec directly and wait for it, the agent would be killed mid-update. By launching +//! this shim as a detached process, the shim survives the agent service restart and +//! ensures the MSI installation completes successfully. +//! +//! # Usage +//! +//! ```text +//! devolutions-agent-updater [-x ] +//! ``` +//! +//! When `-x ` is provided (a braced GUID such as +//! `{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx}`), the shim first runs +//! `msiexec /x` to uninstall the currently installed version and then runs +//! `msiexec /i` to install the target version. This is required for downgrades +//! because MSI upgrade conditions prevent installing an older version on top of +//! a newer one. + +// Suppress the console window in release builds. In debug builds, we keep the console for +// visibility when running from a terminal during development. +#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] + +#[cfg(windows)] +use devolutions_agent::updater::{self, AgentServiceState}; + +fn main() { + #[cfg(not(windows))] + { + use std::io::Write as _; + let _ = writeln!( + std::io::stderr(), + "devolutions-agent-updater is only supported on Windows" + ); + std::process::exit(1); + } + + #[cfg(windows)] + windows_main(); +} + +#[cfg(windows)] +fn windows_main() { + let args: Vec = std::env::args().collect(); + + if args.len() < 2 { + let _ = write_to_stderr("Usage: devolutions-agent-updater [-x ] "); + std::process::exit(1); + } + + // Parse optional -x flag before the positional MSI path. + let (uninstall_product_code, msi_path) = { + let mut iter = args.iter().skip(1).peekable(); + let product_code = if iter.peek().map(|s| s.as_str()) == Some("-x") { + iter.next(); // consume "-x" + let code = iter.next().map(String::as_str); + if code.is_none() { + let _ = write_to_stderr("Error: -x requires a product code argument"); + std::process::exit(1); + } + code + } else { + None + }; + let msi = match iter.next() { + Some(s) => s.as_str(), + None => { + let _ = write_to_stderr("Usage: devolutions-agent-updater [-x ] "); + std::process::exit(1); + } + }; + (product_code, msi) + }; + + // Derive paths from the MSI path. + // The shim log uses a separate extension so it doesn't conflict with the msiexec log. + let shim_log_path = format!("{msi_path}.shim.log"); + let install_log_path = format!("{msi_path}.install.log"); + + write_log(&shim_log_path, "devolutions-agent-updater: starting"); + write_log(&shim_log_path, &format!(" MSI path: {msi_path}")); + write_log(&shim_log_path, &format!(" Install log: {install_log_path}")); + + // Capture agent service state before the update so we can restore it afterwards. + let service_state = match updater::query_agent_service_state() { + Ok(state) => { + write_log( + &shim_log_path, + &format!( + "Agent service state: running={}, automatic_startup={}", + state.was_running, state.startup_was_automatic + ), + ); + Some(state) + } + Err(e) => { + write_log(&shim_log_path, &format!("Failed to query agent service state: {e:#}")); + None + } + }; + + let exit_code = run_update( + uninstall_product_code, + msi_path, + &shim_log_path, + &install_log_path, + service_state.as_ref(), + ); + + // Always mark the shim log for deletion on the next reboot (best-effort). + let _ = updater::remove_file_on_reboot(camino::Utf8Path::new(&shim_log_path)); + + if exit_code != 0 { + std::process::exit(exit_code); + } +} + +/// Run the optional uninstall followed by the MSI install. +/// +/// Returns 0 on success or a non-zero msiexec exit code on failure. +#[cfg(windows)] +fn run_update( + uninstall_product_code: Option<&str>, + msi_path: &str, + shim_log_path: &str, + install_log_path: &str, + service_state: Option<&AgentServiceState>, +) -> i32 { + // For downgrades, uninstall the currently installed version first. + if let Some(product_code) = uninstall_product_code { + write_log(shim_log_path, &format!(" Uninstalling product code: {product_code}")); + + let uninstall_log_path = format!("{msi_path}.uninstall.log"); + let status = std::process::Command::new("msiexec") + .args([ + "/x", + product_code, + "/quiet", + "/norestart", + "/l*v", + uninstall_log_path.as_str(), + ]) + .status(); + + // Mark the uninstall log for deletion on reboot regardless of the msiexec result. + let _ = updater::remove_file_on_reboot(camino::Utf8Path::new(&uninstall_log_path)); + + match status { + Ok(exit_status) => { + let code = exit_status.code().unwrap_or(-1); + match code { + 0 | 3010 | 1641 => { + write_log( + shim_log_path, + &format!("devolutions-agent-updater: uninstall completed with code {code} (success)"), + ); + } + _ => { + write_log( + shim_log_path, + &format!("devolutions-agent-updater: uninstall failed with exit code {code}"), + ); + return code; + } + } + } + Err(err) => { + write_log( + shim_log_path, + &format!("devolutions-agent-updater: failed to launch msiexec for uninstall: {err}"), + ); + return 1; + } + } + } + + let status = std::process::Command::new("msiexec") + .args(["/i", msi_path, "/quiet", "/norestart", "/l*v", install_log_path]) + .status(); + + // Mark the install log for deletion on reboot regardless of the msiexec result. + let _ = updater::remove_file_on_reboot(camino::Utf8Path::new(install_log_path)); + + match status { + Ok(exit_status) => { + let code = exit_status.code().unwrap_or(-1); + + // MSI exit codes: + // 0 = Success + // 3010 = Success (reboot required, but our installers shouldn't need a reboot) + // 1641 = Success (reboot initiated) + match code { + 0 | 3010 | 1641 => { + write_log( + shim_log_path, + &format!("devolutions-agent-updater: msiexec completed with code {code} (success)"), + ); + // Post-update: restore service running state when startup mode is manual. + if let Some(state) = service_state { + match updater::start_agent_service_if_needed(state) { + Ok(true) => write_log(shim_log_path, "Agent service started successfully"), + Ok(false) => {} + Err(e) => write_log(shim_log_path, &format!("Failed to start agent service: {e:#}")), + } + } + 0 + } + _ => { + write_log( + shim_log_path, + &format!("devolutions-agent-updater: msiexec failed with exit code {code}"), + ); + code + } + } + } + Err(err) => { + write_log( + shim_log_path, + &format!("devolutions-agent-updater: failed to launch msiexec: {err}"), + ); + 1 + } + } +} + +/// Append a line to a log file, ignoring errors (best-effort logging). +#[cfg(windows)] +fn write_log(path: &str, msg: &str) { + use std::fs::OpenOptions; + use std::io::Write as _; + + if let Ok(mut file) = OpenOptions::new().create(true).append(true).open(path) { + let _ = writeln!(file, "{msg}"); + } +} + +#[cfg(windows)] +fn write_to_stderr(msg: &str) -> std::io::Result<()> { + use std::io::Write as _; + writeln!(std::io::stderr(), "{msg}") +} diff --git a/devolutions-agent/Cargo.toml b/devolutions-agent/Cargo.toml index a93df50c8..37e366744 100644 --- a/devolutions-agent/Cargo.toml +++ b/devolutions-agent/Cargo.toml @@ -8,6 +8,10 @@ description = "Agent companion service for Devolutions Gateway" build = "build.rs" publish = false +[[bin]] +name = "devolutions-agent" +path = "src/main.rs" + [lints] workspace = true @@ -57,6 +61,7 @@ features = [ [target.'cfg(windows)'.dependencies] aws-lc-rs = "1.15" +time = { version = "0.3", features = ["local-offset", "macros", "parsing"] } devolutions-pedm = { path = "../crates/devolutions-pedm" } hex = "0.4" notify-debouncer-mini = "0.6" diff --git a/devolutions-agent/src/config.rs b/devolutions-agent/src/config.rs index 1632fcc60..b095b594f 100644 --- a/devolutions-agent/src/config.rs +++ b/devolutions-agent/src/config.rs @@ -5,7 +5,7 @@ use std::sync::Arc; use anyhow::{Context, bail}; use camino::{Utf8Path, Utf8PathBuf}; -use devolutions_agent_shared::get_data_dir; +use devolutions_agent_shared::{default_schedule_window_start, get_data_dir}; use serde::{Deserialize, Serialize}; use tap::prelude::*; use url::Url; @@ -141,6 +141,17 @@ impl ConfHandle { pub fn get_conf_file(&self) -> Arc { self.inner.conf_file.read().clone() } + + /// Persists a new auto-update schedule to `agent.json` and updates the in-memory state. + pub fn save_updater_schedule(&self, schedule: &dto::UpdaterSchedule) -> anyhow::Result<()> { + let mut conf_file = (*self.inner.conf_file.read()).as_ref().clone(); + conf_file.updater.get_or_insert_with(dto::UpdaterConf::default).schedule = Some(schedule.clone()); + let conf = Conf::from_conf_file(&conf_file).context("invalid configuration")?; + save_config(&conf_file).context("failed to save configuration")?; + *self.inner.conf.write() = Arc::new(conf); + *self.inner.conf_file.write() = Arc::new(conf_file); + Ok(()) + } } fn save_config(conf: &dto::ConfFile) -> anyhow::Result<()> { @@ -191,19 +202,89 @@ pub fn load_conf_file_or_generate_new() -> anyhow::Result { } pub mod dto { + use devolutions_agent_shared::UpdateProductKey; + use super::*; + /// Mirrors [`devolutions_agent_shared::UpdateSchedule`] + #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize, Default)] + #[serde(rename_all = "PascalCase")] + pub struct UpdaterSchedule { + /// Enable periodic Devolutions Agent self-update checks. + pub enabled: bool, + + /// Minimum interval between update checks, in seconds. + /// + /// 0 value has a special meaning of "only check once at `update_window_start`. + #[serde(default)] + pub interval: u64, + + /// Start of the maintenance window as seconds past midnight, local time. + #[serde(default = "default_schedule_window_start")] + pub update_window_start: u32, + + /// End of the maintenance window as seconds past midnight, local time, exclusive. + /// + /// `None` means no upper bound (only single update check at update_window_start). + /// When end < start the window crosses midnight. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub update_window_end: Option, + + /// Products for which the agent autonomously polls for new versions. + #[serde(default)] + pub products: Vec, + } + + impl From for UpdaterSchedule { + fn from(s: devolutions_agent_shared::UpdateSchedule) -> Self { + Self { + enabled: s.enabled, + interval: s.interval, + update_window_start: s.update_window_start, + update_window_end: s.update_window_end, + products: s.products, + } + } + } + + impl From for devolutions_agent_shared::UpdateSchedule { + fn from(s: UpdaterSchedule) -> Self { + Self { + enabled: s.enabled, + interval: s.interval, + update_window_start: s.update_window_start, + update_window_end: s.update_window_end, + products: s.products, + } + } + } + + // ── UpdaterConf ────────────────────────────────────────────────────────── + #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "PascalCase")] pub struct UpdaterConf { - /// Enable updater module + /// Enable updater module. pub enabled: bool, + /// Periodic Devolutions Agent self-update schedule. + /// + /// When present and `Enabled` is `true`, the agent automatically checks for a new + /// version of itself at the configured interval and triggers a silent MSI update + /// during the configured maintenance window. + /// + /// This setting can be managed remotely via the Devolutions Gateway API + /// (`GET`/`POST /jet/update/schedule`) or set directly in this file. + #[serde(skip_serializing_if = "Option::is_none")] + pub schedule: Option, } #[allow(clippy::derivable_impls)] // Just to be explicit about the default values of the config. impl Default for UpdaterConf { fn default() -> Self { - Self { enabled: false } + Self { + enabled: false, + schedule: None, + } } } @@ -324,7 +405,10 @@ pub mod dto { Self { verbosity_profile: None, log_file: None, - updater: Some(UpdaterConf { enabled: true }), + updater: Some(UpdaterConf { + enabled: true, + schedule: None, + }), remote_desktop: None, pedm: None, proxy: None, diff --git a/devolutions-agent/src/updater/detect.rs b/devolutions-agent/src/updater/detect.rs index eaab03eaf..e739d122c 100644 --- a/devolutions-agent/src/updater/detect.rs +++ b/devolutions-agent/src/updater/detect.rs @@ -1,6 +1,6 @@ //! Module which provides logic to detect installed products and their versions. use devolutions_agent_shared::DateVersion; -use devolutions_agent_shared::windows::{GATEWAY_UPDATE_CODE, HUB_SERVICE_UPDATE_CODE, registry}; +use devolutions_agent_shared::windows::{AGENT_UPDATE_CODE, GATEWAY_UPDATE_CODE, HUB_SERVICE_UPDATE_CODE, registry}; use uuid::Uuid; use crate::updater::{Product, UpdaterError}; @@ -16,6 +16,10 @@ pub(crate) fn get_installed_product_version(product: Product) -> Result { + registry::get_installed_product_version(AGENT_UPDATE_CODE, registry::ProductVersionEncoding::Agent) + .map_err(UpdaterError::WindowsRegistry) + } } } @@ -25,5 +29,6 @@ pub(crate) fn get_product_code(product: Product) -> Result, Updater Product::HubService => { registry::get_product_code(HUB_SERVICE_UPDATE_CODE).map_err(UpdaterError::WindowsRegistry) } + Product::Agent => registry::get_product_code(AGENT_UPDATE_CODE).map_err(UpdaterError::WindowsRegistry), } } diff --git a/devolutions-agent/src/updater/error.rs b/devolutions-agent/src/updater/error.rs index 44ee84fc2..df2af0144 100644 --- a/devolutions-agent/src/updater/error.rs +++ b/devolutions-agent/src/updater/error.rs @@ -58,4 +58,10 @@ pub(crate) enum UpdaterError { QueryServiceState { product: Product, source: anyhow::Error }, #[error("failed to start service for `{product}`")] StartService { product: Product, source: anyhow::Error }, + #[error("agent updater shim not found at expected path: `{path}`")] + AgentUpdaterShimNotFound { path: Utf8PathBuf }, + #[error("failed to launch agent updater shim")] + AgentShimLaunch { source: std::io::Error }, + #[error("an agent update is already in progress; skipping concurrent update request")] + AgentUpdateAlreadyInProgress, } diff --git a/devolutions-agent/src/updater/mod.rs b/devolutions-agent/src/updater/mod.rs index 994a1c08c..1db65e7a2 100644 --- a/devolutions-agent/src/updater/mod.rs +++ b/devolutions-agent/src/updater/mod.rs @@ -8,17 +8,32 @@ mod product_actions; mod productinfo; mod security; +/// Schedule a file for deletion on the next system reboot (best-effort). +/// +/// Wraps the internal reboot-removal logic with an [`anyhow::Error`] return type for use +/// outside this crate. +pub fn remove_file_on_reboot(file_path: &Utf8Path) -> anyhow::Result<()> { + io::remove_file_on_reboot(file_path).map_err(anyhow::Error::from) +} + +use std::collections::HashMap; use std::sync::Arc; +use std::sync::atomic::{AtomicBool, Ordering}; use std::time::Duration; use anyhow::{Context, anyhow}; use async_trait::async_trait; use camino::{Utf8Path, Utf8PathBuf}; -use devolutions_agent_shared::{DateVersion, UpdateJson, VersionSpecification, get_updater_file_path}; +use devolutions_agent_shared::{ + DateVersion, InstalledProductUpdateInfo, ProductUpdateInfo, UpdateManifest, UpdateManifestV2, UpdateProductKey, + UpdateSchedule, UpdateStatus, UpdateStatusV2, VersionSpecification, get_update_status_file_path, + get_updater_file_path, +}; use devolutions_gateway_task::{ShutdownSignal, Task}; use notify_debouncer_mini::notify::RecursiveMode; use tokio::fs; use uuid::Uuid; +use win_api_wrappers::service::{ServiceManager, ServiceStartupMode}; use self::detect::get_product_code; pub(crate) use self::error::UpdaterError; @@ -30,12 +45,76 @@ use self::product_actions::{ProductUpdateActions, build_product_actions}; use self::productinfo::DEVOLUTIONS_PRODUCTINFO_URL; use self::security::set_file_dacl; use crate::config::ConfHandle; +use crate::config::dto::UpdaterSchedule; use crate::updater::productinfo::ProductInfoDb; +/// Windows service name for Devolutions Agent. +pub const AGENT_SERVICE_NAME: &str = "DevolutionsAgent"; + +/// Service state captured before the MSI update begins, used to restore state afterwards. +pub struct AgentServiceState { + pub was_running: bool, + pub startup_was_automatic: bool, +} + +/// Query the Devolutions Agent service state before the MSI update begins. +/// +/// Called while the agent service is still running so startup mode and running state +/// reflect the pre-update configuration. +pub fn query_agent_service_state() -> anyhow::Result { + let sm = ServiceManager::open_read()?; + let svc = sm.open_service_read(AGENT_SERVICE_NAME)?; + Ok(AgentServiceState { + startup_was_automatic: svc.startup_mode()? == ServiceStartupMode::Automatic, + was_running: svc.is_running()?, + }) +} + +/// Start the Devolutions Agent service after a successful update if its startup mode is manual. +/// +/// Services configured for automatic startup are restarted by the Windows SCM after the MSI +/// completes. Services with manual startup must be started explicitly. +/// +/// Returns `true` if the service was started, `false` if a start was not needed. +pub fn start_agent_service_if_needed(state: &AgentServiceState) -> anyhow::Result { + // Automatic-startup services restart themselves via the SCM; no action needed. + if state.startup_was_automatic || !state.was_running { + return Ok(false); + } + let sm = ServiceManager::open_all_access()?; + let svc = sm.open_service_all_access(AGENT_SERVICE_NAME)?; + svc.start()?; + Ok(true) +} + const UPDATE_JSON_WATCH_INTERVAL: Duration = Duration::from_secs(3); -// List of updateable products could be extended in future -const PRODUCTS: &[Product] = &[Product::Gateway, Product::HubService]; +/// Delay before the first unconditional `update_status.json` refresh after agent start. +/// +/// 30 seconds gives the MSI installer enough time to finish after an agent self-update +/// so the registry reflects the newly installed version when we re-probe it. +const STATUS_REFRESH_INITIAL_DELAY: Duration = Duration::from_secs(30); + +/// Interval between subsequent unconditional `update_status.json` refreshes. +/// +/// This catches manual re-installations or any other external change that the +/// update-triggered refresh would miss. +const STATUS_REFRESH_INTERVAL: Duration = Duration::from_secs(5 * 60); + +// List of updateable products could be extended in future. +const PRODUCTS: &[Product] = &[Product::Gateway, Product::HubService, Product::Agent]; + +// The first agent version with self-update support is 2026.2. +const AGENT_MIN_SELF_UPDATE_VERSION: DateVersion = DateVersion { + year: 2026, + month: 2, + day: 0, + revision: 0, +}; + +fn is_agent_self_update_supported(version: DateVersion) -> bool { + version >= AGENT_MIN_SELF_UPDATE_VERSION +} /// Load productinfo source from configured URL or file path async fn load_productinfo_source(conf: &ConfHandle) -> Result { @@ -78,10 +157,14 @@ fn validate_download_url(ctx: &UpdaterCtx, url: &str) -> Result<(), UpdaterError } /// Context for updater task -struct UpdaterCtx { +pub(crate) struct UpdaterCtx { product: Product, actions: Box, conf: ConfHandle, + shutdown_signal: ShutdownSignal, + /// For agent self-update downgrades: the product code of the currently installed version + /// to be uninstalled by the shim before installing the target version. + downgrade_product_code: Option, } struct DowngradeInfo { @@ -96,6 +179,13 @@ struct UpdateOrder { hash: Option, } +/// Set to `true` while the agent self-update shim is running. +/// +/// Used as a lightweight guard to prevent overlapping agent updates and to block any +/// other product update from starting while the agent MSI is being installed (the MSI +/// may restart dependent services). +static AGENT_UPDATE_IN_PROGRESS: AtomicBool = AtomicBool::new(false); + pub struct UpdaterTask { conf_handle: ConfHandle, } @@ -115,9 +205,25 @@ impl Task for UpdaterTask { async fn run(self, mut shutdown_signal: ShutdownSignal) -> anyhow::Result<()> { let conf = self.conf_handle.clone(); - // Initialize update.json file if does not exist + // Derive the initial auto-update schedule from agent.json. + let initial_schedule: Option = { + let conf_data = conf.get_conf(); + conf_data.updater.schedule.clone() + }; let update_file_path = init_update_json().await?; + let mut current_schedule: Option = initial_schedule.map(UpdateSchedule::from); + + // Write update_status.json with the current schedule and installed product versions. + // The gateway reads this file for GET /jet/update/schedule and GET /jet/update. + init_update_status_json(current_schedule.as_ref()).await?; + + // Unconditional status refresh: fires 30 s after start (catches self-update where the + // agent is re-launched before the MSI finishes writing to the registry), then every 5 + // minutes (catches manual re-installations and any other external change). + let status_refresh = tokio::time::sleep(STATUS_REFRESH_INITIAL_DELAY); + tokio::pin!(status_refresh); + let file_change_notification = Arc::new(tokio::sync::Notify::new()); let file_change_tx = Arc::clone(&file_change_notification); @@ -140,14 +246,90 @@ impl Task for UpdaterTask { // Trigger initial check during task startup file_change_notification.notify_waiters(); + // Absolute local timestamp of the last auto-update check, used to compute + // true elapsed time even when the interval spans multiple days. + let mut last_check_at_local: Option = None; + loop { + // Compute the delay to the next auto-update check slot. + // Schedule is re-read every iteration so external changes (e.g. written by the + // gateway via update.json) take effect without restarting the agent. + let auto_update_sleep = match current_schedule.as_ref().filter(|s| s.enabled) { + None => tokio::time::sleep(Duration::MAX), + Some(schedule) => { + let now: time::OffsetDateTime = local_now(); + + // How many seconds have elapsed since the last check? + let last_check_ago = elapsed_since_last_check_secs(now, last_check_at_local); + + let delay = next_poll_delay(seconds_since_midnight(now), last_check_ago, schedule); + trace!(delay_secs = delay.as_secs(), "Next auto-update check scheduled"); + tokio::time::sleep(delay) + } + }; + tokio::select! { + _ = &mut status_refresh => { + // Trace instead of Info since logging each 5 minutes produces + // a lot of noise in logs. + trace!("Refreshing update_status.json (periodic status check)"); + + refresh_update_status_json(current_schedule.as_ref()).await; + status_refresh.as_mut().reset(tokio::time::Instant::now() + STATUS_REFRESH_INTERVAL); + } + _ = auto_update_sleep => { + let Some(ref schedule) = current_schedule else { continue }; + + if !schedule.enabled { + continue; + } + + // Confirm we are inside the window at the actual wake-up instant. + // The sleep duration is computed from wall-clock seconds, so minor + // clock drift or a very short interval could cause us to wake up + // fractionally early or outside the window. + let now = local_now(); + let now_secs = seconds_since_midnight(now); + + if !is_in_update_window( + now_secs, + u64::from(schedule.update_window_start), + schedule.update_window_end.map(u64::from), + ) { + // Not yet in the window; loop to recompute the exact delay. + continue; + } + + info!("Agent scheduled auto-update: maintenance window active, checking for new version"); + last_check_at_local = Some(now); + + // Build the product map from the schedule's product list, requesting + // the latest version for each. An empty list means no products are + // configured for auto-update; still record the check timestamp so the + // scheduler advances normally. + let scheduled_products: HashMap = schedule + .products + .iter() + .map(|key| { + ( + key.clone(), + ProductUpdateInfo { target_version: VersionSpecification::Latest }, + ) + }) + .collect(); + + if scheduled_products.is_empty() { + info!("Agent scheduled auto-update: no products configured, skipping"); + } else if run_product_updates(&scheduled_products, &conf, shutdown_signal.clone()).await { + // Update status needs updating. + refresh_update_status_json(current_schedule.as_ref()).await; + } + } _ = file_change_notification.notified() => { info!("update.json file changed, checking for updates..."); - - let update_json = match read_update_json(&update_file_path).await { - Ok(update_json) => update_json, + let manifest = match read_update_json(&update_file_path).await { + Ok(manifest) => manifest, Err(error) => { error!(%error, "Failed to parse `update.json`"); // Allow this error to be non-critical, as this file could be @@ -156,30 +338,40 @@ impl Task for UpdaterTask { } }; - let mut update_orders = vec![]; - for product in PRODUCTS { - let update_order = match check_for_updates(*product, &update_json, &conf).await { - Ok(order) => order, - Err(error) => { - error!(%product, error = format!("{error:#}"), "Failed to check for updates for a product"); - continue; - } - }; - - if let Some(order) = update_order { - update_orders.push((*product, order)); + // Apply schedule changes when the gateway writes a new Schedule field. + // If the manifest has no Schedule field, leave the current schedule unchanged. + let mut status_needs_update = if let UpdateManifest::ManifestV2(ref v2) = manifest + && let Some(new_schedule) = v2.schedule.clone() + && current_schedule.as_ref() != Some(&new_schedule) + { + info!("Auto-update schedule changed via update.json; persisting to agent.json"); + let persisted = UpdaterSchedule::from(new_schedule.clone()); + if let Err(error) = conf.save_updater_schedule(&persisted) { + error!(%error, "Failed to persist auto-update schedule to agent.json"); } - } + current_schedule = Some(new_schedule); + // Rebase scheduler state to the newly applied schedule so checks are + // computed from the new window/interval policy. + last_check_at_local = None; + true + } else { + false + }; + + let products_map = manifest.into_products(); - if update_orders.is_empty() { - info!("No updates available for any product"); + // If update.json has no Products field, do not trigger any update. + if products_map.is_empty() { + info!("update.json has no Products field, skipping update check"); + } else { + status_needs_update |= + run_product_updates(&products_map, &conf, shutdown_signal.clone()).await; } - for (product, order) in update_orders { - if let Err(error) = update_product(conf.clone(), product, order).await { - error!(%product, %error, "Failed to update product"); - } + // Refresh status after we applied all changes from the manifest. + if status_needs_update { + refresh_update_status_json(current_schedule.as_ref()).await; } } _ = shutdown_signal.wait() => { @@ -192,7 +384,77 @@ impl Task for UpdaterTask { } } -async fn update_product(conf: ConfHandle, product: Product, order: UpdateOrder) -> anyhow::Result<()> { +/// Check for and run updates for all products present in `products_map`. +/// +/// Iterates [`PRODUCTS`] in definition order, collects those that have an available update, +/// sorts them so the Agent update runs last (its MSI stops the agent service, which would +/// abort any subsequent product update), then installs each one. +/// +/// Returns `true` when `update_status.json` should be refreshed after this call. +async fn run_product_updates( + products_map: &HashMap, + conf: &ConfHandle, + shutdown_signal: ShutdownSignal, +) -> bool { + let mut update_orders: Vec<(Product, UpdateOrder)> = vec![]; + + for &product in PRODUCTS { + let update_order = match check_for_updates(product, products_map, conf).await { + Ok(order) => order, + Err(error) => { + error!(%product, error = format!("{error:#}"), "Failed to check for updates for a product"); + continue; + } + }; + + if let Some(order) = update_order { + update_orders.push((product, order)); + } + } + + if update_orders.is_empty() { + info!("No updates available for any product"); + return false; + } + + // Agent self-update must go last: its MSI stops the agent service, + // which would prevent any subsequent products from being updated. + update_orders.sort_by_key(|(product, _)| *product == Product::Agent); + + let mut agent_updated = false; + let mut update_successful = false; + + for (product, order) in update_orders { + match update_product(conf.clone(), product, order, shutdown_signal.clone()).await { + Ok(()) => { + if product == Product::Agent { + agent_updated = true; + } + + update_successful = true; + } + Err(error) => { + error!(%product, %error, "Failed to update product"); + } + } + } + + // If the agent was successfully updated a restart is imminent; status refreshes on next start. + update_successful & (!agent_updated) +} + +async fn update_product( + conf: ConfHandle, + product: Product, + order: UpdateOrder, + shutdown_signal: ShutdownSignal, +) -> anyhow::Result<()> { + // Block any product update while the agent shim is running in the background. + // The agent MSI restarts dependent services and must complete uninterrupted. + if AGENT_UPDATE_IN_PROGRESS.load(Ordering::Acquire) { + anyhow::bail!("skipping {product} update: agent update is in progress"); + } + let target_version = order.target_version; let hash = order.hash; @@ -200,6 +462,12 @@ async fn update_product(conf: ConfHandle, product: Product, order: UpdateOrder) product, actions: build_product_actions(product), conf, + shutdown_signal, + downgrade_product_code: order.downgrade.as_ref().and_then(|d| { + // For Agent, the shim handles uninstall + install in sequence; pass the product + // code so it can run `msiexec /x` before `msiexec /i`. + (product == Product::Agent).then_some(d.product_code) + }), }; validate_download_url(&ctx, &order.package_url)?; @@ -238,7 +506,10 @@ async fn update_product(conf: ConfHandle, product: Product, order: UpdateOrder) let uninstall_log_path = package_path.with_extension("uninstall.log"); // NOTE: An uninstall/reinstall will lose any custom feature selection or other options in the existing installation - uninstall_package(&ctx, downgrade.product_code, &uninstall_log_path).await?; + // For Product::Agent the shim handles uninstall; skip the in-process step. + if product != Product::Agent { + uninstall_package(&ctx, downgrade.product_code, &uninstall_log_path).await?; + } } let log_path = package_path.with_extension("log"); @@ -254,30 +525,48 @@ async fn update_product(conf: ConfHandle, product: Product, order: UpdateOrder) Ok(()) } -async fn read_update_json(update_file_path: &Utf8Path) -> anyhow::Result { - let update_json_data = fs::read(update_file_path) +/// Read and parse `update.json` asynchronously. +/// +/// Transparently upgrades a legacy V1 file to a V2 manifest in memory so the rest of the +/// updater task never needs to handle the old format. The file on disk is left unchanged; +/// the next write will persist the upgraded format. +async fn read_update_json(update_file_path: &Utf8Path) -> anyhow::Result { + let data = fs::read(update_file_path) .await .context("failed to read update.json file")?; - // Strip UTF-8 BOM if present (some editors add it) - let data_without_bom = if update_json_data.starts_with(&[0xEF, 0xBB, 0xBF]) { - &update_json_data[3..] - } else { - &update_json_data - }; + let manifest = UpdateManifest::parse(&data).context("failed to parse update.json file")?; - let update_json: UpdateJson = - serde_json::from_slice(data_without_bom).context("failed to parse update.json file")?; + // Transparently upgrade V1 → V2 in memory. + let upgraded = match manifest { + UpdateManifest::ManifestV2(_) => manifest, + UpdateManifest::Legacy(v1) => { + let mut products = HashMap::new(); + if let Some(gw) = v1.gateway { + products.insert(UpdateProductKey::Gateway, gw); + } + if let Some(hs) = v1.hub_service { + products.insert(UpdateProductKey::HubService, hs); + } + UpdateManifest::ManifestV2(UpdateManifestV2 { + products, + ..UpdateManifestV2::default() + }) + } + }; - Ok(update_json) + Ok(upgraded) } async fn check_for_updates( product: Product, - update_json: &UpdateJson, + products: &HashMap, conf: &ConfHandle, ) -> anyhow::Result> { - let target_version = match product.get_update_info(update_json).map(|info| info.target_version) { + let target_version = match products + .get(&product.as_update_product_key()) + .map(|info| info.target_version.clone()) + { Some(version) => version, None => { trace!(%product, "No target version specified in update.json, skipping update check"); @@ -343,6 +632,16 @@ async fn check_for_updates( return Ok(None); } + if product == Product::Agent && !is_agent_self_update_supported(remote_version) { + warn!( + %product, + target_version = %remote_version, + min_version = %AGENT_MIN_SELF_UPDATE_VERSION, + "Latest version does not support agent self-update; skipping to avoid breaking auto-update" + ); + return Ok(None); + } + Ok(Some(UpdateOrder { target_version: remote_version, downgrade: None, @@ -400,6 +699,16 @@ async fn check_for_updates( } // Target MSI found, proceed with update. + if product == Product::Agent && !is_agent_self_update_supported(version) { + warn!( + %product, + %version, + min_version = %AGENT_MIN_SELF_UPDATE_VERSION, + "Target version does not support agent self-update; skipping to avoid breaking auto-update" + ); + return Ok(None); + } + // For the downgrade, we remove the installed product and install the target // version. This is the simplest and more reliable way to handle downgrades. (WiX // downgrade is not used). @@ -424,11 +733,107 @@ async fn check_for_updates( } } +/// Collect the currently installed version of every known product. +/// +/// Products that are not installed or whose version cannot be detected are silently +/// omitted from the returned map. +fn collect_installed_products() -> HashMap { + let mut products = HashMap::new(); + for &product in PRODUCTS { + match detect::get_installed_product_version(product) { + Ok(Some(version)) => { + products.insert( + product.as_update_product_key(), + InstalledProductUpdateInfo { + version: VersionSpecification::Specific(version), + }, + ); + } + Ok(None) => { + trace!(%product, "Product not installed, omitting from update_status.json"); + } + Err(error) => { + warn!(%product, %error, "Failed to detect installed product version for update_status.json"); + } + } + } + products +} + +/// Create `update_status.json` at startup, populate it with the current schedule and +/// installed product versions, and apply the DACL that restricts the Gateway service +/// to read-only access. +async fn init_update_status_json(schedule: Option<&UpdateSchedule>) -> anyhow::Result<()> { + let status_file_path = get_update_status_file_path(); + + let status = UpdateStatus::StatusV2(UpdateStatusV2 { + schedule: schedule.cloned(), + products: collect_installed_products(), + ..UpdateStatusV2::default() + }); + + let json = serde_json::to_string_pretty(&status).context("failed to serialize update_status.json")?; + fs::write(&status_file_path, json) + .await + .context("failed to write update_status.json")?; + + match set_file_dacl(&status_file_path, security::UPDATE_STATUS_JSON_DACL) { + Ok(_) => { + info!("Created `update_status.json` and set permissions successfully"); + } + Err(err) => { + std::fs::remove_file(status_file_path.as_std_path()).unwrap_or_else( + |error| warn!(%error, "Failed to remove update_status.json after failed permissions set"), + ); + return Err(anyhow!(err).context("failed to set update_status.json file permissions")); + } + } + + Ok(()) +} + +/// Refresh `update_status.json` with the latest schedule and re-detected installed +/// product versions. +/// +/// Called after each updater run (even when some product updates fail — the file is +/// always updated to reflect the current on-disk state) and after a schedule change. +/// +/// Note: if the agent itself is being updated, `update_status.json` will be automatically +/// refreshed when the agent restarts after the update completes. +/// +/// Errors are logged but treated as non-fatal so a failed write never aborts the updater. +async fn refresh_update_status_json(schedule: Option<&UpdateSchedule>) { + let status_file_path = get_update_status_file_path(); + + let status = UpdateStatus::StatusV2(UpdateStatusV2 { + schedule: schedule.cloned(), + products: collect_installed_products(), + ..UpdateStatusV2::default() + }); + + match serde_json::to_string_pretty(&status) { + Ok(json) => { + if let Err(error) = fs::write(&status_file_path, json).await { + error!(%error, "Failed to write update_status.json"); + } + } + Err(error) => { + error!(%error, "Failed to serialize update_status.json"); + } + } +} + async fn init_update_json() -> anyhow::Result { let update_file_path = get_updater_file_path(); + // update.json is the gateway->agent command channel. + // Do not mirror agent runtime state into this file; schedule and installed products + // are published by the agent through update_status.json. + let v2 = UpdateManifestV2::default(); + + let initial_manifest = UpdateManifest::ManifestV2(v2); let default_update_json = - serde_json::to_string_pretty(&UpdateJson::default()).context("failed to serialize default update.json")?; + serde_json::to_string_pretty(&initial_manifest).context("failed to serialize default update.json")?; fs::write(&update_file_path, default_update_json) .await @@ -476,12 +881,339 @@ fn try_modify_product_url_version( Ok(new_url) } +const SECS_PER_DAY: u64 = 86_400; + +/// Returns `true` when `now` falls within the configured maintenance window. +/// +/// `now`, `window_start`, and `window_end` are seconds past local midnight. +/// When `window_end` is `None`, the window spans an implicit full 24 h period starting at +/// `window_start`, therefore every local time is in-window. When `window_end` is `Some` +/// and `end < start`, midnight crossing is assumed +/// (e.g. `79200`–`10800` covers `[22:00, midnight) ∪ [midnight, 03:00)`). +fn is_in_update_window(now: u64, window_start: u64, window_end: Option) -> bool { + match window_end { + None => true, + Some(end) => { + if end < window_start { + // Window crosses midnight: [start, midnight) ∪ [midnight, end) + now >= window_start || now < end + } else { + // Normal window: [start, end) + now >= window_start && now < end + } + } + } +} + +fn local_now() -> time::OffsetDateTime { + time::OffsetDateTime::now_local().unwrap_or_else(|_| time::OffsetDateTime::now_utc()) +} + +fn seconds_since_midnight(now: time::OffsetDateTime) -> u64 { + u64::from(now.hour()) * 3_600 + u64::from(now.minute()) * 60 + u64::from(now.second()) +} + +fn elapsed_since_last_check_secs( + now_local: time::OffsetDateTime, + last_check_at_local: Option, +) -> Option { + last_check_at_local.map(|last| u64::try_from((now_local - last).whole_seconds()).unwrap_or(0)) +} + +/// Compute how long to sleep before the next auto-update check. +/// +/// The function is pure (takes explicit `now_secs`) so it can be unit-tested without +/// mocking the system clock. +/// +/// # Rules +/// +/// The window rolls over every 24 h. When `window_end` is `None` the window spans exactly +/// one full day starting at `window_start` (no upper bound restriction within that day). +/// +/// * **Outside window** — sleep until the next window start, then also check that the +/// cross-day rule since the last check is respected. If the interval is longer than +/// 24 h, the next check slot may be more than one day away. +/// * **Inside window, `interval == 0`** — single check per window; sleep until the +/// *next* window start (i.e. fire once, then skip to tomorrow's window). +/// * **Inside window, `interval > 0`** — checks land on multiples of `interval` counted +/// from `window_start`. Return the delay to the next such slot that lies inside the +/// window. If no further slot fits in the current window, sleep until the next window +/// start (tomorrow). +/// +/// For intervals greater than 24 h, an additional cross-day rule is enforced while +/// in-window: if the previous check happened less than `interval` seconds ago, the +/// returned delay is increased to at least the remaining interval. +/// +/// # Arguments +/// +/// * `now_since_midnight`— seconds past local midnight, in `[0, 86400)`. +/// * `last_check_ago` — elapsed seconds since the previous successful check +/// (`None` means no check has fired yet). +/// * `schedule` — the current [`UpdateSchedule`]. +/// +/// Returns a positive [`Duration`] (never zero, minimum 1 s) to avoid busy-loops. +fn next_poll_delay(now_since_midnight: u64, last_check_ago: Option, schedule: &UpdateSchedule) -> Duration { + let window_start = u64::from(schedule.update_window_start); + // None → no end bound; treat the window as spanning the full 24 h from window_start. + let window_end = schedule.update_window_end.map(u64::from); + // interval == 0 is treated as a single daily check (fire once at window start). + let interval = if schedule.interval == 0 { + SECS_PER_DAY + } else { + schedule.interval + }; + + // How many seconds until the next window start (wrapping around midnight)? + let secs_until_window_start = if now_since_midnight < window_start { + window_start - now_since_midnight + } else { + SECS_PER_DAY - now_since_midnight + window_start + }; + + // Is `now` inside the window? + let in_window = { + let end = window_end.unwrap_or(window_start + SECS_PER_DAY); + if end <= window_start { + // Midnight-crossing window: [start, 24h) ∪ [0, end) + now_since_midnight >= window_start || now_since_midnight < end + } else { + now_since_midnight >= window_start && now_since_midnight < end + } + }; + + if !in_window { + // Outside the window. Check whether the cross-day rule would push us past the + // next window start; if so, honour the interval instead. + let delay = if let Some(last_ago) = last_check_ago { + if last_ago < interval { + // Interval not yet elapsed since last check; wait the remaining interval + // time but no longer than until the window re-opens. + let remaining_interval = interval - last_ago; + remaining_interval.max(secs_until_window_start) + } else { + secs_until_window_start + } + } else { + secs_until_window_start + }; + + return Duration::from_secs(delay.max(1)); + } + + // Inside the window. Find how far past window_start we are (may need to wrap around + // midnight for crossing windows). + let secs_past_start = if now_since_midnight >= window_start { + now_since_midnight - window_start + } else { + // We are before midnight but inside a crossing window (now_secs < window_start + // and we are in the [0, end) portion). + SECS_PER_DAY - window_start + now_since_midnight + }; + + // Next slot index (from window_start) is ceil(secs_past_start / interval). + let next_slot_offset = { + let elapsed_slots = secs_past_start / interval; + // If we're exactly on a slot boundary, still move to next slot (the current + // slot either just fired or is about to; either way don't re-fire immediately). + (elapsed_slots + 1) * interval + }; + + // Does that slot still fall inside the window? + let window_size = match window_end { + Some(end) if end > window_start => end - window_start, + Some(end) => SECS_PER_DAY - window_start + end, // crossing + None => SECS_PER_DAY, + }; + + let mut delay_secs = if next_slot_offset < window_size { + // Next check fires inside this window. + next_slot_offset - secs_past_start + } else { + // No more slots in this window; sleep until the next window start. + secs_until_window_start + }; + + // Enforce the cross-day rule only for intervals longer than one day. For shorter + // intervals, keep the legacy in-window slot semantics unchanged. + if let Some(last_ago) = last_check_ago + && interval > SECS_PER_DAY + && last_ago < interval + { + delay_secs = delay_secs.max(interval - last_ago); + } + + // Enforce a minimum of 30 s to prevent unnecessarily fast polling loops even if the + // schedule is configured with a very small interval. + const MIN_POLL_SECS: u64 = 30; + Duration::from_secs(delay_secs.max(MIN_POLL_SECS)) +} + #[cfg(test)] mod tests { use super::*; + fn t(h: u64, m: u64) -> u64 { + h * 3_600 + m * 60 + } + + fn sched(window_start: u64, window_end: Option, interval: u64) -> UpdateSchedule { + UpdateSchedule { + enabled: true, + interval, + update_window_start: u32::try_from(window_start).expect("window start within a day"), + update_window_end: window_end.map(|end| u32::try_from(end).expect("window end within a day")), + products: vec![], + } + } + + fn at(ts: i64) -> time::OffsetDateTime { + time::OffsetDateTime::from_unix_timestamp(ts).expect("valid unix timestamp") + } + + fn at_u64(ts: u64) -> time::OffsetDateTime { + at(i64::try_from(ts).expect("test timestamp fits in i64")) + } + + #[test] + fn maintenance_window_bounded_cases() { + let start = t(2, 0); + let end = Some(t(4, 0)); + + for (time, expected) in [ + (t(1, 59), false), + (t(2, 0), true), + (t(3, 0), true), + (t(4, 0), false), + (t(4, 1), false), + ] { + assert_eq!(is_in_update_window(time, start, end), expected, "time={time}"); + } + } + + #[test] + fn maintenance_window_open_ended_cases() { + let start = t(2, 0); + + for (time, expected) in [(t(1, 59), true), (t(2, 0), true), (t(23, 59), true)] { + assert_eq!(is_in_update_window(time, start, None), expected, "time={time}"); + } + } + + #[test] + fn maintenance_window_midnight_crossing_cases() { + let start = t(22, 0); + let end = Some(t(3, 0)); + + for (time, expected) in [ + (t(22, 0), true), + (t(23, 0), true), + (t(1, 0), true), + (t(3, 0), false), + (t(10, 0), false), + ] { + assert_eq!(is_in_update_window(time, start, end), expected, "time={time}"); + } + } + + #[test] + fn next_poll_delay_outside_window_cases() { + for (now_time, last_check_ago, schedule, expected_delay) in [ + (t(0, 0), None, sched(t(2, 0), Some(t(4, 0)), t(1, 0)), t(2, 0)), + ( + t(5, 0), + Some(t(0, 10)), + sched(t(2, 0), Some(t(4, 0)), t(1, 0)), + t(21, 0), + ), + (t(5, 0), Some(t(2, 0)), sched(t(2, 0), Some(t(4, 0)), t(1, 0)), t(21, 0)), + (t(0, 0), None, sched(t(2, 0), Some(t(4, 0)), t(0, 0)), t(2, 0)), + ] { + assert_eq!( + next_poll_delay(now_time, last_check_ago, &schedule).as_secs(), + expected_delay + ); + } + } + + #[test] + fn next_poll_delay_inside_window_slot_cases() { + for (now_time, last_check_ago, schedule, expected_delay) in [ + (t(0, 0), None, sched(t(0, 0), Some(t(8, 0)), t(2, 0)), t(2, 0)), + (t(1, 30), None, sched(t(0, 0), Some(t(8, 0)), t(2, 0)), t(0, 30)), + (t(2, 0), None, sched(t(0, 0), Some(t(8, 0)), t(2, 0)), t(2, 0)), + (t(2, 30), None, sched(t(0, 0), Some(t(3, 0)), t(2, 0)), t(21, 30)), + ( + t(1, 30), + Some(t(0, 15)), + sched(t(0, 0), Some(t(8, 0)), t(2, 0)), + t(0, 30), + ), + ] { + assert_eq!( + next_poll_delay(now_time, last_check_ago, &schedule).as_secs(), + expected_delay + ); + } + } + + #[test] + fn next_poll_delay_special_window_cases() { + for (now_time, schedule, expected_delay) in [ + (t(2, 0), sched(t(2, 0), Some(t(4, 0)), t(0, 0)), SECS_PER_DAY), + (t(3, 0), sched(t(2, 0), None, t(4, 0)), t(3, 0)), + (t(1, 0), sched(t(22, 0), Some(t(3, 0)), t(2, 0)), t(1, 0)), + ] { + assert_eq!(next_poll_delay(now_time, None, &schedule).as_secs(), expected_delay); + } + } + + #[test] + fn next_poll_delay_long_interval_respects_cross_day_rule_inside_window() { + // interval = 36h, last check was 1h ago, now is inside window. + // Remaining interval must win over "next window start" delay. + let schedule = sched(t(2, 0), Some(t(4, 0)), t(36, 0)); + assert_eq!(next_poll_delay(t(2, 30), Some(t(1, 0)), &schedule).as_secs(), t(35, 0)); + } + + #[test] + fn next_poll_delay_long_interval_outside_window_keeps_remaining_interval() { + // interval = 36h, last check was 2h ago, now outside window. + // Remaining 34h is longer than waiting for next window start and must be used. + let schedule = sched(t(2, 0), Some(t(4, 0)), t(36, 0)); + assert_eq!(next_poll_delay(t(5, 0), Some(t(2, 0)), &schedule).as_secs(), t(34, 0)); + } + + #[test] + fn next_poll_delay_long_interval_without_last_check_inside_window() { + // interval = 36h, no previous check, now is inside window. + // The first eligible slot inside the current window should be used. + let schedule = sched(t(2, 0), Some(t(4, 0)), t(36, 0)); + assert_eq!(next_poll_delay(t(2, 30), None, &schedule).as_secs(), t(23, 30)); + } + + #[test] + fn next_poll_delay_long_interval_without_last_check_outside_window() { + // interval = 36h, no previous check, now is outside window. + // With no cross-day rule to honor yet, wait only until the next window start. + let schedule = sched(t(2, 0), Some(t(4, 0)), t(36, 0)); + assert_eq!(next_poll_delay(t(5, 0), None, &schedule).as_secs(), t(21, 0)); + } + + #[test] + fn elapsed_since_last_check_secs_supports_multi_day_runtime_intervals() { + assert_eq!( + elapsed_since_last_check_secs(at_u64(t(72, 0) + t(1, 0)), Some(at_u64(t(1, 0)))), + Some(t(72, 0)) + ); + } + #[test] - fn test_try_modify_product_url_version() { + fn elapsed_since_last_check_secs_without_previous_check_is_none() { + assert_eq!(elapsed_since_last_check_secs(at_u64(t(1, 0)), None), None); + } + + #[test] + fn try_modify_product_url_version_replaces_embedded_version() { let url = "https://cdn.devolutions.net/download/DevolutionsGateway-x86_64-2024.3.3.0.msi"; let original_version = DateVersion { year: 2024, @@ -503,4 +1235,30 @@ mod tests { "https://cdn.devolutions.net/download/DevolutionsGateway-x86_64-2024.4.0.0.msi" ); } + + #[test] + fn agent_self_update_support_boundary() { + let before_boundary = DateVersion { + year: 2026, + month: 1, + day: 0, + revision: 0, + }; + let boundary = DateVersion { + year: 2026, + month: 2, + day: 0, + revision: 0, + }; + let after_boundary = DateVersion { + year: 2026, + month: 2, + day: 1, + revision: 0, + }; + + assert!(!is_agent_self_update_supported(before_boundary)); + assert!(is_agent_self_update_supported(boundary)); + assert!(is_agent_self_update_supported(after_boundary)); + } } diff --git a/devolutions-agent/src/updater/package.rs b/devolutions-agent/src/updater/package.rs index fd82be7f0..319eeb212 100644 --- a/devolutions-agent/src/updater/package.rs +++ b/devolutions-agent/src/updater/package.rs @@ -2,12 +2,12 @@ use std::ops::DerefMut; -use camino::Utf8Path; +use camino::{Utf8Path, Utf8PathBuf}; use uuid::Uuid; use win_api_wrappers::utils::WideString; use crate::updater::io::remove_file_on_reboot; -use crate::updater::{Product, UpdaterCtx, UpdaterError}; +use crate::updater::{AGENT_UPDATE_IN_PROGRESS, Product, UpdaterCtx, UpdaterError}; /// List of allowed thumbprints for Devolutions code signing certificates const DEVOLUTIONS_CERT_THUMBPRINTS: &[&str] = &[ @@ -16,6 +16,9 @@ const DEVOLUTIONS_CERT_THUMBPRINTS: &[&str] = &[ "50f753333811ff11f1920274afde3ffd4468b210", ]; +/// Filename of the updater shim executable installed alongside the agent. +const AGENT_UPDATER_SHIM_NAME: &str = "DevolutionsAgentUpdater.exe"; + pub(crate) async fn install_package( ctx: &UpdaterCtx, path: &Utf8Path, @@ -23,6 +26,7 @@ pub(crate) async fn install_package( ) -> Result<(), UpdaterError> { match ctx.product { Product::Gateway | Product::HubService => install_msi(ctx, path, log_path).await, + Product::Agent => install_agent_via_shim(ctx, path).await, } } @@ -33,7 +37,175 @@ pub(crate) async fn uninstall_package( ) -> Result<(), UpdaterError> { match ctx.product { Product::Gateway | Product::HubService => uninstall_msi(ctx, product_code, log_path).await, + // For agent self-update the shim handles uninstall + install in sequence; the + // in-process uninstall step is skipped to avoid stopping the service prematurely. + Product::Agent => Ok(()), + } +} + +/// Install a new version of Devolutions Agent by launching the updater shim as a detached process. +/// +/// The shim (`devolutions-agent-updater.exe`) is copied to a temp location before being launched +/// so that the MSI installer can freely overwrite the agent installation directory. The shim +/// then runs `msiexec` silently, which stops the agent service, replaces its files, and +/// restarts it. Since the shim is detached from the agent service, it survives the service +/// restart and ensures the installation completes. +/// +/// When `downgrade_product_code` is `Some` the shim will first run `msiexec /x` to uninstall +/// the currently installed version before running `msiexec /i` for the target version. +async fn install_agent_via_shim(ctx: &UpdaterCtx, msi_path: &Utf8Path) -> Result<(), UpdaterError> { + let shim_path = find_agent_updater_shim()?; + + // Copy the shim to a temp location so it survives the MSI replacing the installation dir. + let temp_shim_path = copy_shim_to_temp(&shim_path).await?; + info!(%msi_path, %temp_shim_path, "Launching agent updater shim as detached process"); + + // Schedule the temp shim copy for deletion at the next system reboot. + if let Err(error) = remove_file_on_reboot(&temp_shim_path) { + error!(%error, "Failed to schedule temp shim for deletion on reboot"); + } + + launch_updater_shim_detached(ctx, &temp_shim_path, msi_path, ctx.downgrade_product_code).await?; + + if ctx.downgrade_product_code.is_some() { + info!("Agent updater shim launched; agent will be uninstalled then reinstalled at the target version"); + } else { + info!("Agent updater shim launched; agent service will be updated and restarted shortly"); + } + + Ok(()) +} + +/// Locate the agent updater shim executable next to the running agent binary. +fn find_agent_updater_shim() -> Result { + let exe_path = std::env::current_exe().map_err(UpdaterError::Io)?; + + let exe_path = Utf8PathBuf::from_path_buf(exe_path) + .map_err(|_| UpdaterError::Io(std::io::Error::other("agent executable path contains invalid UTF-8")))?; + + let exe_dir = exe_path + .parent() + .ok_or_else(|| UpdaterError::Io(std::io::Error::other("cannot determine agent executable directory")))?; + + let shim_path = exe_dir.join(AGENT_UPDATER_SHIM_NAME); + + if !shim_path.exists() { + return Err(UpdaterError::AgentUpdaterShimNotFound { path: shim_path }); } + + Ok(shim_path) +} + +/// Copy the shim executable to a temporary path (UUID-named) so it can run independently of +/// the installation directory. +async fn copy_shim_to_temp(shim_path: &Utf8Path) -> Result { + let temp_shim_path = Utf8PathBuf::from_path_buf(std::env::temp_dir()) + .expect("BUG: OS should always return valid UTF-8 temp path") + .join(format!("{}-devolutions-agent-updater.exe", Uuid::new_v4())); + + tokio::fs::copy(shim_path, &temp_shim_path) + .await + .map_err(UpdaterError::Io)?; + + Ok(temp_shim_path) +} + +/// Launch the updater shim and wait for it to finish, a shutdown signal, or a timeout. +/// +/// Sets [`AGENT_UPDATE_IN_PROGRESS`] for the duration so any concurrent update attempts +/// are rejected. Clears the flag on timeout or unexpected shim exit, but NOT on shutdown: +/// when a shutdown signal is received the MSI is assumed to be making progress (it will +/// stop and restart the agent service), so the flag is left set until the process exits. +/// +/// `DETACHED_PROCESS` disassociates the child from the parent's console. +/// `CREATE_NEW_PROCESS_GROUP` creates a new process group so that Ctrl+C signals from the +/// parent do not propagate to the child. +/// `CREATE_BREAKAWAY_FROM_JOB` removes the shim (and its children, including msiexec) from +/// the service's Windows Job Object. Without this flag the shim inherits the per-service +/// Job Object that the SCM assigns to every service process. When the MSI installer stops +/// the DevolutionsAgent service the SCM terminates that job, which kills the shim and its +/// msiexec child mid-installation, causing MSI rollback with errors 1923 / 1920. The agent +/// runs as LocalSystem, which holds SeTcbPrivilege; that allows breakaway from any job +/// regardless of whether the job has JOB_OBJECT_LIMIT_BREAKAWAY_OK set. +/// +/// When `downgrade_product_code` is `Some`, it is passed to the shim as `-x ` +/// (before the MSI path) so it can uninstall the old version before installing the new one. +async fn launch_updater_shim_detached( + ctx: &UpdaterCtx, + shim_path: &Utf8Path, + msi_path: &Utf8Path, + downgrade_product_code: Option, +) -> Result<(), UpdaterError> { + use std::sync::atomic::Ordering; + + // Flags reference: https://learn.microsoft.com/en-us/windows/win32/procthread/process-creation-flags + const DETACHED_PROCESS: u32 = 0x0000_0008; + const CREATE_NEW_PROCESS_GROUP: u32 = 0x0000_0200; + const CREATE_BREAKAWAY_FROM_JOB: u32 = 0x0100_0000; + const SHIM_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10 * 60); + + // Reject concurrent agent updates. + if AGENT_UPDATE_IN_PROGRESS + .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire) + .is_err() + { + return Err(UpdaterError::AgentUpdateAlreadyInProgress); + } + + // The shim derives its log path from the MSI path (see `devolutions-agent-updater/src/main.rs`). + let shim_log_path = format!("{}.shim.log", msi_path); + + let mut cmd = tokio::process::Command::new(shim_path.as_str()); + if let Some(code) = downgrade_product_code { + cmd.args(["-x", &code.braced().to_string()]); + } + cmd.arg(msi_path.as_str()); + let mut child = cmd + .stdin(std::process::Stdio::null()) + .stdout(std::process::Stdio::null()) + .stderr(std::process::Stdio::null()) + .creation_flags(DETACHED_PROCESS | CREATE_NEW_PROCESS_GROUP | CREATE_BREAKAWAY_FROM_JOB) + .spawn() + .map_err(|source| UpdaterError::AgentShimLaunch { source })?; + + info!(%shim_log_path, "Waiting for agent updater shim to complete (or service shutdown)"); + + let mut shutdown = ctx.shutdown_signal.clone(); + + tokio::select! { + result = child.wait() => { + // The shim exited before the agent service was stopped by the MSI. + // This is unexpected: the MSI should stop the service (killing us) before the + // shim finishes. Treat any exit — successful or not — as a failure. + let code = result.ok().and_then(|s| s.code()).unwrap_or(-1); + AGENT_UPDATE_IN_PROGRESS.store(false, Ordering::Release); + error!( + %shim_log_path, + exit_code = code, + "Agent updater shim exited unexpectedly before the service was restarted; \ + the update may not have completed. Check the shim log for details.", + ); + } + _ = tokio::time::sleep(SHIM_TIMEOUT) => { + // Shim has been running for too long; something is wrong. + AGENT_UPDATE_IN_PROGRESS.store(false, Ordering::Release); + error!( + %shim_log_path, + timeout_secs = SHIM_TIMEOUT.as_secs(), + "Agent updater shim timed out; the update may not have completed. \ + Check the shim log for details.", + ); + } + _ = shutdown.wait() => { + // The service is being stopped — most likely by the MSI installer as part of the + // update process. Assume the update is proceeding correctly and exit cleanly. + // AGENT_UPDATE_IN_PROGRESS is intentionally left `true`; the next agent instance + // starts fresh and resets it via the static initialiser. + info!("Shutdown signal received while waiting for updater shim; assuming MSI update is in progress"); + } + } + + Ok(()) } async fn install_msi(ctx: &UpdaterCtx, path: &Utf8Path, log_path: &Utf8Path) -> Result<(), UpdaterError> { @@ -224,7 +396,7 @@ fn ensure_enough_rights() -> Result<(), UpdaterError> { pub(crate) fn validate_package(ctx: &UpdaterCtx, path: &Utf8Path) -> Result<(), UpdaterError> { match ctx.product { - Product::Gateway | Product::HubService => validate_msi(ctx, path), + Product::Gateway | Product::HubService | Product::Agent => validate_msi(ctx, path), } } diff --git a/devolutions-agent/src/updater/product.rs b/devolutions-agent/src/updater/product.rs index d6a1644df..ed0874e64 100644 --- a/devolutions-agent/src/updater/product.rs +++ b/devolutions-agent/src/updater/product.rs @@ -1,15 +1,19 @@ use std::fmt; use std::str::FromStr; -use devolutions_agent_shared::{ProductUpdateInfo, UpdateJson}; +use devolutions_agent_shared::UpdateProductKey; -use crate::updater::productinfo::{GATEWAY_PRODUCT_ID, HUB_SERVICE_PRODUCT_ID}; +use crate::updater::productinfo::{AGENT_PRODUCT_ID, GATEWAY_PRODUCT_ID, HUB_SERVICE_PRODUCT_ID}; /// Product IDs to track updates for #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub(crate) enum Product { + /// Devolutions Gateway service Gateway, + /// Devolutions Hub Service HubService, + /// Devolutions Agent service (self-update) + Agent, } impl fmt::Display for Product { @@ -17,6 +21,7 @@ impl fmt::Display for Product { match self { Product::Gateway => write!(f, "Gateway"), Product::HubService => write!(f, "HubService"), + Product::Agent => write!(f, "Agent"), } } } @@ -28,16 +33,19 @@ impl FromStr for Product { match s { "Gateway" => Ok(Product::Gateway), "HubService" => Ok(Product::HubService), + "Agent" => Ok(Product::Agent), _ => Err(()), } } } impl Product { - pub(crate) fn get_update_info(self, update_json: &UpdateJson) -> Option { + /// Convert to the corresponding [`UpdateProductKey`] for looking up update info in a products map. + pub(crate) fn as_update_product_key(self) -> UpdateProductKey { match self { - Product::Gateway => update_json.gateway.clone(), - Product::HubService => update_json.hub_service.clone(), + Product::Gateway => UpdateProductKey::Gateway, + Product::HubService => UpdateProductKey::HubService, + Product::Agent => UpdateProductKey::Agent, } } @@ -45,13 +53,13 @@ impl Product { match self { Product::Gateway => GATEWAY_PRODUCT_ID, Product::HubService => HUB_SERVICE_PRODUCT_ID, + Product::Agent => AGENT_PRODUCT_ID, } } pub(crate) const fn get_package_extension(self) -> &'static str { match self { - Product::Gateway => "msi", - Product::HubService => "msi", + Product::Gateway | Product::HubService | Product::Agent => "msi", } } } diff --git a/devolutions-agent/src/updater/product_actions.rs b/devolutions-agent/src/updater/product_actions.rs index 3b745eafd..5de7767c2 100644 --- a/devolutions-agent/src/updater/product_actions.rs +++ b/devolutions-agent/src/updater/product_actions.rs @@ -112,6 +112,9 @@ impl ServiceUpdateActions { let should_start = match self.product { Product::Gateway => !state.startup_was_automatic && state.was_running, Product::HubService => state.was_running, + // INVARIANT: AgentSelfUpdateActions is used for Product::Agent, not + // ServiceUpdateActions; this branch is unreachable. + Product::Agent => unreachable!("ServiceUpdateActions is never used for Product::Agent"), }; if should_start { @@ -185,6 +188,9 @@ impl ProductUpdateActions for ServiceUpdateActions { warn!("No Hub Service features detected, installer may use defaults"); } } + // INVARIANT: AgentSelfUpdateActions is used for Product::Agent, not + // ServiceUpdateActions; this branch is unreachable. + Product::Agent => unreachable!("ServiceUpdateActions is never used for Product::Agent"), } Vec::new() @@ -208,5 +214,33 @@ pub(crate) fn build_product_actions(product: Product) -> Box Box::new(AgentSelfUpdateActions), + } +} + +/// Product update actions for Devolutions Agent self-update. +/// +/// The agent service lifecycle (stop/start) is fully managed by the MSI installer, so no +/// explicit service manipulation is needed here. The install step launches a detached shim +/// process that runs msiexec, allowing the installer to stop and restart the agent service +/// without interrupting the updater shim. +struct AgentSelfUpdateActions; + +impl ProductUpdateActions for AgentSelfUpdateActions { + fn pre_update(&mut self) -> Result<(), UpdaterError> { + // The MSI installer manages the agent service lifecycle. + Ok(()) + } + + fn get_msiexec_install_params(&self) -> Vec { + // No extra msiexec parameters are needed for the agent self-update. + // The updater shim launches msiexec directly with default parameters. + Vec::new() + } + + fn post_update(&mut self) -> Result<(), UpdaterError> { + // The MSI installer manages the agent service lifecycle. + // The new agent version will start automatically after the MSI completes. + Ok(()) } } diff --git a/devolutions-agent/src/updater/productinfo/mod.rs b/devolutions-agent/src/updater/productinfo/mod.rs index e9d20966d..5f3a600e7 100644 --- a/devolutions-agent/src/updater/productinfo/mod.rs +++ b/devolutions-agent/src/updater/productinfo/mod.rs @@ -6,4 +6,6 @@ pub(crate) const GATEWAY_PRODUCT_ID: &str = "Gateway"; pub(crate) const HUB_SERVICE_PRODUCT_ID: &str = "HubServices"; +pub(crate) const AGENT_PRODUCT_ID: &str = "Agent"; + pub(crate) use db::{ProductInfoDb, get_target_arch}; diff --git a/devolutions-agent/src/updater/security.rs b/devolutions-agent/src/updater/security.rs index 43fd367fb..c692189c0 100644 --- a/devolutions-agent/src/updater/security.rs +++ b/devolutions-agent/src/updater/security.rs @@ -15,6 +15,20 @@ use crate::updater::UpdaterError; /// - Users: Read pub(crate) const UPDATE_JSON_DACL: &str = "D:PAI(A;;FA;;;SY)(A;;0x1201bf;;;NS)(A;;FA;;;BA)(A;;FR;;;BU)"; +/// DACL for the update_status.json file: +/// Owner: SYSTEM +/// Group: SYSTEM +/// Access: +/// - SYSTEM: Full control +/// - Administrators: Full control +/// - Users: Read (covers NETWORK SERVICE via membership in Authenticated Users) +/// +/// Unlike `UPDATE_JSON_DACL`, NETWORK SERVICE does not receive write access — the agent is +/// the sole writer of this file. An explicit NS entry is not needed because the built-in +/// Users group (BU) already grants read access to all authenticated users, including +/// the NETWORK SERVICE account. +pub(crate) const UPDATE_STATUS_JSON_DACL: &str = "D:PAI(A;;FA;;;SY)(A;;FA;;;BA)(A;;FR;;;BU)"; + /// Set DACL (Discretionary Access Control List) on a specified file. pub(crate) fn set_file_dacl(file_path: &Utf8Path, acl: &str) -> Result<(), UpdaterError> { use windows::Win32::Foundation::{ERROR_SUCCESS, FALSE, HLOCAL, LocalFree}; diff --git a/devolutions-gateway/openapi/gateway-api.yaml b/devolutions-gateway/openapi/gateway-api.yaml index 8d0947232..15aaa66b6 100644 --- a/devolutions-gateway/openapi/gateway-api.yaml +++ b/devolutions-gateway/openapi/gateway-api.yaml @@ -7,7 +7,7 @@ info: email: infos@devolutions.net license: name: MIT/Apache-2.0 - version: 2025.3.2 + version: 2026.1.1 paths: /jet/config: patch: @@ -595,25 +595,70 @@ paths: - scope_token: - gateway.traffic.claim /jet/update: + get: + tags: + - Update + summary: Retrieve the currently installed version of each Devolutions product. + description: |- + Reads `update_status.json`, which is written by the Devolutions Agent on startup and + refreshed after every update run. When the file does not exist (agent not installed + or is an older version), returns an empty product map. + operationId: GetUpdateProducts + responses: + '200': + description: Installed product versions + content: + application/json: + schema: + $ref: '#/components/schemas/GetUpdateProductsResponse' + '401': + description: Invalid or missing authorization token + '403': + description: Insufficient permissions + '500': + description: Failed to read agent status file + '503': + description: Agent updater service is unavailable + security: + - scope_token: + - gateway.update.read post: tags: - Update - summary: Triggers Devolutions Gateway update process. + summary: Trigger an update for one or more Devolutions products. description: |- - This is done via updating `Agent/update.json` file, which is then read by Devolutions Agent - when changes are detected. If the version written to `update.json` is indeed higher than the - currently installed version, Devolutions Agent will proceed with the update process. + Writes the requested version(s) into `Agent/update.json`, which is watched by Devolutions + Agent. When a requested version is higher than the installed version the agent proceeds + with the update. + + **Body form** (preferred): pass a JSON body with a `Products` map. + + **Query-param form** (legacy, gateway-only): `POST /jet/update?version=latest`. + This form updates only the Gateway product and is kept for backward compatibility. + + Both forms cannot be used simultaneously; doing so returns HTTP 400. operationId: TriggerUpdate parameters: - name: version in: query - description: The version to install; use 'latest' for the latest version, or 'w.x.y.z' for a specific version - required: true + description: Gateway-only target version; use the request body for multi-product updates + required: false + deprecated: true schema: type: string + nullable: true + requestBody: + description: Products and target versions to update + content: + application/json: + schema: + allOf: + - $ref: '#/components/schemas/UpdateRequestSchema' + nullable: true + required: false responses: '200': - description: Update request has been processed successfully + description: Update request accepted content: application/json: schema: @@ -625,7 +670,72 @@ paths: '403': description: Insufficient permissions '500': - description: Agent updater service is malfunctioning + description: Failed to write update manifest + '503': + description: Agent updater service is unavailable + security: + - scope_token: + - gateway.update + /jet/update/schedule: + get: + tags: + - Update + summary: Retrieve the current Devolutions Agent auto-update schedule. + description: |- + Reads the `Schedule` field from `agent_status.json`. When the field is absent the response + contains zeroed defaults (`Enabled: false`, interval `0`, window start `0`, no products). + operationId: GetUpdateSchedule + responses: + '200': + description: Current auto-update schedule + content: + application/json: + schema: + $ref: '#/components/schemas/GetUpdateScheduleResponse' + '401': + description: Invalid or missing authorization token + '403': + description: Insufficient permissions + '500': + description: Failed to read agent status file + '503': + description: Agent updater service is unavailable + security: + - scope_token: + - gateway.update.read + post: + tags: + - Update + summary: Set the Devolutions Agent auto-update schedule. + description: |- + Writes the `Schedule` field into `update.json`. The agent watches this file and + applies the new schedule immediately, then persists it to `agent.json`. + + All other fields in `update.json` are preserved; the `VersionMinor` field is reset to + the minor version this gateway build understands so the agent does not see an unknown + future version. + operationId: SetUpdateSchedule + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/SetUpdateScheduleRequest' + required: true + responses: + '200': + description: Auto-update schedule applied + content: + application/json: + schema: + $ref: '#/components/schemas/SetUpdateScheduleResponse' + '400': + description: Bad request + '401': + description: Invalid or missing authorization token + '403': + description: Insufficient permissions + '500': + description: Failed to write update manifest '503': description: Agent updater service is unavailable security: @@ -708,6 +818,7 @@ components: - gateway.recording.delete - gateway.recordings.read - gateway.update + - gateway.update.read - gateway.preflight - gateway.traffic.claim - gateway.traffic.ack @@ -885,6 +996,61 @@ components: - connect_failure - normal_termination - abnormal_termination + GetUpdateProductsResponse: + type: object + description: Installed version of each product, as reported by Devolutions Agent. + required: + - ManifestVersion + properties: + ManifestVersion: + type: string + description: Version of the `update_status.json` format in `"major.minor"` form (e.g. `"1.1"`). + Products: + type: object + description: Map of product name to API-specific product info. + additionalProperties: + $ref: '#/components/schemas/UpdateProductInfo' + GetUpdateScheduleResponse: + type: object + description: Current auto-update schedule for Devolutions Agent. + required: + - ManifestVersion + - Enabled + - Interval + - UpdateWindowStart + properties: + Enabled: + type: boolean + description: Enable periodic Devolutions Agent self-update checks. + Interval: + type: integer + format: int64 + description: |- + Minimum interval between auto-update checks, in seconds. + + `0` means check once at `UpdateWindowStart`. + minimum: 0 + ManifestVersion: + type: string + description: Version of the `update_status.json` format in `"major.minor"` form (e.g. `"1.1"`). + Products: + type: array + items: + $ref: '#/components/schemas/UpdateProduct' + description: Products the agent autonomously polls for new versions. + UpdateWindowEnd: + type: integer + format: int32 + description: |- + End of the maintenance window as seconds past midnight (local time, exclusive). + `None` means no upper bound (single check at `UpdateWindowStart`). + nullable: true + minimum: 0 + UpdateWindowStart: + type: integer + format: int32 + description: Start of the maintenance window as seconds past midnight (local time). + minimum: 0 Heartbeat: type: object required: @@ -1137,7 +1303,7 @@ components: type: integer format: int32 description: |- - Minimum persistance duration in seconds for the data provisioned via this operation. + Minimum persistence duration in seconds for the data provisioned via this operation. Optional parameter for "provision-token" and "provision-credentials" kinds. nullable: true @@ -1361,6 +1527,45 @@ components: $ref: '#/components/schemas/MonitorDefinitionProbeTypeError' description: An optional list of probes that this server could not parse. nullable: true + SetUpdateScheduleRequest: + type: object + description: Desired auto-update schedule to apply to Devolutions Agent. + required: + - Enabled + properties: + Enabled: + type: boolean + description: Enable periodic Devolutions Agent self-update checks. + Interval: + type: integer + format: int64 + description: |- + Minimum interval between auto-update checks, in seconds. + + `0` means check once at `UpdateWindowStart` (default). + minimum: 0 + Products: + type: array + items: + $ref: '#/components/schemas/UpdateProduct' + description: 'Products the agent autonomously polls for new versions (default: empty).' + UpdateWindowEnd: + type: integer + format: int32 + description: |- + End of the maintenance window as seconds past midnight in local time, exclusive. + + `null` (default) means no upper bound - a single check fires at `UpdateWindowStart`. + When end < start the window crosses midnight. + nullable: true + minimum: 0 + UpdateWindowStart: + type: integer + format: int32 + description: 'Start of the maintenance window as seconds past midnight in local time (default: `7200` = 02:00).' + minimum: 0 + SetUpdateScheduleResponse: + type: object SubProvisionerKey: type: object required: @@ -1455,8 +1660,31 @@ components: enum: - tcp - udp + UpdateProduct: + type: string + description: |- + Product names accepted by the update endpoint. + + Known values are `Gateway`, `HubService`, and `Agent`. Any other product name is also accepted and forwarded to the agent unchanged so future product types are supported transparently. + UpdateProductInfo: + type: object + description: Per-product update information. + required: + - Version + properties: + Version: + $ref: '#/components/schemas/VersionSpecification' + UpdateRequest: + type: object + description: |- + OpenAPI schema for the update request body. + + The API accepts a map from product name to update information. + additionalProperties: + $ref: '#/components/schemas/UpdateProductInfo' UpdateResponse: type: object + description: Response returned by the update endpoint. securitySchemes: jrec_token: type: http diff --git a/devolutions-gateway/src/api/mod.rs b/devolutions-gateway/src/api/mod.rs index a5cbbc643..3b0d1343f 100644 --- a/devolutions-gateway/src/api/mod.rs +++ b/devolutions-gateway/src/api/mod.rs @@ -35,7 +35,14 @@ pub fn make_router(state: crate::DgwState) -> axum::Router { .nest("/jet/webapp", webapp::make_router(state.clone())) .nest("/jet/net", net::make_router(state.clone())) .nest("/jet/traffic", traffic::make_router(state.clone())) - .route("/jet/update", axum::routing::post(update::trigger_update_check)); + .route( + "/jet/update", + axum::routing::get(update::get_update_products).post(update::trigger_update_check), + ) + .route( + "/jet/update/schedule", + axum::routing::get(update::get_update_schedule).post(update::set_update_schedule), + ); if state.conf_handle.get_conf().web_app.enabled { router = router.route( diff --git a/devolutions-gateway/src/api/update.rs b/devolutions-gateway/src/api/update.rs index 4a1139dc1..8b0bec6cc 100644 --- a/devolutions-gateway/src/api/update.rs +++ b/devolutions-gateway/src/api/update.rs @@ -1,74 +1,559 @@ +use std::collections::HashMap; + use axum::Json; -use axum::extract::Query; -use devolutions_agent_shared::{ProductUpdateInfo, UpdateJson, VersionSpecification, get_updater_file_path}; +use axum::body::Bytes; +use devolutions_agent_shared::{ + ProductUpdateInfo, UPDATE_MANIFEST_V2_MINOR_VERSION, UpdateManifest, UpdateManifestV2, UpdateProductKey, + UpdateSchedule, UpdateStatus, VersionSpecification, default_schedule_window_start, get_update_status_file_path, + get_updater_file_path, +}; use hyper::StatusCode; +use tokio::fs; -use crate::extract::UpdateScope; +use crate::extract::{UpdateReadScope, UpdateScope}; use crate::http::{HttpError, HttpErrorBuilder}; -#[derive(Deserialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct UpdateQueryParam { - version: VersionSpecification, +// ── Shared async file I/O ───────────────────────────────────────────────────── + +/// Read and parse `update.json` asynchronously. +/// +/// Returns `(manifest, was_v2)` where `was_v2` indicates whether the file on disk was +/// already in V2 format. Returns `503` when the file doesn't exist (agent not installed) +/// and `500` on any other I/O or parse error. A legacy V1 file is transparently upgraded +/// to a V2 manifest in memory; the file on disk is rewritten on the next write. +async fn read_manifest() -> Result<(UpdateManifestV2, bool), HttpError> { + let path = get_updater_file_path(); + let data = match fs::read(&path).await { + Ok(d) => d, + Err(e) if e.kind() == std::io::ErrorKind::NotFound => { + return Err( + HttpErrorBuilder::new(StatusCode::SERVICE_UNAVAILABLE).msg("failed to open update manifest file") + ); + } + Err(e) => { + return Err(HttpError::internal() + .with_msg("failed to read update manifest file") + .build(e)); + } + }; + match UpdateManifest::parse(&data).map_err( + HttpError::internal() + .with_msg("update manifest file contains invalid JSON") + .err(), + )? { + UpdateManifest::ManifestV2(v2) => Ok((v2, true)), + // V1 → V2 upgrade: carry products forward, leave Schedule as None. + UpdateManifest::Legacy(v1) => { + let mut products = HashMap::new(); + if let Some(gw) = v1.gateway { + products.insert(UpdateProductKey::Gateway, gw); + } + if let Some(hs) = v1.hub_service { + products.insert(UpdateProductKey::HubService, hs); + } + Ok(( + UpdateManifestV2 { + products, + ..UpdateManifestV2::default() + }, + false, + )) + } + } +} + +/// Serialise `manifest` and write it back to `update.json` asynchronously. +/// +/// Always resets `VersionMinor` to [`UPDATE_MANIFEST_V2_MINOR_VERSION`] so the agent +/// never sees a minor version it wasn't built against. +async fn write_manifest(mut manifest: UpdateManifestV2) -> Result<(), HttpError> { + manifest.version_minor = UPDATE_MANIFEST_V2_MINOR_VERSION; + let serialized = serde_json::to_string(&UpdateManifest::ManifestV2(manifest)).map_err( + HttpError::internal() + .with_msg("failed to serialize update manifest") + .err(), + )?; + fs::write(get_updater_file_path(), serialized).await.map_err( + HttpError::internal() + .with_msg("failed to write update manifest file") + .err(), + ) +} + +// ── OpenAPI request / response types ───────────────────────────────────────── + +/// Per-product update information. +#[cfg_attr(feature = "openapi", derive(utoipa::ToSchema))] +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(rename_all = "PascalCase")] +pub(crate) struct UpdateProductInfo { + /// Requested or installed version: `"latest"` or `"YYYY.M.D"` / `"YYYY.M.D.R"`. + pub version: VersionSpecification, +} + +/// Known product names accepted by the update endpoint. +/// +/// `Other` captures any product name not yet known to this gateway version; +/// it is forwarded to the agent unchanged so future agents can act on it. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub(crate) enum UpdateProduct { + Gateway, + HubService, + Agent, + /// A product name not recognised by this gateway version. + Other(String), +} + +#[cfg(feature = "openapi")] +impl<'__s> utoipa::ToSchema<'__s> for UpdateProduct { + fn schema() -> (&'__s str, utoipa::openapi::RefOr) { + use utoipa::openapi::RefOr; + use utoipa::openapi::schema::{ObjectBuilder, SchemaType}; + ( + "UpdateProduct", + RefOr::T( + ObjectBuilder::new() + .schema_type(SchemaType::String) + .description(Some( + "Product names accepted by the update endpoint.\n\n\ + Known values are `Gateway`, `HubService`, and `Agent`. \ + Any other product name is also accepted and forwarded to the agent \ + unchanged so future product types are supported transparently.", + )) + .build() + .into(), + ), + ) + } +} + +impl<'de> serde::Deserialize<'de> for UpdateProduct { + fn deserialize>(d: D) -> Result { + struct V; + impl serde::de::Visitor<'_> for V { + type Value = UpdateProduct; + fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "a product name string") + } + fn visit_str(self, s: &str) -> Result { + Ok(match s { + "Gateway" => UpdateProduct::Gateway, + "HubService" => UpdateProduct::HubService, + "Agent" => UpdateProduct::Agent, + other => UpdateProduct::Other(other.to_owned()), + }) + } + } + d.deserialize_str(V) + } } +impl serde::Serialize for UpdateProduct { + fn serialize(&self, s: S) -> Result { + s.serialize_str(match self { + Self::Gateway => "Gateway", + Self::HubService => "HubService", + Self::Agent => "Agent", + Self::Other(name) => name.as_str(), + }) + } +} + +/// Request body for the unified update endpoint. +/// +/// Every key in `Products` is a product name. Known products (`Gateway`, `Agent`, +/// `HubService`) are processed natively; any other name is forwarded as-is to the +/// agent so future product types are supported transparently. +#[derive(Debug, Default, Deserialize)] +#[serde(rename_all = "PascalCase")] +pub(crate) struct UpdateRequest { + /// Map of product name to API-specific product info. + #[serde(default)] + pub products: HashMap, +} + +/// OpenAPI schema for the update request body. +/// +/// The API accepts a map from product name to update information. +#[cfg(feature = "openapi")] +#[derive(Serialize, utoipa::ToSchema)] +#[schema(as = UpdateRequest)] +#[serde(transparent)] +pub(crate) struct UpdateRequestSchema(pub HashMap); + +/// Response returned by the update endpoint. #[cfg_attr(feature = "openapi", derive(utoipa::ToSchema))] #[derive(Serialize)] pub(crate) struct UpdateResponse {} -/// Triggers Devolutions Gateway update process. +// ── Conversion: API types → shared manifest types ───────────────────────────── + +impl From for UpdateProductKey { + fn from(p: UpdateProduct) -> Self { + match p { + UpdateProduct::Gateway => UpdateProductKey::Gateway, + UpdateProduct::HubService => UpdateProductKey::HubService, + UpdateProduct::Agent => UpdateProductKey::Agent, + UpdateProduct::Other(s) => UpdateProductKey::Other(s), + } + } +} + +impl From for UpdateProduct { + fn from(k: UpdateProductKey) -> Self { + match k { + UpdateProductKey::Gateway => UpdateProduct::Gateway, + UpdateProductKey::HubService => UpdateProduct::HubService, + UpdateProductKey::Agent => UpdateProduct::Agent, + UpdateProductKey::Other(s) => UpdateProduct::Other(s), + } + } +} + +/// Apply updated products from the API request to `manifest`. /// -/// This is done via updating `Agent/update.json` file, which is then read by Devolutions Agent -/// when changes are detected. If the version written to `update.json` is indeed higher than the -/// currently installed version, Devolutions Agent will proceed with the update process. +/// - **V2 on disk** (`was_v2 = true`): all products accepted. +/// - **V1 on disk** (`was_v2 = false`): only `Gateway` and `HubService` accepted; any other +/// product returns `400 Bad Request`. +fn apply_products(req: UpdateRequest, manifest: &mut UpdateManifestV2, was_v2: bool) -> Result<(), HttpError> { + if was_v2 { + manifest.products = req + .products + .into_iter() + .map(|(k, v)| { + ( + UpdateProductKey::from(k), + ProductUpdateInfo { + target_version: v.version, + }, + ) + }) + .collect(); + } else { + // Legacy path: only Gateway and HubService are valid in V1. + manifest.products.clear(); + for (product, info) in req.products { + let pi = ProductUpdateInfo { + target_version: info.version, + }; + match product { + UpdateProduct::Gateway => { + manifest.products.insert(UpdateProductKey::Gateway, pi); + } + UpdateProduct::HubService => { + manifest.products.insert(UpdateProductKey::HubService, pi); + } + UpdateProduct::Agent => { + return Err(HttpErrorBuilder::new(StatusCode::BAD_REQUEST) + .msg("Agent updates require a V2-capable agent; upgrade the installed agent first")); + } + UpdateProduct::Other(name) => { + return Err(HttpError::bad_request() + .with_msg("product is not supported by the installed legacy agent") + .build(format!("product `{name}` requires a V2-capable agent"))); + } + } + } + } + Ok(()) +} + +/// Trigger an update for one or more Devolutions products. +/// +/// Writes the requested version(s) into `Agent/update.json`, which is watched by Devolutions +/// Agent. When a requested version is higher than the installed version the agent proceeds +/// with the update. +/// +/// **Body form** (preferred): pass a JSON body with a `Products` map. +/// +/// **Query-param form** (legacy, gateway-only): `POST /jet/update?version=latest`. +/// This form updates only the Gateway product and is kept for backward compatibility. +/// +/// Both forms cannot be used simultaneously; doing so returns HTTP 400. #[cfg_attr(feature = "openapi", utoipa::path( post, operation_id = "TriggerUpdate", tag = "Update", path = "/jet/update", params( - ("version" = String, Query, description = "The version to install; use 'latest' for the latest version, or 'w.x.y.z' for a specific version"), + ("version" = Option, Query, deprecated, description = "Gateway-only target version; use the request body for multi-product updates"), ), + request_body(content = Option, description = "Products and target versions to update", content_type = "application/json"), responses( - (status = 200, description = "Update request has been processed successfully", body = UpdateResponse), + (status = 200, description = "Update request accepted", body = UpdateResponse), (status = 400, description = "Bad request"), (status = 401, description = "Invalid or missing authorization token"), (status = 403, description = "Insufficient permissions"), - (status = 500, description = "Agent updater service is malfunctioning"), + (status = 500, description = "Failed to write update manifest"), (status = 503, description = "Agent updater service is unavailable"), ), security(("scope_token" = ["gateway.update"])), ))] pub(super) async fn trigger_update_check( - Query(query): Query, + uri: axum::http::Uri, _scope: UpdateScope, + body: Bytes, ) -> Result, HttpError> { - let target_version = query.version; + // Extract optional legacy `?version=` query param (gateway-only path). + let query_version: Option = uri.query().and_then(|q| { + q.split('&').find_map(|kv| { + kv.split_once('=') + .filter(|(k, _)| *k == "version") + .map(|(_, v)| v.to_owned()) + }) + }); - let updater_file_path = get_updater_file_path(); + // Parse the JSON body; an absent or empty body is treated as an empty product map. + let mut request: UpdateRequest = if body.is_empty() { + UpdateRequest::default() + } else { + serde_json::from_slice(&body).map_err(HttpError::bad_request().with_msg("invalid request body").err())? + }; + + // Legacy query param: conflicts with an explicit body that already lists products. + if let Some(v) = query_version { + if !request.products.is_empty() { + return Err(HttpErrorBuilder::new(StatusCode::BAD_REQUEST) + .msg("cannot specify both query parameter and request body; use one or the other")); + } + // Build a Gateway-only update from the (deprecated) query param. + let version: VersionSpecification = v.parse().map_err( + HttpError::bad_request() + .with_msg("invalid version in query parameter") + .err(), + )?; + request + .products + .insert(UpdateProduct::Gateway, UpdateProductInfo { version }); + } - if !updater_file_path.exists() { + // Reject requests that specify no products at all; an empty write would clear any + // pending update requests already written into update.json. + if request.products.is_empty() { return Err( - HttpErrorBuilder::new(StatusCode::SERVICE_UNAVAILABLE).msg("Agent updater service is not installed") + HttpErrorBuilder::new(StatusCode::BAD_REQUEST).msg("request must specify at least one product to update") ); } - let update_json = UpdateJson { - gateway: Some(ProductUpdateInfo { target_version }), - hub_service: None, - }; + // Read the existing manifest (503 when agent is not installed). + // `was_v2` tells us if the file on disk was V2; determines which products are accepted. + let (mut manifest, was_v2) = read_manifest().await?; + apply_products(request, &mut manifest, was_v2)?; + write_manifest(manifest).await?; + + Ok(Json(UpdateResponse {})) +} - let update_json = serde_json::to_string(&update_json).map_err( +/// Installed version of each product, as reported by Devolutions Agent. +#[cfg_attr(feature = "openapi", derive(utoipa::ToSchema))] +#[derive(Serialize)] +#[serde(rename_all = "PascalCase")] +pub(crate) struct GetUpdateProductsResponse { + /// Version of the `update_status.json` format in `"major.minor"` form (e.g. `"1.1"`). + pub manifest_version: String, + /// Map of product name to API-specific product info. + #[serde(default)] + pub products: HashMap, +} + +/// Retrieve the currently installed version of each Devolutions product. +/// +/// Reads `update_status.json`, which is written by the Devolutions Agent on startup and +/// refreshed after every update run. When the file does not exist (agent not installed +/// or is an older version), returns an empty product map. +#[cfg_attr(feature = "openapi", utoipa::path( + get, + operation_id = "GetUpdateProducts", + tag = "Update", + path = "/jet/update", + responses( + (status = 200, description = "Installed product versions", body = GetUpdateProductsResponse), + (status = 401, description = "Invalid or missing authorization token"), + (status = 403, description = "Insufficient permissions"), + (status = 500, description = "Failed to read agent status file"), + (status = 503, description = "Agent updater service is unavailable"), + ), + security(("scope_token" = ["gateway.update.read"])), +))] +pub(super) async fn get_update_products(_scope: UpdateReadScope) -> Result, HttpError> { + let path = get_update_status_file_path(); + let data = match fs::read(&path).await { + Ok(d) => d, + Err(e) if e.kind() == std::io::ErrorKind::NotFound => { + return Err(HttpErrorBuilder::new(StatusCode::SERVICE_UNAVAILABLE).msg("failed to open agent status file")); + } + Err(e) => { + return Err(HttpError::internal() + .with_msg("failed to read agent status file") + .build(e)); + } + }; + let status = UpdateStatus::parse(&data).map_err( HttpError::internal() - .with_msg("failed to serialize the update manifest") + .with_msg("agent status file contains invalid JSON") .err(), )?; + let manifest_version = status.version_string(); + let products = status + .into_products() + .into_iter() + .map(|(k, v)| (UpdateProduct::from(k), UpdateProductInfo { version: v.version })) + .collect(); + Ok(Json(GetUpdateProductsResponse { + manifest_version, + products, + })) +} + +// ── Update schedule: types and handlers ────────────────────────────────────── + +/// Current auto-update schedule for Devolutions Agent. +#[cfg_attr(feature = "openapi", derive(utoipa::ToSchema))] +#[derive(Serialize)] +pub(crate) struct GetUpdateScheduleResponse { + /// Version of the `update_status.json` format in `"major.minor"` form (e.g. `"1.1"`). + #[serde(rename = "ManifestVersion")] + pub manifest_version: String, + /// Enable periodic Devolutions Agent self-update checks. + #[serde(rename = "Enabled")] + pub enabled: bool, + /// Minimum interval between auto-update checks, in seconds. + /// + /// `0` means check once at `UpdateWindowStart`. + #[serde(rename = "Interval")] + pub interval: u64, + /// Start of the maintenance window as seconds past midnight (local time). + #[serde(rename = "UpdateWindowStart")] + pub update_window_start: u32, + /// End of the maintenance window as seconds past midnight (local time, exclusive). + /// `None` means no upper bound (single check at `UpdateWindowStart`). + #[serde(rename = "UpdateWindowEnd", skip_serializing_if = "Option::is_none")] + pub update_window_end: Option, + /// Products the agent autonomously polls for new versions. + #[serde(rename = "Products", default)] + pub products: Vec, +} + +/// Desired auto-update schedule to apply to Devolutions Agent. +#[cfg_attr(feature = "openapi", derive(utoipa::ToSchema))] +#[derive(Deserialize)] +pub(crate) struct SetUpdateScheduleRequest { + /// Enable periodic Devolutions Agent self-update checks. + #[serde(rename = "Enabled")] + pub enabled: bool, + /// Minimum interval between auto-update checks, in seconds. + /// + /// `0` means check once at `UpdateWindowStart` (default). + #[serde(rename = "Interval", default)] + pub interval: u64, + /// Start of the maintenance window as seconds past midnight in local time (default: `7200` = 02:00). + #[serde(rename = "UpdateWindowStart", default = "default_schedule_window_start")] + pub update_window_start: u32, + /// End of the maintenance window as seconds past midnight in local time, exclusive. + /// + /// `null` (default) means no upper bound - a single check fires at `UpdateWindowStart`. + /// When end < start the window crosses midnight. + #[serde(rename = "UpdateWindowEnd", default)] + pub update_window_end: Option, + /// Products the agent autonomously polls for new versions (default: empty). + #[serde(rename = "Products", default)] + pub products: Vec, +} - std::fs::write(updater_file_path, update_json).map_err( +impl From for UpdateSchedule { + fn from(r: SetUpdateScheduleRequest) -> Self { + Self { + enabled: r.enabled, + interval: r.interval, + update_window_start: r.update_window_start, + update_window_end: r.update_window_end, + products: r.products.into_iter().map(UpdateProductKey::from).collect(), + } + } +} + +#[cfg_attr(feature = "openapi", derive(utoipa::ToSchema))] +#[derive(Serialize)] +pub(crate) struct SetUpdateScheduleResponse {} + +/// Retrieve the current Devolutions Agent auto-update schedule. +/// +/// Reads the `Schedule` field from `agent_status.json`. When the field is absent the response +/// contains zeroed defaults (`Enabled: false`, interval `0`, window start `0`, no products). +#[cfg_attr(feature = "openapi", utoipa::path( + get, + operation_id = "GetUpdateSchedule", + tag = "Update", + path = "/jet/update/schedule", + responses( + (status = 200, description = "Current auto-update schedule", body = GetUpdateScheduleResponse), + (status = 401, description = "Invalid or missing authorization token"), + (status = 403, description = "Insufficient permissions"), + (status = 500, description = "Failed to read agent status file"), + (status = 503, description = "Agent updater service is unavailable"), + ), + security(("scope_token" = ["gateway.update.read"])), +))] +pub(super) async fn get_update_schedule(_scope: UpdateReadScope) -> Result, HttpError> { + let path = get_update_status_file_path(); + let data = match fs::read(&path).await { + Ok(d) => d, + Err(e) if e.kind() == std::io::ErrorKind::NotFound => { + return Err(HttpErrorBuilder::new(StatusCode::SERVICE_UNAVAILABLE).msg("failed to open agent status file")); + } + Err(e) => { + return Err(HttpError::internal() + .with_msg("failed to read agent status file") + .build(e)); + } + }; + let status = UpdateStatus::parse(&data).map_err( HttpError::internal() - .with_msg("failed to write the new `update.json` manifest on disk") + .with_msg("agent status file contains invalid JSON") .err(), )?; + let manifest_version = status.version_string(); + let schedule = status.schedule().cloned().unwrap_or_default(); + Ok(Json(GetUpdateScheduleResponse { + manifest_version, + enabled: schedule.enabled, + interval: schedule.interval, + update_window_start: schedule.update_window_start, + update_window_end: schedule.update_window_end, + products: schedule.products.into_iter().map(UpdateProduct::from).collect(), + })) +} - Ok(Json(UpdateResponse {})) +/// Set the Devolutions Agent auto-update schedule. +/// +/// Writes the `Schedule` field into `update.json`. The agent watches this file and +/// applies the new schedule immediately, then persists it to `agent.json`. +/// +/// All other fields in `update.json` are preserved; the `VersionMinor` field is reset to +/// the minor version this gateway build understands so the agent does not see an unknown +/// future version. +#[cfg_attr(feature = "openapi", utoipa::path( + post, + operation_id = "SetUpdateSchedule", + tag = "Update", + path = "/jet/update/schedule", + request_body = SetUpdateScheduleRequest, + responses( + (status = 200, description = "Auto-update schedule applied", body = SetUpdateScheduleResponse), + (status = 400, description = "Bad request"), + (status = 401, description = "Invalid or missing authorization token"), + (status = 403, description = "Insufficient permissions"), + (status = 500, description = "Failed to write update manifest"), + (status = 503, description = "Agent updater service is unavailable"), + ), + security(("scope_token" = ["gateway.update"])), +))] +pub(super) async fn set_update_schedule( + _scope: UpdateScope, + Json(body): Json, +) -> Result, HttpError> { + let (mut manifest, _) = read_manifest().await?; + manifest.schedule = Some(UpdateSchedule::from(body)); + write_manifest(manifest).await?; + Ok(Json(SetUpdateScheduleResponse {})) } diff --git a/devolutions-gateway/src/extract.rs b/devolutions-gateway/src/extract.rs index 9f2450854..6e5cf3539 100644 --- a/devolutions-gateway/src/extract.rs +++ b/devolutions-gateway/src/extract.rs @@ -296,6 +296,26 @@ where } } +#[derive(Clone, Copy)] +pub struct UpdateReadScope; + +impl FromRequestParts for UpdateReadScope +where + S: Send + Sync, +{ + type Rejection = HttpError; + + async fn from_request_parts(parts: &mut Parts, state: &S) -> Result { + match ScopeToken::from_request_parts(parts, state).await?.0.scope { + AccessScope::Wildcard => Ok(Self), + // The full write scope also grants read access. + AccessScope::Update => Ok(Self), + AccessScope::UpdateRead => Ok(Self), + _ => Err(HttpError::forbidden().msg("invalid scope for route")), + } + } +} + #[derive(Clone, Copy)] pub struct PreflightScope; diff --git a/devolutions-gateway/src/openapi.rs b/devolutions-gateway/src/openapi.rs index fd46afd74..5ec30f40e 100644 --- a/devolutions-gateway/src/openapi.rs +++ b/devolutions-gateway/src/openapi.rs @@ -25,7 +25,10 @@ use crate::config::dto::{DataEncoding, PubKeyFormat, Subscriber}; crate::api::jrec::pull_recording_file, crate::api::webapp::sign_app_token, crate::api::webapp::sign_session_token, + crate::api::update::get_update_products, crate::api::update::trigger_update_check, + crate::api::update::get_update_schedule, + crate::api::update::set_update_schedule, crate::api::preflight::post_preflight, crate::api::net::get_net_config, crate::api::monitoring::handle_set_monitoring_config, @@ -51,7 +54,14 @@ use crate::config::dto::{DataEncoding, PubKeyFormat, Subscriber}; crate::token::AccessScope, crate::api::webapp::AppTokenSignRequest, crate::api::webapp::AppTokenContentType, + crate::api::update::GetUpdateProductsResponse, + crate::api::update::UpdateRequestSchema, + crate::api::update::UpdateProduct, + crate::api::update::UpdateProductInfo, crate::api::update::UpdateResponse, + crate::api::update::GetUpdateScheduleResponse, + crate::api::update::SetUpdateScheduleRequest, + crate::api::update::SetUpdateScheduleResponse, PreflightOperation, PreflightOperationKind, AppCredential, @@ -233,7 +243,7 @@ enum SubscriberMessageKind { struct SubscriberMessage { /// Name of the event type associated to this message. /// - /// Presence or absence of additionnal fields depends on the value of this field. + /// Presence or absence of additional fields depends on the value of this field. kind: SubscriberMessageKind, /// Date and time this message was produced. #[serde(with = "time::serde::rfc3339")] @@ -365,7 +375,7 @@ struct PreflightOperation { /// /// Required for "resolve-host" kind. host_to_resolve: Option, - /// Minimum persistance duration in seconds for the data provisioned via this operation. + /// Minimum persistence duration in seconds for the data provisioned via this operation. /// /// Optional parameter for "provision-token" and "provision-credentials" kinds. time_to_live: Option, diff --git a/devolutions-gateway/src/token.rs b/devolutions-gateway/src/token.rs index 75b7d112e..00199bbf1 100644 --- a/devolutions-gateway/src/token.rs +++ b/devolutions-gateway/src/token.rs @@ -454,6 +454,8 @@ pub enum AccessScope { RecordingsRead, #[serde(rename = "gateway.update")] Update, + #[serde(rename = "gateway.update.read")] + UpdateRead, #[serde(rename = "gateway.preflight")] Preflight, #[serde(rename = "gateway.traffic.claim")] diff --git a/package/AgentWindowsManaged/Actions/AgentActions.cs b/package/AgentWindowsManaged/Actions/AgentActions.cs index 407b8bebf..daba52198 100644 --- a/package/AgentWindowsManaged/Actions/AgentActions.cs +++ b/package/AgentWindowsManaged/Actions/AgentActions.cs @@ -254,7 +254,8 @@ internal static class AgentActions Sequence = Sequence.InstallExecuteSequence, Return = Return.check, Step = Step.StartServices, - When = When.Before + When = When.Before, + Condition = Condition.NOT_BeingRemoved & new Condition("(UILevel >= 3 OR WIXSHARP_MANAGED_UI_HANDLE <> \"\")") }; private static readonly ElevatedManagedAction registerExplorerCommand = new( diff --git a/package/AgentWindowsManaged/Program.cs b/package/AgentWindowsManaged/Program.cs index 883b274f3..47338d33a 100644 --- a/package/AgentWindowsManaged/Program.cs +++ b/package/AgentWindowsManaged/Program.cs @@ -87,6 +87,8 @@ private static string DevolutionsDesktopAgentPath private static string DevolutionsPedmShellExtMsix => ResolveArtifact("DAGENT_PEDM_SHELL_EXT_MSIX", "..\\..\\target\\debug\\DevolutionsPedmShellExt.msix"); + private static string DevolutionsAgentUpdaterExePath => ResolveArtifact("DAGENT_UPDATER_EXECUTABLE", "..\\..\\target\\debug\\devolutions-agent-updater.exe"); + private static string DevolutionsSession => ResolveArtifact("DAGENT_SESSION_EXECUTABLE", "..\\..\\target\\debug\\devolutions-session.exe"); private static string DevolutionsTun2SocksExe => ResolveArtifact("DAGENT_TUN2SOCKS_EXE", "..\\..\\tun2socks.exe"); @@ -284,6 +286,13 @@ static void Main() }, }, new (Features.SESSION_FEATURE, DevolutionsSession) + { + TargetFileName = "DevolutionsSession.exe" + }, + new (Features.AGENT_UPDATER_FEATURE, DevolutionsAgentUpdaterExePath) + { + TargetFileName = "DevolutionsAgentUpdater.exe" + } }, Dirs = new[] { @@ -291,8 +300,8 @@ static void Main() new Dir(Features.PEDM_FEATURE, "ShellExt", new File(Features.PEDM_FEATURE, DevolutionsPedmShellExtDll), new File(Features.PEDM_FEATURE, DevolutionsPedmShellExtMsix)), - new Dir(Features.AGENT_FEATURE, "tun2socks", - new File(Features.AGENT_FEATURE, DevolutionsTun2SocksExe), + new Dir(Features.AGENT_FEATURE, "tun2socks", + new File(Features.AGENT_FEATURE, DevolutionsTun2SocksExe), new File(Features.AGENT_FEATURE, DevolutionsWintunDll)) } })),