diff --git a/api/pom.xml b/api/pom.xml index c80c35593451..4cdb57b6414c 100644 --- a/api/pom.xml +++ b/api/pom.xml @@ -71,6 +71,11 @@ cloud-framework-direct-download ${project.version} + + org.apache.cloudstack + cloud-framework-kms + ${project.version} + diff --git a/api/src/main/java/com/cloud/event/EventTypes.java b/api/src/main/java/com/cloud/event/EventTypes.java index 889e821a0905..efb4227a0a2a 100644 --- a/api/src/main/java/com/cloud/event/EventTypes.java +++ b/api/src/main/java/com/cloud/event/EventTypes.java @@ -36,6 +36,8 @@ import org.apache.cloudstack.gpu.GpuDevice; import org.apache.cloudstack.gpu.VgpuProfile; import org.apache.cloudstack.ha.HAConfig; +import org.apache.cloudstack.kms.HSMProfile; +import org.apache.cloudstack.kms.KMSKey; import org.apache.cloudstack.network.BgpPeer; import org.apache.cloudstack.network.Ipv4GuestSubnetNetworkMap; import org.apache.cloudstack.quota.QuotaTariff; @@ -271,6 +273,20 @@ public class EventTypes { public static final String EVENT_CA_CERTIFICATE_REVOKE = "CA.CERTIFICATE.REVOKE"; public static final String EVENT_CA_CERTIFICATE_PROVISION = "CA.CERTIFICATE.PROVISION"; + // KMS (Key Management Service) events + public static final String EVENT_KMS_KEY_WRAP = "KMS.KEY.WRAP"; + public static final String EVENT_KMS_KEY_UNWRAP = "KMS.KEY.UNWRAP"; + public static final String EVENT_KMS_KEY_CREATE = "KMS.KEY.CREATE"; + public static final String EVENT_KMS_KEY_UPDATE = "KMS.KEY.UPDATE"; + public static final String EVENT_KMS_KEY_ROTATE = "KMS.KEY.ROTATE"; + public static final String EVENT_KMS_KEY_DELETE = "KMS.KEY.DELETE"; + public static final String EVENT_VOLUME_MIGRATE_TO_KMS = "VOLUME.MIGRATE.TO.KMS"; + + // HSM Profile events + public static final String EVENT_HSM_PROFILE_CREATE = "HSM.PROFILE.CREATE"; + public static final String EVENT_HSM_PROFILE_UPDATE = "HSM.PROFILE.UPDATE"; + public static final String EVENT_HSM_PROFILE_DELETE = "HSM.PROFILE.DELETE"; + // Account events public static final String EVENT_ACCOUNT_ENABLE = "ACCOUNT.ENABLE"; public static final String EVENT_ACCOUNT_DISABLE = "ACCOUNT.DISABLE"; @@ -1009,6 +1025,19 @@ public class EventTypes { entityEventDetails.put(EVENT_VOLUME_RECOVER, Volume.class); entityEventDetails.put(EVENT_VOLUME_CHANGE_DISK_OFFERING, Volume.class); + // KMS Key Events + entityEventDetails.put(EVENT_KMS_KEY_CREATE, KMSKey.class); + entityEventDetails.put(EVENT_KMS_KEY_UPDATE, KMSKey.class); + entityEventDetails.put(EVENT_KMS_KEY_UNWRAP, KMSKey.class); + entityEventDetails.put(EVENT_KMS_KEY_WRAP, KMSKey.class); + entityEventDetails.put(EVENT_KMS_KEY_DELETE, KMSKey.class); + entityEventDetails.put(EVENT_KMS_KEY_ROTATE, KMSKey.class); + + // HSM Profile Events + entityEventDetails.put(EVENT_HSM_PROFILE_CREATE, HSMProfile.class); + entityEventDetails.put(EVENT_HSM_PROFILE_UPDATE, HSMProfile.class); + entityEventDetails.put(EVENT_HSM_PROFILE_DELETE, HSMProfile.class); + // Domains entityEventDetails.put(EVENT_DOMAIN_CREATE, Domain.class); entityEventDetails.put(EVENT_DOMAIN_DELETE, Domain.class); diff --git a/api/src/main/java/com/cloud/offering/DiskOfferingInfo.java b/api/src/main/java/com/cloud/offering/DiskOfferingInfo.java index 12dcf423e34f..197565a1fccb 100644 --- a/api/src/main/java/com/cloud/offering/DiskOfferingInfo.java +++ b/api/src/main/java/com/cloud/offering/DiskOfferingInfo.java @@ -23,6 +23,7 @@ public class DiskOfferingInfo { private Long _size; private Long _minIops; private Long _maxIops; + private Long _kmsKeyId; public DiskOfferingInfo() { } @@ -38,6 +39,14 @@ public DiskOfferingInfo(DiskOffering diskOffering, Long size, Long minIops, Long _maxIops = maxIops; } + public DiskOfferingInfo(DiskOffering diskOffering, Long size, Long minIops, Long maxIops, Long kmsKeyId) { + _diskOffering = diskOffering; + _size = size; + _minIops = minIops; + _maxIops = maxIops; + _kmsKeyId = kmsKeyId; + } + public void setDiskOffering(DiskOffering diskOffering) { _diskOffering = diskOffering; } @@ -69,4 +78,12 @@ public void setMaxIops(Long maxIops) { public Long getMaxIops() { return _maxIops; } + + public void setKmsKeyId(Long kmsKeyId) { + _kmsKeyId = kmsKeyId; + } + + public Long getKmsKeyId() { + return _kmsKeyId; + } } diff --git a/api/src/main/java/com/cloud/storage/Volume.java b/api/src/main/java/com/cloud/storage/Volume.java index c7fbdb0a5445..c7a13d5780d0 100644 --- a/api/src/main/java/com/cloud/storage/Volume.java +++ b/api/src/main/java/com/cloud/storage/Volume.java @@ -275,6 +275,14 @@ enum Event { void setPassphraseId(Long id); + Long getKmsKeyId(); + + void setKmsKeyId(Long id); + + Long getKmsWrappedKeyId(); + + void setKmsWrappedKeyId(Long id); + String getEncryptFormat(); void setEncryptFormat(String encryptFormat); diff --git a/api/src/main/java/com/cloud/vm/UserVmService.java b/api/src/main/java/com/cloud/vm/UserVmService.java index 01f11b73cd41..917008ca4902 100644 --- a/api/src/main/java/com/cloud/vm/UserVmService.java +++ b/api/src/main/java/com/cloud/vm/UserVmService.java @@ -226,7 +226,7 @@ UserVm createBasicSecurityGroupVirtualMachine(DataCenter zone, ServiceOffering s String userData, Long userDataId, String userDataDetails, List sshKeyPairs, Map requestedIps, IpAddresses defaultIp, Boolean displayVm, String keyboard, List affinityGroupIdList, Map customParameter, String customId, Map> dhcpOptionMap, Map dataDiskTemplateToDiskOfferingMap, - Map userVmOVFProperties, boolean dynamicScalingEnabled, Long overrideDiskOfferingId, Volume volume, Snapshot snapshot) throws InsufficientCapacityException, + Map userVmOVFProperties, boolean dynamicScalingEnabled, Long overrideDiskOfferingId, Long rootDiskKmsKeyId, Volume volume, Snapshot snapshot) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException, StorageUnavailableException, ResourceAllocationException; /** @@ -302,7 +302,7 @@ UserVm createAdvancedSecurityGroupVirtualMachine(DataCenter zone, ServiceOfferin List securityGroupIdList, Account owner, String hostName, String displayName, Long diskOfferingId, Long diskSize, List dataDiskInfoList, String group, HypervisorType hypervisor, HTTPMethod httpmethod, String userData, Long userDataId, String userDataDetails, List sshKeyPairs, Map requestedIps, IpAddresses defaultIps, Boolean displayVm, String keyboard, List affinityGroupIdList, Map customParameters, String customId, Map> dhcpOptionMap, - Map dataDiskTemplateToDiskOfferingMap, Map userVmOVFProperties, boolean dynamicScalingEnabled, Long overrideDiskOfferingId, String vmType, Volume volume, Snapshot snapshot) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException, StorageUnavailableException, ResourceAllocationException; + Map dataDiskTemplateToDiskOfferingMap, Map userVmOVFProperties, boolean dynamicScalingEnabled, Long overrideDiskOfferingId, Long rootDiskKmsKeyId, String vmType, Volume volume, Snapshot snapshot) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException, StorageUnavailableException, ResourceAllocationException; /** * Creates a User VM in Advanced Zone (Security Group feature is disabled) @@ -374,7 +374,7 @@ UserVm createAdvancedVirtualMachine(DataCenter zone, ServiceOffering serviceOffe String hostName, String displayName, Long diskOfferingId, Long diskSize, List dataDiskInfoList, String group, HypervisorType hypervisor, HTTPMethod httpmethod, String userData, Long userDataId, String userDataDetails, List sshKeyPairs, Map requestedIps, IpAddresses defaultIps, Boolean displayVm, String keyboard, List affinityGroupIdList, Map customParameters, String customId, Map> dhcpOptionMap, Map dataDiskTemplateToDiskOfferingMap, - Map templateOvfPropertiesMap, boolean dynamicScalingEnabled, String vmType, Long overrideDiskOfferingId, Volume volume, Snapshot snapshot) + Map templateOvfPropertiesMap, boolean dynamicScalingEnabled, String vmType, Long overrideDiskOfferingId, Long rootDiskKmsKeyId, Volume volume, Snapshot snapshot) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException, StorageUnavailableException, ResourceAllocationException; diff --git a/api/src/main/java/com/cloud/vm/VmDiskInfo.java b/api/src/main/java/com/cloud/vm/VmDiskInfo.java index b8779a8d77c6..97683e8397fa 100644 --- a/api/src/main/java/com/cloud/vm/VmDiskInfo.java +++ b/api/src/main/java/com/cloud/vm/VmDiskInfo.java @@ -33,6 +33,11 @@ public VmDiskInfo(DiskOffering diskOffering, Long size, Long minIops, Long maxIo _deviceId = deviceId; } + public VmDiskInfo(DiskOffering diskOffering, Long size, Long minIops, Long maxIops, Long deviceId, Long kmsKeyId) { + super(diskOffering, size, minIops, maxIops, kmsKeyId); + _deviceId = deviceId; + } + public Long getDeviceId() { return _deviceId; } diff --git a/api/src/main/java/org/apache/cloudstack/api/ApiCommandResourceType.java b/api/src/main/java/org/apache/cloudstack/api/ApiCommandResourceType.java index 4d33ba859a5b..91e702df33da 100644 --- a/api/src/main/java/org/apache/cloudstack/api/ApiCommandResourceType.java +++ b/api/src/main/java/org/apache/cloudstack/api/ApiCommandResourceType.java @@ -89,7 +89,9 @@ public enum ApiCommandResourceType { KubernetesSupportedVersion(null), SharedFS(org.apache.cloudstack.storage.sharedfs.SharedFS.class), Extension(org.apache.cloudstack.extension.Extension.class), - ExtensionCustomAction(org.apache.cloudstack.extension.ExtensionCustomAction.class); + ExtensionCustomAction(org.apache.cloudstack.extension.ExtensionCustomAction.class), + KmsKey(org.apache.cloudstack.kms.KMSKey.class), + HsmProfile(org.apache.cloudstack.kms.HSMProfile.class); private final Class clazz; diff --git a/api/src/main/java/org/apache/cloudstack/api/ApiConstants.java b/api/src/main/java/org/apache/cloudstack/api/ApiConstants.java index 9a8913da5b04..8b7983d9f358 100644 --- a/api/src/main/java/org/apache/cloudstack/api/ApiConstants.java +++ b/api/src/main/java/org/apache/cloudstack/api/ApiConstants.java @@ -194,6 +194,7 @@ public class ApiConstants { public static final String UTILIZATION = "utilization"; public static final String DRIVER = "driver"; public static final String ROOT_DISK_SIZE = "rootdisksize"; + public static final String ROOT_DISK_KMS_KEY_ID = "rootdiskkmskeyid"; public static final String DHCP_OPTIONS_NETWORK_LIST = "dhcpoptionsnetworklist"; public static final String DHCP_OPTIONS = "dhcpoptions"; public static final String DHCP_PREFIX = "dhcp:"; @@ -861,7 +862,14 @@ public class ApiConstants { public static final String ITERATIONS = "iterations"; public static final String SORT_BY = "sortby"; public static final String CHANGE_CIDR = "changecidr"; + public static final String HSM_PROFILE = "hsmprofile"; + public static final String HSM_PROFILE_ID = "hsmprofileid"; public static final String PURPOSE = "purpose"; + public static final String KMS_KEY = "kmskey"; + public static final String KMS_KEY_ID = "kmskeyid"; + public static final String KMS_KEY_VERSION = "kmskeyversion"; + public static final String KEK_LABEL = "keklabel"; + public static final String KEY_BITS = "keybits"; public static final String IS_TAGGED = "istagged"; public static final String INSTANCE_NAME = "instancename"; public static final String CONSIDER_LAST_HOST = "considerlasthost"; diff --git a/api/src/main/java/org/apache/cloudstack/api/ResponseGenerator.java b/api/src/main/java/org/apache/cloudstack/api/ResponseGenerator.java index 8e92e877f5ca..b7c54e2aef26 100644 --- a/api/src/main/java/org/apache/cloudstack/api/ResponseGenerator.java +++ b/api/src/main/java/org/apache/cloudstack/api/ResponseGenerator.java @@ -277,7 +277,8 @@ public interface ResponseGenerator { List createUserVmResponse(ResponseView view, String objectName, UserVm... userVms); - List createUserVmResponse(ResponseView view, String objectName, EnumSet details, UserVm... userVms); + List createUserVmResponse(ResponseView view, String objectName, EnumSet details, + UserVm... userVms); SystemVmResponse createSystemVmResponse(VirtualMachine systemVM); @@ -303,11 +304,13 @@ public interface ResponseGenerator { LoadBalancerResponse createLoadBalancerResponse(LoadBalancer loadBalancer); - LBStickinessResponse createLBStickinessPolicyResponse(List stickinessPolicies, LoadBalancer lb); + LBStickinessResponse createLBStickinessPolicyResponse(List stickinessPolicies, + LoadBalancer lb); LBStickinessResponse createLBStickinessPolicyResponse(StickinessPolicy stickinessPolicy, LoadBalancer lb); - LBHealthCheckResponse createLBHealthCheckPolicyResponse(List healthcheckPolicies, LoadBalancer lb); + LBHealthCheckResponse createLBHealthCheckPolicyResponse(List healthcheckPolicies, + LoadBalancer lb); LBHealthCheckResponse createLBHealthCheckPolicyResponse(HealthCheckPolicy healthcheckPolicy, LoadBalancer lb); @@ -315,7 +318,8 @@ public interface ResponseGenerator { PodResponse createMinimalPodResponse(Pod pod); - ZoneResponse createZoneResponse(ResponseView view, DataCenter dataCenter, Boolean showCapacities, Boolean showResourceIcon); + ZoneResponse createZoneResponse(ResponseView view, DataCenter dataCenter, Boolean showCapacities, + Boolean showResourceIcon); DataCenterGuestIpv6PrefixResponse createDataCenterGuestIpv6PrefixResponse(DataCenterGuestIpv6Prefix prefix); @@ -355,7 +359,8 @@ public interface ResponseGenerator { List createTemplateResponses(ResponseView view, long templateId, Long zoneId, boolean readyOnly); - List createTemplateResponses(ResponseView view, long templateId, Long snapshotId, Long volumeId, boolean readyOnly); + List createTemplateResponses(ResponseView view, long templateId, Long snapshotId, Long volumeId, + boolean readyOnly); SecurityGroupResponse createSecurityGroupResponseFromSecurityGroupRule(List securityRules); @@ -374,14 +379,15 @@ public interface ResponseGenerator { TemplateResponse createTemplateUpdateResponse(ResponseView view, VirtualMachineTemplate result); List createTemplateResponses(ResponseView view, VirtualMachineTemplate result, - Long zoneId, boolean readyOnly); + Long zoneId, boolean readyOnly); List createTemplateResponses(ResponseView view, VirtualMachineTemplate result, - List zoneIds, boolean readyOnly); + List zoneIds, boolean readyOnly); List createCapacityResponse(List result, DecimalFormat format); - TemplatePermissionsResponse createTemplatePermissionsResponse(ResponseView view, List accountNames, Long id); + TemplatePermissionsResponse createTemplatePermissionsResponse(ResponseView view, List accountNames, + Long id); AsyncJobResponse queryJobResult(QueryAsyncJobResultCmd cmd); @@ -395,7 +401,8 @@ List createTemplateResponses(ResponseView view, VirtualMachine Long getSecurityGroupId(String groupName, long accountId); - List createIsoResponses(ResponseView view, VirtualMachineTemplate iso, Long zoneId, boolean readyOnly); + List createIsoResponses(ResponseView view, VirtualMachineTemplate iso, Long zoneId, + boolean readyOnly); ProjectResponse createProjectResponse(Project project); @@ -496,13 +503,15 @@ List createTemplateResponses(ResponseView view, VirtualMachine GuestOsMappingResponse createGuestOSMappingResponse(GuestOSHypervisor osHypervisor); - HypervisorGuestOsNamesResponse createHypervisorGuestOSNamesResponse(List> hypervisorGuestOsNames); + HypervisorGuestOsNamesResponse createHypervisorGuestOSNamesResponse( + List> hypervisorGuestOsNames); SnapshotScheduleResponse createSnapshotScheduleResponse(SnapshotSchedule sched); UsageRecordResponse createUsageResponse(Usage usageRecord); - UsageRecordResponse createUsageResponse(Usage usageRecord, Map> resourceTagResponseMap, boolean oldFormat); + UsageRecordResponse createUsageResponse(Usage usageRecord, + Map> resourceTagResponseMap, boolean oldFormat); public Map> getUsageResourceTags(); @@ -514,7 +523,8 @@ List createTemplateResponses(ResponseView view, VirtualMachine public NicResponse createNicResponse(Nic result); - ApplicationLoadBalancerResponse createLoadBalancerContainerReponse(ApplicationLoadBalancerRule lb, Map lbInstances); + ApplicationLoadBalancerResponse createLoadBalancerContainerReponse(ApplicationLoadBalancerRule lb, + Map lbInstances); AffinityGroupResponse createAffinityGroupResponse(AffinityGroup group); @@ -540,9 +550,12 @@ List createTemplateResponses(ResponseView view, VirtualMachine ManagementServerResponse createManagementResponse(ManagementServerHost mgmt); - List createHealthCheckResponse(VirtualMachine router, List healthCheckResults); + List createHealthCheckResponse(VirtualMachine router, + List healthCheckResults); - RollingMaintenanceResponse createRollingMaintenanceResponse(Boolean success, String details, List hostsUpdated, List hostsSkipped); + RollingMaintenanceResponse createRollingMaintenanceResponse(Boolean success, String details, + List hostsUpdated, + List hostsSkipped); ResourceIconResponse createResourceIconResponse(ResourceIcon resourceIcon); @@ -552,11 +565,14 @@ List createTemplateResponses(ResponseView view, VirtualMachine DirectDownloadCertificateResponse createDirectDownloadCertificateResponse(DirectDownloadCertificate certificate); - List createDirectDownloadCertificateHostMapResponse(List hostMappings); + List createDirectDownloadCertificateHostMapResponse( + List hostMappings); - DirectDownloadCertificateHostStatusResponse createDirectDownloadCertificateHostStatusResponse(DirectDownloadManager.HostCertificateStatus status); + DirectDownloadCertificateHostStatusResponse createDirectDownloadCertificateHostStatusResponse( + DirectDownloadManager.HostCertificateStatus status); - DirectDownloadCertificateHostStatusResponse createDirectDownloadCertificateProvisionResponse(Long certificateId, Long hostId, Pair result); + DirectDownloadCertificateHostStatusResponse createDirectDownloadCertificateProvisionResponse(Long certificateId, + Long hostId, Pair result); FirewallResponse createIpv6FirewallRuleResponse(FirewallRule acl); diff --git a/api/src/main/java/org/apache/cloudstack/api/command/admin/kms/MigrateVolumesToKMSCmd.java b/api/src/main/java/org/apache/cloudstack/api/command/admin/kms/MigrateVolumesToKMSCmd.java new file mode 100644 index 000000000000..eef4ef9af26f --- /dev/null +++ b/api/src/main/java/org/apache/cloudstack/api/command/admin/kms/MigrateVolumesToKMSCmd.java @@ -0,0 +1,148 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +package org.apache.cloudstack.api.command.admin.kms; + +import com.cloud.dc.DataCenter; +import org.apache.cloudstack.acl.RoleType; +import org.apache.cloudstack.api.APICommand; +import org.apache.cloudstack.api.ApiCommandResourceType; +import org.apache.cloudstack.api.ApiConstants; +import org.apache.cloudstack.api.ApiErrorCode; +import org.apache.cloudstack.api.BaseAsyncCmd; +import org.apache.cloudstack.api.Parameter; +import org.apache.cloudstack.api.ServerApiException; +import org.apache.cloudstack.api.response.AsyncJobResponse; +import org.apache.cloudstack.api.response.DomainResponse; +import org.apache.cloudstack.api.response.KMSKeyResponse; +import org.apache.cloudstack.api.response.VolumeResponse; +import org.apache.cloudstack.api.response.ZoneResponse; +import org.apache.cloudstack.context.CallContext; +import org.apache.cloudstack.framework.kms.KMSException; +import org.apache.cloudstack.kms.KMSKey; +import org.apache.cloudstack.kms.KMSManager; + +import javax.inject.Inject; +import java.util.List; + +@APICommand(name = "migrateVolumesToKMS", + description = "Migrates passphrase-based volumes to KMS (admin only)", + responseObject = AsyncJobResponse.class, + since = "4.23.0", + authorized = {RoleType.Admin}, + requestHasSensitiveInfo = false, + responseHasSensitiveInfo = false) +public class MigrateVolumesToKMSCmd extends BaseAsyncCmd { + private static final String s_name = "migratevolumestokmsresponse"; + + @Inject + private KMSManager kmsManager; + + @Parameter(name = ApiConstants.ZONE_ID, + type = CommandType.UUID, + entityType = ZoneResponse.class, + description = "Zone ID") + private Long zoneId; + + @Parameter(name = ApiConstants.ACCOUNT, + type = CommandType.STRING, + description = "Migrate volumes for specific account") + private String accountName; + + @Parameter(name = ApiConstants.DOMAIN_ID, + type = CommandType.UUID, + entityType = DomainResponse.class, + description = "Domain ID") + private Long domainId; + + @Parameter(name = ApiConstants.VOLUME_IDS, + type = CommandType.LIST, + collectionType = CommandType.UUID, + entityType = VolumeResponse.class, + description = "List of volume IDs to migrate") + private List volumeIds; + + @Parameter(name = ApiConstants.KMS_KEY_ID, + required = true, + type = CommandType.UUID, + entityType = KMSKeyResponse.class, + description = "KMS Key ID to use for migrating volumes") + private Long kmsKeyId; + + public Long getZoneId() { + return zoneId; + } + + public String getAccountName() { + return accountName; + } + + public Long getDomainId() { + return domainId; + } + + public List getVolumeIds() { + return volumeIds; + } + + public Long getKmsKeyId() { + return kmsKeyId; + } + + @Override + public void execute() { + try { + kmsManager.migrateVolumesToKMS(this); + } catch (KMSException e) { + throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, + "Failed to migrate volumes to KMS: " + e.getMessage()); + } + } + + @Override + public String getCommandName() { + return s_name; + } + + @Override + public long getEntityOwnerId() { + KMSKey key = _entityMgr.findById(KMSKey.class, kmsKeyId); + if (key != null) { + return key.getAccountId(); + } + return CallContext.current().getCallingAccount().getId(); + } + + @Override + public String getEventType() { + return com.cloud.event.EventTypes.EVENT_VOLUME_MIGRATE_TO_KMS; + } + + @Override + public String getEventDescription() { + return "Migrating volumes to KMS for zone: " + _uuidMgr.getUuid(DataCenter.class, zoneId); + } + + @Override + public ApiCommandResourceType getApiResourceType() { + return ApiCommandResourceType.Zone; + } + + @Override + public Long getApiResourceId() { + return zoneId; + } +} diff --git a/api/src/main/java/org/apache/cloudstack/api/command/user/kms/CreateKMSKeyCmd.java b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/CreateKMSKeyCmd.java new file mode 100644 index 000000000000..c48c1343fa09 --- /dev/null +++ b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/CreateKMSKeyCmd.java @@ -0,0 +1,168 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.cloudstack.api.command.user.kms; + +import com.cloud.exception.ResourceAllocationException; +import org.apache.cloudstack.acl.RoleType; +import org.apache.cloudstack.api.APICommand; +import org.apache.cloudstack.api.ApiCommandResourceType; +import org.apache.cloudstack.api.ApiConstants; +import org.apache.cloudstack.api.ApiErrorCode; +import org.apache.cloudstack.api.BaseCmd; +import org.apache.cloudstack.api.Parameter; +import org.apache.cloudstack.api.ServerApiException; +import org.apache.cloudstack.api.command.user.UserCmd; +import org.apache.cloudstack.api.response.DomainResponse; +import org.apache.cloudstack.api.response.HSMProfileResponse; +import org.apache.cloudstack.api.response.KMSKeyResponse; +import org.apache.cloudstack.api.response.ProjectResponse; +import org.apache.cloudstack.api.response.ZoneResponse; +import org.apache.cloudstack.context.CallContext; +import org.apache.cloudstack.framework.kms.KMSException; +import org.apache.cloudstack.kms.KMSManager; + +import javax.inject.Inject; + +@APICommand(name = "createKMSKey", + description = "Creates a new KMS key (Key Encryption Key) for encryption", + responseObject = KMSKeyResponse.class, + since = "4.23.0", + authorized = {RoleType.Admin, RoleType.ResourceAdmin, RoleType.DomainAdmin, RoleType.User}, + requestHasSensitiveInfo = false, + responseHasSensitiveInfo = false) +public class CreateKMSKeyCmd extends BaseCmd implements UserCmd { + + @Inject + private KMSManager kmsManager; + + @Parameter(name = ApiConstants.NAME, + required = true, + type = CommandType.STRING, + description = "Name of the KMS key") + private String name; + + @Parameter(name = ApiConstants.DESCRIPTION, + type = CommandType.STRING, + description = "Description of the KMS key") + private String description; + + @Parameter(name = ApiConstants.PURPOSE, + type = CommandType.STRING, + description = "Purpose of the key: volume, tls. (default: volume)") + private String purpose; + + @Parameter(name = ApiConstants.ZONE_ID, + required = true, + type = CommandType.UUID, + entityType = ZoneResponse.class, + description = "Zone ID where the key will be valid") + private Long zoneId; + + @Parameter(name = ApiConstants.ACCOUNT, + type = CommandType.STRING, + description = "Account name (for creating keys for child accounts - requires domain admin or admin)") + private String accountName; + + @Parameter(name = ApiConstants.DOMAIN_ID, + type = CommandType.UUID, + entityType = DomainResponse.class, + description = "Domain ID (for creating keys for child accounts - requires domain admin or admin)") + private Long domainId; + + @Parameter(name = ApiConstants.PROJECT_ID, + type = CommandType.UUID, + entityType = ProjectResponse.class, + description = "ID of the project to create the KMS key for") + private Long projectId; + + @Parameter(name = ApiConstants.KEY_BITS, + type = CommandType.INTEGER, + description = "Key size in bits: 128, 192, or 256 (default: 256)") + private Integer keyBits; + + @Parameter(name = ApiConstants.HSM_PROFILE_ID, + type = CommandType.UUID, + entityType = HSMProfileResponse.class, + required = true, + description = "ID of HSM profile to create key in") + private Long hsmProfileId; + + public String getName() { + return name; + } + + public String getDescription() { + return description; + } + + public String getPurpose() { + return purpose == null ? "volume" : purpose; + } + + public Long getZoneId() { + return zoneId; + } + + public String getAccountName() { + return accountName; + } + + public Long getDomainId() { + return domainId; + } + + public Long getProjectId() { + return projectId; + } + + public Integer getKeyBits() { + return keyBits != null ? keyBits : 256; + } + + public Long getHsmProfileId() { + return hsmProfileId; + } + + @Override + public void execute() throws ResourceAllocationException { + try { + KMSKeyResponse response = kmsManager.createKMSKey(this); + response.setResponseName(getCommandName()); + setResponseObject(response); + } catch (KMSException e) { + throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, + "Failed to create KMS key: " + e.getMessage()); + } + } + + @Override + public long getEntityOwnerId() { + Long accountId = _accountService.finalizeAccountId(accountName, domainId, projectId, true); + if (accountId != null) { + return accountId; + } + return CallContext.current().getCallingAccount().getId(); + } + + @Override + public ApiCommandResourceType getApiResourceType() { + return ApiCommandResourceType.KmsKey; + } +} diff --git a/api/src/main/java/org/apache/cloudstack/api/command/user/kms/DeleteKMSKeyCmd.java b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/DeleteKMSKeyCmd.java new file mode 100644 index 000000000000..9963677c2f26 --- /dev/null +++ b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/DeleteKMSKeyCmd.java @@ -0,0 +1,99 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.cloudstack.api.command.user.kms; + +import com.cloud.event.EventTypes; +import org.apache.cloudstack.acl.RoleType; +import org.apache.cloudstack.api.APICommand; +import org.apache.cloudstack.api.ApiCommandResourceType; +import org.apache.cloudstack.api.ApiConstants; +import org.apache.cloudstack.api.ApiErrorCode; +import org.apache.cloudstack.api.BaseAsyncCmd; +import org.apache.cloudstack.api.Parameter; +import org.apache.cloudstack.api.ServerApiException; +import org.apache.cloudstack.api.command.user.UserCmd; +import org.apache.cloudstack.api.response.KMSKeyResponse; +import org.apache.cloudstack.api.response.SuccessResponse; +import org.apache.cloudstack.context.CallContext; +import org.apache.cloudstack.framework.kms.KMSException; +import org.apache.cloudstack.kms.KMSKey; +import org.apache.cloudstack.kms.KMSManager; + +import javax.inject.Inject; + +@APICommand(name = "deleteKMSKey", + description = "Deletes a KMS key (only if not in use)", + responseObject = SuccessResponse.class, + since = "4.23.0", + authorized = {RoleType.Admin, RoleType.ResourceAdmin, RoleType.DomainAdmin, RoleType.User}, + requestHasSensitiveInfo = false, + responseHasSensitiveInfo = false) +public class DeleteKMSKeyCmd extends BaseAsyncCmd implements UserCmd { + + @Inject + private KMSManager kmsManager; + + @Parameter(name = ApiConstants.ID, + required = true, + type = CommandType.UUID, + entityType = KMSKeyResponse.class, + description = "The UUID of the KMS key to delete") + private Long id; + + @Override + public void execute() { + try { + SuccessResponse response = kmsManager.deleteKMSKey(this); + response.setResponseName(getCommandName()); + setResponseObject(response); + } catch (KMSException e) { + throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, + "Failed to delete KMS key: " + e.getMessage()); + } + } + + @Override + public long getEntityOwnerId() { + KMSKey key = _entityMgr.findById(KMSKey.class, id); + if (key != null) { + return key.getAccountId(); + } + return CallContext.current().getCallingAccount().getId(); + } + + @Override + public ApiCommandResourceType getApiResourceType() { + return ApiCommandResourceType.KmsKey; + } + + @Override + public String getEventType() { + return EventTypes.EVENT_KMS_KEY_DELETE; + } + + @Override + public String getEventDescription() { + return "deleting KMS key: " + getId(); + } + + public Long getId() { + return id; + } +} diff --git a/api/src/main/java/org/apache/cloudstack/api/command/user/kms/ListKMSKeysCmd.java b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/ListKMSKeysCmd.java new file mode 100644 index 000000000000..3856cc9fed55 --- /dev/null +++ b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/ListKMSKeysCmd.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.cloudstack.api.command.user.kms; + +import org.apache.cloudstack.acl.RoleType; +import org.apache.cloudstack.api.APICommand; +import org.apache.cloudstack.api.ApiConstants; +import org.apache.cloudstack.api.BaseListProjectAndAccountResourcesCmd; +import org.apache.cloudstack.api.Parameter; +import org.apache.cloudstack.api.ResponseObject.ResponseView; +import org.apache.cloudstack.api.command.user.UserCmd; +import org.apache.cloudstack.api.response.HSMProfileResponse; +import org.apache.cloudstack.api.response.KMSKeyResponse; +import org.apache.cloudstack.api.response.ListResponse; +import org.apache.cloudstack.api.response.ZoneResponse; +import org.apache.cloudstack.kms.KMSManager; + +import javax.inject.Inject; + +@APICommand(name = "listKMSKeys", + description = "Lists KMS keys available to the caller", + responseObject = KMSKeyResponse.class, + responseView = ResponseView.Restricted, + since = "4.23.0", + authorized = {RoleType.Admin, RoleType.ResourceAdmin, RoleType.DomainAdmin, RoleType.User}, + requestHasSensitiveInfo = false, + responseHasSensitiveInfo = false) +public class ListKMSKeysCmd extends BaseListProjectAndAccountResourcesCmd implements UserCmd { + private static final String s_name = "listkmskeysresponse"; + + @Inject + private KMSManager kmsManager; + + @Parameter(name = ApiConstants.ID, + type = CommandType.UUID, + entityType = KMSKeyResponse.class, + description = "List KMS key by UUID") + private Long id; + + @Parameter(name = ApiConstants.PURPOSE, + type = CommandType.STRING, + description = "Filter by purpose: volume, tls") + private String purpose; + + @Parameter(name = ApiConstants.ZONE_ID, + type = CommandType.UUID, + entityType = ZoneResponse.class, + description = "Filter by zone ID") + private Long zoneId; + + @Parameter(name = ApiConstants.ENABLED, + type = CommandType.BOOLEAN, + description = "Filter by enabled status") + private Boolean enabled; + + @Parameter(name = ApiConstants.HSM_PROFILE_ID, + type = CommandType.UUID, + entityType = HSMProfileResponse.class, + description = "Filter by HSM profile ID") + private Long hsmProfileId; + + public Long getId() { + return id; + } + + public String getPurpose() { + return purpose; + } + + public Long getZoneId() { + return zoneId; + } + + public Boolean getEnabled() { + return enabled; + } + + public Long getHsmProfileId() { + return hsmProfileId; + } + + @Override + public void execute() { + ListResponse listResponse = kmsManager.listKMSKeys(this); + listResponse.setResponseName(getCommandName()); + setResponseObject(listResponse); + } + + @Override + public String getCommandName() { + return s_name; + } +} diff --git a/api/src/main/java/org/apache/cloudstack/api/command/user/kms/RotateKMSKeyCmd.java b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/RotateKMSKeyCmd.java new file mode 100644 index 000000000000..a370a1d5d59e --- /dev/null +++ b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/RotateKMSKeyCmd.java @@ -0,0 +1,128 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +package org.apache.cloudstack.api.command.user.kms; + +import org.apache.cloudstack.acl.RoleType; +import org.apache.cloudstack.api.APICommand; +import org.apache.cloudstack.api.ApiCommandResourceType; +import org.apache.cloudstack.api.ApiConstants; +import org.apache.cloudstack.api.ApiErrorCode; +import org.apache.cloudstack.api.BaseAsyncCmd; +import org.apache.cloudstack.api.Parameter; +import org.apache.cloudstack.api.ServerApiException; +import org.apache.cloudstack.api.response.AsyncJobResponse; +import org.apache.cloudstack.api.response.HSMProfileResponse; +import org.apache.cloudstack.api.response.KMSKeyResponse; +import org.apache.cloudstack.api.response.SuccessResponse; +import org.apache.cloudstack.context.CallContext; +import org.apache.cloudstack.framework.kms.KMSException; +import org.apache.cloudstack.kms.KMSKey; +import org.apache.cloudstack.kms.KMSManager; + +import javax.inject.Inject; + +@APICommand(name = "rotateKMSKey", + description = "Rotates KEK by creating new version and scheduling gradual re-encryption", + responseObject = AsyncJobResponse.class, + since = "4.23.0", + authorized = {RoleType.Admin, RoleType.ResourceAdmin, RoleType.DomainAdmin, RoleType.User}, + requestHasSensitiveInfo = false, + responseHasSensitiveInfo = false) +public class RotateKMSKeyCmd extends BaseAsyncCmd { + private static final String s_name = "rotatekmskeyresponse"; + + @Inject + private KMSManager kmsManager; + + @Parameter(name = ApiConstants.ID, + required = true, + type = CommandType.UUID, + entityType = KMSKeyResponse.class, + description = "KMS Key UUID to rotate") + private Long id; + + @Parameter(name = ApiConstants.KEY_BITS, + type = CommandType.INTEGER, + description = "Key size for new KEK (default: same as current)") + private Integer keyBits; + + @Parameter(name = ApiConstants.HSM_PROFILE_ID, + type = CommandType.UUID, + entityType = HSMProfileResponse.class, + description = "The target HSM profile ID for the new KEK version. If provided, migrates the key to " + + "this HSM.") + private Long hsmProfileId; + + public Long getId() { + return id; + } + + public Integer getKeyBits() { + return keyBits; + } + + public Long getHsmProfileId() { + return hsmProfileId; + } + + @Override + public void execute() { + try { + kmsManager.rotateKMSKey(this); + SuccessResponse response = new SuccessResponse(); + response.setResponseName(getCommandName()); + setResponseObject(response); + } catch (KMSException e) { + throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, + "Failed to rotate KMS key: " + e.getMessage()); + } + } + + @Override + public String getCommandName() { + return s_name; + } + + @Override + public long getEntityOwnerId() { + KMSKey key = _entityMgr.findById(KMSKey.class, id); + if (key != null) { + return key.getAccountId(); + } + return CallContext.current().getCallingAccount().getId(); + } + + @Override + public String getEventType() { + return com.cloud.event.EventTypes.EVENT_KMS_KEY_ROTATE; + } + + @Override + public String getEventDescription() { + return "Rotating KMS key: " + _uuidMgr.getUuid(KMSKey.class, id); + } + + @Override + public ApiCommandResourceType getApiResourceType() { + return ApiCommandResourceType.KmsKey; + } + + @Override + public Long getApiResourceId() { + return id; + } +} diff --git a/api/src/main/java/org/apache/cloudstack/api/command/user/kms/UpdateKMSKeyCmd.java b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/UpdateKMSKeyCmd.java new file mode 100644 index 000000000000..4eba649ecb48 --- /dev/null +++ b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/UpdateKMSKeyCmd.java @@ -0,0 +1,120 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.cloudstack.api.command.user.kms; + +import com.cloud.event.EventTypes; +import org.apache.cloudstack.acl.RoleType; +import org.apache.cloudstack.api.APICommand; +import org.apache.cloudstack.api.ApiCommandResourceType; +import org.apache.cloudstack.api.ApiConstants; +import org.apache.cloudstack.api.ApiErrorCode; +import org.apache.cloudstack.api.BaseAsyncCmd; +import org.apache.cloudstack.api.Parameter; +import org.apache.cloudstack.api.ServerApiException; +import org.apache.cloudstack.api.command.user.UserCmd; +import org.apache.cloudstack.api.response.KMSKeyResponse; +import org.apache.cloudstack.context.CallContext; +import org.apache.cloudstack.framework.kms.KMSException; +import org.apache.cloudstack.kms.KMSManager; + +import javax.inject.Inject; + +@APICommand(name = "updateKMSKey", + description = "Updates KMS key name, description, or enabled status", + responseObject = KMSKeyResponse.class, + since = "4.23.0", + authorized = {RoleType.Admin, RoleType.ResourceAdmin, RoleType.DomainAdmin, RoleType.User}, + requestHasSensitiveInfo = false, + responseHasSensitiveInfo = false) +public class UpdateKMSKeyCmd extends BaseAsyncCmd implements UserCmd { + + @Inject + private KMSManager kmsManager; + + @Parameter(name = ApiConstants.ID, + required = true, + type = CommandType.UUID, + entityType = KMSKeyResponse.class, + description = "The UUID of the KMS key to update") + private Long id; + + @Parameter(name = ApiConstants.NAME, + type = CommandType.STRING, + description = "New name for the key") + private String name; + + @Parameter(name = ApiConstants.DESCRIPTION, + type = CommandType.STRING, + description = "New description for the key") + private String description; + + @Parameter(name = ApiConstants.ENABLED, + type = CommandType.BOOLEAN, + description = "whether the key should be enabled") + private Boolean enabled; + + public String getName() { + return name; + } + + public String getDescription() { + return description; + } + + public Boolean getEnabled() { + return enabled; + } + + @Override + public void execute() { + try { + KMSKeyResponse response = kmsManager.updateKMSKey(this); + response.setResponseName(getCommandName()); + setResponseObject(response); + } catch (KMSException e) { + throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, + "Failed to update KMS key: " + e.getMessage()); + } + } + + @Override + public long getEntityOwnerId() { + return CallContext.current().getCallingAccount().getId(); + } + + @Override + public ApiCommandResourceType getApiResourceType() { + return ApiCommandResourceType.KmsKey; + } + + @Override + public String getEventType() { + return EventTypes.EVENT_KMS_KEY_UPDATE; + } + + @Override + public String getEventDescription() { + return "updating KMS key: " + getId(); + } + + public Long getId() { + return id; + } +} diff --git a/api/src/main/java/org/apache/cloudstack/api/command/user/kms/hsm/AddHSMProfileCmd.java b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/hsm/AddHSMProfileCmd.java new file mode 100644 index 000000000000..b7410e4cfea9 --- /dev/null +++ b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/hsm/AddHSMProfileCmd.java @@ -0,0 +1,162 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.api.command.user.kms.hsm; + +import com.cloud.exception.ConcurrentOperationException; +import com.cloud.exception.InsufficientCapacityException; +import com.cloud.exception.NetworkRuleConflictException; +import com.cloud.exception.ResourceAllocationException; +import com.cloud.exception.ResourceUnavailableException; +import com.cloud.utils.StringUtils; +import org.apache.cloudstack.acl.RoleType; +import org.apache.cloudstack.api.APICommand; +import org.apache.cloudstack.api.ApiConstants; +import org.apache.cloudstack.api.ApiErrorCode; +import org.apache.cloudstack.api.BaseCmd; +import org.apache.cloudstack.api.Parameter; +import org.apache.cloudstack.api.ServerApiException; +import org.apache.cloudstack.api.response.DomainResponse; +import org.apache.cloudstack.api.response.HSMProfileResponse; +import org.apache.cloudstack.api.response.ProjectResponse; +import org.apache.cloudstack.api.response.ZoneResponse; +import org.apache.cloudstack.context.CallContext; +import org.apache.cloudstack.framework.kms.KMSException; +import org.apache.cloudstack.kms.HSMProfile; +import org.apache.cloudstack.kms.KMSManager; +import org.apache.commons.collections.MapUtils; + +import javax.inject.Inject; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +@APICommand(name = "addHSMProfile", description = "Adds a new HSM profile", responseObject = HSMProfileResponse.class, + requestHasSensitiveInfo = true, responseHasSensitiveInfo = true, since = "4.23.0", + authorized = { RoleType.Admin }) +public class AddHSMProfileCmd extends BaseCmd { + + @Inject + private KMSManager kmsManager; + + @Parameter(name = ApiConstants.NAME, type = CommandType.STRING, required = true, + description = "the name of the HSM profile") + private String name; + + @Parameter(name = ApiConstants.PROTOCOL, type = CommandType.STRING, + description = "the protocol of the HSM profile (PKCS11, KMIP, etc.). Default is 'pkcs11'") + private String protocol; + + @Parameter(name = ApiConstants.ZONE_ID, type = CommandType.UUID, entityType = ZoneResponse.class, + description = "the zone ID where the HSM profile is available. If null, global scope (for admin only)") + private Long zoneId; + + @Parameter(name = ApiConstants.DOMAIN_ID, type = CommandType.UUID, entityType = DomainResponse.class, + description = "the domain ID where the HSM profile is available") + private Long domainId; + + @Parameter(name = ApiConstants.ACCOUNT, type = CommandType.STRING, + description = "the account name of the HSM profile owner. Must be used with domainId.") + private String accountName; + + @Parameter(name = ApiConstants.PROJECT_ID, type = CommandType.UUID, entityType = ProjectResponse.class, + description = "the ID of the project to add the HSM profile for") + private Long projectId; + + @Parameter(name = "system", type = CommandType.BOOLEAN, + description = "whether this is a system HSM profile available to all users globally (root admin only). " + + "Default is false") + private Boolean system; + + @Parameter(name = ApiConstants.VENDOR_NAME, type = CommandType.STRING, description = "the vendor name of the HSM") + private String vendorName; + + @Parameter(name = ApiConstants.DETAILS, type = CommandType.MAP, + description = "HSM configuration details (protocol specific)") + private Map details; + + public String getName() { + return name; + } + + public String getProtocol() { + if (StringUtils.isBlank(protocol)) { + return "pkcs11"; + } + return protocol; + } + + public Long getZoneId() { + return zoneId; + } + + public Long getDomainId() { + return domainId; + } + + public String getAccountName() { + return accountName; + } + + public Long getProjectId() { + return projectId; + } + + public Boolean isSystem() { + return system != null && system; + } + + public String getVendorName() { + return vendorName; + } + + public Map getDetails() { + Map detailsMap = new HashMap<>(); + if (MapUtils.isNotEmpty(details)) { + Collection props = details.values(); + for (Object prop : props) { + HashMap detail = (HashMap) prop; + for (Map.Entry entry : detail.entrySet()) { + detailsMap.put(entry.getKey(), entry.getValue()); + } + } + } + return detailsMap; + } + + @Override + public void execute() throws ResourceUnavailableException, InsufficientCapacityException, ServerApiException, + ConcurrentOperationException, ResourceAllocationException, NetworkRuleConflictException { + try { + HSMProfile profile = kmsManager.addHSMProfile(this); + HSMProfileResponse response = kmsManager.createHSMProfileResponse(profile); + response.setResponseName(getCommandName()); + setResponseObject(response); + } catch (KMSException e) { + throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, e.getMessage()); + } + } + + @Override + public long getEntityOwnerId() { + Long accountId = _accountService.finalizeAccountId(accountName, domainId, projectId, true); + if (accountId != null) { + return accountId; + } + return CallContext.current().getCallingAccount().getId(); + } +} diff --git a/api/src/main/java/org/apache/cloudstack/api/command/user/kms/hsm/DeleteHSMProfileCmd.java b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/hsm/DeleteHSMProfileCmd.java new file mode 100644 index 000000000000..5f70eb7a899a --- /dev/null +++ b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/hsm/DeleteHSMProfileCmd.java @@ -0,0 +1,81 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.api.command.user.kms.hsm; + +import com.cloud.exception.ConcurrentOperationException; +import com.cloud.exception.InsufficientCapacityException; +import com.cloud.exception.NetworkRuleConflictException; +import com.cloud.exception.ResourceAllocationException; +import com.cloud.exception.ResourceUnavailableException; +import org.apache.cloudstack.acl.RoleType; +import org.apache.cloudstack.api.APICommand; +import org.apache.cloudstack.api.ApiConstants; +import org.apache.cloudstack.api.ApiErrorCode; +import org.apache.cloudstack.api.BaseCmd; +import org.apache.cloudstack.api.Parameter; +import org.apache.cloudstack.api.ServerApiException; +import org.apache.cloudstack.api.response.HSMProfileResponse; +import org.apache.cloudstack.api.response.SuccessResponse; +import org.apache.cloudstack.context.CallContext; +import org.apache.cloudstack.framework.kms.KMSException; +import org.apache.cloudstack.kms.HSMProfile; +import org.apache.cloudstack.kms.KMSManager; + +import javax.inject.Inject; + +@APICommand(name = "deleteHSMProfile", description = "Deletes an HSM profile", responseObject = SuccessResponse.class, + requestHasSensitiveInfo = false, responseHasSensitiveInfo = false, since = "4.23.0", + authorized = { RoleType.Admin }) +public class DeleteHSMProfileCmd extends BaseCmd { + + @Inject + private KMSManager kmsManager; + + @Parameter(name = ApiConstants.ID, type = CommandType.UUID, entityType = HSMProfileResponse.class, required = true, + description = "the ID of the HSM profile") + private Long id; + + public Long getId() { + return id; + } + + @Override + public void execute() throws ResourceUnavailableException, InsufficientCapacityException, ServerApiException, + ConcurrentOperationException, ResourceAllocationException, NetworkRuleConflictException { + try { + boolean result = kmsManager.deleteHSMProfile(this); + if (result) { + SuccessResponse response = new SuccessResponse(getCommandName()); + setResponseObject(response); + } else { + throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, "Failed to delete HSM profile"); + } + } catch (KMSException e) { + throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, e.getMessage()); + } + } + + @Override + public long getEntityOwnerId() { + HSMProfile profile = _entityMgr.findById(HSMProfile.class, id); + if (profile != null && profile.getAccountId() > 0) { + return profile.getAccountId(); + } + return CallContext.current().getCallingAccount().getId(); + } +} diff --git a/api/src/main/java/org/apache/cloudstack/api/command/user/kms/hsm/ListHSMProfilesCmd.java b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/hsm/ListHSMProfilesCmd.java new file mode 100644 index 000000000000..d42d701c2f2f --- /dev/null +++ b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/hsm/ListHSMProfilesCmd.java @@ -0,0 +1,85 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.api.command.user.kms.hsm; + +import org.apache.cloudstack.acl.RoleType; +import org.apache.cloudstack.api.APICommand; +import org.apache.cloudstack.api.ApiConstants; +import org.apache.cloudstack.api.BaseListProjectAndAccountResourcesCmd; +import org.apache.cloudstack.api.Parameter; +import org.apache.cloudstack.api.response.HSMProfileResponse; +import org.apache.cloudstack.api.response.ListResponse; +import org.apache.cloudstack.api.response.ZoneResponse; +import org.apache.cloudstack.kms.KMSManager; + +import javax.inject.Inject; + +@APICommand(name = "listHSMProfiles", description = "Lists HSM profiles", responseObject = HSMProfileResponse.class, + requestHasSensitiveInfo = false, responseHasSensitiveInfo = true, since = "4.23.0", + authorized = {RoleType.Admin, RoleType.ResourceAdmin, RoleType.DomainAdmin, RoleType.User}) +public class ListHSMProfilesCmd extends BaseListProjectAndAccountResourcesCmd { + + @Inject + private KMSManager kmsManager; + + @Parameter(name = ApiConstants.ID, type = CommandType.UUID, entityType = HSMProfileResponse.class, + description = "the HSM profile ID") + private Long id; + + @Parameter(name = ApiConstants.ZONE_ID, type = CommandType.UUID, entityType = ZoneResponse.class, + description = "the zone ID") + private Long zoneId; + + @Parameter(name = ApiConstants.PROTOCOL, type = CommandType.STRING, description = "the protocol of the HSM profile") + private String protocol; + + @Parameter(name = ApiConstants.ENABLED, type = CommandType.BOOLEAN, description = "list only enabled profiles") + private Boolean enabled; + + @Parameter(name = ApiConstants.IS_SYSTEM, + type = CommandType.BOOLEAN, + description = "when true, non-admin users see only system (global) profiles") + private Boolean isSystem; + + public Long getId() { + return id; + } + + public Long getZoneId() { + return zoneId; + } + + public String getProtocol() { + return protocol; + } + + public Boolean getEnabled() { + return enabled; + } + + public Boolean getIsSystem() { + return isSystem; + } + + @Override + public void execute() { + ListResponse response = kmsManager.listHSMProfiles(this); + response.setResponseName(getCommandName()); + setResponseObject(response); + } +} diff --git a/api/src/main/java/org/apache/cloudstack/api/command/user/kms/hsm/UpdateHSMProfileCmd.java b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/hsm/UpdateHSMProfileCmd.java new file mode 100644 index 000000000000..a408c967362d --- /dev/null +++ b/api/src/main/java/org/apache/cloudstack/api/command/user/kms/hsm/UpdateHSMProfileCmd.java @@ -0,0 +1,93 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.api.command.user.kms.hsm; + +import com.cloud.exception.ConcurrentOperationException; +import com.cloud.exception.InsufficientCapacityException; +import com.cloud.exception.NetworkRuleConflictException; +import com.cloud.exception.ResourceAllocationException; +import com.cloud.exception.ResourceUnavailableException; +import org.apache.cloudstack.acl.RoleType; +import org.apache.cloudstack.api.APICommand; +import org.apache.cloudstack.api.ApiConstants; +import org.apache.cloudstack.api.ApiErrorCode; +import org.apache.cloudstack.api.BaseCmd; +import org.apache.cloudstack.api.Parameter; +import org.apache.cloudstack.api.ServerApiException; +import org.apache.cloudstack.api.response.HSMProfileResponse; +import org.apache.cloudstack.context.CallContext; +import org.apache.cloudstack.framework.kms.KMSException; +import org.apache.cloudstack.kms.HSMProfile; +import org.apache.cloudstack.kms.KMSManager; + +import javax.inject.Inject; + +@APICommand(name = "updateHSMProfile", description = "Updates an HSM profile", + responseObject = HSMProfileResponse.class, + requestHasSensitiveInfo = true, responseHasSensitiveInfo = true, since = "4.23.0", + authorized = { RoleType.Admin }) +public class UpdateHSMProfileCmd extends BaseCmd { + + @Inject + private KMSManager kmsManager; + + @Parameter(name = ApiConstants.ID, type = CommandType.UUID, entityType = HSMProfileResponse.class, required = true, + description = "the ID of the HSM profile") + private Long id; + + @Parameter(name = ApiConstants.NAME, type = CommandType.STRING, description = "the name of the HSM profile") + private String name; + + @Parameter(name = ApiConstants.ENABLED, type = CommandType.BOOLEAN, + description = "whether the HSM profile is enabled") + private Boolean enabled; + + public Long getId() { + return id; + } + + public String getName() { + return name; + } + + public Boolean getEnabled() { + return enabled; + } + + @Override + public void execute() throws ResourceUnavailableException, InsufficientCapacityException, ServerApiException, + ConcurrentOperationException, ResourceAllocationException, NetworkRuleConflictException { + try { + HSMProfile profile = kmsManager.updateHSMProfile(this); + HSMProfileResponse response = kmsManager.createHSMProfileResponse(profile); + response.setResponseName(getCommandName()); + setResponseObject(response); + } catch (KMSException e) { + throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, e.getMessage()); + } + } + + @Override + public long getEntityOwnerId() { + HSMProfile profile = _entityMgr.findById(HSMProfile.class, id); + if (profile != null && profile.getAccountId() > 0) { + return profile.getAccountId(); + } + return CallContext.current().getCallingAccount().getId(); + } +} diff --git a/api/src/main/java/org/apache/cloudstack/api/command/user/vm/BaseDeployVMCmd.java b/api/src/main/java/org/apache/cloudstack/api/command/user/vm/BaseDeployVMCmd.java index 8c29d7338b85..f45e1a54f604 100644 --- a/api/src/main/java/org/apache/cloudstack/api/command/user/vm/BaseDeployVMCmd.java +++ b/api/src/main/java/org/apache/cloudstack/api/command/user/vm/BaseDeployVMCmd.java @@ -40,12 +40,14 @@ import org.apache.cloudstack.api.response.DiskOfferingResponse; import org.apache.cloudstack.api.response.DomainResponse; import org.apache.cloudstack.api.response.HostResponse; +import org.apache.cloudstack.api.response.KMSKeyResponse; import org.apache.cloudstack.api.response.NetworkResponse; import org.apache.cloudstack.api.response.ProjectResponse; import org.apache.cloudstack.api.response.SecurityGroupResponse; import org.apache.cloudstack.api.response.UserDataResponse; import org.apache.cloudstack.api.response.ZoneResponse; import org.apache.cloudstack.context.CallContext; +import org.apache.cloudstack.kms.KMSKey; import org.apache.cloudstack.vm.lease.VMLeaseManager; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; @@ -126,11 +128,19 @@ public abstract class BaseDeployVMCmd extends BaseAsyncCreateCustomIdCmd impleme since = "4.4") private Long rootdisksize; + @ACL + @Parameter(name = ApiConstants.ROOT_DISK_KMS_KEY_ID, + type = CommandType.UUID, + entityType = KMSKeyResponse.class, + description = "ID of the KMS Key to use for root disk encryption", + since = "4.23.0") + private Long rootDiskKmsKeyId; + @Parameter(name = ApiConstants.DATADISKS_DETAILS, type = CommandType.MAP, since = "4.21.0", description = "Disk offering details for creating multiple data volumes. Mutually exclusive with diskOfferingId." + - " Example: datadisksdetails[0].diskofferingid=a2a73a84-19db-4852-8930-dfddef053341&datadisksdetails[0].size=10&datadisksdetails[0].miniops=100&datadisksdetails[0].maxiops=200") + " Example: datadisksdetails[0].diskofferingid=a2a73a84-19db-4852-8930-dfddef053341&datadisksdetails[0].size=10&datadisksdetails[0].miniops=100&datadisksdetails[0].maxiops=200&datadisksdetails[0].kmskeyid=") private Map dataDisksDetails; @Parameter(name = ApiConstants.GROUP, type = CommandType.STRING, description = "an optional group for the virtual machine") @@ -300,6 +310,10 @@ public Long getDiskOfferingId() { return diskOfferingId; } + public Long getRootDiskKmsKeyId() { + return rootDiskKmsKeyId; + } + public String getDeploymentPlanner() { return deploymentPlanner; } @@ -581,7 +595,19 @@ public List getDataDiskInfoList() { minIops = Long.parseLong(dataDisk.get(ApiConstants.MIN_IOPS)); maxIops = Long.parseLong(dataDisk.get(ApiConstants.MAX_IOPS)); } - VmDiskInfo vmDiskInfo = new VmDiskInfo(diskOffering, size, minIops, maxIops, deviceId); + + // Extract KMS key ID if provided + Long kmsKeyId = null; + String kmsKeyUuid = dataDisk.get(ApiConstants.KMS_KEY_ID); + if (kmsKeyUuid != null) { + KMSKey kmsKey = _entityMgr.findByUuid(org.apache.cloudstack.kms.KMSKey.class, kmsKeyUuid); + if (kmsKey == null) { + throw new InvalidParameterValueException("Unable to find KMS key " + kmsKeyUuid); + } + kmsKeyId = kmsKey.getId(); + } + + VmDiskInfo vmDiskInfo = new VmDiskInfo(diskOffering, size, minIops, maxIops, deviceId, kmsKeyId); vmDiskInfoList.add(vmDiskInfo); } this.dataDiskInfoList = vmDiskInfoList; diff --git a/api/src/main/java/org/apache/cloudstack/api/command/user/volume/CreateVolumeCmd.java b/api/src/main/java/org/apache/cloudstack/api/command/user/volume/CreateVolumeCmd.java index 5bcf3a141178..cb9253f11db5 100644 --- a/api/src/main/java/org/apache/cloudstack/api/command/user/volume/CreateVolumeCmd.java +++ b/api/src/main/java/org/apache/cloudstack/api/command/user/volume/CreateVolumeCmd.java @@ -30,6 +30,7 @@ import org.apache.cloudstack.api.command.user.UserCmd; import org.apache.cloudstack.api.response.DiskOfferingResponse; import org.apache.cloudstack.api.response.DomainResponse; +import org.apache.cloudstack.api.response.KMSKeyResponse; import org.apache.cloudstack.api.response.ProjectResponse; import org.apache.cloudstack.api.response.SnapshotResponse; import org.apache.cloudstack.api.response.UserVmResponse; @@ -109,6 +110,13 @@ public class CreateVolumeCmd extends BaseAsyncCreateCustomIdCmd implements UserC description = "The ID of the Instance; to be used with snapshot Id, Instance to which the volume gets attached after creation") private Long virtualMachineId; + @Parameter(name = ApiConstants.KMS_KEY_ID, + type = CommandType.UUID, + entityType = KMSKeyResponse.class, + description = "ID of the KMS Key for volume encryption (required if encryption enabled for zone)", + since = "4.23.0") + private Long kmsKeyId; + ///////////////////////////////////////////////////// /////////////////// Accessors /////////////////////// ///////////////////////////////////////////////////// @@ -169,6 +177,10 @@ public Long getVirtualMachineId() { return virtualMachineId; } + public Long getKmsKeyId() { + return kmsKeyId; + } + ///////////////////////////////////////////////////// /////////////// API Implementation/////////////////// ///////////////////////////////////////////////////// diff --git a/api/src/main/java/org/apache/cloudstack/api/command/user/volume/ListVolumesCmd.java b/api/src/main/java/org/apache/cloudstack/api/command/user/volume/ListVolumesCmd.java index a4cd299dae9c..88f87e941e78 100644 --- a/api/src/main/java/org/apache/cloudstack/api/command/user/volume/ListVolumesCmd.java +++ b/api/src/main/java/org/apache/cloudstack/api/command/user/volume/ListVolumesCmd.java @@ -29,6 +29,7 @@ import org.apache.cloudstack.api.response.ClusterResponse; import org.apache.cloudstack.api.response.DiskOfferingResponse; import org.apache.cloudstack.api.response.HostResponse; +import org.apache.cloudstack.api.response.KMSKeyResponse; import org.apache.cloudstack.api.response.ListResponse; import org.apache.cloudstack.api.response.PodResponse; import org.apache.cloudstack.api.response.ServiceOfferingResponse; @@ -90,6 +91,9 @@ public class ListVolumesCmd extends BaseListRetrieveOnlyResourceCountCmd impleme @Parameter(name = ApiConstants.DISK_OFFERING_ID, type = CommandType.UUID, entityType = DiskOfferingResponse.class, description = "List volumes by disk offering", since = "4.4") private Long diskOfferingId; + @Parameter(name = ApiConstants.KMS_KEY_ID, type = CommandType.UUID, entityType = KMSKeyResponse.class, description = "List volumes by KMS Key", since = "4.23") + private Long kmsKeyId; + @Parameter(name = ApiConstants.DISPLAY_VOLUME, type = CommandType.BOOLEAN, description = "List resources by display flag; only ROOT admin is eligible to pass this parameter", since = "4.4", authorized = { RoleType.Admin}) private Boolean display; @@ -136,6 +140,10 @@ public Long getDiskOfferingId() { return diskOfferingId; } + public Long getKmsKeyId() { + return kmsKeyId; + } + public String getType() { return type; } diff --git a/api/src/main/java/org/apache/cloudstack/api/response/HSMProfileResponse.java b/api/src/main/java/org/apache/cloudstack/api/response/HSMProfileResponse.java new file mode 100644 index 000000000000..607d1f0c379b --- /dev/null +++ b/api/src/main/java/org/apache/cloudstack/api/response/HSMProfileResponse.java @@ -0,0 +1,182 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.api.response; + +import java.util.Date; +import java.util.Map; + +import org.apache.cloudstack.api.ApiConstants; +import org.apache.cloudstack.api.BaseResponse; +import org.apache.cloudstack.api.EntityReference; +import org.apache.cloudstack.kms.HSMProfile; + +import com.cloud.serializer.Param; +import com.google.gson.annotations.SerializedName; + +@EntityReference(value = HSMProfile.class) +public class HSMProfileResponse extends BaseResponse implements ControlledViewEntityResponse { + @SerializedName(ApiConstants.ID) + @Param(description = "the ID of the HSM profile") + private String id; + + @SerializedName(ApiConstants.NAME) + @Param(description = "the name of the HSM profile") + private String name; + + @SerializedName(ApiConstants.PROTOCOL) + @Param(description = "the protocol of the HSM profile") + private String protocol; + + @SerializedName(ApiConstants.ACCOUNT_ID) + @Param(description = "the account ID of the HSM profile owner") + private String accountId; + + @SerializedName(ApiConstants.ACCOUNT) + @Param(description = "the account name of the HSM profile owner") + private String accountName; + + @SerializedName(ApiConstants.DOMAIN_ID) + @Param(description = "the domain ID of the HSM profile owner") + private String domainId; + + @SerializedName(ApiConstants.DOMAIN) + @Param(description = "the domain name of the HSM profile owner") + private String domainName; + + @SerializedName(ApiConstants.DOMAIN_PATH) + @Param(description = "the domain path of the HSM profile owner") + private String domainPath; + + @SerializedName(ApiConstants.PROJECT_ID) + @Param(description = "the project ID of the HSM profile owner") + private String projectId; + + @SerializedName(ApiConstants.PROJECT) + @Param(description = "the project name of the HSM profile owner") + private String projectName; + + @SerializedName(ApiConstants.ZONE_ID) + @Param(description = "the zone ID where the HSM profile is available") + private String zoneId; + + @SerializedName(ApiConstants.ZONE_NAME) + @Param(description = "the zone name where the HSM profile is available") + private String zoneName; + + @SerializedName("vendor") + @Param(description = "the vendor name of the HSM profile") + private String vendorName; + + @SerializedName(ApiConstants.STATE) + @Param(description = "the state of the HSM profile") + private String state; + + @SerializedName(ApiConstants.ENABLED) + @Param(description = "whether the HSM profile is enabled") + private Boolean enabled; + + @SerializedName("system") + @Param(description = "whether this is a system HSM profile available to all users globally") + private Boolean system; + + @SerializedName(ApiConstants.CREATED) + @Param(description = "the date the HSM profile was created") + private Date created; + + @SerializedName(ApiConstants.DETAILS) + @Param(description = "HSM configuration details (sensitive values are encrypted)") + private Map details; + + public void setId(String id) { + this.id = id; + } + + public void setName(String name) { + this.name = name; + } + + public void setProtocol(String protocol) { + this.protocol = protocol; + } + + public void setAccountId(String accountId) { + this.accountId = accountId; + } + + @Override + public void setAccountName(String accountName) { + this.accountName = accountName; + } + + @Override + public void setDomainId(String domainId) { + this.domainId = domainId; + } + + @Override + public void setDomainName(String domainName) { + this.domainName = domainName; + } + + @Override + public void setDomainPath(String domainPath) { + this.domainPath = domainPath; + } + + @Override + public void setProjectId(String projectId) { + this.projectId = projectId; + } + + @Override + public void setProjectName(String projectName) { + this.projectName = projectName; + } + + public void setZoneId(String zoneId) { + this.zoneId = zoneId; + } + + public void setZoneName(String zoneName) { + this.zoneName = zoneName; + } + + public void setVendorName(String vendorName) { + this.vendorName = vendorName; + } + + public void setState(String state) { + this.state = state; + } + + public void setEnabled(Boolean enabled) { + this.enabled = enabled; + } + + public void setSystem(Boolean system) { + this.system = system; + } + + public void setCreated(Date created) { + this.created = created; + } + + public void setDetails(Map details) { + this.details = details; + } +} diff --git a/api/src/main/java/org/apache/cloudstack/api/response/KMSKeyResponse.java b/api/src/main/java/org/apache/cloudstack/api/response/KMSKeyResponse.java new file mode 100644 index 000000000000..b9fdb1644ceb --- /dev/null +++ b/api/src/main/java/org/apache/cloudstack/api/response/KMSKeyResponse.java @@ -0,0 +1,284 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.cloudstack.api.response; + +import com.cloud.serializer.Param; +import com.google.gson.annotations.SerializedName; +import org.apache.cloudstack.acl.RoleType; +import org.apache.cloudstack.api.ApiConstants; +import org.apache.cloudstack.api.BaseResponse; +import org.apache.cloudstack.api.EntityReference; +import org.apache.cloudstack.kms.KMSKey; + +import java.util.Date; + +@EntityReference(value = KMSKey.class) +public class KMSKeyResponse extends BaseResponse implements ControlledViewEntityResponse { + + @SerializedName(ApiConstants.ID) + @Param(description = "the UUID of the key") + private String id; + + @SerializedName(ApiConstants.NAME) + @Param(description = "the name of the key") + private String name; + + @SerializedName(ApiConstants.DESCRIPTION) + @Param(description = "the description of the key") + private String description; + + @SerializedName(ApiConstants.PURPOSE) + @Param(description = "the purpose of the key (VOLUME_ENCRYPTION, TLS_CERT)") + private String purpose; + + @SerializedName(ApiConstants.ACCOUNT) + @Param(description = "the account owning the key") + private String accountName; + + @SerializedName(ApiConstants.ACCOUNT_ID) + @Param(description = "the account ID owning the key") + private String accountId; + + @SerializedName(ApiConstants.DOMAIN_ID) + @Param(description = "the domain ID of the key") + private String domainId; + + @SerializedName(ApiConstants.DOMAIN) + @Param(description = "the domain name of the key") + private String domainName; + + @SerializedName(ApiConstants.DOMAIN_PATH) + @Param(description = "the domain path of the key") + private String domainPath; + + @SerializedName(ApiConstants.ZONE_ID) + @Param(description = "the zone ID where the key is valid") + private String zoneId; + + @SerializedName(ApiConstants.ZONE_NAME) + @Param(description = "the zone name where the key is valid") + private String zoneName; + + @SerializedName(ApiConstants.HSM_PROFILE_ID) + @Param(description = "the zone ID where the key is valid") + private String hsmProfileId; + + @SerializedName(ApiConstants.HSM_PROFILE) + @Param(description = "the zone name where the key is valid") + private String hsmProfileName; + + @SerializedName(ApiConstants.ALGORITHM) + @Param(description = "the encryption algorithm") + private String algorithm; + + @SerializedName(ApiConstants.KEY_BITS) + @Param(description = "the key size in bits") + private Integer keyBits; + + @SerializedName(ApiConstants.VERSION) + @Param(description = "the key size in bits") + private Integer version; + + @SerializedName(ApiConstants.ENABLED) + @Param(description = "whether the key is enabled") + private Boolean enabled; + + @SerializedName(ApiConstants.CREATED) + @Param(description = "the creation timestamp") + private Date created; + + @SerializedName(ApiConstants.PROJECT_ID) + @Param(description = "the project ID of the key") + private String projectId; + + @SerializedName(ApiConstants.PROJECT) + @Param(description = "the project name of the key") + private String projectName; + + @SerializedName(ApiConstants.KEK_LABEL) + @Param(description = "the provider-specific KEK label (admin only)", authorized = { RoleType.Admin }) + private String kekLabel; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getPurpose() { + return purpose; + } + + public void setPurpose(String purpose) { + this.purpose = purpose; + } + + public String getAccountName() { + return accountName; + } + + @Override + public void setAccountName(String accountName) { + this.accountName = accountName; + } + + @Override + public void setProjectId(String projectId) { + this.projectId = projectId; + } + + @Override + public void setProjectName(String projectName) { + this.projectName = projectName; + } + + public String getAccountId() { + return accountId; + } + + public void setAccountId(String accountId) { + this.accountId = accountId; + } + + public String getDomainId() { + return domainId; + } + + @Override + public void setDomainId(String domainId) { + this.domainId = domainId; + } + + public String getDomainName() { + return domainName; + } + + @Override + public void setDomainName(String domainName) { + this.domainName = domainName; + } + + public String getDomainPath() { + return domainPath; + } + + @Override + public void setDomainPath(String domainPath) { + this.domainPath = domainPath; + } + + public String getZoneId() { + return zoneId; + } + + public void setZoneId(String zoneId) { + this.zoneId = zoneId; + } + + public String getZoneName() { + return zoneName; + } + + public void setZoneName(String zoneName) { + this.zoneName = zoneName; + } + + public String getHsmProfileId() { + return hsmProfileId; + } + + public void setHsmProfileId(String hsmProfileId) { + this.hsmProfileId = hsmProfileId; + } + + public String getHsmProfileName() { + return hsmProfileName; + } + + public void setHsmProfileName(String hsmProfileName) { + this.hsmProfileName = hsmProfileName; + } + + public String getAlgorithm() { + return algorithm; + } + + public void setAlgorithm(String algorithm) { + this.algorithm = algorithm; + } + + public Integer getKeyBits() { + return keyBits; + } + + public void setKeyBits(Integer keyBits) { + this.keyBits = keyBits; + } + + public Integer getVersion() { + return version; + } + + public void setVersion(Integer version) { + this.version = version; + } + + public Boolean getEnabled() { + return enabled; + } + + public void setEnabled(Boolean enabled) { + this.enabled = enabled; + } + + public Date getCreated() { + return created; + } + + public void setCreated(Date created) { + this.created = created; + } + + public String getKekLabel() { + return kekLabel; + } + + public void setKekLabel(String kekLabel) { + this.kekLabel = kekLabel; + } +} diff --git a/api/src/main/java/org/apache/cloudstack/api/response/VolumeResponse.java b/api/src/main/java/org/apache/cloudstack/api/response/VolumeResponse.java index 058ea50f991e..fd0c8ca10f5e 100644 --- a/api/src/main/java/org/apache/cloudstack/api/response/VolumeResponse.java +++ b/api/src/main/java/org/apache/cloudstack/api/response/VolumeResponse.java @@ -309,6 +309,18 @@ public class VolumeResponse extends BaseResponseWithTagInformation implements Co @Param(description = "the format of the disk encryption if applicable", since = "4.19.1") private String encryptionFormat; + @SerializedName(ApiConstants.KMS_KEY) + @Param(description = "KMS key id of the volume", since = "4.23.0") + private String kmsKey; + + @SerializedName(ApiConstants.KMS_KEY_ID) + @Param(description = "KMS key id of the volume", since = "4.23.0") + private String kmsKeyId; + + @SerializedName(ApiConstants.KMS_KEY_VERSION) + @Param(description = "Version number of the KMS key used for disk encryption if applicable", since = "4.23.0") + private Integer kmsKeyVersion; + public String getPath() { return path; } @@ -871,4 +883,28 @@ public void setVolumeRepairResult(Map volumeRepairResult) { public void setEncryptionFormat(String encryptionFormat) { this.encryptionFormat = encryptionFormat; } + + public String getKmsKey() { + return kmsKey; + } + + public void setKmsKey(String kmsKey) { + this.kmsKey = kmsKey; + } + + public String getKmsKeyId() { + return kmsKeyId; + } + + public void setKmsKeyId(String kmsKeyId) { + this.kmsKeyId = kmsKeyId; + } + + public Integer getKmsKeyVersion() { + return kmsKeyVersion; + } + + public void setKmsKeyVersion(Integer kmsKeyVersion) { + this.kmsKeyVersion = kmsKeyVersion; + } } diff --git a/api/src/main/java/org/apache/cloudstack/kms/HSMProfile.java b/api/src/main/java/org/apache/cloudstack/kms/HSMProfile.java new file mode 100644 index 000000000000..97a38f16ba94 --- /dev/null +++ b/api/src/main/java/org/apache/cloudstack/kms/HSMProfile.java @@ -0,0 +1,46 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms; + +import org.apache.cloudstack.acl.ControlledEntity; +import org.apache.cloudstack.api.Identity; +import org.apache.cloudstack.api.InternalIdentity; + +import java.util.Date; + +public interface HSMProfile extends Identity, InternalIdentity, ControlledEntity { + String getName(); + + String getProtocol(); + + long getAccountId(); + + long getDomainId(); + + Long getZoneId(); + + String getVendorName(); + + boolean isEnabled(); + + boolean isSystem(); + + Date getCreated(); + + Date getRemoved(); +} diff --git a/api/src/main/java/org/apache/cloudstack/kms/KMSKey.java b/api/src/main/java/org/apache/cloudstack/kms/KMSKey.java new file mode 100644 index 000000000000..c956a1ec66d8 --- /dev/null +++ b/api/src/main/java/org/apache/cloudstack/kms/KMSKey.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.cloudstack.kms; + +import org.apache.cloudstack.acl.ControlledEntity; +import org.apache.cloudstack.api.Identity; +import org.apache.cloudstack.api.InternalIdentity; +import org.apache.cloudstack.framework.kms.KeyPurpose; + +import java.util.Date; + +/** + * KMS Key (Key Encryption Key) metadata. + * Represents a KEK that can be used to wrap/unwrap Data Encryption Keys (DEKs). + * KEKs are account-scoped and used for envelope encryption. + */ +public interface KMSKey extends Identity, InternalIdentity, ControlledEntity { + + String getName(); + + String getDescription(); + + /** + * Provider-specific KEK label/ID (internal identifier used by the KMS provider) + */ + String getKekLabel(); + + KeyPurpose getPurpose(); + + Long getZoneId(); + + String getAlgorithm(); + + Integer getKeyBits(); + + boolean isEnabled(); + + Date getCreated(); + + Date getRemoved(); + + Long getHsmProfileId(); +} diff --git a/api/src/main/java/org/apache/cloudstack/kms/KMSManager.java b/api/src/main/java/org/apache/cloudstack/kms/KMSManager.java new file mode 100644 index 000000000000..21f5f074c1f4 --- /dev/null +++ b/api/src/main/java/org/apache/cloudstack/kms/KMSManager.java @@ -0,0 +1,280 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms; + +import com.cloud.user.Account; +import com.cloud.utils.component.Manager; +import org.apache.cloudstack.api.command.admin.kms.MigrateVolumesToKMSCmd; +import org.apache.cloudstack.api.command.user.kms.RotateKMSKeyCmd; +import org.apache.cloudstack.api.command.user.kms.CreateKMSKeyCmd; +import org.apache.cloudstack.api.command.user.kms.DeleteKMSKeyCmd; +import org.apache.cloudstack.api.command.user.kms.ListKMSKeysCmd; +import org.apache.cloudstack.api.command.user.kms.UpdateKMSKeyCmd; +import org.apache.cloudstack.api.command.user.kms.hsm.AddHSMProfileCmd; +import org.apache.cloudstack.api.command.user.kms.hsm.DeleteHSMProfileCmd; +import org.apache.cloudstack.api.command.user.kms.hsm.ListHSMProfilesCmd; +import org.apache.cloudstack.api.command.user.kms.hsm.UpdateHSMProfileCmd; +import org.apache.cloudstack.api.response.HSMProfileResponse; +import org.apache.cloudstack.api.response.KMSKeyResponse; +import org.apache.cloudstack.api.response.ListResponse; +import org.apache.cloudstack.api.response.SuccessResponse; +import org.apache.cloudstack.framework.config.ConfigKey; +import org.apache.cloudstack.framework.config.Configurable; +import org.apache.cloudstack.framework.kms.KMSException; +import org.apache.cloudstack.framework.kms.KMSProvider; +import org.apache.cloudstack.framework.kms.WrappedKey; + +import java.util.List; + +public interface KMSManager extends Manager, Configurable { + + ConfigKey KMSDekSizeBits = new ConfigKey<>( + "Advanced", + Integer.class, + "kms.dek.size.bits", + "256", + "The size of Data Encryption Keys (DEK) in bits (128, 192, or 256)", + true, + ConfigKey.Scope.Global + ); + + ConfigKey KMSRetryCount = new ConfigKey<>( + "Advanced", + Integer.class, + "kms.retry.count", + "3", + "Number of retry attempts for transient KMS failures", + true, + ConfigKey.Scope.Global + ); + + ConfigKey KMSRetryDelayMs = new ConfigKey<>( + "Advanced", + Integer.class, + "kms.retry.delay.ms", + "1000", + "Delay in milliseconds between KMS retry attempts (exponential backoff)", + true, + ConfigKey.Scope.Global + ); + + ConfigKey KMSOperationTimeoutSec = new ConfigKey<>( + "Advanced", + Integer.class, + "kms.operation.timeout.sec", + "30", + "Timeout in seconds for KMS cryptographic operations", + true, + ConfigKey.Scope.Global + ); + + ConfigKey KMSRewrapBatchSize = new ConfigKey<>( + "Advanced", + Integer.class, + "kms.rewrap.batch.size", + "50", + "Number of wrapped keys to rewrap per batch in background job", + true, + ConfigKey.Scope.Global + ); + + ConfigKey KMSRewrapIntervalMs = new ConfigKey<>( + "Advanced", + Long.class, + "kms.rewrap.interval.ms", + "300000", + "Interval in milliseconds between background rewrap job executions (default: 5 minutes)", + true, + ConfigKey.Scope.Global + ); + + /** + * List all registered KMS providers + * + * @return list of available providers + */ + List listKMSProviders(); + + /** + * Get a specific KMS provider by name + * + * @param name provider name + * @return the provider, or null if not found + */ + KMSProvider getKMSProvider(String name); + + /** + * Unwrap a DEK from a wrapped key + * SECURITY: Caller must zeroize returned byte array after use! + * + * @param wrappedKey the wrapped key from database + * @param zoneId the zone ID + * @return plaintext DEK (caller must zeroize!) + * @throws KMSException if unwrap fails + */ + byte[] unwrapVolumeKey(WrappedKey wrappedKey, Long zoneId) throws KMSException; + + /** + * Check if caller has permission to use a KMS key + * + * @param callerAccountId the caller's account ID + * @param key the KMS key + * @return true if caller has permission + */ + boolean hasPermission(Long callerAccountId, KMSKey key); + + /** + * Validates that the KMS key can be used for volume encryption: key exists, not deleted, + * caller has access, key state is Enabled, and key purpose is VOLUME_ENCRYPTION. + * No-op if kmsKeyId is null. + * + * @param caller the caller's account + * @param kmsKeyId the KMS key database ID + * @param zoneId the zone ID of the target resource (volume/VM) + * @throws InvalidParameterValueException if key not found, deleted, disabled, wrong purpose, or zone mismatch + * @throws PermissionDeniedException if caller lacks access + */ + void checkKmsKeyForVolumeEncryption(Account caller, Long kmsKeyId, Long zoneId); + + /** + * Unwrap a DEK by wrapped key ID, trying multiple KEK versions if needed + * + * @param wrappedKeyId the wrapped key database ID + * @return plaintext DEK (caller must zeroize!) + * @throws KMSException if unwrap fails + */ + byte[] unwrapKey(Long wrappedKeyId) throws KMSException; + + /** + * Generate and wrap a DEK using a specific KMS key UUID + * + * @param kmsKey the KMS key + * @param callerAccountId the caller's account ID + * @return wrapped key ready for database storage + * @throws KMSException if operation fails + */ + WrappedKey generateVolumeKeyWithKek(KMSKey kmsKey, Long callerAccountId) throws KMSException; + + /** + * Create a KMS key and return the response object. + * Handles validation, account resolution, and permission checks. + * + * @param cmd the create command with all parameters + * @return KMSKeyResponse + * @throws KMSException if creation fails + */ + KMSKeyResponse createKMSKey(CreateKMSKeyCmd cmd) throws KMSException; + + /** + * List KMS keys and return the response object. + * Handles validation and permission checks. + * + * @param cmd the list command with all parameters + * @return ListResponse with KMSKeyResponse objects + */ + ListResponse listKMSKeys(ListKMSKeysCmd cmd); + + /** + * Update a KMS key and return the response object. + * Handles validation and permission checks. + * + * @param cmd the update command with all parameters + * @return KMSKeyResponse + * @throws KMSException if update fails + */ + KMSKeyResponse updateKMSKey(UpdateKMSKeyCmd cmd) throws KMSException; + + /** + * Delete a KMS key and return the response object. + * Handles validation and permission checks. + * + * @param cmd the delete command with all parameters + * @return SuccessResponse + * @throws KMSException if deletion fails + */ + SuccessResponse deleteKMSKey(DeleteKMSKeyCmd cmd) throws KMSException; + + /** + * Rotate KEK by creating new version and scheduling gradual re-encryption + * + * @param cmd the rotate command with all parameters + * @return New KEK version UUID + * @throws KMSException if rotation fails + */ + String rotateKMSKey(RotateKMSKeyCmd cmd) throws KMSException; + + /** + * Migrate passphrase-based volumes to KMS encryption + * + * @param cmd the migrate command with all parameters + * @return Number of volumes successfully migrated + * @throws KMSException if migration fails + */ + int migrateVolumesToKMS(MigrateVolumesToKMSCmd cmd) throws KMSException; + + /** + * Delete all KMS keys owned by an account (called during account cleanup) + * + * @param accountId the account ID + * @return true if all keys were successfully deleted + */ + boolean deleteKMSKeysByAccountId(Long accountId); + + /** + * Add a new HSM profile + * + * @param cmd the add command + * @return the created HSM profile + * @throws KMSException if addition fails + */ + HSMProfile addHSMProfile(AddHSMProfileCmd cmd) throws KMSException; + + /** + * List HSM profiles + * + * @param cmd the list command + * @return list of HSM profiles + */ + ListResponse listHSMProfiles(ListHSMProfilesCmd cmd); + + /** + * Delete an HSM profile + * + * @param cmd the delete command + * @return true if deletion was successful + * @throws KMSException if deletion fails + */ + boolean deleteHSMProfile(DeleteHSMProfileCmd cmd) throws KMSException; + + /** + * Update an HSM profile + * + * @param cmd the update command + * @return the updated HSM profile + * @throws KMSException if update fails + */ + HSMProfile updateHSMProfile(UpdateHSMProfileCmd cmd) throws KMSException; + + /** + * Create a response object for an HSM profile + * + * @param profile the HSM profile + * @return the response object + */ + HSMProfileResponse createHSMProfileResponse(HSMProfile profile); +} diff --git a/client/pom.xml b/client/pom.xml index b8dffe65d4fb..979a60682077 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -251,6 +251,16 @@ cloud-plugin-metrics ${project.version} + + org.apache.cloudstack + cloud-plugin-kms-database + ${project.version} + + + org.apache.cloudstack + cloud-plugin-kms-pkcs11 + ${project.version} + org.apache.cloudstack cloud-plugin-network-nvp diff --git a/core/src/main/resources/META-INF/cloudstack/core/spring-core-registry-core-context.xml b/core/src/main/resources/META-INF/cloudstack/core/spring-core-registry-core-context.xml index 01c568d78916..15465c22f8fc 100644 --- a/core/src/main/resources/META-INF/cloudstack/core/spring-core-registry-core-context.xml +++ b/core/src/main/resources/META-INF/cloudstack/core/spring-core-registry-core-context.xml @@ -366,4 +366,7 @@ + + + diff --git a/core/src/main/resources/META-INF/cloudstack/kms/module.properties b/core/src/main/resources/META-INF/cloudstack/kms/module.properties new file mode 100644 index 000000000000..98e38d7cd8f6 --- /dev/null +++ b/core/src/main/resources/META-INF/cloudstack/kms/module.properties @@ -0,0 +1,21 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +name=kms +parent=core diff --git a/core/src/main/resources/META-INF/cloudstack/kms/spring-core-lifecycle-kms-context-inheritable.xml b/core/src/main/resources/META-INF/cloudstack/kms/spring-core-lifecycle-kms-context-inheritable.xml new file mode 100644 index 000000000000..9226eef8fc1a --- /dev/null +++ b/core/src/main/resources/META-INF/cloudstack/kms/spring-core-lifecycle-kms-context-inheritable.xml @@ -0,0 +1,29 @@ + + + + + + + + diff --git a/engine/api/src/main/java/org/apache/cloudstack/engine/orchestration/service/VolumeOrchestrationService.java b/engine/api/src/main/java/org/apache/cloudstack/engine/orchestration/service/VolumeOrchestrationService.java index 6f8c46304567..55e9023407c5 100644 --- a/engine/api/src/main/java/org/apache/cloudstack/engine/orchestration/service/VolumeOrchestrationService.java +++ b/engine/api/src/main/java/org/apache/cloudstack/engine/orchestration/service/VolumeOrchestrationService.java @@ -120,7 +120,7 @@ VolumeInfo moveVolume(VolumeInfo volume, long destPoolDcId, Long destPoolPodId, void destroyVolume(Volume volume); DiskProfile allocateRawVolume(Type type, String name, DiskOffering offering, Long size, Long minIops, Long maxIops, VirtualMachine vm, VirtualMachineTemplate template, - Account owner, Long deviceId); + Account owner, Long deviceId, Long kmsKeyId); VolumeInfo createVolumeOnPrimaryStorage(VirtualMachine vm, VolumeInfo volume, HypervisorType rootDiskHyperType, StoragePool storagePool) throws NoTransitionException; @@ -150,7 +150,7 @@ DiskProfile allocateRawVolume(Type type, String name, DiskOffering offering, Lon * Allocate a volume or multiple volumes in case of template is registered with the 'deploy-as-is' option, allowing multiple disks */ List allocateTemplatedVolumes(Type type, String name, DiskOffering offering, Long rootDisksize, Long minIops, Long maxIops, VirtualMachineTemplate template, VirtualMachine vm, - Account owner, Volume volume, Snapshot snapshot); + Account owner, Long kmsKeyId, Volume volume, Snapshot snapshot); String getVmNameFromVolumeId(long volumeId); diff --git a/engine/api/src/main/java/org/apache/cloudstack/engine/service/api/OrchestrationService.java b/engine/api/src/main/java/org/apache/cloudstack/engine/service/api/OrchestrationService.java index 6be71b3cb250..887aeaef0736 100644 --- a/engine/api/src/main/java/org/apache/cloudstack/engine/service/api/OrchestrationService.java +++ b/engine/api/src/main/java/org/apache/cloudstack/engine/service/api/OrchestrationService.java @@ -71,7 +71,7 @@ VirtualMachineEntity createVirtualMachine(@QueryParam("id") String id, @QueryPar @QueryParam("network-nic-map") Map> networkNicMap, @QueryParam("deploymentplan") DeploymentPlan plan, @QueryParam("root-disk-size") Long rootDiskSize, @QueryParam("extra-dhcp-option-map") Map> extraDhcpOptionMap, @QueryParam("datadisktemplate-diskoffering-map") Map datadiskTemplateToDiskOfferingMap, @QueryParam("disk-offering-id") Long diskOfferingId, - @QueryParam("root-disk-offering-id") Long rootDiskOfferingId, List dataDiskInfoList, Volume volume, Snapshot snapshot) throws InsufficientCapacityException; + @QueryParam("root-disk-offering-id") Long rootDiskOfferingId, @QueryParam("root-disk-kms-key-id") Long rootDiskKmsKeyId, List dataDiskInfoList, Volume volume, Snapshot snapshot) throws InsufficientCapacityException; @POST VirtualMachineEntity createVirtualMachineFromScratch(@QueryParam("id") String id, @QueryParam("owner") String owner, @QueryParam("iso-id") String isoId, @@ -80,7 +80,7 @@ VirtualMachineEntity createVirtualMachineFromScratch(@QueryParam("id") String id @QueryParam("compute-tags") List computeTags, @QueryParam("root-disk-tags") List rootDiskTags, @QueryParam("network-nic-map") Map> networkNicMap, @QueryParam("deploymentplan") DeploymentPlan plan, @QueryParam("extra-dhcp-option-map") Map> extraDhcpOptionMap, @QueryParam("disk-offering-id") Long diskOfferingId, - @QueryParam("data-disks-offering-info") List dataDiskInfoList, Volume volume, Snapshot snapshot) throws InsufficientCapacityException; + @QueryParam("root-disk-kms-key-id") Long rootDiskKmsKeyId, @QueryParam("data-disks-offering-info") List dataDiskInfoList, Volume volume, Snapshot snapshot) throws InsufficientCapacityException; @POST NetworkEntity createNetwork(String id, String name, String domainName, String cidr, String gateway); diff --git a/engine/orchestration/src/main/java/com/cloud/vm/VirtualMachineManagerImpl.java b/engine/orchestration/src/main/java/com/cloud/vm/VirtualMachineManagerImpl.java index e8796fb02529..b3223320142f 100755 --- a/engine/orchestration/src/main/java/com/cloud/vm/VirtualMachineManagerImpl.java +++ b/engine/orchestration/src/main/java/com/cloud/vm/VirtualMachineManagerImpl.java @@ -585,7 +585,7 @@ public void allocate(final String vmInstanceName, final VirtualMachineTemplate t Long deviceId = dataDiskDeviceIds.get(index++); String volumeName = deviceId == null ? "DATA-" + persistedVm.getId() : "DATA-" + persistedVm.getId() + "-" + String.valueOf(deviceId); volumeMgr.allocateRawVolume(Type.DATADISK, volumeName, dataDiskOfferingInfo.getDiskOffering(), dataDiskOfferingInfo.getSize(), - dataDiskOfferingInfo.getMinIops(), dataDiskOfferingInfo.getMaxIops(), persistedVm, template, owner, deviceId); + dataDiskOfferingInfo.getMinIops(), dataDiskOfferingInfo.getMaxIops(), persistedVm, template, owner, deviceId, dataDiskOfferingInfo.getKmsKeyId()); } } if (datadiskTemplateToDiskOfferingMap != null && !datadiskTemplateToDiskOfferingMap.isEmpty()) { @@ -595,7 +595,7 @@ public void allocate(final String vmInstanceName, final VirtualMachineTemplate t long diskOfferingSize = diskOffering.getDiskSize() / (1024 * 1024 * 1024); VMTemplateVO dataDiskTemplate = _templateDao.findById(dataDiskTemplateToDiskOfferingMap.getKey()); volumeMgr.allocateRawVolume(Type.DATADISK, "DATA-" + persistedVm.getId() + "-" + String.valueOf( diskNumber), diskOffering, diskOfferingSize, null, null, - persistedVm, dataDiskTemplate, owner, diskNumber); + persistedVm, dataDiskTemplate, owner, diskNumber, null); diskNumber++; } } @@ -625,12 +625,12 @@ private void allocateRootVolume(VMInstanceVO vm, VirtualMachineTemplate template String rootVolumeName = String.format("ROOT-%s", vm.getId()); if (template.getFormat() == ImageFormat.ISO) { volumeMgr.allocateRawVolume(Type.ROOT, rootVolumeName, rootDiskOfferingInfo.getDiskOffering(), rootDiskOfferingInfo.getSize(), - rootDiskOfferingInfo.getMinIops(), rootDiskOfferingInfo.getMaxIops(), vm, template, owner, null); + rootDiskOfferingInfo.getMinIops(), rootDiskOfferingInfo.getMaxIops(), vm, template, owner, null, rootDiskOfferingInfo.getKmsKeyId()); } else if (Arrays.asList(ImageFormat.BAREMETAL, ImageFormat.EXTERNAL).contains(template.getFormat())) { logger.debug("{} has format [{}]. Skipping ROOT volume [{}] allocation.", template, template.getFormat(), rootVolumeName); } else { volumeMgr.allocateTemplatedVolumes(Type.ROOT, rootVolumeName, rootDiskOfferingInfo.getDiskOffering(), rootDiskSizeFinal, - rootDiskOfferingInfo.getMinIops(), rootDiskOfferingInfo.getMaxIops(), template, vm, owner, volume, snapshot); + rootDiskOfferingInfo.getMinIops(), rootDiskOfferingInfo.getMaxIops(), template, vm, owner, rootDiskOfferingInfo.getKmsKeyId(), volume, snapshot); } } finally { // Remove volumeContext and pop vmContext back diff --git a/engine/orchestration/src/main/java/org/apache/cloudstack/engine/orchestration/CloudOrchestrator.java b/engine/orchestration/src/main/java/org/apache/cloudstack/engine/orchestration/CloudOrchestrator.java index 8639f006383f..eb2a8828d979 100644 --- a/engine/orchestration/src/main/java/org/apache/cloudstack/engine/orchestration/CloudOrchestrator.java +++ b/engine/orchestration/src/main/java/org/apache/cloudstack/engine/orchestration/CloudOrchestrator.java @@ -164,7 +164,7 @@ public void destroyVolume(String volumeEntity) { public VirtualMachineEntity createVirtualMachine(String id, String owner, String templateId, String hostName, String displayName, String hypervisor, int cpu, int speed, long memory, Long diskSize, List computeTags, List rootDiskTags, Map> networkNicMap, DeploymentPlan plan, Long rootDiskSize, Map> extraDhcpOptionMap, Map dataDiskTemplateToDiskOfferingMap, Long dataDiskOfferingId, Long rootDiskOfferingId, - List dataDiskInfoList, Volume volume, Snapshot snapshot) throws InsufficientCapacityException { + Long rootDiskKmsKeyId, List dataDiskInfoList, Volume volume, Snapshot snapshot) throws InsufficientCapacityException { // VirtualMachineEntityImpl vmEntity = new VirtualMachineEntityImpl(id, owner, hostName, displayName, cpu, speed, memory, computeTags, rootDiskTags, networks, // vmEntityManager); @@ -198,6 +198,7 @@ public VirtualMachineEntity createVirtualMachine(String id, String owner, String } rootDiskOfferingInfo.setDiskOffering(rootDiskOffering); rootDiskOfferingInfo.setSize(rootDiskSize); + rootDiskOfferingInfo.setKmsKeyId(rootDiskKmsKeyId); if (rootDiskOffering.isCustomizedIops() != null && rootDiskOffering.isCustomizedIops()) { Map userVmDetails = _vmInstanceDetailsDao.listDetailsKeyPairs(vm.getId()); @@ -280,7 +281,7 @@ public VirtualMachineEntity createVirtualMachine(String id, String owner, String @Override public VirtualMachineEntity createVirtualMachineFromScratch(String id, String owner, String isoId, String hostName, String displayName, String hypervisor, String os, int cpu, int speed, long memory, Long diskSize, List computeTags, List rootDiskTags, Map> networkNicMap, DeploymentPlan plan, - Map> extraDhcpOptionMap, Long diskOfferingId, List dataDiskInfoList, Volume volume, Snapshot snapshot) + Map> extraDhcpOptionMap, Long diskOfferingId, Long rootDiskKmsKeyId, List dataDiskInfoList, Volume volume, Snapshot snapshot) throws InsufficientCapacityException { // VirtualMachineEntityImpl vmEntity = new VirtualMachineEntityImpl(id, owner, hostName, displayName, cpu, speed, memory, computeTags, rootDiskTags, networks, vmEntityManager); @@ -314,6 +315,7 @@ public VirtualMachineEntity createVirtualMachineFromScratch(String id, String ow rootDiskOfferingInfo.setDiskOffering(diskOffering); rootDiskOfferingInfo.setSize(size); + rootDiskOfferingInfo.setKmsKeyId(rootDiskKmsKeyId); if (diskOffering.isCustomizedIops() != null && diskOffering.isCustomizedIops()) { Map userVmDetails = _vmInstanceDetailsDao.listDetailsKeyPairs(vm.getId()); diff --git a/engine/orchestration/src/main/java/org/apache/cloudstack/engine/orchestration/VolumeOrchestrator.java b/engine/orchestration/src/main/java/org/apache/cloudstack/engine/orchestration/VolumeOrchestrator.java index e8c75afa81c5..c69ac85a95b1 100644 --- a/engine/orchestration/src/main/java/org/apache/cloudstack/engine/orchestration/VolumeOrchestrator.java +++ b/engine/orchestration/src/main/java/org/apache/cloudstack/engine/orchestration/VolumeOrchestrator.java @@ -85,6 +85,13 @@ import org.apache.cloudstack.secret.PassphraseVO; import org.apache.cloudstack.secret.dao.PassphraseDao; import org.apache.cloudstack.snapshot.SnapshotHelper; +import org.apache.cloudstack.kms.KMSManager; +import org.apache.cloudstack.kms.KMSKeyVO; +import org.apache.cloudstack.kms.KMSWrappedKeyVO; +import org.apache.cloudstack.kms.dao.KMSKeyDao; +import org.apache.cloudstack.kms.dao.KMSWrappedKeyDao; +import org.apache.cloudstack.framework.kms.KMSException; +import org.apache.cloudstack.framework.kms.WrappedKey; import org.apache.cloudstack.storage.command.CommandResult; import org.apache.cloudstack.storage.datastore.db.ImageStoreDao; import org.apache.cloudstack.storage.datastore.db.PrimaryDataStoreDao; @@ -279,6 +286,12 @@ public enum UserVmCloneType { @Inject private DataStoreProviderManager dataStoreProviderMgr; + @Inject + private KMSManager kmsManager; + @Inject + private KMSKeyDao kmsKeyDao; + @Inject + private KMSWrappedKeyDao kmsWrappedKeyDao; private final StateMachine2 _volStateMachine; protected List _storagePoolAllocators; @@ -507,7 +520,9 @@ public VolumeInfo createVolumeFromSnapshot(Volume volume, Snapshot snapshot, Use DiskOffering diskOffering = _entityMgr.findById(DiskOffering.class, volume.getDiskOfferingId()); if (diskOffering.getEncrypt()) { VolumeVO vol = (VolumeVO) volume; - volume = setPassphraseForVolumeEncryption(vol); + // Retrieve KMS key from volume's kmsKeyId if provided + KMSKeyVO kmsKey = getKmsKeyFromVolume(vol); + volume = setPassphraseForVolumeEncryption(vol, kmsKey, volume.getAccountId()); } DataCenter dc = _entityMgr.findById(DataCenter.class, volume.getDataCenterId()); DiskProfile dskCh = new DiskProfile(volume, diskOffering, snapshot.getHypervisorType()); @@ -724,7 +739,9 @@ public VolumeInfo createVolume(VolumeInfo volumeInfo, VirtualMachine vm, Virtual if (diskOffering.getEncrypt()) { VolumeVO vol = _volsDao.findById(volumeInfo.getId()); - setPassphraseForVolumeEncryption(vol); + // Retrieve KMS key from volume's kmsKeyId if provided + KMSKeyVO kmsKey = getKmsKeyFromVolume(vol); + setPassphraseForVolumeEncryption(vol, kmsKey, vol.getAccountId()); volumeInfo = volFactory.getVolume(volumeInfo.getId()); } } @@ -862,7 +879,7 @@ protected DiskProfile toDiskProfile(Volume vol, DiskOffering offering) { @ActionEvent(eventType = EventTypes.EVENT_VOLUME_CREATE, eventDescription = "creating volume", create = true) @Override public DiskProfile allocateRawVolume(Type type, String name, DiskOffering offering, Long size, Long minIops, Long maxIops, VirtualMachine vm, VirtualMachineTemplate template, Account owner, - Long deviceId) { + Long deviceId, Long kmsKeyId) { if (size == null) { size = offering.getDiskSize(); } else { @@ -895,6 +912,11 @@ public DiskProfile allocateRawVolume(Type type, String name, DiskOffering offeri vol.setDisplayVolume(userVm.isDisplayVm()); } + // Set KMS key ID if provided + if (kmsKeyId != null) { + vol.setKmsKeyId(kmsKeyId); + } + vol.setFormat(getSupportedImageFormatForCluster(vm.getHypervisorType())); vol = _volsDao.persist(vol); @@ -914,7 +936,7 @@ public DiskProfile allocateRawVolume(Type type, String name, DiskOffering offeri } private DiskProfile allocateTemplatedVolume(Type type, String name, DiskOffering offering, Long rootDisksize, Long minIops, Long maxIops, VirtualMachineTemplate template, VirtualMachine vm, - Account owner, long deviceId, String configurationId, Volume volume, Snapshot snapshot) { + Account owner, long deviceId, String configurationId, Long kmsKeyId, Volume volume, Snapshot snapshot) { assert (template.getFormat() != ImageFormat.ISO) : "ISO is not a template."; if (volume != null) { @@ -964,6 +986,11 @@ private DiskProfile allocateTemplatedVolume(Type type, String name, DiskOffering vol.setDisplayVolume(userVm.isDisplayVm()); } + // Set KMS key ID if provided + if (kmsKeyId != null) { + vol.setKmsKeyId(kmsKeyId); + } + vol = _volsDao.persist(vol); saveVolumeDetails(offering.getId(), vol.getId()); @@ -1053,7 +1080,7 @@ public void saveVolumeDetails(Long diskOfferingId, Long volumeId) { @ActionEvent(eventType = EventTypes.EVENT_VOLUME_CREATE, eventDescription = "creating ROOT volume", create = true) @Override public List allocateTemplatedVolumes(Type type, String name, DiskOffering offering, Long rootDisksize, Long minIops, Long maxIops, VirtualMachineTemplate template, VirtualMachine vm, - Account owner, Volume volume, Snapshot snapshot) { + Account owner, Long kmsKeyId, Volume volume, Snapshot snapshot) { String templateToString = getReflectOnlySelectedFields(template); int volumesNumber = 1; @@ -1100,7 +1127,7 @@ public List allocateTemplatedVolumes(Type type, String name, DiskOf } logger.info("Adding disk object [{}] to VM [{}]", volumeName, vm); DiskProfile diskProfile = allocateTemplatedVolume(type, volumeName, offering, volumeSize, minIops, maxIops, - template, vm, owner, deviceId, configurationId, volume, snapshot); + template, vm, owner, deviceId, configurationId, kmsKeyId, volume, snapshot); profiles.add(diskProfile); } @@ -1775,7 +1802,9 @@ private Pair recreateVolume(VolumeVO vol, VirtualMachinePro if (vol.getState() == Volume.State.Allocated || vol.getState() == Volume.State.Creating) { DiskOffering diskOffering = _entityMgr.findById(DiskOffering.class, vol.getDiskOfferingId()); if (diskOffering.getEncrypt()) { - vol = setPassphraseForVolumeEncryption(vol); + // Retrieve KMS key from volume's kmsKeyId if provided + KMSKeyVO kmsKey = getKmsKeyFromVolume(vol); + vol = setPassphraseForVolumeEncryption(vol, kmsKey, vol.getAccountId()); } newVol = vol; } else { @@ -1898,10 +1927,65 @@ private Pair recreateVolume(VolumeVO vol, VirtualMachinePro return new Pair<>(newVol, destPool); } + /** + * Helper method to retrieve KMS key from volume's kmsKeyId + */ + private KMSKeyVO getKmsKeyFromVolume(VolumeVO volume) { + if (volume.getKmsKeyId() == null) { + return null; + } + return kmsKeyDao.findById(volume.getKmsKeyId()); + } + private VolumeVO setPassphraseForVolumeEncryption(VolumeVO volume) { - if (volume.getPassphraseId() != null) { + return setPassphraseForVolumeEncryption(volume, null, null); + } + + private VolumeVO setPassphraseForVolumeEncryption(VolumeVO volume, KMSKeyVO kmsKey, Long callerAccountId) { + // If volume already has encryption set up, return it + if (volume.getKmsWrappedKeyId() != null || volume.getPassphraseId() != null) { return volume; } + + if (kmsKey != null) { + // Determine caller account ID if not provided + if (callerAccountId == null) { + callerAccountId = volume.getAccountId(); + } + + // Validate permission + if (!kmsManager.hasPermission(callerAccountId, kmsKey)) { + throw new CloudRuntimeException("No permission to use KMS key: " + kmsKey); + } + + try { + logger.debug("Generating and wrapping DEK for volume {} using KMS key {}", volume.getName(), kmsKey.getUuid()); + long startTime = System.currentTimeMillis(); + + // Generate and wrap DEK using active KEK version + WrappedKey wrappedKey = kmsManager.generateVolumeKeyWithKek(kmsKey, callerAccountId); + + // The wrapped key is already persisted by generateVolumeKeyWithKek, get its ID + KMSWrappedKeyVO wrappedKeyVO = kmsWrappedKeyDao.findByUuid(wrappedKey.getUuid()); + if (wrappedKeyVO == null) { + throw new CloudRuntimeException("Failed to find persisted wrapped key: " + wrappedKey.getUuid()); + } + + // Set the wrapped key ID on the volume + volume.setKmsWrappedKeyId(wrappedKeyVO.getId()); + + long finishTime = System.currentTimeMillis(); + logger.debug("Generating and persisting wrapped key took {} ms for volume: {}", + (finishTime - startTime), volume.getName()); + + return _volsDao.persist(volume); + + } catch (KMSException e) { + throw new CloudRuntimeException("KMS failure while setting up volume encryption: " + e.getMessage(), e); + } + } + + // Legacy: passphrase-based encryption (fallback when KMS not enabled or KMS key not specified) logger.debug("Creating passphrase for the volume: " + volume.getName()); long startTime = System.currentTimeMillis(); PassphraseVO passphrase = passphraseDao.persist(new PassphraseVO(true)); diff --git a/engine/schema/pom.xml b/engine/schema/pom.xml index 654cd14a25d3..664d4909e677 100644 --- a/engine/schema/pom.xml +++ b/engine/schema/pom.xml @@ -48,6 +48,11 @@ cloud-framework-db ${project.version} + + org.apache.cloudstack + cloud-framework-kms + ${project.version} + com.mysql mysql-connector-j diff --git a/engine/schema/src/main/java/com/cloud/storage/VolumeVO.java b/engine/schema/src/main/java/com/cloud/storage/VolumeVO.java index 653be54a9109..3815125b3487 100644 --- a/engine/schema/src/main/java/com/cloud/storage/VolumeVO.java +++ b/engine/schema/src/main/java/com/cloud/storage/VolumeVO.java @@ -182,6 +182,12 @@ public class VolumeVO implements Volume { @Column(name = "passphrase_id") private Long passphraseId; + @Column(name = "kms_key_id") + private Long kmsKeyId; + + @Column(name = "kms_wrapped_key_id") + private Long kmsWrappedKeyId; + @Column(name = "encrypt_format") private String encryptFormat; @@ -683,6 +689,14 @@ public void setExternalUuid(String externalUuid) { public void setPassphraseId(Long id) { this.passphraseId = id; } + public Long getKmsKeyId() { return kmsKeyId; } + + public void setKmsKeyId(Long id) { this.kmsKeyId = id; } + + public Long getKmsWrappedKeyId() { return kmsWrappedKeyId; } + + public void setKmsWrappedKeyId(Long id) { this.kmsWrappedKeyId = id; } + public String getEncryptFormat() { return encryptFormat; } public void setEncryptFormat(String encryptFormat) { this.encryptFormat = encryptFormat; } diff --git a/engine/schema/src/main/java/com/cloud/storage/dao/VolumeDao.java b/engine/schema/src/main/java/com/cloud/storage/dao/VolumeDao.java index a03b94faa797..4945d08a54f0 100644 --- a/engine/schema/src/main/java/com/cloud/storage/dao/VolumeDao.java +++ b/engine/schema/src/main/java/com/cloud/storage/dao/VolumeDao.java @@ -109,6 +109,17 @@ public interface VolumeDao extends GenericDao, StateDao listVolumesByPassphraseId(long passphraseId); + /** + * List volumes with passphrase_id for migration to KMS + * + * @param zoneId Zone ID (required) + * @param accountId Account ID filter (optional, null for all accounts) + * @param domainId Domain ID filter (optional, null for all domains) + * @param limit Maximum number of volumes to return + * @return list of volumes that need migration + */ + Pair, Integer> listVolumesForKMSMigration(Long zoneId, Long accountId, Long domainId, Integer limit); + /** * Gets the Total Primary Storage space allocated for an account * @@ -166,4 +177,6 @@ public interface VolumeDao extends GenericDao, StateDao implements Vol protected GenericSearchBuilder secondaryStorageSearch; private final SearchBuilder poolAndPathSearch; final GenericSearchBuilder CountByOfferingId; + private final SearchBuilder kmsMigrationSearch; @Inject ReservationDao reservationDao; @Inject ResourceTagDao tagsDao; + @Inject + KMSWrappedKeyDao kmsWrappedKeyDao; // need to account for zone-wide primary storage where storage_pool has // null-value pod and cluster, where hypervisor information is stored in @@ -512,6 +516,13 @@ public VolumeDaoImpl() { CountByOfferingId.select(null, Func.COUNT, CountByOfferingId.entity().getId()); CountByOfferingId.and("diskOfferingId", CountByOfferingId.entity().getDiskOfferingId(), Op.EQ); CountByOfferingId.done(); + + kmsMigrationSearch = createSearchBuilder(); + kmsMigrationSearch.and("passphraseId", kmsMigrationSearch.entity().getPassphraseId(), Op.NNULL); + kmsMigrationSearch.and("zoneId", kmsMigrationSearch.entity().getDataCenterId(), Op.EQ); + kmsMigrationSearch.and("accountId", kmsMigrationSearch.entity().getAccountId(), Op.EQ); + kmsMigrationSearch.and("domainId", kmsMigrationSearch.entity().getDomainId(), Op.EQ); + kmsMigrationSearch.done(); } @Override @@ -732,6 +743,21 @@ public List listVolumesByPassphraseId(long passphraseId) { return listBy(sc); } + @Override + public Pair, Integer> listVolumesForKMSMigration(Long zoneId, Long accountId, Long domainId, Integer limit) { + SearchCriteria sc = kmsMigrationSearch.create(); + + Filter filter = new Filter(limit); + sc.setParameters("zoneId", zoneId); + if (accountId != null) { + sc.setParameters("accountId", accountId); + } + if (domainId != null) { + sc.setParameters("domainId", domainId); + } + return searchAndCount(sc, filter); + } + @Override @DB public boolean remove(Long id) { @@ -740,6 +766,17 @@ public boolean remove(Long id) { logger.debug(String.format("Removing volume %s from DB", id)); VolumeVO entry = findById(id); if (entry != null) { + // Clean up KMS wrapped key if volume was encrypted with KMS + if (entry.getKmsWrappedKeyId() != null) { + try { + kmsWrappedKeyDao.remove(entry.getKmsWrappedKeyId()); + logger.debug("Removed KMS wrapped key [id={}] for volume [id={}, uuid={}]", + entry.getKmsWrappedKeyId(), id, entry.getUuid()); + } catch (Exception e) { + logger.warn("Failed to remove KMS wrapped key [id={}] for volume [id={}, uuid={}]: {}", + entry.getKmsWrappedKeyId(), id, entry.getUuid(), e.getMessage(), e); + } + } tagsDao.removeByIdAndType(id, ResourceObjectType.Volume); } boolean result = super.remove(id); @@ -934,4 +971,12 @@ public VolumeVO findByLastIdAndState(long lastVolumeId, State ...states) { sc.and(sc.entity().getState(), SearchCriteria.Op.IN, (Object[]) states); return sc.find(); } + + @Override + public boolean existsWithKmsKey(long kmsKeyId) { + SearchCriteria sc = AllFieldsSearch.create(); + sc.setParameters("kmsKeyId", kmsKeyId); + sc.setParameters("notDestroyed", Volume.State.Expunged, Volume.State.Destroy); + return findOneBy(sc) != null; + } } diff --git a/engine/schema/src/main/java/org/apache/cloudstack/kms/HSMProfileDetailsVO.java b/engine/schema/src/main/java/org/apache/cloudstack/kms/HSMProfileDetailsVO.java new file mode 100644 index 000000000000..cd20b8a74fe5 --- /dev/null +++ b/engine/schema/src/main/java/org/apache/cloudstack/kms/HSMProfileDetailsVO.java @@ -0,0 +1,84 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms; + +import org.apache.cloudstack.api.ResourceDetail; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; + +@Entity +@Table(name = "kms_hsm_profile_details") +public class HSMProfileDetailsVO implements ResourceDetail { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(name = "id") + private long id; + + @Column(name = "profile_id") + private long resourceId; + + @Column(name = "name") + private String name; + + @Column(name = "value") + private String value; + + public HSMProfileDetailsVO() { + } + + public HSMProfileDetailsVO(long profileId, String name, String value) { + this.resourceId = profileId; + this.name = name; + this.value = value; + } + + @Override + public long getId() { + return id; + } + + @Override + public long getResourceId() { + return resourceId; + } + + @Override + public String getName() { + return name; + } + + @Override + public String getValue() { + return value; + } + + @Override + public boolean isDisplay() { + return true; + } + + public void setValue(String value) { + this.value = value; + } +} diff --git a/engine/schema/src/main/java/org/apache/cloudstack/kms/HSMProfileVO.java b/engine/schema/src/main/java/org/apache/cloudstack/kms/HSMProfileVO.java new file mode 100644 index 000000000000..fdf1c78c693d --- /dev/null +++ b/engine/schema/src/main/java/org/apache/cloudstack/kms/HSMProfileVO.java @@ -0,0 +1,183 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms; + +import org.apache.cloudstack.utils.reflectiontostringbuilderutils.ReflectionToStringBuilderUtils; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; +import java.util.Date; +import java.util.UUID; + +@Entity +@Table(name = "kms_hsm_profiles") +public class HSMProfileVO implements HSMProfile { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(name = "id") + private long id; + + @Column(name = "uuid") + private String uuid; + + @Column(name = "name") + private String name; + + @Column(name = "protocol") + private String protocol; + + @Column(name = "account_id") + private Long accountId; + + @Column(name = "domain_id") + private Long domainId; + + @Column(name = "zone_id") + private Long zoneId; + + @Column(name = "vendor_name") + private String vendorName; + + @Column(name = "enabled") + private boolean enabled; + + @Column(name = "system") + private boolean system; + + @Column(name = "created") + private Date created; + + @Column(name = "removed") + private Date removed; + + public HSMProfileVO() { + this.uuid = UUID.randomUUID().toString(); + this.created = new Date(); + this.system = false; + } + + public HSMProfileVO(String name, String protocol, Long accountId, Long domainId, Long zoneId, String vendorName) { + this.uuid = UUID.randomUUID().toString(); + this.name = name; + this.protocol = protocol; + this.accountId = accountId; + this.domainId = domainId; + this.zoneId = zoneId; + this.vendorName = vendorName; + this.enabled = true; + this.system = false; + this.created = new Date(); + } + + @Override + public String toString() { + return String.format("HSMProfileVO %s", + ReflectionToStringBuilderUtils.reflectOnlySelectedFields( + this, "id", "uuid", "name", "protocol", "system", "enabled")); + } + + @Override + public long getId() { + return id; + } + + @Override + public String getUuid() { + return uuid; + } + + @Override + public String getName() { + return name; + } + + @Override + public String getProtocol() { + return protocol; + } + + @Override + public long getAccountId() { + return accountId == null ? -1 : accountId; + } + + @Override + public long getDomainId() { + return domainId == null ? -1 : domainId; + } + + @Override + public Long getZoneId() { + return zoneId; + } + + @Override + public String getVendorName() { + return vendorName; + } + + @Override + public boolean isEnabled() { + return enabled; + } + + @Override + public Date getCreated() { + return created; + } + + @Override + public Date getRemoved() { + return removed; + } + + @Override + public Class getEntityType() { + return HSMProfile.class; + } + + public void setRemoved(Date removed) { + this.removed = removed; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + public void setVendorName(String vendorName) { + this.vendorName = vendorName; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public boolean isSystem() { + return system; + } + + public void setSystem(boolean system) { + this.system = system; + } +} diff --git a/engine/schema/src/main/java/org/apache/cloudstack/kms/KMSKekVersionVO.java b/engine/schema/src/main/java/org/apache/cloudstack/kms/KMSKekVersionVO.java new file mode 100644 index 000000000000..e78fdaba53d1 --- /dev/null +++ b/engine/schema/src/main/java/org/apache/cloudstack/kms/KMSKekVersionVO.java @@ -0,0 +1,187 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms; + +import com.cloud.utils.db.GenericDao; +import org.apache.cloudstack.utils.reflectiontostringbuilderutils.ReflectionToStringBuilderUtils; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.EnumType; +import javax.persistence.Enumerated; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; +import java.util.Date; +import java.util.UUID; + +/** + * Database entity for KEK versions. + * Tracks multiple KEK versions per KMS key to support gradual rotation. + * During rotation, a new version is created (status=Active) and old versions + * are marked as Previous (still usable for decryption) or Archived (no longer used). + */ +@Entity +@Table(name = "kms_kek_versions") +public class KMSKekVersionVO { + + public enum Status { + /** + * Used for new encryption operations + */ + Active, + /** + * Still usable for decryption during key rotation + */ + Previous, + /** + * No longer used; all wrapped keys have been re-encrypted + */ + Archived + } + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(name = "id") + private Long id; + @Column(name = "uuid", nullable = false) + private String uuid; + @Column(name = "kms_key_id", nullable = false) + private Long kmsKeyId; + @Column(name = "version_number", nullable = false) + private Integer versionNumber; + @Column(name = "kek_label", nullable = false) + private String kekLabel; + @Column(name = "status", nullable = false, length = 32) + @Enumerated(EnumType.STRING) + private Status status; + @Column(name = "hsm_profile_id") + private Long hsmProfileId; + @Column(name = "hsm_key_label") + private String hsmKeyLabel; + @Column(name = GenericDao.CREATED_COLUMN, nullable = false) + @Temporal(TemporalType.TIMESTAMP) + private Date created; + @Column(name = GenericDao.REMOVED_COLUMN) + @Temporal(TemporalType.TIMESTAMP) + private Date removed; + + public KMSKekVersionVO(Long kmsKeyId, Integer versionNumber, String kekLabel, Status status) { + this(); + this.kmsKeyId = kmsKeyId; + this.versionNumber = versionNumber; + this.kekLabel = kekLabel; + this.status = status; + } + + public KMSKekVersionVO() { + this.uuid = UUID.randomUUID().toString(); + this.created = new Date(); + this.status = Status.Active; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getUuid() { + return uuid; + } + + public void setUuid(String uuid) { + this.uuid = uuid; + } + + public Long getKmsKeyId() { + return kmsKeyId; + } + + public void setKmsKeyId(Long kmsKeyId) { + this.kmsKeyId = kmsKeyId; + } + + public Integer getVersionNumber() { + return versionNumber; + } + + public void setVersionNumber(Integer versionNumber) { + this.versionNumber = versionNumber; + } + + public String getKekLabel() { + return kekLabel; + } + + public void setKekLabel(String kekLabel) { + this.kekLabel = kekLabel; + } + + public Status getStatus() { + return status; + } + + public void setStatus(Status status) { + this.status = status; + } + + public Long getHsmProfileId() { + return hsmProfileId; + } + + public void setHsmProfileId(Long hsmProfileId) { + this.hsmProfileId = hsmProfileId; + } + + public String getHsmKeyLabel() { + return hsmKeyLabel; + } + + public void setHsmKeyLabel(String hsmKeyLabel) { + this.hsmKeyLabel = hsmKeyLabel; + } + + public Date getCreated() { + return created; + } + + public void setCreated(Date created) { + this.created = created; + } + + public Date getRemoved() { + return removed; + } + + public void setRemoved(Date removed) { + this.removed = removed; + } + + @Override + public String toString() { + return String.format("KMSKekVersion %s", + ReflectionToStringBuilderUtils.reflectOnlySelectedFields( + this, "id", "uuid", "kmsKeyId", "versionNumber", "status", "kekLabel")); + } +} diff --git a/engine/schema/src/main/java/org/apache/cloudstack/kms/KMSKeyVO.java b/engine/schema/src/main/java/org/apache/cloudstack/kms/KMSKeyVO.java new file mode 100644 index 000000000000..bffd06d59e00 --- /dev/null +++ b/engine/schema/src/main/java/org/apache/cloudstack/kms/KMSKeyVO.java @@ -0,0 +1,262 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms; + +import com.cloud.utils.db.GenericDao; +import org.apache.cloudstack.framework.kms.KeyPurpose; +import org.apache.cloudstack.utils.reflectiontostringbuilderutils.ReflectionToStringBuilderUtils; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.EnumType; +import javax.persistence.Enumerated; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; +import java.util.Date; +import java.util.UUID; + +/** + * Database entity for KMS Key (Key Encryption Key) metadata. + * Tracks ownership, purpose, and lifecycle of KEKs used in envelope encryption. + */ +@Entity +@Table(name = "kms_keys") +public class KMSKeyVO implements KMSKey { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(name = "id") + private Long id; + + @Column(name = "uuid", nullable = false) + private String uuid; + + @Column(name = "name", nullable = false) + private String name; + + @Column(name = "description", length = 1024) + private String description; + + @Column(name = "kek_label", nullable = false) + private String kekLabel; + + @Column(name = "purpose", nullable = false, length = 32) + @Enumerated(EnumType.STRING) + private KeyPurpose purpose; + + @Column(name = "account_id", nullable = false) + private Long accountId; + + @Column(name = "domain_id", nullable = false) + private Long domainId; + + @Column(name = "zone_id", nullable = false) + private Long zoneId; + + @Column(name = "algorithm", nullable = false, length = 64) + private String algorithm; + + @Column(name = "key_bits", nullable = false) + private Integer keyBits; + + @Column(name = "enabled", nullable = false) + private boolean enabled; + + @Column(name = "hsm_profile_id") + private Long hsmProfileId; + + @Column(name = GenericDao.CREATED_COLUMN, nullable = false) + @Temporal(TemporalType.TIMESTAMP) + private Date created; + + @Column(name = GenericDao.REMOVED_COLUMN) + @Temporal(TemporalType.TIMESTAMP) + private Date removed; + + public KMSKeyVO(String name, String description, String kekLabel, KeyPurpose purpose, + Long accountId, Long domainId, Long zoneId, + String algorithm, Integer keyBits) { + this(); + this.name = name; + this.description = description; + this.kekLabel = kekLabel; + this.purpose = purpose; + this.accountId = accountId; + this.domainId = domainId; + this.zoneId = zoneId; + this.algorithm = algorithm; + this.keyBits = keyBits; + } + + public KMSKeyVO() { + this.uuid = UUID.randomUUID().toString(); + this.created = new Date(); + this.enabled = true; + } + + @Override + public long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + @Override + public String getUuid() { + return uuid; + } + + public void setUuid(String uuid) { + this.uuid = uuid; + } + + @Override + public String getName() { + return name; + } + + @Override + public String getDescription() { + return description; + } + + @Override + public String getKekLabel() { + return kekLabel; + } + + @Override + public KeyPurpose getPurpose() { + return purpose; + } + + @Override + public Long getZoneId() { + return zoneId; + } + + @Override + public String getAlgorithm() { + return algorithm; + } + + @Override + public Integer getKeyBits() { + return keyBits; + } + + @Override + public boolean isEnabled() { + return enabled; + } + + @Override + public Date getCreated() { + return created; + } + + @Override + public Date getRemoved() { + return removed; + } + + @Override + public Long getHsmProfileId() { + return hsmProfileId; + } + + public void setHsmProfileId(Long hsmProfileId) { + this.hsmProfileId = hsmProfileId; + } + + public void setRemoved(Date removed) { + this.removed = removed; + } + + public void setCreated(Date created) { + this.created = created; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + public void setKeyBits(Integer keyBits) { + this.keyBits = keyBits; + } + + public void setAlgorithm(String algorithm) { + this.algorithm = algorithm; + } + + public void setZoneId(Long zoneId) { + this.zoneId = zoneId; + } + + public void setPurpose(KeyPurpose purpose) { + this.purpose = purpose; + } + + public void setKekLabel(String kekLabel) { + this.kekLabel = kekLabel; + } + + public void setDescription(String description) { + this.description = description; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public long getAccountId() { + return accountId; + } + + public void setAccountId(Long accountId) { + this.accountId = accountId; + } + + @Override + public long getDomainId() { + return domainId; + } + + public void setDomainId(Long domainId) { + this.domainId = domainId; + } + + @Override + public Class getEntityType() { + return KMSKey.class; + } + + @Override + public String toString() { + return String.format("KMSKey %s", + ReflectionToStringBuilderUtils.reflectOnlySelectedFields(this, "id", "uuid", "name", "purpose", + "accountId", "zoneId", "enabled")); + } +} diff --git a/engine/schema/src/main/java/org/apache/cloudstack/kms/KMSWrappedKeyVO.java b/engine/schema/src/main/java/org/apache/cloudstack/kms/KMSWrappedKeyVO.java new file mode 100644 index 000000000000..8a763623fba7 --- /dev/null +++ b/engine/schema/src/main/java/org/apache/cloudstack/kms/KMSWrappedKeyVO.java @@ -0,0 +1,176 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms; + +import com.cloud.utils.db.GenericDao; +import org.apache.cloudstack.utils.reflectiontostringbuilderutils.ReflectionToStringBuilderUtils; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; +import java.util.Arrays; +import java.util.Date; +import java.util.UUID; + +/** + * Database entity for storing wrapped (encrypted) Data Encryption Keys. + * Each entry represents a DEK that has been encrypted by a Key Encryption Key (KEK). + * KEK metadata is stored in kms_keys table via the kms_key_id foreign key. + */ +@Entity +@Table(name = "kms_wrapped_key") +public class KMSWrappedKeyVO { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(name = "id") + private Long id; + + @Column(name = "uuid", nullable = false) + private String uuid; + + @Column(name = "kms_key_id") + private Long kmsKeyId; + + @Column(name = "kek_version_id") + private Long kekVersionId; + + @Column(name = "zone_id", nullable = false) + private Long zoneId; + + @Column(name = "wrapped_blob", nullable = false) + private byte[] wrappedBlob; + + @Column(name = GenericDao.CREATED_COLUMN, nullable = false) + @Temporal(TemporalType.TIMESTAMP) + private Date created; + + @Column(name = GenericDao.REMOVED_COLUMN) + @Temporal(TemporalType.TIMESTAMP) + private Date removed; + + public KMSWrappedKeyVO(KMSKeyVO kmsKey, byte[] wrappedBlob) { + this(); + this.kmsKeyId = kmsKey.getId(); + this.zoneId = kmsKey.getZoneId(); + this.wrappedBlob = wrappedBlob != null ? Arrays.copyOf(wrappedBlob, wrappedBlob.length) : null; + } + + public KMSWrappedKeyVO() { + this.uuid = UUID.randomUUID().toString(); + this.created = new Date(); + } + + public KMSWrappedKeyVO(KMSKeyVO kmsKey, Long kekVersionId, byte[] wrappedBlob) { + this(); + this.kmsKeyId = kmsKey.getId(); + this.kekVersionId = kekVersionId; + this.zoneId = kmsKey.getZoneId(); + this.wrappedBlob = wrappedBlob != null ? Arrays.copyOf(wrappedBlob, wrappedBlob.length) : null; + } + + public KMSWrappedKeyVO(Long kmsKeyId, Long zoneId, byte[] wrappedBlob) { + this(); + this.kmsKeyId = kmsKeyId; + this.zoneId = zoneId; + this.wrappedBlob = wrappedBlob != null ? Arrays.copyOf(wrappedBlob, wrappedBlob.length) : null; + } + + public KMSWrappedKeyVO(Long kmsKeyId, Long kekVersionId, Long zoneId, byte[] wrappedBlob) { + this(); + this.kmsKeyId = kmsKeyId; + this.kekVersionId = kekVersionId; + this.zoneId = zoneId; + this.wrappedBlob = wrappedBlob != null ? Arrays.copyOf(wrappedBlob, wrappedBlob.length) : null; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getUuid() { + return uuid; + } + + public void setUuid(String uuid) { + this.uuid = uuid; + } + + public Long getKmsKeyId() { + return kmsKeyId; + } + + public void setKmsKeyId(Long kmsKeyId) { + this.kmsKeyId = kmsKeyId; + } + + public Long getKekVersionId() { + return kekVersionId; + } + + public void setKekVersionId(Long kekVersionId) { + this.kekVersionId = kekVersionId; + } + + public Long getZoneId() { + return zoneId; + } + + public void setZoneId(Long zoneId) { + this.zoneId = zoneId; + } + + public byte[] getWrappedBlob() { + return wrappedBlob != null ? Arrays.copyOf(wrappedBlob, wrappedBlob.length) : null; + } + + public void setWrappedBlob(byte[] wrappedBlob) { + this.wrappedBlob = wrappedBlob != null ? Arrays.copyOf(wrappedBlob, wrappedBlob.length) : null; + } + + public Date getCreated() { + return created; + } + + public void setCreated(Date created) { + this.created = created; + } + + public Date getRemoved() { + return removed; + } + + public void setRemoved(Date removed) { + this.removed = removed; + } + + @Override + public String toString() { + return String.format("KMSWrappedKey %s", ReflectionToStringBuilderUtils.reflectOnlySelectedFields( + this, "id", "uuid", "kmsKeyId", "kekVersionId", "zoneId", "created", "removed")); + } +} diff --git a/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/HSMProfileDao.java b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/HSMProfileDao.java new file mode 100644 index 000000000000..a2202a18bfc1 --- /dev/null +++ b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/HSMProfileDao.java @@ -0,0 +1,24 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms.dao; + +import com.cloud.utils.db.GenericDao; +import org.apache.cloudstack.kms.HSMProfileVO; + +public interface HSMProfileDao extends GenericDao { +} diff --git a/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/HSMProfileDaoImpl.java b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/HSMProfileDaoImpl.java new file mode 100644 index 000000000000..b063d2cfe749 --- /dev/null +++ b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/HSMProfileDaoImpl.java @@ -0,0 +1,29 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms.dao; + +import com.cloud.utils.db.GenericDaoBase; +import org.apache.cloudstack.kms.HSMProfileVO; +import org.springframework.stereotype.Component; + +@Component +public class HSMProfileDaoImpl extends GenericDaoBase implements HSMProfileDao { + public HSMProfileDaoImpl() { + super(); + } +} diff --git a/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/HSMProfileDetailsDao.java b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/HSMProfileDetailsDao.java new file mode 100644 index 000000000000..0d5c71b9e5f0 --- /dev/null +++ b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/HSMProfileDetailsDao.java @@ -0,0 +1,33 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms.dao; + +import com.cloud.utils.db.GenericDao; +import org.apache.cloudstack.kms.HSMProfileDetailsVO; + +import java.util.List; + +public interface HSMProfileDetailsDao extends GenericDao { + List listByProfileId(long profileId); + + void persist(long profileId, String name, String value); + + HSMProfileDetailsVO findDetail(long profileId, String name); + + void deleteDetails(long profileId); +} diff --git a/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/HSMProfileDetailsDaoImpl.java b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/HSMProfileDetailsDaoImpl.java new file mode 100644 index 000000000000..59c0ec43259c --- /dev/null +++ b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/HSMProfileDetailsDaoImpl.java @@ -0,0 +1,75 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms.dao; + +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.SearchCriteria.Op; +import org.apache.cloudstack.kms.HSMProfileDetailsVO; +import org.springframework.stereotype.Component; + +import java.util.List; + +@Component +public class HSMProfileDetailsDaoImpl extends GenericDaoBase implements HSMProfileDetailsDao { + + protected SearchBuilder ProfileSearch; + protected SearchBuilder DetailSearch; + + public HSMProfileDetailsDaoImpl() { + super(); + + ProfileSearch = createSearchBuilder(); + ProfileSearch.and("profileId", ProfileSearch.entity().getResourceId(), Op.EQ); + ProfileSearch.done(); + + DetailSearch = createSearchBuilder(); + DetailSearch.and("profileId", DetailSearch.entity().getResourceId(), Op.EQ); + DetailSearch.and("name", DetailSearch.entity().getName(), Op.EQ); + DetailSearch.done(); + } + + @Override + public List listByProfileId(long profileId) { + SearchCriteria sc = ProfileSearch.create(); + sc.setParameters("profileId", profileId); + return listBy(sc); + } + + @Override + public void persist(long profileId, String name, String value) { + HSMProfileDetailsVO vo = new HSMProfileDetailsVO(profileId, name, value); + persist(vo); + } + + @Override + public HSMProfileDetailsVO findDetail(long profileId, String name) { + SearchCriteria sc = DetailSearch.create(); + sc.setParameters("profileId", profileId); + sc.setParameters("name", name); + return findOneBy(sc); + } + + @Override + public void deleteDetails(long profileId) { + SearchCriteria sc = ProfileSearch.create(); + sc.setParameters("profileId", profileId); + remove(sc); + } +} diff --git a/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSKekVersionDao.java b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSKekVersionDao.java new file mode 100644 index 000000000000..dbe4ca5f648a --- /dev/null +++ b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSKekVersionDao.java @@ -0,0 +1,43 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms.dao; + +import com.cloud.utils.db.GenericDao; +import org.apache.cloudstack.kms.KMSKekVersionVO; + +import java.util.List; + +public interface KMSKekVersionDao extends GenericDao { + + KMSKekVersionVO getActiveVersion(Long kmsKeyId); + + /** + * Returns Active and Previous versions (usable for decryption) + */ + List getVersionsForDecryption(Long kmsKeyId); + + List listByKmsKeyId(Long kmsKeyId); + + KMSKekVersionVO findByKmsKeyIdAndVersion(Long kmsKeyId, Integer versionNumber); + + KMSKekVersionVO findByKekLabel(String kekLabel); + + List findByStatus(KMSKekVersionVO.Status status); + + List listByHsmProfileId(Long hsmProfileId); +} diff --git a/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSKekVersionDaoImpl.java b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSKekVersionDaoImpl.java new file mode 100644 index 000000000000..841ab8c03d9b --- /dev/null +++ b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSKekVersionDaoImpl.java @@ -0,0 +1,98 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms.dao; + +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import org.apache.cloudstack.kms.KMSKekVersionVO; +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.List; + +@Component +public class KMSKekVersionDaoImpl extends GenericDaoBase implements KMSKekVersionDao { + + private final SearchBuilder allFieldSearch; + + public KMSKekVersionDaoImpl() { + allFieldSearch = createSearchBuilder(); + allFieldSearch.and("kmsKeyId", allFieldSearch.entity().getKmsKeyId(), SearchCriteria.Op.EQ); + allFieldSearch.and("status", allFieldSearch.entity().getStatus(), SearchCriteria.Op.IN); + allFieldSearch.and("versionNumber", allFieldSearch.entity().getVersionNumber(), SearchCriteria.Op.EQ); + allFieldSearch.and("kekLabel", allFieldSearch.entity().getKekLabel(), SearchCriteria.Op.EQ); + allFieldSearch.and("hsmProfileId", allFieldSearch.entity().getHsmProfileId(), SearchCriteria.Op.EQ); + allFieldSearch.done(); + } + + @Override + public KMSKekVersionVO getActiveVersion(Long kmsKeyId) { + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("kmsKeyId", kmsKeyId); + sc.setParameters("status", KMSKekVersionVO.Status.Active); + return findOneBy(sc); + } + + @Override + public List getVersionsForDecryption(Long kmsKeyId) { + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("kmsKeyId", kmsKeyId); + sc.setParameters("status", KMSKekVersionVO.Status.Active, KMSKekVersionVO.Status.Previous); + return listBy(sc); + } + + @Override + public List listByKmsKeyId(Long kmsKeyId) { + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("kmsKeyId", kmsKeyId); + return listBy(sc); + } + + @Override + public KMSKekVersionVO findByKmsKeyIdAndVersion(Long kmsKeyId, Integer versionNumber) { + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("kmsKeyId", kmsKeyId); + sc.setParameters("versionNumber", versionNumber); + return findOneBy(sc); + } + + @Override + public KMSKekVersionVO findByKekLabel(String kekLabel) { + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("kekLabel", kekLabel); + return findOneBy(sc); + } + + @Override + public List findByStatus(KMSKekVersionVO.Status status) { + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("status", status); + return listBy(sc); + } + + @Override + public List listByHsmProfileId(Long hsmProfileId) { + if (hsmProfileId == null) { + return new ArrayList<>(); + } + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("hsmProfileId", hsmProfileId); + return listBy(sc); + } +} diff --git a/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSKeyDao.java b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSKeyDao.java new file mode 100644 index 000000000000..b6fb75c7425a --- /dev/null +++ b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSKeyDao.java @@ -0,0 +1,33 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms.dao; + +import com.cloud.utils.db.GenericDao; +import org.apache.cloudstack.framework.kms.KeyPurpose; +import org.apache.cloudstack.kms.KMSKeyVO; + +import java.util.List; + +public interface KMSKeyDao extends GenericDao { + + List listByAccount(Long accountId, KeyPurpose purpose, Boolean enabled); + + List listByZone(Long zoneId, KeyPurpose purpose, Boolean enabled); + + long countByHsmProfileId(Long hsmProfileId); +} diff --git a/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSKeyDaoImpl.java b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSKeyDaoImpl.java new file mode 100644 index 000000000000..3f93c47c2ba3 --- /dev/null +++ b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSKeyDaoImpl.java @@ -0,0 +1,74 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms.dao; + +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import org.apache.cloudstack.framework.kms.KeyPurpose; +import org.apache.cloudstack.kms.KMSKeyVO; +import org.springframework.stereotype.Component; + +import java.util.List; + +@Component +public class KMSKeyDaoImpl extends GenericDaoBase implements KMSKeyDao { + + private final SearchBuilder allFieldSearch; + + public KMSKeyDaoImpl() { + allFieldSearch = createSearchBuilder(); + allFieldSearch.and("kekLabel", allFieldSearch.entity().getKekLabel(), SearchCriteria.Op.EQ); + allFieldSearch.and("domainId", allFieldSearch.entity().getDomainId(), SearchCriteria.Op.EQ); + allFieldSearch.and("accountId", allFieldSearch.entity().getAccountId(), SearchCriteria.Op.EQ); + allFieldSearch.and("purpose", allFieldSearch.entity().getPurpose(), SearchCriteria.Op.EQ); + allFieldSearch.and("enabled", allFieldSearch.entity().isEnabled(), SearchCriteria.Op.EQ); + allFieldSearch.and("zoneId", allFieldSearch.entity().getZoneId(), SearchCriteria.Op.EQ); + allFieldSearch.and("hsmProfileId", allFieldSearch.entity().getHsmProfileId(), SearchCriteria.Op.EQ); + allFieldSearch.done(); + } + + @Override + public List listByAccount(Long accountId, KeyPurpose purpose, Boolean enabled) { + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("accountId", accountId); + sc.setParametersIfNotNull("purpose", purpose); + sc.setParametersIfNotNull("enabled", enabled); + return listBy(sc); + } + + @Override + public List listByZone(Long zoneId, KeyPurpose purpose, Boolean enabled) { + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("zoneId", zoneId); + sc.setParametersIfNotNull("purpose", purpose); + sc.setParametersIfNotNull("enabled", enabled); + return listBy(sc); + } + + @Override + public long countByHsmProfileId(Long hsmProfileId) { + if (hsmProfileId == null) { + return 0; + } + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("hsmProfileId", hsmProfileId); + Integer count = getCount(sc); + return count != null ? count : 0; + } +} diff --git a/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSWrappedKeyDao.java b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSWrappedKeyDao.java new file mode 100644 index 000000000000..5e9e418a1667 --- /dev/null +++ b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSWrappedKeyDao.java @@ -0,0 +1,35 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms.dao; + +import com.cloud.utils.db.GenericDao; +import org.apache.cloudstack.kms.KMSWrappedKeyVO; + +import java.util.List; + +public interface KMSWrappedKeyDao extends GenericDao { + + long countByKmsKeyId(Long kmsKeyId); + + /** + * Limited variant for batch processing during key rotation + */ + List listByKekVersionId(Long kekVersionId, int limit); + + long countByKekVersionId(Long kekVersionId); +} diff --git a/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSWrappedKeyDaoImpl.java b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSWrappedKeyDaoImpl.java new file mode 100644 index 000000000000..ae1155760071 --- /dev/null +++ b/engine/schema/src/main/java/org/apache/cloudstack/kms/dao/KMSWrappedKeyDaoImpl.java @@ -0,0 +1,71 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms.dao; + +import com.cloud.utils.db.Filter; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import org.apache.cloudstack.kms.KMSWrappedKeyVO; +import org.springframework.stereotype.Component; + +import java.util.List; + +@Component +public class KMSWrappedKeyDaoImpl extends GenericDaoBase implements KMSWrappedKeyDao { + + private final SearchBuilder allFieldSearch; + + public KMSWrappedKeyDaoImpl() { + super(); + + allFieldSearch = createSearchBuilder(); + allFieldSearch.and("kmsKeyId", allFieldSearch.entity().getKmsKeyId(), SearchCriteria.Op.EQ); + allFieldSearch.and("kekVersionId", allFieldSearch.entity().getKekVersionId(), SearchCriteria.Op.EQ); + allFieldSearch.and("zoneId", allFieldSearch.entity().getZoneId(), SearchCriteria.Op.EQ); + allFieldSearch.and("kmsKeyId", allFieldSearch.entity().getKmsKeyId(), SearchCriteria.Op.EQ); + allFieldSearch.done(); + } + + @Override + public long countByKmsKeyId(Long kmsKeyId) { + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("kmsKeyId", kmsKeyId); + Integer count = getCount(sc); + return count != null ? count.longValue() : 0L; + } + + @Override + public List listByKekVersionId(Long kekVersionId, int limit) { + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("kekVersionId", kekVersionId); + Filter filter = new Filter(limit); + return listBy(sc, filter); + } + + @Override + public long countByKekVersionId(Long kekVersionId) { + if (kekVersionId == null) { + return 0; + } + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("kekVersionId", kekVersionId); + Integer count = getCount(sc); + return count != null ? count.longValue() : 0L; + } +} diff --git a/engine/schema/src/main/resources/META-INF/cloudstack/core/spring-engine-schema-core-daos-context.xml b/engine/schema/src/main/resources/META-INF/cloudstack/core/spring-engine-schema-core-daos-context.xml index 0656d5e3c440..ed17147fb4c5 100644 --- a/engine/schema/src/main/resources/META-INF/cloudstack/core/spring-engine-schema-core-daos-context.xml +++ b/engine/schema/src/main/resources/META-INF/cloudstack/core/spring-engine-schema-core-daos-context.xml @@ -310,4 +310,9 @@ + + + + + diff --git a/engine/schema/src/main/resources/META-INF/db/schema-42210to42300.sql b/engine/schema/src/main/resources/META-INF/db/schema-42210to42300.sql index d330ecd0c0d5..d62cba276b3c 100644 --- a/engine/schema/src/main/resources/META-INF/db/schema-42210to42300.sql +++ b/engine/schema/src/main/resources/META-INF/db/schema-42210to42300.sql @@ -49,3 +49,173 @@ CREATE TABLE IF NOT EXISTS `cloud`.`webhook_filter` ( INDEX `i_webhook_filter__webhook_id`(`webhook_id`), CONSTRAINT `fk_webhook_filter__webhook_id` FOREIGN KEY(`webhook_id`) REFERENCES `webhook`(`id`) ON DELETE CASCADE ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; + +-- KMS HSM Profiles (Generic table for PKCS#11, KMIP, etc.) +-- Scoped to account (user-provided) or global/zone (admin-provided) +CREATE TABLE IF NOT EXISTS `cloud`.`kms_hsm_profiles` ( + `id` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + `uuid` VARCHAR(40) NOT NULL, + `name` VARCHAR(255) NOT NULL, + `protocol` VARCHAR(32) NOT NULL COMMENT 'PKCS11, KMIP, AWS_KMS, etc.', + + -- Scoping + `account_id` BIGINT UNSIGNED COMMENT 'null = admin-provided (available to all accounts)', + `domain_id` BIGINT UNSIGNED COMMENT 'null = zone/global scope', + `zone_id` BIGINT UNSIGNED COMMENT 'null = global scope', + + -- Metadata + `vendor_name` VARCHAR(64) COMMENT 'HSM vendor (Thales, AWS, SoftHSM, etc.)', + `enabled` BOOLEAN NOT NULL DEFAULT TRUE, + `system` BOOLEAN NOT NULL DEFAULT FALSE COMMENT 'System profile (globally available, root admin only)', + `created` DATETIME NOT NULL, + `removed` DATETIME, + + PRIMARY KEY (`id`), + UNIQUE KEY `uk_uuid` (`uuid`), + UNIQUE KEY `uk_account_name` (`account_id`, `name`, `removed`), + INDEX `idx_protocol_enabled` (`protocol`, `enabled`, `removed`), + INDEX `idx_scoping` (`account_id`, `domain_id`, `zone_id`, `removed`), + CONSTRAINT `fk_kms_hsm_profiles__account_id` FOREIGN KEY (`account_id`) REFERENCES `account`(`id`) ON DELETE CASCADE, + CONSTRAINT `fk_kms_hsm_profiles__domain_id` FOREIGN KEY (`domain_id`) REFERENCES `domain`(`id`) ON DELETE CASCADE, + CONSTRAINT `fk_kms_hsm_profiles__zone_id` FOREIGN KEY (`zone_id`) REFERENCES `data_center`(`id`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='HSM profiles for KMS providers'; + +-- Add default database HSM profile (disabled by default) +INSERT INTO `cloud`.`kms_hsm_profiles` (`uuid`, `name`, `protocol`, `account_id`, `domain_id`, `enabled`, `system`, `created`) +VALUES (UUID(), 'default', 'database', 1, 1, 0, 1, NOW()); + +-- KMS HSM Profile Details (Protocol-specific configuration) +CREATE TABLE IF NOT EXISTS `cloud`.`kms_hsm_profile_details` ( + `id` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, + `profile_id` BIGINT UNSIGNED NOT NULL COMMENT 'HSM profile ID', + `name` VARCHAR(255) NOT NULL COMMENT 'Config key (e.g. library_path, endpoint, pin, cert_content)', + `value` TEXT NOT NULL COMMENT 'Config value (encrypted if sensitive)', + PRIMARY KEY (`id`), + UNIQUE KEY `uk_profile_name` (`profile_id`, `name`), + CONSTRAINT `fk_kms_hsm_profile_details__profile_id` FOREIGN KEY (`profile_id`) REFERENCES `kms_hsm_profiles`(`id`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='Details for HSM profiles (key-value configuration)'; + +-- KMS Keys (Key Encryption Key Metadata) +-- Account-scoped KEKs for envelope encryption +CREATE TABLE IF NOT EXISTS `cloud`.`kms_keys` ( + `id` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT COMMENT 'Unique ID', + `uuid` VARCHAR(40) NOT NULL COMMENT 'UUID - user-facing identifier', + `name` VARCHAR(255) NOT NULL COMMENT 'User-friendly name', + `description` VARCHAR(1024) COMMENT 'User description', + `kek_label` VARCHAR(255) NOT NULL COMMENT 'Provider-specific KEK label/ID', + `purpose` VARCHAR(32) NOT NULL COMMENT 'Key purpose (VOLUME_ENCRYPTION, TLS_CERT, CONFIG_SECRET)', + `account_id` BIGINT UNSIGNED NOT NULL COMMENT 'Owning account', + `domain_id` BIGINT UNSIGNED NOT NULL COMMENT 'Owning domain', + `zone_id` BIGINT UNSIGNED NOT NULL COMMENT 'Zone where key is valid', + `algorithm` VARCHAR(64) NOT NULL DEFAULT 'AES/GCM/NoPadding' COMMENT 'Encryption algorithm', + `key_bits` INT NOT NULL DEFAULT 256 COMMENT 'Key size in bits', + `enabled` TINYINT(1) NOT NULL DEFAULT 1 COMMENT 'Whether the key is enabled for new cryptographic operations', + `hsm_profile_id` BIGINT UNSIGNED NOT NULL COMMENT 'Current HSM profile ID for this key', + `created` DATETIME NOT NULL COMMENT 'Creation timestamp', + `removed` DATETIME COMMENT 'Removal timestamp for soft delete', + PRIMARY KEY (`id`), + UNIQUE KEY `uk_uuid` (`uuid`), + INDEX `idx_account_purpose` (`account_id`, `purpose`, `enabled`), + INDEX `idx_domain_purpose` (`domain_id`, `purpose`, `enabled`), + INDEX `idx_zone_enabled` (`zone_id`, `enabled`), + CONSTRAINT `fk_kms_keys__account_id` FOREIGN KEY (`account_id`) REFERENCES `account`(`id`) ON DELETE CASCADE, + CONSTRAINT `fk_kms_keys__domain_id` FOREIGN KEY (`domain_id`) REFERENCES `domain`(`id`) ON DELETE CASCADE, + CONSTRAINT `fk_kms_keys__zone_id` FOREIGN KEY (`zone_id`) REFERENCES `data_center`(`id`) ON DELETE CASCADE, + CONSTRAINT `fk_kms_keys__hsm_profile_id` FOREIGN KEY (`hsm_profile_id`) REFERENCES `kms_hsm_profiles`(`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='KMS Key (KEK) metadata - account-scoped keys for envelope encryption'; + +-- KMS KEK Versions (multiple KEKs per KMS key for gradual rotation) +-- Supports multiple KEK versions per logical KMS key during rotation +CREATE TABLE IF NOT EXISTS `cloud`.`kms_kek_versions` ( + `id` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT COMMENT 'Unique ID', + `uuid` VARCHAR(40) NOT NULL COMMENT 'UUID', + `kms_key_id` BIGINT UNSIGNED NOT NULL COMMENT 'Reference to kms_keys table', + `version_number` INT NOT NULL COMMENT 'Version number (1, 2, 3, ...)', + `kek_label` VARCHAR(255) NOT NULL COMMENT 'Provider-specific KEK label/ID for this version', + `status` VARCHAR(32) NOT NULL DEFAULT 'Active' COMMENT 'Active, Previous, Archived', + `hsm_profile_id` BIGINT UNSIGNED COMMENT 'HSM profile where this KEK version is stored', + `hsm_key_label` VARCHAR(255) COMMENT 'Optional HSM-specific key label/alias', + `created` DATETIME NOT NULL COMMENT 'Creation timestamp', + `removed` DATETIME COMMENT 'Removal timestamp for soft delete', + PRIMARY KEY (`id`), + UNIQUE KEY `uk_uuid` (`uuid`), + UNIQUE KEY `uk_kms_key_version` (`kms_key_id`, `version_number`, `removed`), + INDEX `idx_kms_key_status` (`kms_key_id`, `status`, `removed`), + INDEX `idx_kek_label` (`kek_label`), + CONSTRAINT `fk_kms_kek_versions__kms_key_id` FOREIGN KEY (`kms_key_id`) REFERENCES `kms_keys`(`id`) ON DELETE CASCADE, + CONSTRAINT `fk_kms_kek_versions__hsm_profile_id` FOREIGN KEY (`hsm_profile_id`) REFERENCES `kms_hsm_profiles`(`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='KEK versions for a KMS key - supports gradual rotation'; + +-- KMS Wrapped Keys (Data Encryption Keys) +-- Generic table for wrapped DEKs - references kms_keys for metadata and kek_versions for specific KEK version +CREATE TABLE IF NOT EXISTS `cloud`.`kms_wrapped_key` ( + `id` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT COMMENT 'Unique ID', + `uuid` VARCHAR(40) NOT NULL COMMENT 'UUID', + `kms_key_id` BIGINT UNSIGNED COMMENT 'Reference to kms_keys table', + `kek_version_id` BIGINT UNSIGNED COMMENT 'Reference to kms_kek_versions table', + `zone_id` BIGINT UNSIGNED NOT NULL COMMENT 'Zone ID for zone-scoped keys', + `wrapped_blob` VARBINARY(4096) NOT NULL COMMENT 'Encrypted DEK material', + `created` DATETIME NOT NULL COMMENT 'Creation timestamp', + `removed` DATETIME COMMENT 'Removal timestamp for soft delete', + PRIMARY KEY (`id`), + UNIQUE KEY `uk_uuid` (`uuid`), + INDEX `idx_kms_key_id` (`kms_key_id`, `removed`), + INDEX `idx_kek_version_id` (`kek_version_id`, `removed`), + INDEX `idx_zone_id` (`zone_id`, `removed`), + CONSTRAINT `fk_kms_wrapped_key__kms_key_id` FOREIGN KEY (`kms_key_id`) REFERENCES `kms_keys`(`id`) ON DELETE CASCADE, + CONSTRAINT `fk_kms_wrapped_key__kek_version_id` FOREIGN KEY (`kek_version_id`) REFERENCES `kms_kek_versions`(`id`) ON DELETE CASCADE, + CONSTRAINT `fk_kms_wrapped_key__zone_id` FOREIGN KEY (`zone_id`) REFERENCES `data_center`(`id`) ON DELETE CASCADE +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='KMS wrapped encryption keys (DEKs) - references kms_keys for KEK metadata and kek_versions for specific version'; + +-- Add KMS key reference to volumes table (which KMS key was used) +CALL `cloud`.`IDEMPOTENT_ADD_COLUMN`('cloud.volumes', 'kms_key_id', 'BIGINT UNSIGNED COMMENT ''KMS key ID used for volume encryption'''); +CALL `cloud`.`IDEMPOTENT_ADD_FOREIGN_KEY`('cloud.volumes', 'fk_volumes__kms_key_id', '(kms_key_id)', '`kms_keys`(`id`)'); + +-- Add KMS wrapped key reference to volumes table +CALL `cloud`.`IDEMPOTENT_ADD_COLUMN`('cloud.volumes', 'kms_wrapped_key_id', 'BIGINT UNSIGNED COMMENT ''KMS wrapped key ID for volume encryption'''); +CALL `cloud`.`IDEMPOTENT_ADD_FOREIGN_KEY`('cloud.volumes', 'fk_volumes__kms_wrapped_key_id', '(kms_wrapped_key_id)', '`kms_wrapped_key`(`id`)'); + +-- KMS Database Provider KEK Objects (PKCS#11-like object storage) +-- Stores KEKs for the database KMS provider in a PKCS#11-compatible format +CREATE TABLE IF NOT EXISTS `cloud`.`kms_database_kek_objects` ( + `id` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT COMMENT 'Object handle (PKCS#11 CKA_HANDLE)', + `uuid` VARCHAR(40) NOT NULL COMMENT 'UUID', + -- PKCS#11 Object Class (CKA_CLASS) + `object_class` VARCHAR(32) NOT NULL DEFAULT 'CKO_SECRET_KEY' COMMENT 'PKCS#11 object class (CKO_SECRET_KEY, CKO_PRIVATE_KEY, etc.)', + -- PKCS#11 Label (CKA_LABEL) - human-readable identifier + `label` VARCHAR(255) NOT NULL COMMENT 'PKCS#11 label (CKA_LABEL) - human-readable identifier', + -- PKCS#11 ID (CKA_ID) - application-defined identifier + `object_id` VARBINARY(64) COMMENT 'PKCS#11 object ID (CKA_ID) - application-defined identifier', + -- Key Type (CKA_KEY_TYPE) + `key_type` VARCHAR(32) NOT NULL DEFAULT 'CKK_AES' COMMENT 'PKCS#11 key type (CKK_AES, CKK_RSA, etc.)', + -- Key Material (CKA_VALUE) - encrypted KEK material + `key_material` VARBINARY(512) NOT NULL COMMENT 'PKCS#11 key value (CKA_VALUE) - encrypted KEK material', + -- Key Attributes (PKCS#11 boolean attributes) + `is_sensitive` BOOLEAN NOT NULL DEFAULT TRUE COMMENT 'PKCS#11 CKA_SENSITIVE - key material is sensitive', + `is_extractable` BOOLEAN NOT NULL DEFAULT FALSE COMMENT 'PKCS#11 CKA_EXTRACTABLE - key can be extracted', + `is_token` BOOLEAN NOT NULL DEFAULT TRUE COMMENT 'PKCS#11 CKA_TOKEN - object is on token (persistent)', + `is_private` BOOLEAN NOT NULL DEFAULT TRUE COMMENT 'PKCS#11 CKA_PRIVATE - object is private', + `is_modifiable` BOOLEAN NOT NULL DEFAULT FALSE COMMENT 'PKCS#11 CKA_MODIFIABLE - object can be modified', + `is_copyable` BOOLEAN NOT NULL DEFAULT FALSE COMMENT 'PKCS#11 CKA_COPYABLE - object can be copied', + `is_destroyable` BOOLEAN NOT NULL DEFAULT TRUE COMMENT 'PKCS#11 CKA_DESTROYABLE - object can be destroyed', + `always_sensitive` BOOLEAN NOT NULL DEFAULT TRUE COMMENT 'PKCS#11 CKA_ALWAYS_SENSITIVE - key was always sensitive', + `never_extractable` BOOLEAN NOT NULL DEFAULT TRUE COMMENT 'PKCS#11 CKA_NEVER_EXTRACTABLE - key was never extractable', + -- Key Metadata + `purpose` VARCHAR(32) NOT NULL COMMENT 'Key purpose (VOLUME_ENCRYPTION, TLS_CERT, CONFIG_SECRET)', + `key_bits` INT NOT NULL COMMENT 'Key size in bits (128, 192, 256)', + `algorithm` VARCHAR(64) NOT NULL DEFAULT 'AES/GCM/NoPadding' COMMENT 'Encryption algorithm', + -- Validity Dates (PKCS#11 CKA_START_DATE, CKA_END_DATE) + `start_date` DATETIME COMMENT 'PKCS#11 CKA_START_DATE - key validity start', + `end_date` DATETIME COMMENT 'PKCS#11 CKA_END_DATE - key validity end', + -- Lifecycle + `created` DATETIME NOT NULL COMMENT 'Creation timestamp', + `last_used` DATETIME COMMENT 'Last usage timestamp', + `removed` DATETIME COMMENT 'Removal timestamp for soft delete', + PRIMARY KEY (`id`), + UNIQUE KEY `uk_uuid` (`uuid`), + UNIQUE KEY `uk_label_removed` (`label`, `removed`), + INDEX `idx_purpose_removed` (`purpose`, `removed`), + INDEX `idx_key_type` (`key_type`, `removed`), + INDEX `idx_object_class` (`object_class`, `removed`), + INDEX `idx_created` (`created`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='KMS Database Provider KEK Objects - PKCS#11-like object storage for KEKs'; diff --git a/engine/schema/src/main/resources/META-INF/db/views/cloud.volume_view.sql b/engine/schema/src/main/resources/META-INF/db/views/cloud.volume_view.sql index ffeb93e8fa7a..8ba7e5c6df77 100644 --- a/engine/schema/src/main/resources/META-INF/db/views/cloud.volume_view.sql +++ b/engine/schema/src/main/resources/META-INF/db/views/cloud.volume_view.sql @@ -40,6 +40,10 @@ SELECT `volumes`.`chain_info` AS `chain_info`, `volumes`.`external_uuid` AS `external_uuid`, `volumes`.`encrypt_format` AS `encrypt_format`, + `volumes`.`kms_key_id` AS `kms_key_id`, + `kms_keys`.`uuid` AS `kms_key_uuid`, + `kms_keys`.`name` AS `kms_key_name`, + `volumes`.`kms_wrapped_key_id` AS `kms_wrapped_key_id`, `volumes`.`delete_protection` AS `delete_protection`, `account`.`id` AS `account_id`, `account`.`uuid` AS `account_uuid`, @@ -116,7 +120,7 @@ SELECT `resource_tag_domain`.`uuid` AS `tag_domain_uuid`, `resource_tag_domain`.`name` AS `tag_domain_name` FROM - ((((((((((((((((((`volumes` + (((((((((((((((((((`volumes` JOIN `account`ON ((`volumes`.`account_id` = `account`.`id`))) JOIN `domain`ON @@ -129,8 +133,10 @@ LEFT JOIN `vm_instance`ON ((`volumes`.`instance_id` = `vm_instance`.`id`))) LEFT JOIN `user_vm`ON ((`user_vm`.`id` = `vm_instance`.`id`))) -LEFT JOIN `volume_store_ref`ON +LEFT JOIN `volume_store_ref` ON ((`volumes`.`id` = `volume_store_ref`.`volume_id`))) +LEFT JOIN `kms_keys` ON + ((`volumes`.`kms_key_id` = `kms_keys`.`id`))) LEFT JOIN `service_offering`ON ((`vm_instance`.`service_offering_id` = `service_offering`.`id`))) LEFT JOIN `disk_offering`ON diff --git a/engine/storage/volume/src/main/java/org/apache/cloudstack/storage/volume/VolumeObject.java b/engine/storage/volume/src/main/java/org/apache/cloudstack/storage/volume/VolumeObject.java index 43218b3f6a02..13b9553494d2 100644 --- a/engine/storage/volume/src/main/java/org/apache/cloudstack/storage/volume/VolumeObject.java +++ b/engine/storage/volume/src/main/java/org/apache/cloudstack/storage/volume/VolumeObject.java @@ -46,6 +46,8 @@ import org.apache.cloudstack.engine.subsystem.api.storage.DataStore; import org.apache.cloudstack.engine.subsystem.api.storage.ObjectInDataStoreStateMachine; import org.apache.cloudstack.engine.subsystem.api.storage.VolumeInfo; +import org.apache.cloudstack.kms.KMSManager; +import org.apache.cloudstack.kms.dao.KMSWrappedKeyDao; import org.apache.cloudstack.storage.command.CopyCmdAnswer; import org.apache.cloudstack.storage.command.CreateObjectAnswer; import org.apache.cloudstack.storage.datastore.ObjectInDataStoreManager; @@ -98,6 +100,10 @@ public class VolumeObject implements VolumeInfo { @Inject VolumeDataStoreDao volumeStoreDao; @Inject + KMSManager kmsManager; + @Inject + KMSWrappedKeyDao kmsWrappedKeyDao; + @Inject ObjectInDataStoreManager objectInStoreMgr; @Inject ResourceLimitService resourceLimitMgr; @@ -900,6 +906,26 @@ public void setPassphraseId(Long id) { volumeVO.setPassphraseId(id); } + @Override + public Long getKmsKeyId() { + return volumeVO.getKmsKeyId(); + } + + @Override + public void setKmsKeyId(Long id) { + volumeVO.setKmsKeyId(id); + } + + @Override + public Long getKmsWrappedKeyId() { + return volumeVO.getKmsWrappedKeyId(); + } + + @Override + public void setKmsWrappedKeyId(Long id) { + volumeVO.setKmsWrappedKeyId(id); + } + /** * Removes passphrase reference from underlying volume. Also removes the associated passphrase entry if it is the last user. */ @@ -929,9 +955,29 @@ public void doInTransactionWithoutResult(TransactionStatus status) { /** * Looks up passphrase from underlying volume. - * @return passphrase as bytes + * Supports both legacy passphrase-based encryption and KMS-based encryption. + * @return passphrase/DEK as base64-encoded bytes (UTF-8 bytes of base64 string) */ public byte[] getPassphrase() { + // First check for KMS-encrypted volume + if (volumeVO.getKmsWrappedKeyId() != null) { + try { + // Unwrap the DEK from KMS (returns raw binary bytes) + byte[] dekBytes = kmsManager.unwrapKey(volumeVO.getKmsWrappedKeyId()); + // Base64-encode the DEK for consistency with legacy passphrases + // and for use with qemu-img which expects base64 format + String base64Dek = java.util.Base64.getEncoder().encodeToString(dekBytes); + // Zeroize the raw DEK bytes + java.util.Arrays.fill(dekBytes, (byte) 0); + // Return UTF-8 bytes of the base64 string + return base64Dek.getBytes(java.nio.charset.StandardCharsets.UTF_8); + } catch (org.apache.cloudstack.framework.kms.KMSException e) { + logger.error("Failed to unwrap KMS key for volume {}: {}", volumeVO.getId(), e.getMessage()); + return new byte[0]; + } + } + + // Fallback to legacy passphrase-based encryption PassphraseVO passphrase = passphraseDao.findById(volumeVO.getPassphraseId()); if (passphrase != null) { return passphrase.getPassphrase(); diff --git a/framework/kms/pom.xml b/framework/kms/pom.xml new file mode 100644 index 000000000000..719072ac493a --- /dev/null +++ b/framework/kms/pom.xml @@ -0,0 +1,46 @@ + + + 4.0.0 + cloud-framework-kms + Apache CloudStack Framework - Key Management Service + Core KMS framework with provider-agnostic interfaces + + + org.apache.cloudstack + cloudstack-framework + 4.23.0.0-SNAPSHOT + ../pom.xml + + + + + org.apache.cloudstack + cloud-utils + ${project.version} + + + org.apache.cloudstack + cloud-framework-config + ${project.version} + + + diff --git a/framework/kms/src/main/java/org/apache/cloudstack/framework/kms/KMSException.java b/framework/kms/src/main/java/org/apache/cloudstack/framework/kms/KMSException.java new file mode 100644 index 000000000000..8f15ad24ac6e --- /dev/null +++ b/framework/kms/src/main/java/org/apache/cloudstack/framework/kms/KMSException.java @@ -0,0 +1,181 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.framework.kms; + +import com.cloud.utils.exception.CloudRuntimeException; + +/** + * Exception class for KMS-related errors with structured error types + * to enable proper retry logic and error handling. + */ +public class KMSException extends CloudRuntimeException { + + /** + * Error types for KMS operations to enable intelligent retry logic + */ + public enum ErrorType { + CONNECTION_FAILED(true), + /** + * Authentication failed (e.g., incorrect PIN) + */ + AUTHENTICATION_FAILED(false), + /** + * Provider not initialized or unavailable + */ + PROVIDER_NOT_INITIALIZED(false), + + /** + * KEK not found in backend + */ + KEK_NOT_FOUND(false), + + /** + * KEK with given label already exists + */ + KEY_ALREADY_EXISTS(false), + + /** + * Invalid parameters provided + */ + INVALID_PARAMETER(false), + + /** + * Wrap/unwrap operation failed + */ + WRAP_UNWRAP_FAILED(true), + + /** + * KEK operation (create/delete) failed + */ + KEK_OPERATION_FAILED(true), + + /** + * Health check failed + */ + HEALTH_CHECK_FAILED(true), + + /** + * Transient network or communication error + */ + TRANSIENT_ERROR(true), + + /** + * Unknown error + */ + UNKNOWN(false); + + private final boolean retryable; + + ErrorType(boolean retryable) { + this.retryable = retryable; + } + + public boolean isRetryable() { + return retryable; + } + } + + private final ErrorType errorType; + + public KMSException(String message) { + super(message); + this.errorType = ErrorType.UNKNOWN; + } + + public KMSException(String message, Throwable cause) { + super(message, cause); + this.errorType = ErrorType.UNKNOWN; + } + + public KMSException(ErrorType errorType, String message) { + super(message); + this.errorType = errorType; + } + + public KMSException(ErrorType errorType, String message, Throwable cause) { + super(message, cause); + this.errorType = errorType; + } + + public static KMSException providerNotInitialized(String details) { + return new KMSException(ErrorType.PROVIDER_NOT_INITIALIZED, + "KMS provider not initialized: " + details); + } + + public static KMSException kekNotFound(String kekId) { + return new KMSException(ErrorType.KEK_NOT_FOUND, + "KEK not found: " + kekId); + } + + public static KMSException keyAlreadyExists(String details) { + return new KMSException(ErrorType.KEY_ALREADY_EXISTS, + "Key already exists: " + details); + } + + public static KMSException invalidParameter(String details) { + return new KMSException(ErrorType.INVALID_PARAMETER, + "Invalid parameter: " + details); + } + + public static KMSException wrapUnwrapFailed(String details, Throwable cause) { + return new KMSException(ErrorType.WRAP_UNWRAP_FAILED, + "Wrap/unwrap operation failed: " + details, cause); + } + + public static KMSException wrapUnwrapFailed(String details) { + return new KMSException(ErrorType.WRAP_UNWRAP_FAILED, + "Wrap/unwrap operation failed: " + details); + } + + public static KMSException kekOperationFailed(String details, Throwable cause) { + return new KMSException(ErrorType.KEK_OPERATION_FAILED, + "KEK operation failed: " + details, cause); + } + + public static KMSException kekOperationFailed(String details) { + return new KMSException(ErrorType.KEK_OPERATION_FAILED, + "KEK operation failed: " + details); + } + + public static KMSException healthCheckFailed(String details, Throwable cause) { + return new KMSException(ErrorType.HEALTH_CHECK_FAILED, + "Health check failed: " + details, cause); + } + + public static KMSException transientError(String details, Throwable cause) { + return new KMSException(ErrorType.TRANSIENT_ERROR, + "Transient error: " + details, cause); + } + + public ErrorType getErrorType() { + return errorType; + } + + @Override + public String toString() { + return "KMSException{" + + "errorType=" + errorType + + ", retryable=" + isRetryable() + + ", message='" + getMessage() + '\'' + + '}'; + } + + public boolean isRetryable() { + return errorType.isRetryable(); + } +} diff --git a/framework/kms/src/main/java/org/apache/cloudstack/framework/kms/KMSProvider.java b/framework/kms/src/main/java/org/apache/cloudstack/framework/kms/KMSProvider.java new file mode 100644 index 000000000000..388d464caa75 --- /dev/null +++ b/framework/kms/src/main/java/org/apache/cloudstack/framework/kms/KMSProvider.java @@ -0,0 +1,255 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.framework.kms; + +import com.cloud.utils.component.Adapter; +import org.apache.cloudstack.framework.config.Configurable; + +/** + * Abstract provider contract for Key Management Service operations. + *

+ * Implementations provide the cryptographic backend (HSM via PKCS#11, database, cloud KMS, etc.) + * for secure key wrapping/unwrapping using envelope encryption. + *

+ * Design principles: + * - KEKs (Key Encryption Keys) never leave the secure backend + * - DEKs (Data Encryption Keys) are wrapped by KEKs for storage + * - Plaintext DEKs exist only transiently in memory during wrap/unwrap + * - All operations are purpose-scoped to prevent key reuse + *

+ * Thread-safety: Implementations must be thread-safe for concurrent operations. + */ +public interface KMSProvider extends Configurable, Adapter { + + /** + * Returns {@code true} if the given HSM profile configuration key name refers + * to a + * sensitive value (PIN, password, secret, or private key) that must be + * encrypted at + * rest and masked in API responses. + * + *

+ * This is a shared naming-convention helper used by both KMS providers (when + * loading/storing profile details) and the KMS manager (when building API + * responses). + * + * @param key configuration key name (case-insensitive); null returns false + * @return true if the key is considered sensitive + */ + static boolean isSensitiveKey(String key) { + if (key == null) { + return false; + } + return key.equalsIgnoreCase("pin") || + key.equalsIgnoreCase("password") || + key.toLowerCase().contains("secret") || + key.equalsIgnoreCase("private_key"); + } + + /** + * Get the unique name of this provider + * + * @return provider name (e.g., "database", "pkcs11") + */ + String getProviderName(); + + /** + * Create a new Key Encryption Key (KEK) in the secure backend. + * Delegates to {@link #createKek(KeyPurpose, String, int, Long)} with null profile ID. + * + * @param purpose the purpose/scope for this KEK + * @param label human-readable label for the KEK (must be unique within purpose) + * @param keyBits key size in bits (typically 128, 192, or 256) + * @return the KEK identifier (label or handle) for later reference + * @throws KMSException if KEK creation fails + */ + default String createKek(KeyPurpose purpose, String label, int keyBits) throws KMSException { + return createKek(purpose, label, keyBits, null); + } + + /** + * Create a new Key Encryption Key (KEK) in the secure backend with explicit HSM profile. + * + * @param purpose the purpose/scope for this KEK + * @param label human-readable label for the KEK (must be unique within purpose) + * @param keyBits key size in bits (typically 128, 192, or 256) + * @param hsmProfileId optional HSM profile ID to create the KEK in (null for auto-resolution/default) + * @return the KEK identifier (label or handle) for later reference + * @throws KMSException if KEK creation fails + */ + String createKek(KeyPurpose purpose, String label, int keyBits, Long hsmProfileId) throws KMSException; + + /** + * Delete a KEK from the secure backend. + * WARNING: This will make all DEKs wrapped by this KEK unrecoverable. + * + * @param kekId the KEK identifier to delete + * @throws KMSException if deletion fails or KEK not found + */ + void deleteKek(String kekId) throws KMSException; + + /** + * Validates the configuration details for this provider before saving an HSM + * profile. + * Implementations should override this to perform provider-specific validation. + * + * @param details the configuration details to validate + * @throws KMSException if validation fails + */ + default void validateProfileConfig(java.util.Map details) throws KMSException { + // default no-op + } + + /** + * Check if a KEK exists and is accessible + * + * @param kekId the KEK identifier to check + * @return true if KEK is available + * @throws KMSException if check fails + */ + boolean isKekAvailable(String kekId) throws KMSException; + + /** + * Wrap (encrypt) a plaintext Data Encryption Key with a KEK. + * Delegates to {@link #wrapKey(byte[], KeyPurpose, String, Long)} with null profile ID. + * + * @param plainDek the plaintext DEK to wrap (caller must zeroize after call) + * @param purpose the intended purpose of this DEK + * @param kekLabel the label of the KEK to use for wrapping + * @return WrappedKey containing the encrypted DEK and metadata + * @throws KMSException if wrapping fails or KEK not found + */ + default WrappedKey wrapKey(byte[] plainDek, KeyPurpose purpose, String kekLabel) throws KMSException { + return wrapKey(plainDek, purpose, kekLabel, null); + } + + /** + * Wrap (encrypt) a plaintext Data Encryption Key with a KEK using explicit HSM profile. + * + * @param plainDek the plaintext DEK to wrap (caller must zeroize after call) + * @param purpose the intended purpose of this DEK + * @param kekLabel the label of the KEK to use for wrapping + * @param hsmProfileId optional HSM profile ID to use (null for auto-resolution/default) + * @return WrappedKey containing the encrypted DEK and metadata + * @throws KMSException if wrapping fails or KEK not found + */ + WrappedKey wrapKey(byte[] plainDek, KeyPurpose purpose, String kekLabel, Long hsmProfileId) throws KMSException; + + /** + * Unwrap (decrypt) a wrapped DEK to obtain the plaintext key. + * Delegates to {@link #unwrapKey(WrappedKey, Long)} with null profile ID. + *

+ * SECURITY: Caller MUST zeroize the returned byte array after use + * + * @param wrappedKey the wrapped key to decrypt + * @return plaintext DEK (caller must zeroize!) + * @throws KMSException if unwrapping fails or KEK not found + */ + default byte[] unwrapKey(WrappedKey wrappedKey) throws KMSException { + return unwrapKey(wrappedKey, null); + } + + /** + * Unwrap (decrypt) a wrapped DEK to obtain the plaintext key using explicit HSM profile. + *

+ * SECURITY: Caller MUST zeroize the returned byte array after use + * + * @param wrappedKey the wrapped key to decrypt + * @param hsmProfileId optional HSM profile ID to use (null for auto-resolution/default) + * @return plaintext DEK (caller must zeroize!) + * @throws KMSException if unwrapping fails or KEK not found + */ + byte[] unwrapKey(WrappedKey wrappedKey, Long hsmProfileId) throws KMSException; + + /** + * Generate a new random DEK and immediately wrap it with a KEK. + * Delegates to {@link #generateAndWrapDek(KeyPurpose, String, int, Long)} with null profile ID. + * (convenience method combining generation + wrapping) + * + * @param purpose the intended purpose of the new DEK + * @param kekLabel the label of the KEK to use for wrapping + * @param keyBits DEK size in bits (typically 128, 192, or 256) + * @return WrappedKey containing the newly generated and wrapped DEK + * @throws KMSException if generation or wrapping fails + */ + default WrappedKey generateAndWrapDek(KeyPurpose purpose, String kekLabel, int keyBits) throws KMSException { + return generateAndWrapDek(purpose, kekLabel, keyBits, null); + } + + /** + * Generate a new random DEK and immediately wrap it with a KEK using explicit HSM profile. + * (convenience method combining generation + wrapping) + * + * @param purpose the intended purpose of the new DEK + * @param kekLabel the label of the KEK to use for wrapping + * @param keyBits DEK size in bits (typically 128, 192, or 256) + * @param hsmProfileId optional HSM profile ID to use (null for auto-resolution/default) + * @return WrappedKey containing the newly generated and wrapped DEK + * @throws KMSException if generation or wrapping fails + */ + WrappedKey generateAndWrapDek(KeyPurpose purpose, String kekLabel, int keyBits, + Long hsmProfileId) throws KMSException; + + /** + * Rewrap a DEK with a different KEK (used during key rotation). + * Delegates to {@link #rewrapKey(WrappedKey, String, Long)} with null profile ID. + * This unwraps with the old KEK and wraps with the new KEK without exposing the plaintext DEK. + * + * @param oldWrappedKey the currently wrapped key + * @param newKekLabel the label of the new KEK to wrap with + * @return new WrappedKey encrypted with the new KEK + * @throws KMSException if rewrapping fails + */ + default WrappedKey rewrapKey(WrappedKey oldWrappedKey, String newKekLabel) throws KMSException { + return rewrapKey(oldWrappedKey, newKekLabel, null); + } + + /** + * Rewrap a DEK with a different KEK (used during key rotation) using explicit target HSM profile. + * This unwraps with the old KEK and wraps with the new KEK without exposing the plaintext DEK. + * + * @param oldWrappedKey the currently wrapped key + * @param newKekLabel the label of the new KEK to wrap with + * @param targetHsmProfileId optional target HSM profile ID to wrap with (null for auto-resolution/default) + * @return new WrappedKey encrypted with the new KEK + * @throws KMSException if rewrapping fails + */ + WrappedKey rewrapKey(WrappedKey oldWrappedKey, String newKekLabel, Long targetHsmProfileId) throws KMSException; + + /** + * Perform health check on the provider backend + * + * @return true if provider is healthy and operational + * @throws KMSException if health check fails with critical error + */ + boolean healthCheck() throws KMSException; + + /** + * Invalidates any cached state (config, sessions) associated with the given HSM profile. + * Must be called after an HSM profile is updated or deleted so that the next operation + * re-reads the profile details from the database instead of using stale cached values. + * + *

Providers that do not cache per-profile state (e.g. the database provider) can + * leave this as a no-op. + * + * @param profileId the HSM profile ID whose cache should be evicted + */ + default void invalidateProfileCache(Long profileId) { + // no-op for providers that don't cache per-profile state + } +} diff --git a/framework/kms/src/main/java/org/apache/cloudstack/framework/kms/KeyPurpose.java b/framework/kms/src/main/java/org/apache/cloudstack/framework/kms/KeyPurpose.java new file mode 100644 index 000000000000..cea182eb75e5 --- /dev/null +++ b/framework/kms/src/main/java/org/apache/cloudstack/framework/kms/KeyPurpose.java @@ -0,0 +1,76 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.framework.kms; + +/** + * Defines the purpose/usage scope for cryptographic keys in the KMS system. + * This enables proper key segregation and prevents key reuse across different contexts. + */ +public enum KeyPurpose { + /** + * Keys used for encrypting VM disk volumes (LUKS, encrypted storage) + */ + VOLUME_ENCRYPTION("volume", "Volume disk encryption keys"), + + /** + * Keys used for protecting TLS certificate private keys + */ + TLS_CERT("tls", "TLS certificate private keys"); + + private final String name; + private final String description; + + KeyPurpose(String name, String description) { + this.name = name; + this.description = description; + } + + /** + * Convert string name to KeyPurpose enum + * + * @param name the string representation of the purpose + * @return matching KeyPurpose + * @throws IllegalArgumentException if no matching purpose found + */ + public static KeyPurpose fromString(String name) { + for (KeyPurpose purpose : KeyPurpose.values()) { + if (purpose.getName().equalsIgnoreCase(name)) { + return purpose; + } + } + throw new IllegalArgumentException("Unknown KeyPurpose: " + name); + } + + public String getName() { + return name; + } + + public String getDescription() { + return description; + } + + /** + * Generate a KEK label with purpose prefix + * + * @param customLabel optional custom label suffix + * @return formatted KEK label (e.g., "volume-kek-v1") + */ + public String generateKekLabel(String customLabel) { + return name + "-kek-" + (customLabel != null ? customLabel : "v1"); + } +} diff --git a/framework/kms/src/main/java/org/apache/cloudstack/framework/kms/WrappedKey.java b/framework/kms/src/main/java/org/apache/cloudstack/framework/kms/WrappedKey.java new file mode 100644 index 000000000000..e70c5e32c46a --- /dev/null +++ b/framework/kms/src/main/java/org/apache/cloudstack/framework/kms/WrappedKey.java @@ -0,0 +1,131 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.framework.kms; + +import java.util.Arrays; +import java.util.Date; +import java.util.Objects; + +/** + * Immutable Data Transfer Object representing an encrypted (wrapped) Data Encryption Key. + * The wrapped key material contains the DEK encrypted by a Key Encryption Key (KEK) + * stored in a secure backend (HSM, database, etc.). + *

+ * This follows the envelope encryption pattern: + * - DEK: encrypts actual data (e.g., disk volume) + * - KEK: encrypts the DEK (never leaves secure storage) + * - Wrapped Key: DEK encrypted by KEK, safe to store in database + */ +public class WrappedKey { + private final String uuid; + private final String kekId; + private final KeyPurpose purpose; + private final String algorithm; + private final byte[] wrappedKeyMaterial; + private final String providerName; + private final Date created; + private final Long zoneId; + + /** + * Create a new WrappedKey instance + * + * @param kekId ID/label of the KEK used to wrap this key + * @param purpose the intended use of this key + * @param algorithm encryption algorithm (e.g., "AES/GCM/NoPadding") + * @param wrappedKeyMaterial the encrypted DEK blob + * @param providerName name of the KMS provider that created this key + * @param created timestamp when key was wrapped + * @param zoneId optional zone ID for zone-scoped keys + */ + public WrappedKey(String kekId, KeyPurpose purpose, String algorithm, + byte[] wrappedKeyMaterial, String providerName, + Date created, Long zoneId) { + this(null, kekId, purpose, algorithm, wrappedKeyMaterial, providerName, created, zoneId); + } + + /** + * Constructor for database-loaded keys with ID + */ + public WrappedKey(String uuid, String kekId, KeyPurpose purpose, String algorithm, + byte[] wrappedKeyMaterial, String providerName, + Date created, Long zoneId) { + this.uuid = uuid; + this.kekId = Objects.requireNonNull(kekId, "kekId cannot be null"); + this.purpose = Objects.requireNonNull(purpose, "purpose cannot be null"); + this.algorithm = Objects.requireNonNull(algorithm, "algorithm cannot be null"); + this.providerName = providerName; + + if (wrappedKeyMaterial == null || wrappedKeyMaterial.length == 0) { + throw new IllegalArgumentException("wrappedKeyMaterial cannot be null or empty"); + } + this.wrappedKeyMaterial = Arrays.copyOf(wrappedKeyMaterial, wrappedKeyMaterial.length); + + this.created = created != null ? new Date(created.getTime()) : new Date(); + this.zoneId = zoneId; + } + + public String getUuid() { + return uuid; + } + + public String getKekId() { + return kekId; + } + + public KeyPurpose getPurpose() { + return purpose; + } + + public String getAlgorithm() { + return algorithm; + } + + /** + * Get wrapped key material. Returns a defensive copy to prevent modification. + * Caller is responsible for zeroizing the returned array after use. + */ + public byte[] getWrappedKeyMaterial() { + return Arrays.copyOf(wrappedKeyMaterial, wrappedKeyMaterial.length); + } + + public String getProviderName() { + return providerName; + } + + public Date getCreated() { + return created != null ? new Date(created.getTime()) : null; + } + + public Long getZoneId() { + return zoneId; + } + + @Override + public String toString() { + return "WrappedKey{" + + "uuid='" + uuid + '\'' + + ", kekId='" + kekId + '\'' + + ", purpose=" + purpose + + ", algorithm='" + algorithm + '\'' + + ", providerName='" + providerName + '\'' + + ", materialLength=" + (wrappedKeyMaterial != null ? wrappedKeyMaterial.length : 0) + + ", created=" + created + + ", zoneId=" + zoneId + + '}'; + } +} diff --git a/framework/pom.xml b/framework/pom.xml index 337e5b0268b2..95d0bd0694c6 100644 --- a/framework/pom.xml +++ b/framework/pom.xml @@ -54,6 +54,7 @@ extensions ipc jobs + kms managed-context quota rest diff --git a/plugins/integrations/kubernetes-service/src/main/java/com/cloud/kubernetes/cluster/actionworkers/KubernetesClusterResourceModifierActionWorker.java b/plugins/integrations/kubernetes-service/src/main/java/com/cloud/kubernetes/cluster/actionworkers/KubernetesClusterResourceModifierActionWorker.java index cf69234d19e0..685a3efb0fcb 100644 --- a/plugins/integrations/kubernetes-service/src/main/java/com/cloud/kubernetes/cluster/actionworkers/KubernetesClusterResourceModifierActionWorker.java +++ b/plugins/integrations/kubernetes-service/src/main/java/com/cloud/kubernetes/cluster/actionworkers/KubernetesClusterResourceModifierActionWorker.java @@ -434,13 +434,13 @@ protected UserVm createKubernetesNode(String joinIp, Long domainId, Long account hostName, hostName, null, null, null, null, Hypervisor.HypervisorType.None, BaseCmd.HTTPMethod.POST,base64UserData, null, null, keypairs, null, addrs, null, null, Objects.nonNull(affinityGroupId) ? Collections.singletonList(affinityGroupId) : null, customParameterMap, null, null, null, - null, true, null, UserVmManager.CKS_NODE, null, null); + null, true, null, null, UserVmManager.CKS_NODE, null, null); } else { nodeVm = userVmService.createAdvancedVirtualMachine(zone, serviceOffering, workerNodeTemplate, networkIds, owner, hostName, hostName, null, null, null, null, Hypervisor.HypervisorType.None, BaseCmd.HTTPMethod.POST, base64UserData, null, null, keypairs, null, addrs, null, null, Objects.nonNull(affinityGroupId) ? - Collections.singletonList(affinityGroupId) : null, customParameterMap, null, null, null, null, true, UserVmManager.CKS_NODE, null, null, null); + Collections.singletonList(affinityGroupId) : null, customParameterMap, null, null, null, null, true, UserVmManager.CKS_NODE, null, null, null, null); } if (logger.isInfoEnabled()) { logger.info("Created node VM : {}, {} in the Kubernetes cluster : {}", hostName, nodeVm, kubernetesCluster.getName()); diff --git a/plugins/integrations/kubernetes-service/src/main/java/com/cloud/kubernetes/cluster/actionworkers/KubernetesClusterStartWorker.java b/plugins/integrations/kubernetes-service/src/main/java/com/cloud/kubernetes/cluster/actionworkers/KubernetesClusterStartWorker.java index aa9317e619b0..0f15bdc52624 100644 --- a/plugins/integrations/kubernetes-service/src/main/java/com/cloud/kubernetes/cluster/actionworkers/KubernetesClusterStartWorker.java +++ b/plugins/integrations/kubernetes-service/src/main/java/com/cloud/kubernetes/cluster/actionworkers/KubernetesClusterStartWorker.java @@ -281,13 +281,13 @@ private Pair createKubernetesControlNode(final Network network, S hostName, hostName, null, null, null, null, Hypervisor.HypervisorType.None, BaseCmd.HTTPMethod.POST,base64UserData, userDataId, userDataDetails, keypairs, requestedIps, addrs, null, null, Objects.nonNull(affinityGroupId) ? Collections.singletonList(affinityGroupId) : null, customParameterMap, null, null, null, - null, true, null, UserVmManager.CKS_NODE, null, null); + null, true, null, null, UserVmManager.CKS_NODE, null, null); } else { controlVm = userVmService.createAdvancedVirtualMachine(zone, serviceOffering, controlNodeTemplate, networkIds, owner, hostName, hostName, null, null, null, null, Hypervisor.HypervisorType.None, BaseCmd.HTTPMethod.POST, base64UserData, userDataId, userDataDetails, keypairs, requestedIps, addrs, null, null, Objects.nonNull(affinityGroupId) ? - Collections.singletonList(affinityGroupId) : null, customParameterMap, null, null, null, null, true, UserVmManager.CKS_NODE, null, null, null); + Collections.singletonList(affinityGroupId) : null, customParameterMap, null, null, null, null, true, UserVmManager.CKS_NODE, null, null, null, null); } if (logger.isInfoEnabled()) { logger.info("Created control VM: {}, {} in the Kubernetes cluster: {}", controlVm, hostName, kubernetesCluster); @@ -449,13 +449,13 @@ private UserVm createKubernetesAdditionalControlNode(final String joinIp, final hostName, hostName, null, null, null, null, Hypervisor.HypervisorType.None, BaseCmd.HTTPMethod.POST,base64UserData, null, null, keypairs, null, addrs, null, null, Objects.nonNull(affinityGroupId) ? Collections.singletonList(affinityGroupId) : null, customParameterMap, null, null, null, - null, true, null, UserVmManager.CKS_NODE, null, null); + null, true, null, null, UserVmManager.CKS_NODE, null, null); } else { additionalControlVm = userVmService.createAdvancedVirtualMachine(zone, serviceOffering, controlNodeTemplate, networkIds, owner, hostName, hostName, null, null, null, null, Hypervisor.HypervisorType.None, BaseCmd.HTTPMethod.POST, base64UserData, null, null, keypairs, null, addrs, null, null, Objects.nonNull(affinityGroupId) ? - Collections.singletonList(affinityGroupId) : null, customParameterMap, null, null, null, null, true, UserVmManager.CKS_NODE, null, null, null); + Collections.singletonList(affinityGroupId) : null, customParameterMap, null, null, null, null, true, UserVmManager.CKS_NODE, null, null, null, null); } if (logger.isInfoEnabled()) { @@ -493,13 +493,13 @@ private UserVm createEtcdNode(List requestedIps, List + + + 4.0.0 + cloud-plugin-kms-database + Apache CloudStack Plugin - KMS Database Provider + Database-backed KMS provider for encrypted key storage + + + org.apache.cloudstack + cloudstack-kms-plugins + 4.23.0.0-SNAPSHOT + ../pom.xml + + + + + org.apache.cloudstack + cloud-framework-kms + ${project.version} + + + org.apache.cloudstack + cloud-framework-config + ${project.version} + + + org.apache.cloudstack + cloud-utils + ${project.version} + + + com.google.crypto.tink + tink + ${cs.tink.version} + + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + true + + + + + + + diff --git a/plugins/kms/database/src/main/java/org/apache/cloudstack/kms/provider/DatabaseKMSProvider.java b/plugins/kms/database/src/main/java/org/apache/cloudstack/kms/provider/DatabaseKMSProvider.java new file mode 100644 index 000000000000..835e85656b33 --- /dev/null +++ b/plugins/kms/database/src/main/java/org/apache/cloudstack/kms/provider/DatabaseKMSProvider.java @@ -0,0 +1,337 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms.provider; + +import com.cloud.utils.component.AdapterBase; +import com.cloud.utils.crypt.DBEncryptionUtil; +import com.google.crypto.tink.subtle.AesGcmJce; +import org.apache.cloudstack.framework.config.ConfigKey; +import org.apache.cloudstack.framework.kms.KMSException; +import org.apache.cloudstack.framework.kms.KMSProvider; +import org.apache.cloudstack.framework.kms.KeyPurpose; +import org.apache.cloudstack.framework.kms.WrappedKey; +import org.apache.cloudstack.kms.provider.database.KMSDatabaseKekObjectVO; +import org.apache.cloudstack.kms.provider.database.dao.KMSDatabaseKekObjectDao; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import javax.inject.Inject; +import java.nio.charset.StandardCharsets; +import java.security.SecureRandom; +import java.util.Arrays; +import java.util.Base64; +import java.util.Date; +import java.util.UUID; + +/** + * Database-backed KMS provider that stores master KEKs in a PKCS#11-like object table. + * Uses AES-256-GCM for all cryptographic operations. + *

+ * This provider is suitable for deployments that don't have access to HSM hardware. + * The master KEKs are stored encrypted in the kms_database_kek_objects table using + * CloudStack's existing DBEncryptionUtil, with PKCS#11-compatible attributes. + */ +public class DatabaseKMSProvider extends AdapterBase implements KMSProvider { + private static final Logger logger = LogManager.getLogger(DatabaseKMSProvider.class); + private static final String PROVIDER_NAME = "database"; + private static final int GCM_IV_LENGTH = 12; // 96 bits recommended for GCM + private static final int GCM_TAG_LENGTH = 16; // 128 bits + private static final String ALGORITHM = "AES/GCM/NoPadding"; + private static final String CKO_SECRET_KEY = "CKO_SECRET_KEY"; + private static final String CKK_AES = "CKK_AES"; + + private final SecureRandom secureRandom = new SecureRandom(); + @Inject + private KMSDatabaseKekObjectDao kekObjectDao; + + @Override + public String getProviderName() { + return PROVIDER_NAME; + } + + @Override + public String createKek(KeyPurpose purpose, String label, int keyBits, Long hsmProfileId) throws KMSException { + // Database provider ignores hsmProfileId + return createKek(purpose, label, keyBits); + } + + @Override + public String createKek(KeyPurpose purpose, String label, int keyBits) throws KMSException { + if (keyBits != 128 && keyBits != 192 && keyBits != 256) { + throw KMSException.invalidParameter("Key size must be 128, 192, or 256 bits"); + } + + if (StringUtils.isEmpty(label)) { + label = generateKekLabel(purpose); + } + + if (kekObjectDao.existsByLabel(label)) { + throw KMSException.keyAlreadyExists("KEK with label " + label + " already exists"); + } + + byte[] kekBytes = new byte[keyBits / 8]; + try { + secureRandom.nextBytes(kekBytes); + + // Base64 encode then encrypt the KEK material using DBEncryptionUtil + String kekBase64 = Base64.getEncoder().encodeToString(kekBytes); + String encryptedKek = DBEncryptionUtil.encrypt(kekBase64); + byte[] encryptedKekBytes = encryptedKek.getBytes(StandardCharsets.UTF_8); + + KMSDatabaseKekObjectVO kekObject = new KMSDatabaseKekObjectVO(label, purpose, keyBits, encryptedKekBytes); + kekObject.setObjectClass(CKO_SECRET_KEY); + kekObject.setKeyType(CKK_AES); + kekObject.setObjectId(label.getBytes(StandardCharsets.UTF_8)); + kekObject.setAlgorithm(ALGORITHM); + kekObject.setIsSensitive(true); + kekObject.setIsExtractable(false); + kekObject.setIsToken(true); + kekObject.setIsPrivate(true); + kekObject.setIsModifiable(false); + kekObject.setIsCopyable(false); + kekObject.setIsDestroyable(true); + kekObject.setAlwaysSensitive(true); + kekObject.setNeverExtractable(true); + + kekObjectDao.persist(kekObject); + + logger.info("Created KEK with label {} for purpose {} (PKCS#11 object ID: {})", label, purpose, + kekObject.getId()); + return label; + + } catch (Exception e) { + throw KMSException.kekOperationFailed("Failed to create KEK: " + e.getMessage(), e); + } finally { + Arrays.fill(kekBytes, (byte) 0); + } + } + + @Override + public void deleteKek(String kekId) throws KMSException { + KMSDatabaseKekObjectVO kekObject = kekObjectDao.findByLabel(kekId); + if (kekObject == null) { + throw KMSException.kekNotFound("KEK with label " + kekId + " not found"); + } + + try { + kekObjectDao.remove(kekObject.getId()); + + if (kekObject.getKeyMaterial() != null) { + Arrays.fill(kekObject.getKeyMaterial(), (byte) 0); + } + + logger.warn("Deleted KEK with label {}. All DEKs wrapped with this KEK are now unrecoverable!", kekId); + } catch (Exception e) { + throw KMSException.kekOperationFailed("Failed to delete KEK: " + e.getMessage(), e); + } + } + + @Override + public boolean isKekAvailable(String kekId) throws KMSException { + try { + KMSDatabaseKekObjectVO kekObject = kekObjectDao.findByLabel(kekId); + return kekObject != null && kekObject.getRemoved() == null && kekObject.getKeyMaterial() != null; + } catch (Exception e) { + logger.warn("Error checking KEK availability: {}", e.getMessage()); + return false; + } + } + + @Override + public WrappedKey wrapKey(byte[] plainKey, KeyPurpose purpose, String kekLabel, + Long hsmProfileId) throws KMSException { + // Database provider ignores hsmProfileId + return wrapKey(plainKey, purpose, kekLabel); + } + + @Override + public WrappedKey wrapKey(byte[] plainKey, KeyPurpose purpose, String kekLabel) throws KMSException { + if (plainKey == null || plainKey.length == 0) { + throw KMSException.invalidParameter("Plain key cannot be null or empty"); + } + + byte[] kekBytes = loadKek(kekLabel); + + try { + // Tink's AesGcmJce automatically generates a random IV and prepends it to the ciphertext + AesGcmJce aesgcm = new AesGcmJce(kekBytes); + byte[] wrappedBlob = aesgcm.encrypt(plainKey, new byte[0]); + + WrappedKey wrapped = new WrappedKey(kekLabel, purpose, ALGORITHM, wrappedBlob, PROVIDER_NAME, new Date(), + null); + + logger.debug("Wrapped {} key with KEK {}", purpose, kekLabel); + return wrapped; + } catch (Exception e) { + throw KMSException.wrapUnwrapFailed("Failed to wrap key: " + e.getMessage(), e); + } finally { + // Zeroize KEK + Arrays.fill(kekBytes, (byte) 0); + } + } + + @Override + public byte[] unwrapKey(WrappedKey wrappedKey, Long hsmProfileId) throws KMSException { + // Database provider ignores hsmProfileId + return unwrapKey(wrappedKey); + } + + @Override + public byte[] unwrapKey(WrappedKey wrappedKey) throws KMSException { + if (wrappedKey == null) { + throw KMSException.invalidParameter("Wrapped key cannot be null"); + } + + byte[] kekBytes = loadKek(wrappedKey.getKekId()); + + try { + AesGcmJce aesgcm = new AesGcmJce(kekBytes); + // Tink's decrypt expects [IV][ciphertext+tag] format (same as encrypt returns) + byte[] blob = wrappedKey.getWrappedKeyMaterial(); + if (blob.length < GCM_IV_LENGTH + GCM_TAG_LENGTH) { + throw new KMSException(KMSException.ErrorType.WRAP_UNWRAP_FAILED, + "Invalid wrapped key format: too short"); + } + + byte[] plainKey = aesgcm.decrypt(blob, new byte[0]); + + logger.debug("Unwrapped {} key with KEK {}", wrappedKey.getPurpose(), wrappedKey.getKekId()); + return plainKey; + + } catch (KMSException e) { + throw e; + } catch (Exception e) { + throw KMSException.wrapUnwrapFailed("Failed to unwrap key: " + e.getMessage(), e); + } finally { + // Zeroize KEK + Arrays.fill(kekBytes, (byte) 0); + } + } + + @Override + public WrappedKey generateAndWrapDek(KeyPurpose purpose, String kekLabel, int keyBits, + Long hsmProfileId) throws KMSException { + // Database provider ignores hsmProfileId + return generateAndWrapDek(purpose, kekLabel, keyBits); + } + + @Override + public WrappedKey generateAndWrapDek(KeyPurpose purpose, String kekLabel, int keyBits) throws KMSException { + if (keyBits != 128 && keyBits != 192 && keyBits != 256) { + throw KMSException.invalidParameter("DEK size must be 128, 192, or 256 bits"); + } + + byte[] dekBytes = new byte[keyBits / 8]; + secureRandom.nextBytes(dekBytes); + + try { + return wrapKey(dekBytes, purpose, kekLabel); + } finally { + // Zeroize DEK (wrapped version is in WrappedKey) + Arrays.fill(dekBytes, (byte) 0); + } + } + + @Override + public WrappedKey rewrapKey(WrappedKey oldWrappedKey, String newKekLabel, + Long targetHsmProfileId) throws KMSException { + // Database provider ignores targetHsmProfileId + return rewrapKey(oldWrappedKey, newKekLabel); + } + + @Override + public WrappedKey rewrapKey(WrappedKey oldWrappedKey, String newKekLabel) throws KMSException { + byte[] plainKey = unwrapKey(oldWrappedKey); + try { + return wrapKey(plainKey, oldWrappedKey.getPurpose(), newKekLabel); + } finally { + // Zeroize plaintext DEK + Arrays.fill(plainKey, (byte) 0); + } + } + + @Override + public boolean healthCheck() throws KMSException { + try { + if (kekObjectDao == null) { + logger.error("KMSDatabaseKekObjectDao is not initialized"); + return false; + } + return true; + + } catch (Exception e) { + throw KMSException.healthCheckFailed("Health check failed: " + e.getMessage(), e); + } + } + + private byte[] loadKek(String kekLabel) throws KMSException { + KMSDatabaseKekObjectVO kekObject = kekObjectDao.findByLabel(kekLabel); + + if (kekObject == null || kekObject.getRemoved() != null) { + throw KMSException.kekNotFound("KEK with label " + kekLabel + " not found"); + } + + try { + byte[] encryptedKekBytes = kekObject.getKeyMaterial(); + if (encryptedKekBytes == null || encryptedKekBytes.length == 0) { + throw KMSException.kekNotFound("KEK value is empty for label " + kekLabel); + } + + String encryptedKek = new String(encryptedKekBytes, StandardCharsets.UTF_8); + String kekBase64 = DBEncryptionUtil.decrypt(encryptedKek); + byte[] kekBytes = Base64.getDecoder().decode(kekBase64); + + updateLastUsed(kekLabel); + + return kekBytes; + + } catch (IllegalArgumentException e) { + throw KMSException.kekOperationFailed("Invalid KEK encoding for label " + kekLabel, e); + } catch (Exception e) { + throw KMSException.kekOperationFailed("Failed to decrypt KEK for label " + kekLabel + ": " + e.getMessage(), + e); + } + } + + private void updateLastUsed(String kekLabel) { + try { + KMSDatabaseKekObjectVO kekObject = kekObjectDao.findByLabel(kekLabel); + if (kekObject != null && kekObject.getRemoved() == null) { + kekObject.setLastUsed(new Date()); + kekObjectDao.update(kekObject.getId(), kekObject); + } + } catch (Exception e) { + logger.debug("Failed to update last used timestamp for KEK {}: {}", kekLabel, e.getMessage()); + } + } + + private String generateKekLabel(KeyPurpose purpose) { + return purpose.getName() + "-kek-" + UUID.randomUUID().toString().substring(0, 8); + } + + @Override + public String getConfigComponentName() { + return DatabaseKMSProvider.class.getSimpleName(); + } + + @Override + public ConfigKey[] getConfigKeys() { + return new ConfigKey[0]; + } +} diff --git a/plugins/kms/database/src/main/java/org/apache/cloudstack/kms/provider/database/KMSDatabaseKekObjectVO.java b/plugins/kms/database/src/main/java/org/apache/cloudstack/kms/provider/database/KMSDatabaseKekObjectVO.java new file mode 100644 index 000000000000..c1c91c9cef13 --- /dev/null +++ b/plugins/kms/database/src/main/java/org/apache/cloudstack/kms/provider/database/KMSDatabaseKekObjectVO.java @@ -0,0 +1,357 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms.provider.database; + +import com.cloud.utils.db.GenericDao; +import org.apache.cloudstack.framework.kms.KeyPurpose; +import org.apache.cloudstack.utils.reflectiontostringbuilderutils.ReflectionToStringBuilderUtils; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.EnumType; +import javax.persistence.Enumerated; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; +import java.util.Date; +import java.util.UUID; + +/** + * Database entity for KEK objects stored by the database KMS provider. + * Models PKCS#11 object attributes for cryptographic key storage. + *

+ * This table stores KEKs (Key Encryption Keys) in a PKCS#11-compatible format, + * allowing the database provider to mock PKCS#11 interface behavior. + */ +@Entity +@Table(name = "kms_database_kek_objects") +public class KMSDatabaseKekObjectVO { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + @Column(name = "id") + private Long id; + + @Column(name = "uuid", nullable = false) + private String uuid; + + // PKCS#11 Object Class (CKA_CLASS) + @Column(name = "object_class", nullable = false, length = 32) + private String objectClass = "CKO_SECRET_KEY"; + + // PKCS#11 Label (CKA_LABEL) - human-readable identifier + @Column(name = "label", nullable = false, length = 255) + private String label; + + // PKCS#11 ID (CKA_ID) - application-defined identifier + @Column(name = "object_id", length = 64) + private byte[] objectId; + + // PKCS#11 Key Type (CKA_KEY_TYPE) + @Column(name = "key_type", nullable = false, length = 32) + private String keyType = "CKK_AES"; + + // PKCS#11 Key Value (CKA_VALUE) - encrypted KEK material + @Column(name = "key_material", nullable = false, length = 512) + private byte[] keyMaterial; + + // PKCS#11 Boolean Attributes + @Column(name = "is_sensitive", nullable = false) + private Boolean isSensitive = true; + + @Column(name = "is_extractable", nullable = false) + private Boolean isExtractable = false; + + @Column(name = "is_token", nullable = false) + private Boolean isToken = true; + + @Column(name = "is_private", nullable = false) + private Boolean isPrivate = true; + + @Column(name = "is_modifiable", nullable = false) + private Boolean isModifiable = false; + + @Column(name = "is_copyable", nullable = false) + private Boolean isCopyable = false; + + @Column(name = "is_destroyable", nullable = false) + private Boolean isDestroyable = true; + + @Column(name = "always_sensitive", nullable = false) + private Boolean alwaysSensitive = true; + + @Column(name = "never_extractable", nullable = false) + private Boolean neverExtractable = true; + + // Key Metadata + @Column(name = "purpose", nullable = false, length = 32) + @Enumerated(EnumType.STRING) + private KeyPurpose purpose; + + @Column(name = "key_bits", nullable = false) + private Integer keyBits; + + @Column(name = "algorithm", nullable = false, length = 64) + private String algorithm = "AES/GCM/NoPadding"; + + // PKCS#11 Validity Dates + @Column(name = "start_date") + @Temporal(TemporalType.TIMESTAMP) + private Date startDate; + + @Column(name = "end_date") + @Temporal(TemporalType.TIMESTAMP) + private Date endDate; + + // Lifecycle + @Column(name = GenericDao.CREATED_COLUMN, nullable = false) + @Temporal(TemporalType.TIMESTAMP) + private Date created; + + @Column(name = "last_used") + @Temporal(TemporalType.TIMESTAMP) + private Date lastUsed; + + @Column(name = GenericDao.REMOVED_COLUMN) + @Temporal(TemporalType.TIMESTAMP) + private Date removed; + + /** + * Constructor for creating a new KEK object + * + * @param label PKCS#11 label (CKA_LABEL) + * @param purpose key purpose + * @param keyBits key size in bits + * @param keyMaterial encrypted key material (CKA_VALUE) + */ + public KMSDatabaseKekObjectVO(String label, KeyPurpose purpose, Integer keyBits, byte[] keyMaterial) { + this(); + this.label = label; + this.purpose = purpose; + this.keyBits = keyBits; + this.keyMaterial = keyMaterial; + this.objectId = label.getBytes(); // Use label as object ID by default + this.startDate = new Date(); + } + + public KMSDatabaseKekObjectVO() { + this.uuid = UUID.randomUUID().toString(); + this.created = new Date(); + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getUuid() { + return uuid; + } + + public void setUuid(String uuid) { + this.uuid = uuid; + } + + public String getObjectClass() { + return objectClass; + } + + public void setObjectClass(String objectClass) { + this.objectClass = objectClass; + } + + public String getLabel() { + return label; + } + + public void setLabel(String label) { + this.label = label; + } + + public byte[] getObjectId() { + return objectId; + } + + public void setObjectId(byte[] objectId) { + this.objectId = objectId; + } + + public String getKeyType() { + return keyType; + } + + public void setKeyType(String keyType) { + this.keyType = keyType; + } + + public byte[] getKeyMaterial() { + return keyMaterial; + } + + public void setKeyMaterial(byte[] keyMaterial) { + this.keyMaterial = keyMaterial; + } + + public Boolean getIsSensitive() { + return isSensitive; + } + + public void setIsSensitive(Boolean isSensitive) { + this.isSensitive = isSensitive; + } + + public Boolean getIsExtractable() { + return isExtractable; + } + + public void setIsExtractable(Boolean isExtractable) { + this.isExtractable = isExtractable; + } + + public Boolean getIsToken() { + return isToken; + } + + public void setIsToken(Boolean isToken) { + this.isToken = isToken; + } + + public Boolean getIsPrivate() { + return isPrivate; + } + + public void setIsPrivate(Boolean isPrivate) { + this.isPrivate = isPrivate; + } + + public Boolean getIsModifiable() { + return isModifiable; + } + + public void setIsModifiable(Boolean isModifiable) { + this.isModifiable = isModifiable; + } + + public Boolean getIsCopyable() { + return isCopyable; + } + + public void setIsCopyable(Boolean isCopyable) { + this.isCopyable = isCopyable; + } + + public Boolean getIsDestroyable() { + return isDestroyable; + } + + public void setIsDestroyable(Boolean isDestroyable) { + this.isDestroyable = isDestroyable; + } + + public Boolean getAlwaysSensitive() { + return alwaysSensitive; + } + + public void setAlwaysSensitive(Boolean alwaysSensitive) { + this.alwaysSensitive = alwaysSensitive; + } + + public Boolean getNeverExtractable() { + return neverExtractable; + } + + public void setNeverExtractable(Boolean neverExtractable) { + this.neverExtractable = neverExtractable; + } + + public KeyPurpose getPurpose() { + return purpose; + } + + public void setPurpose(KeyPurpose purpose) { + this.purpose = purpose; + } + + public Integer getKeyBits() { + return keyBits; + } + + public void setKeyBits(Integer keyBits) { + this.keyBits = keyBits; + } + + public String getAlgorithm() { + return algorithm; + } + + public void setAlgorithm(String algorithm) { + this.algorithm = algorithm; + } + + public Date getStartDate() { + return startDate; + } + + public void setStartDate(Date startDate) { + this.startDate = startDate; + } + + public Date getEndDate() { + return endDate; + } + + public void setEndDate(Date endDate) { + this.endDate = endDate; + } + + public Date getCreated() { + return created; + } + + public void setCreated(Date created) { + this.created = created; + } + + public Date getLastUsed() { + return lastUsed; + } + + public void setLastUsed(Date lastUsed) { + this.lastUsed = lastUsed; + } + + public Date getRemoved() { + return removed; + } + + public void setRemoved(Date removed) { + this.removed = removed; + } + + @Override + public String toString() { + return String.format("KMSDatabaseKekObject %s", ReflectionToStringBuilderUtils.reflectOnlySelectedFields( + this, "id", "uuid", "label", "purpose", "keyBits", "objectClass", "keyType", "algorithm")); + } +} diff --git a/plugins/kms/database/src/main/java/org/apache/cloudstack/kms/provider/database/dao/KMSDatabaseKekObjectDao.java b/plugins/kms/database/src/main/java/org/apache/cloudstack/kms/provider/database/dao/KMSDatabaseKekObjectDao.java new file mode 100644 index 000000000000..582c1179ec43 --- /dev/null +++ b/plugins/kms/database/src/main/java/org/apache/cloudstack/kms/provider/database/dao/KMSDatabaseKekObjectDao.java @@ -0,0 +1,61 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms.provider.database.dao; + +import com.cloud.utils.db.GenericDao; +import org.apache.cloudstack.framework.kms.KeyPurpose; +import org.apache.cloudstack.kms.provider.database.KMSDatabaseKekObjectVO; + +import java.util.List; + +/** + * DAO for KMSDatabaseKekObject entities + * Provides PKCS#11-like object storage operations for KEKs + */ +public interface KMSDatabaseKekObjectDao extends GenericDao { + + /** + * Find a KEK object by label (PKCS#11 CKA_LABEL) + */ + KMSDatabaseKekObjectVO findByLabel(String label); + + /** + * Find a KEK object by object ID (PKCS#11 CKA_ID) + */ + KMSDatabaseKekObjectVO findByObjectId(byte[] objectId); + + /** + * List all KEK objects by purpose + */ + List listByPurpose(KeyPurpose purpose); + + /** + * List all KEK objects by key type (PKCS#11 CKA_KEY_TYPE) + */ + List listByKeyType(String keyType); + + /** + * List all KEK objects by object class (PKCS#11 CKA_CLASS) + */ + List listByObjectClass(String objectClass); + + /** + * Check if a KEK object exists with the given label + */ + boolean existsByLabel(String label); +} diff --git a/plugins/kms/database/src/main/java/org/apache/cloudstack/kms/provider/database/dao/KMSDatabaseKekObjectDaoImpl.java b/plugins/kms/database/src/main/java/org/apache/cloudstack/kms/provider/database/dao/KMSDatabaseKekObjectDaoImpl.java new file mode 100644 index 000000000000..ae65f3248b30 --- /dev/null +++ b/plugins/kms/database/src/main/java/org/apache/cloudstack/kms/provider/database/dao/KMSDatabaseKekObjectDaoImpl.java @@ -0,0 +1,84 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms.provider.database.dao; + +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import org.apache.cloudstack.framework.kms.KeyPurpose; +import org.apache.cloudstack.kms.provider.database.KMSDatabaseKekObjectVO; +import org.springframework.stereotype.Component; + +import java.util.List; + +@Component +public class KMSDatabaseKekObjectDaoImpl extends GenericDaoBase implements KMSDatabaseKekObjectDao { + + private final SearchBuilder allFieldSearch; + + public KMSDatabaseKekObjectDaoImpl() { + allFieldSearch = createSearchBuilder(); + allFieldSearch.and("uuid", allFieldSearch.entity().getUuid(), SearchCriteria.Op.EQ); + allFieldSearch.and("label", allFieldSearch.entity().getLabel(), SearchCriteria.Op.EQ); + allFieldSearch.and("objectId", allFieldSearch.entity().getObjectId(), SearchCriteria.Op.EQ); + allFieldSearch.and("purpose", allFieldSearch.entity().getPurpose(), SearchCriteria.Op.EQ); + allFieldSearch.and("keyType", allFieldSearch.entity().getKeyType(), SearchCriteria.Op.EQ); + allFieldSearch.and("objectClass", allFieldSearch.entity().getObjectClass(), SearchCriteria.Op.EQ); + allFieldSearch.done(); + } + + @Override + public KMSDatabaseKekObjectVO findByLabel(String label) { + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("label", label); + return findOneBy(sc); + } + + @Override + public KMSDatabaseKekObjectVO findByObjectId(byte[] objectId) { + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("objectId", objectId); + return findOneBy(sc); + } + + @Override + public List listByPurpose(KeyPurpose purpose) { + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("purpose", purpose); + return listBy(sc); + } + + @Override + public List listByKeyType(String keyType) { + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("keyType", keyType); + return listBy(sc); + } + + @Override + public List listByObjectClass(String objectClass) { + SearchCriteria sc = allFieldSearch.create(); + sc.setParameters("objectClass", objectClass); + return listBy(sc); + } + + @Override + public boolean existsByLabel(String label) { + return findByLabel(label) != null; + } +} diff --git a/plugins/kms/database/src/main/resources/META-INF/cloudstack/database-kms/module.properties b/plugins/kms/database/src/main/resources/META-INF/cloudstack/database-kms/module.properties new file mode 100644 index 000000000000..8d43cd9e08b8 --- /dev/null +++ b/plugins/kms/database/src/main/resources/META-INF/cloudstack/database-kms/module.properties @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +name=database-kms +parent=kms diff --git a/plugins/kms/database/src/main/resources/META-INF/cloudstack/database-kms/spring-database-kms-context.xml b/plugins/kms/database/src/main/resources/META-INF/cloudstack/database-kms/spring-database-kms-context.xml new file mode 100644 index 000000000000..186e8adfa714 --- /dev/null +++ b/plugins/kms/database/src/main/resources/META-INF/cloudstack/database-kms/spring-database-kms-context.xml @@ -0,0 +1,31 @@ + + + + + + + diff --git a/plugins/kms/pkcs11/pom.xml b/plugins/kms/pkcs11/pom.xml new file mode 100644 index 000000000000..1aaa88415769 --- /dev/null +++ b/plugins/kms/pkcs11/pom.xml @@ -0,0 +1,73 @@ + + + + 4.0.0 + cloud-plugin-kms-pkcs11 + Apache CloudStack Plugin - KMS PKCS#11 Provider + PKCS#11-backed KMS provider for HSM integration + + + org.apache.cloudstack + cloudstack-kms-plugins + 4.23.0.0-SNAPSHOT + ../pom.xml + + + + + org.apache.cloudstack + cloud-framework-kms + ${project.version} + + + org.apache.cloudstack + cloud-framework-config + ${project.version} + + + org.apache.cloudstack + cloud-utils + ${project.version} + + + org.apache.cloudstack + cloud-engine-schema + ${project.version} + + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + true + + + + + + + diff --git a/plugins/kms/pkcs11/src/main/java/org/apache/cloudstack/kms/provider/pkcs11/PKCS11HSMProvider.java b/plugins/kms/pkcs11/src/main/java/org/apache/cloudstack/kms/provider/pkcs11/PKCS11HSMProvider.java new file mode 100644 index 000000000000..16203dd7f1df --- /dev/null +++ b/plugins/kms/pkcs11/src/main/java/org/apache/cloudstack/kms/provider/pkcs11/PKCS11HSMProvider.java @@ -0,0 +1,1063 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms.provider.pkcs11; + +import com.cloud.utils.component.AdapterBase; +import com.cloud.utils.crypt.DBEncryptionUtil; +import org.apache.cloudstack.framework.config.ConfigKey; +import org.apache.cloudstack.framework.kms.KMSException; +import org.apache.cloudstack.framework.kms.KMSProvider; +import org.apache.cloudstack.framework.kms.KeyPurpose; +import org.apache.cloudstack.framework.kms.WrappedKey; +import org.apache.cloudstack.kms.HSMProfileDetailsVO; +import org.apache.cloudstack.kms.KMSKekVersionVO; +import org.apache.cloudstack.kms.dao.HSMProfileDao; +import org.apache.cloudstack.kms.dao.HSMProfileDetailsDao; +import org.apache.cloudstack.kms.dao.KMSKekVersionDao; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import javax.annotation.PostConstruct; +import javax.crypto.BadPaddingException; +import javax.crypto.Cipher; +import javax.crypto.IllegalBlockSizeException; +import javax.crypto.KeyGenerator; +import javax.crypto.NoSuchPaddingException; +import javax.crypto.SecretKey; +import javax.crypto.spec.IvParameterSpec; +import javax.inject.Inject; +import java.io.Closeable; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.InvalidAlgorithmParameterException; +import java.security.InvalidKeyException; +import java.security.Key; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.Provider; +import java.security.SecureRandom; +import java.security.Security; +import java.security.UnrecoverableKeyException; +import java.security.cert.CertificateException; +import java.util.Arrays; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; + +public class PKCS11HSMProvider extends AdapterBase implements KMSProvider { + private static final Logger logger = LogManager.getLogger(PKCS11HSMProvider.class); + private static final String PROVIDER_NAME = "pkcs11"; + // Security note (#7): AES-CBC provides confidentiality but not authenticity (no + // HMAC). + // While AES-GCM is preferred, SunPKCS11 support for GCM is often buggy or + // missing + // depending on the underlying driver. We rely on the HSM/storage for tamper + // resistance. + // AES-CBC with PKCS5Padding: FIPS-compliant (NIST SP 800-38A) with universal PKCS#11 support + private static final String CIPHER_ALGORITHM = "AES/CBC/PKCS5Padding"; + + private static final long SESSION_ACQUIRE_TIMEOUT_MS = 5000L; + + private static final int[] VALID_KEY_SIZES = {128, 192, 256}; + private final Map sessionPools = new ConcurrentHashMap<>(); + @Inject + private HSMProfileDao hsmProfileDao; + @Inject + private HSMProfileDetailsDao hsmProfileDetailsDao; + @Inject + private KMSKekVersionDao kmsKekVersionDao; + + @PostConstruct + public void init() { + logger.info("Initializing PKCS11HSMProvider"); + } + + @Override + public String getProviderName() { + return PROVIDER_NAME; + } + + @Override + public String createKek(KeyPurpose purpose, String label, int keyBits, Long hsmProfileId) throws KMSException { + if (hsmProfileId == null) { + throw KMSException.invalidParameter("HSM Profile ID is required for PKCS#11 provider"); + } + final String kekLabel = StringUtils.isEmpty(label) ? generateKekLabel(purpose) : label; + return executeWithSession(hsmProfileId, session -> session.generateKey(kekLabel, keyBits, purpose)); + } + + @Override + public void deleteKek(String kekId) throws KMSException { + Long hsmProfileId = resolveProfileId(kekId); + executeWithSession(hsmProfileId, session -> { + session.deleteKey(kekId); + return null; + }); + } + + @Override + public boolean isKekAvailable(String kekId) throws KMSException { + try { + Long hsmProfileId = resolveProfileId(kekId); + return executeWithSession(hsmProfileId, session -> session.checkKeyExists(kekId)); + } catch (Exception e) { + return false; + } + } + + @Override + public WrappedKey wrapKey(byte[] plainDek, KeyPurpose purpose, String kekLabel, + Long hsmProfileId) throws KMSException { + if (hsmProfileId == null) { + hsmProfileId = resolveProfileId(kekLabel); + } + + byte[] wrappedBlob = executeWithSession(hsmProfileId, session -> session.wrapKey(plainDek, kekLabel)); + return new WrappedKey(kekLabel, purpose, CIPHER_ALGORITHM, wrappedBlob, PROVIDER_NAME, new Date(), null); + } + + @Override + public byte[] unwrapKey(WrappedKey wrappedKey, Long hsmProfileId) throws KMSException { + if (hsmProfileId == null) { + hsmProfileId = resolveProfileId(wrappedKey.getKekId()); + } + + return executeWithSession(hsmProfileId, + session -> session.unwrapKey(wrappedKey.getWrappedKeyMaterial(), wrappedKey.getKekId())); + } + + @Override + public WrappedKey generateAndWrapDek(KeyPurpose purpose, String kekLabel, int keyBits, + Long hsmProfileId) throws KMSException { + byte[] dekBytes = new byte[keyBits / 8]; + new SecureRandom().nextBytes(dekBytes); + + try { + return wrapKey(dekBytes, purpose, kekLabel, hsmProfileId); + } finally { + Arrays.fill(dekBytes, (byte) 0); + } + } + + @Override + public WrappedKey rewrapKey(WrappedKey oldWrappedKey, String newKekLabel, + Long targetHsmProfileId) throws KMSException { + byte[] plainKey = unwrapKey(oldWrappedKey, null); + try { + Long profileId = targetHsmProfileId != null ? targetHsmProfileId : resolveProfileId(newKekLabel); + return wrapKey(plainKey, oldWrappedKey.getPurpose(), newKekLabel, profileId); + } finally { + Arrays.fill(plainKey, (byte) 0); + } + } + + /** + * Performs health check on all configured HSM profiles. + * + *

For each configured HSM profile: + *

    + *
  1. Attempts to acquire a test session
  2. + *
  3. Verifies HSM is responsive (lightweight KeyStore operation)
  4. + *
  5. Releases the session
  6. + *
+ * + *

If any HSM profile fails the health check, this method throws an exception. + * If no profiles are configured, returns true (nothing to check). + * + * @return true if all configured HSM profiles are healthy + * @throws KMSException with {@code HEALTH_CHECK_FAILED} if any HSM profile is unhealthy + */ + @Override + public boolean healthCheck() throws KMSException { + if (sessionPools.isEmpty()) { + logger.debug("No HSM profiles configured for health check"); + return true; + } + + boolean allHealthy = true; + for (Long profileId : sessionPools.keySet()) { + if (!checkProfileHealth(profileId)) { + allHealthy = false; + } + } + + if (!allHealthy) { + throw KMSException.healthCheckFailed("One or more HSM profiles failed health check", null); + } + + return true; + } + + private boolean checkProfileHealth(Long profileId) { + try { + Boolean result = executeWithSession(profileId, session -> { + try { + session.keyStore.size(); // Verify the HSM token is currently reachable + } catch (KeyStoreException e) { + return false; + } + return true; + }); + logger.debug("Health check {} for HSM profile {}", result ? "passed" : "failed", profileId); + return result; + } catch (Exception e) { + logger.warn("Health check failed for HSM profile {}: {}", profileId, e.getMessage(), e); + return false; + } + } + + @Override + public void invalidateProfileCache(Long profileId) { + HSMSessionPool pool = sessionPools.remove(profileId); + if (pool != null) { + pool.invalidate(); + } + logger.info("Invalidated HSM session pool for profile {}", profileId); + } + + Long resolveProfileId(String kekLabel) throws KMSException { + KMSKekVersionVO version = kmsKekVersionDao.findByKekLabel(kekLabel); + if (version != null && version.getHsmProfileId() != null) { + return version.getHsmProfileId(); + } + throw new KMSException(KMSException.ErrorType.KEK_NOT_FOUND, + "Could not resolve HSM profile for KEK: " + kekLabel); + } + + /** + * Executes an operation with a session from the pool, handling acquisition and release. + * + * @param hsmProfileId HSM profile ID + * @param operation Operation to execute with the session + * @return Result of the operation + * @throws KMSException if session acquisition fails or operation throws an exception + */ + private T executeWithSession(Long hsmProfileId, SessionOperation operation) throws KMSException { + HSMSessionPool pool = getSessionPool(hsmProfileId); + PKCS11Session session = null; + try { + session = pool.acquireSession(SESSION_ACQUIRE_TIMEOUT_MS); + return operation.execute(session); + } finally { + pool.releaseSession(session); + } + } + + HSMSessionPool getSessionPool(Long profileId) { + return sessionPools.computeIfAbsent(profileId, id -> { + Map config = loadProfileConfig(id); + int maxSessions = Integer.parseInt(config.getOrDefault("max_sessions", "10")); + return new HSMSessionPool(id, maxSessions, this); + }); + } + + Map loadProfileConfig(Long profileId) { + List details = hsmProfileDetailsDao.listByProfileId(profileId); + Map config = new HashMap<>(); + for (HSMProfileDetailsVO detail : details) { + String value = detail.getValue(); + if (isSensitiveKey(detail.getName())) { + value = DBEncryptionUtil.decrypt(value); + } + config.put(detail.getName(), value); + } + validateProfileConfig(config); + return config; + } + + /** + * Validates HSM profile configuration for PKCS#11 provider. + * + *

+ * Validates: + *

    + *
  • {@code library}: Required, should point to PKCS#11 library
  • + *
  • {@code slot}, {@code slot_list_index}, or {@code token_label}: At least + * one required
  • + *
  • {@code pin}: Required for HSM authentication
  • + *
  • {@code max_sessions}: Optional, must be positive integer if provided
  • + *
+ * + * @param config Configuration map from HSM profile details + * @throws KMSException with {@code INVALID_PARAMETER} if validation fails + */ + @Override + public void validateProfileConfig(Map config) throws KMSException { + String libraryPath = config.get("library"); + if (StringUtils.isBlank(libraryPath)) { + throw KMSException.invalidParameter("library is required for PKCS#11 HSM profile"); + } + + String slot = config.get("slot"); + String slotListIndex = config.get("slot_list_index"); + String tokenLabel = config.get("token_label"); + if (StringUtils.isAllBlank(slot, slotListIndex, tokenLabel)) { + throw KMSException.invalidParameter( + "One of 'slot', 'slot_list_index', or 'token_label' is required for PKCS#11 HSM profile"); + } + + if (StringUtils.isNotBlank(slot)) { + try { + Integer.parseInt(slot); + } catch (NumberFormatException e) { + throw KMSException.invalidParameter("slot must be a valid integer: " + slot); + } + } + + if (StringUtils.isNotBlank(slotListIndex)) { + try { + int idx = Integer.parseInt(slotListIndex); + if (idx < 0) { + throw KMSException.invalidParameter("slot_list_index must be a non-negative integer"); + } + } catch (NumberFormatException e) { + throw KMSException.invalidParameter("slot_list_index must be a valid integer: " + slotListIndex); + } + } + + File libraryFile = new File(libraryPath); + if (!libraryFile.exists() && !libraryFile.isAbsolute()) { + // The HSM library might be in the system library path + logger.debug("Library path {} does not exist as absolute path, will rely on system library path", + libraryPath); + } + + String max_sessions = config.get("max_sessions"); + if (StringUtils.isNotBlank(max_sessions)) { + try { + int idx = Integer.parseInt(max_sessions); + if (idx <= 0) { + throw KMSException.invalidParameter("max_sessions must be greater than 0"); + } + } catch (NumberFormatException e) { + throw KMSException.invalidParameter("max_sessions must be a valid integer: " + max_sessions); + } + } + } + + boolean isSensitiveKey(String key) { + return KMSProvider.isSensitiveKey(key); + } + + String generateKekLabel(KeyPurpose purpose) { + return purpose.getName() + "-kek-" + UUID.randomUUID().toString().substring(0, 8); + } + + @Override + public String getConfigComponentName() { + return PKCS11HSMProvider.class.getSimpleName(); + } + + @Override + public ConfigKey[] getConfigKeys() { + return new ConfigKey[0]; + } + + @FunctionalInterface + private interface SessionOperation { + T execute(PKCS11Session session) throws KMSException; + } + + private static class HSMSessionPool { + private final BlockingQueue availableSessions; + private final Long profileId; + private final PKCS11HSMProvider provider; + private final int maxSessions; + // Counts total sessions (idle + active). Acquired on creation, released on close. + private final Semaphore sessionPermits; + private volatile boolean invalidated = false; + + HSMSessionPool(Long profileId, int maxSessions, PKCS11HSMProvider provider) { + this.profileId = profileId; + this.provider = provider; + this.maxSessions = maxSessions; + this.sessionPermits = new Semaphore(maxSessions); + this.availableSessions = new ArrayBlockingQueue<>(maxSessions); + } + + PKCS11Session acquireSession(long timeoutMs) throws KMSException { + // Try to get an existing idle session first (no semaphore change: it already owns a permit). + PKCS11Session session = availableSessions.poll(); + if (session != null) { + if (session.isValid()) { + return session; + } + // Stale idle session: discard it and free its permit so a new one can be created. + session.close(); + sessionPermits.release(); + } + + // Acquire a permit to create a new session, blocking up to timeoutMs if at capacity. + try { + if (!sessionPermits.tryAcquire(timeoutMs, TimeUnit.MILLISECONDS)) { + // One last try: a session may have been returned while we were waiting. + session = availableSessions.poll(); + if (session != null && session.isValid()) { + return session; + } + if (session != null) { + session.close(); + sessionPermits.release(); + } + throw new KMSException(KMSException.ErrorType.CONNECTION_FAILED, + "Timed out waiting for an available HSM session for profile " + profileId + + " (max=" + maxSessions + ", timeout=" + timeoutMs + "ms)"); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new KMSException(KMSException.ErrorType.CONNECTION_FAILED, + "Interrupted while waiting to acquire HSM session for profile " + profileId, e); + } + + try { + return createNewSession(); + } catch (KMSException e) { + sessionPermits.release(); + throw e; + } + } + + private PKCS11Session createNewSession() throws KMSException { + // Config (including decrypted PIN) is loaded fresh each time and not stored. + return new PKCS11Session(provider.loadProfileConfig(profileId)); + } + + void releaseSession(PKCS11Session session) { + if (session == null) return; + if (!invalidated && session.isValid() && availableSessions.offer(session)) { + return; // session returned to the idle pool; permit stays consumed + } + // Pool is invalidated, session is stale, or the idle queue is full: close immediately. + session.close(); + sessionPermits.release(); + } + + /** + * Marks the pool as invalidated and closes all idle sessions. + * Any session currently checked out will be closed (and its permit released) when + * it is returned via {@link #releaseSession} — the invalidated flag prevents re-pooling. + */ + void invalidate() { + invalidated = true; + PKCS11Session session; + while ((session = availableSessions.poll()) != null) { + session.close(); + sessionPermits.release(); + } + } + } + + /** + * Inner class representing an active PKCS#11 session with an HSM. + * This class manages the connection to the HSM, key operations, and session lifecycle. + * + *

Key operations supported: + *

    + *
  • Key generation: Generate AES keys directly in the HSM
  • + *
  • Key wrapping: Encrypt DEKs using KEKs stored in the HSM (AES-CBC/PKCS5Padding)
  • + *
  • Key unwrapping: Decrypt DEKs using KEKs stored in the HSM (AES-CBC/PKCS5Padding)
  • + *
  • Key deletion: Remove keys from the HSM
  • + *
  • Key existence check: Verify if a key exists in the HSM
  • + *
+ * + *

Configuration requirements: + *

    + *
  • {@code library}: Path to PKCS#11 library (required)
  • + *
  • {@code slot} or {@code token_label}: HSM slot/token selection (at least one required)
  • + *
  • {@code pin}: PIN for HSM authentication (required, sensitive)
  • + *
+ * + *

Error handling: PKCS#11 specific error codes are mapped to appropriate + * {@link KMSException.ErrorType} values for proper retry logic and error reporting. + */ + private static class PKCS11Session { + private static final int IV_LENGTH = 16; // 128 bits for CBC mode + + private KeyStore keyStore; + private Provider provider; + private String providerName; + private Path tempConfigFile; + + /** + * Creates a new PKCS#11 session and connects to the HSM. + * The config map (including any sensitive values such as the PIN) is used only + * during connection setup and is not retained as a field. + * + * @param config HSM profile configuration containing library, slot/token_label, and pin + * @throws KMSException if connection fails or configuration is invalid + */ + PKCS11Session(Map config) throws KMSException { + connect(config); + } + + /** + * Establishes connection to the PKCS#11 HSM. + * + *

This method: + *

    + *
  1. Validates required configuration (library, slot/token_label, pin)
  2. + *
  3. Creates a SunPKCS11 provider with the HSM library
  4. + *
  5. Loads the PKCS#11 KeyStore
  6. + *
  7. Authenticates using the provided PIN
  8. + *
+ * + *

Slot/token selection: + *

    + *
  • If {@code token_label} is provided, it is used (more reliable)
  • + *
  • Otherwise, {@code slot} (numeric ID) is used
  • + *
+ * + * @throws KMSException with appropriate ErrorType: + *
    + *
  • {@code AUTHENTICATION_FAILED} if PIN is incorrect
  • + *
  • {@code INVALID_PARAMETER} if configuration is missing or invalid
  • + *
  • {@code CONNECTION_FAILED} if HSM is unreachable or device error occurs
  • + *
+ */ + private void connect(Map config) throws KMSException { + try { + // Unique suffix ensures each session gets its own provider name in java.security.Security, + // allowing Security.removeProvider() in close() to target exactly this session's provider. + String nameSuffix = UUID.randomUUID().toString().substring(0, 8); + + String configString = buildSunPKCS11Config(config, nameSuffix); + + // Java 9+ API: write config to temp file, then configure the provider + tempConfigFile = Files.createTempFile("pkcs11-config-", ".cfg"); + try (FileWriter writer = new FileWriter(tempConfigFile.toFile(), StandardCharsets.UTF_8)) { + writer.write(configString); + } + + Provider baseProvider = Security.getProvider("SunPKCS11"); + if (baseProvider == null) { + throw new KMSException(KMSException.ErrorType.CONNECTION_FAILED, + "SunPKCS11 provider not available in this JVM"); + } + + provider = baseProvider.configure(tempConfigFile.toAbsolutePath().toString()); + + // Use the actual provider name so Security.removeProvider() in close() works correctly. + providerName = provider.getName(); + + // Security.addProvider returns -1 if a provider with this name is already registered. + // With the UUID-based suffix this should be impossible in practice; guard defensively. + if (Security.addProvider(provider) < 0) { + throw new KMSException(KMSException.ErrorType.CONNECTION_FAILED, + "Failed to register PKCS#11 provider '" + providerName + "': name already in use"); + } + + keyStore = KeyStore.getInstance("PKCS11", provider); + + String pin = config.get("pin"); + if (StringUtils.isEmpty(pin)) { + throw KMSException.invalidParameter("pin is required"); + } + char[] pinChars = pin.toCharArray(); + keyStore.load(null, pinChars); + Arrays.fill(pinChars, '\0'); + + // The temp file is only needed during configure()/load(); delete it immediately + // rather than holding it until the session is eventually closed. + Files.deleteIfExists(tempConfigFile); + tempConfigFile = null; + + logger.debug("Successfully connected to PKCS#11 HSM at {}", config.get("library")); + } catch (KeyStoreException | CertificateException | NoSuchAlgorithmException e) { + handlePKCS11Exception(e, "Failed to initialize PKCS#11 connection"); + } catch (IOException e) { + String errorMsg = e.getMessage(); + if (errorMsg != null && errorMsg.contains("CKR_PIN_INCORRECT")) { + throw new KMSException(KMSException.ErrorType.AUTHENTICATION_FAILED, + "Incorrect PIN for HSM authentication", e); + } else if (errorMsg != null && errorMsg.contains("CKR_SLOT_ID_INVALID")) { + throw KMSException.invalidParameter("Invalid slot ID: " + config.get("slot")); + } else { + handlePKCS11Exception(e, "I/O error during PKCS#11 connection"); + } + } catch (Exception e) { + handlePKCS11Exception(e, "Unexpected error during PKCS#11 connection"); + } + } + + /** + * Builds SunPKCS11 provider configuration string. + * + * @param config HSM profile configuration + * @return Configuration string for SunPKCS11 provider + * @throws KMSException if required configuration is missing + */ + private String buildSunPKCS11Config(Map config, String nameSuffix) throws KMSException { + String libraryPath = config.get("library"); + if (StringUtils.isBlank(libraryPath)) { + throw KMSException.invalidParameter("library is required"); + } + + StringBuilder configBuilder = new StringBuilder(); + // Include the unique suffix so that each session is registered under a distinct + // provider name (SunPKCS11-CloudStackHSM-{suffix}), preventing name collisions + // across concurrent sessions and allowing clean removal via Security.removeProvider(). + configBuilder.append("name=CloudStackHSM-").append(nameSuffix).append("\n"); + configBuilder.append("library=").append(libraryPath).append("\n"); + + String tokenLabel = config.get("token_label"); + String slotListIndex = config.get("slot_list_index"); + String slot = config.get("slot"); + + if (StringUtils.isNotBlank(tokenLabel)) { + configBuilder.append("tokenLabel=").append(tokenLabel).append("\n"); + } else if (StringUtils.isNotBlank(slotListIndex)) { + configBuilder.append("slotListIndex=").append(slotListIndex).append("\n"); + } else if (StringUtils.isNotBlank(slot)) { + configBuilder.append("slot=").append(slot).append("\n"); + } else { + throw KMSException.invalidParameter("One of 'slot', 'slot_list_index', or 'token_label' is required"); + } + + return configBuilder.toString(); + } + + /** + * Maps PKCS#11 specific exceptions to appropriate KMSException.ErrorType. + * + *

PKCS#11 error codes are parsed from exception messages and mapped as follows: + *

    + *
  • {@code CKR_PIN_INCORRECT} → {@code AUTHENTICATION_FAILED}
  • + *
  • {@code CKR_SLOT_ID_INVALID} → {@code INVALID_PARAMETER}
  • + *
  • {@code CKR_KEY_NOT_FOUND} → {@code KEK_NOT_FOUND}
  • + *
  • {@code CKR_DEVICE_ERROR} → {@code CONNECTION_FAILED}
  • + *
  • {@code CKR_SESSION_HANDLE_INVALID} → {@code CONNECTION_FAILED}
  • + *
  • {@code CKR_KEY_ALREADY_EXISTS} → {@code KEY_ALREADY_EXISTS}
  • + *
  • {@code KeyStoreException} → {@code WRAP_UNWRAP_FAILED}
  • + *
  • Other errors → {@code KEK_OPERATION_FAILED}
  • + *
+ * + * @param e The exception to map + * @param context Context description for the error message + * @throws KMSException with appropriate ErrorType and detailed message + */ + private void handlePKCS11Exception(Exception e, String context) throws KMSException { + String errorMsg = e.getMessage(); + if (errorMsg == null) { + errorMsg = e.getClass().getSimpleName(); + } + logger.warn("PKCS#11 error: {} - {}", errorMsg, context, e); + + if (errorMsg.contains("CKR_PIN_INCORRECT") || errorMsg.contains("PIN_INCORRECT")) { + throw new KMSException(KMSException.ErrorType.AUTHENTICATION_FAILED, + context + ": Incorrect PIN", e); + } else if (errorMsg.contains("CKR_SLOT_ID_INVALID") || errorMsg.contains("SLOT_ID_INVALID")) { + throw KMSException.invalidParameter(context + ": Invalid slot ID"); + } else if (errorMsg.contains("CKR_KEY_NOT_FOUND") || errorMsg.contains("KEY_NOT_FOUND")) { + throw KMSException.kekNotFound(context + ": Key not found"); + } else if (errorMsg.contains("CKR_DEVICE_ERROR") || errorMsg.contains("DEVICE_ERROR")) { + throw new KMSException(KMSException.ErrorType.CONNECTION_FAILED, + context + ": HSM device error", e); + } else if (errorMsg.contains("CKR_SESSION_HANDLE_INVALID") || errorMsg.contains("SESSION_HANDLE_INVALID")) { + throw new KMSException(KMSException.ErrorType.CONNECTION_FAILED, + context + ": Invalid session handle", e); + } else if (errorMsg.contains("CKR_KEY_ALREADY_EXISTS") || errorMsg.contains("KEY_ALREADY_EXISTS")) { + throw KMSException.keyAlreadyExists(context); + } else if (e instanceof KeyStoreException) { + throw new KMSException(KMSException.ErrorType.WRAP_UNWRAP_FAILED, + context + ": " + errorMsg, e); + } else { + throw new KMSException(KMSException.ErrorType.KEK_OPERATION_FAILED, + context + ": " + errorMsg, e); + } + } + + /** + * Validates that the PKCS#11 session is still active and connected to the HSM. + * + *

Checks performed: + *

    + *
  • KeyStore object is not null
  • + *
  • Provider is still registered in Security
  • + *
  • HSM is responsive (lightweight operation: get KeyStore size)
  • + *
+ * + * @return true if session is valid and HSM is accessible, false otherwise + */ + boolean isValid() { + try { + if (keyStore == null) { + return false; + } + + if (provider == null || Security.getProvider(provider.getName()) == null) { + return false; + } + + keyStore.size(); + return true; + } catch (Exception e) { + logger.debug("Session validation failed: {}", e.getMessage()); + return false; + } + } + + /** + * Closes the PKCS#11 session and cleans up resources. + * + *

+ * Note: Errors during cleanup are logged but do not throw exceptions + * to ensure cleanup continues even if some steps fail. + */ + void close() { + try { + if (keyStore instanceof Closeable) { + ((Closeable) keyStore).close(); + } + + if (provider != null && providerName != null) { + try { + Security.removeProvider(providerName); + } catch (Exception e) { + logger.debug("Failed to remove provider {}: {}", providerName, e.getMessage()); + } + } + + if (tempConfigFile != null) { + try { + Files.deleteIfExists(tempConfigFile); + } catch (IOException e) { + logger.debug("Failed to delete temporary config file {}: {}", tempConfigFile, e.getMessage()); + } + } + } catch (Exception e) { + logger.warn("Error during session close: {}", e.getMessage()); + } finally { + keyStore = null; + provider = null; + providerName = null; + tempConfigFile = null; + } + } + + /** + * Generates an AES key directly in the HSM with the specified label. + * + *

+ * This method generates the key natively inside the HSM using a + * {@link KeyGenerator} configured with the PKCS#11 provider, so the key + * material never leaves the HSM boundary. The returned PKCS#11-native key + * reference ({@code P11Key}) is then stored in the KeyStore under the + * requested label. + * + *

+ * Using {@code KeyGenerator} with the HSM provider is required for + * HSMs such as NetHSM that do not support importing raw secret-key bytes + * via {@code KeyStore.setKeyEntry()}. By generating the key on the HSM first, + * the value passed to {@code setKeyEntry()} is already a PKCS#11 token object, + * so no raw-bytes import is attempted. + * + *

+ * Once stored, the key: + *

    + *
  • Resides permanently in the HSM token storage
  • + *
  • Is marked as non-extractable (CKA_EXTRACTABLE=false) by the HSM
  • + *
  • Can only be used for cryptographic operations via the HSM
  • + *
+ * + * @param label Unique label for the key in the HSM + * @param keyBits Key size in bits (128, 192, or 256) + * @param purpose Key purpose (for logging/auditing) + * @return The label of the generated key + * @throws KMSException if generation fails or key already exists + */ + String generateKey(String label, int keyBits, KeyPurpose purpose) throws KMSException { + validateKeySize(keyBits); + + try { + // Check if key with this label already exists + if (keyStore.containsAlias(label)) { + throw KMSException.keyAlreadyExists("Key with label '" + label + "' already exists in HSM"); + } + + // Generate the AES key natively inside the HSM using the PKCS#11 provider. + // This avoids importing raw key bytes into the HSM, which is not supported + // by all HSMs (e.g. NetHSM rejects SecretKeySpec via storeSkey()). + // The resulting key is a PKCS#11-native P11Key that lives inside the token. + KeyGenerator keyGen = KeyGenerator.getInstance("AES", provider); + keyGen.init(keyBits); + SecretKey hsmKey = keyGen.generateKey(); + + // Associate the HSM-generated key with the requested label by storing + // it in the PKCS#11 KeyStore. Because hsmKey is already a P11Key + // (not a software SecretKeySpec), P11KeyStore.storeSkey() stores it + // as a persistent token object (CKA_TOKEN=true) with CKA_LABEL=label + // without attempting any raw-bytes conversion. + keyStore.setKeyEntry(label, hsmKey, null, null); + + logger.info("Generated AES-{} key '{}' in HSM (purpose: {})", + keyBits, label, purpose); + return label; + + } catch (KeyStoreException e) { + handlePKCS11Exception(e, "Failed to store key in HSM KeyStore"); + } catch (NoSuchAlgorithmException e) { + handlePKCS11Exception(e, "AES KeyGenerator not available via PKCS#11 provider"); + } catch (Exception e) { + String errorMsg = e.getMessage(); + if (errorMsg != null && (errorMsg.contains("CKR_OBJECT_HANDLE_INVALID") + || errorMsg.contains("already exists"))) { + throw KMSException.keyAlreadyExists("Key with label '" + label + "' already exists in HSM"); + } else { + handlePKCS11Exception(e, "Failed to generate key in HSM"); + } + } + return null; + } + + /** + * Validates that the key size is one of the supported AES key sizes. + * + * @param keyBits Key size in bits + * @throws KMSException if key size is invalid + */ + private void validateKeySize(int keyBits) throws KMSException { + if (Arrays.stream(VALID_KEY_SIZES).noneMatch(size -> size == keyBits)) { + throw KMSException.invalidParameter("Key size must be 128, 192, or 256 bits"); + } + } + + /** + * Wraps (encrypts) a plaintext DEK using a KEK stored in the HSM. + * + *

Uses AES-CBC with PKCS5Padding (FIPS 197 + NIST SP 800-38A): + *

    + *
  • Generates a random 128-bit IV
  • + *
  • Encrypts the DEK using AES-CBC with the KEK from HSM
  • + *
  • Returns format: [IV (16 bytes)][ciphertext]
  • + *
+ * + *

Security: The plaintext DEK should be zeroized by the caller after wrapping. + * + * @param plainDek Plaintext DEK to wrap (will be encrypted) + * @param kekLabel Label of the KEK stored in the HSM + * @return Wrapped key blob: [IV][ciphertext] + * @throws KMSException with appropriate ErrorType: + *

    + *
  • {@code INVALID_PARAMETER} if plainDek is null or empty
  • + *
  • {@code KEK_NOT_FOUND} if KEK with label doesn't exist or is not accessible
  • + *
  • {@code WRAP_UNWRAP_FAILED} if wrapping operation fails
  • + *
+ */ + byte[] wrapKey(byte[] plainDek, String kekLabel) throws KMSException { + if (plainDek == null || plainDek.length == 0) { + throw KMSException.invalidParameter("Plain DEK cannot be null or empty"); + } + + SecretKey kek = getKekFromKeyStore(kekLabel); + try { + byte[] iv = new byte[IV_LENGTH]; + new SecureRandom().nextBytes(iv); + + Cipher cipher = Cipher.getInstance(CIPHER_ALGORITHM, provider); + cipher.init(Cipher.ENCRYPT_MODE, kek, new IvParameterSpec(iv)); + byte[] ciphertext = cipher.doFinal(plainDek); + + byte[] result = new byte[IV_LENGTH + ciphertext.length]; + System.arraycopy(iv, 0, result, 0, IV_LENGTH); + System.arraycopy(ciphertext, 0, result, IV_LENGTH, ciphertext.length); + + logger.debug("Wrapped key with KEK '{}' using AES-CBC", kekLabel); + return result; + } catch (IllegalBlockSizeException | BadPaddingException | InvalidKeyException e) { + handlePKCS11Exception(e, "Invalid key or data for wrapping"); + } catch (NoSuchAlgorithmException | NoSuchPaddingException e) { + handlePKCS11Exception(e, "AES-CBC not supported by HSM"); + } catch (InvalidAlgorithmParameterException e) { + handlePKCS11Exception(e, "Invalid IV for CBC mode"); + } catch (Exception e) { + handlePKCS11Exception(e, "Failed to wrap key with HSM"); + } finally { + kek = null; + } + return null; + } + + /** + * Retrieves a KEK (Key Encryption Key) from the HSM KeyStore. + * + * @param kekLabel Label of the KEK to retrieve + * @return SecretKey representing the KEK + * @throws KMSException if KEK is not found or not accessible + */ + private SecretKey getKekFromKeyStore(String kekLabel) throws KMSException { + try { + Key key = keyStore.getKey(kekLabel, null); + if (key == null) { + throw KMSException.kekNotFound("KEK with label '" + kekLabel + "' not found in HSM"); + } + if (!(key instanceof SecretKey)) { + throw KMSException.kekNotFound("Key with label '" + kekLabel + "' is not a secret key"); + } + return (SecretKey) key; + } catch (UnrecoverableKeyException e) { + throw KMSException.kekNotFound("KEK with label '" + kekLabel + "' is not accessible"); + } catch (NoSuchAlgorithmException e) { + handlePKCS11Exception(e, "Algorithm not supported"); + } catch (KeyStoreException e) { + handlePKCS11Exception(e, "Failed to retrieve KEK from HSM"); + } + return null; + } + + /** + * Unwraps (decrypts) a wrapped DEK using a KEK stored in the HSM. + * + *

+ * Uses AES-CBC with PKCS5Padding. Expected format: [IV (16 bytes)][ciphertext]. + * + *

+ * Security: The returned plaintext DEK must be zeroized by the caller after + * use. + * + * @param wrappedBlob Wrapped DEK blob (IV + ciphertext) + * @param kekLabel Label of the KEK stored in the HSM + * @return Plaintext DEK + * @throws KMSException with appropriate ErrorType: + *

    + *
  • {@code INVALID_PARAMETER} if wrappedBlob is null, + * empty, or too short
  • + *
  • {@code KEK_NOT_FOUND} if KEK with label doesn't + * exist or is not accessible
  • + *
  • {@code WRAP_UNWRAP_FAILED} if unwrapping fails
  • + *
+ */ + byte[] unwrapKey(byte[] wrappedBlob, String kekLabel) throws KMSException { + if (wrappedBlob == null || wrappedBlob.length == 0) { + throw KMSException.invalidParameter("Wrapped blob cannot be null or empty"); + } + + // Minimum size: IV (16 bytes) + at least one AES block (16 bytes) + if (wrappedBlob.length < IV_LENGTH + 16) { + throw KMSException.invalidParameter("Wrapped blob too short: expected at least " + + (IV_LENGTH + 16) + " bytes"); + } + + SecretKey kek = getKekFromKeyStore(kekLabel); + try { + byte[] iv = new byte[IV_LENGTH]; + System.arraycopy(wrappedBlob, 0, iv, 0, IV_LENGTH); + byte[] ciphertext = new byte[wrappedBlob.length - IV_LENGTH]; + System.arraycopy(wrappedBlob, IV_LENGTH, ciphertext, 0, ciphertext.length); + + Cipher cipher = Cipher.getInstance(CIPHER_ALGORITHM, provider); + cipher.init(Cipher.DECRYPT_MODE, kek, new IvParameterSpec(iv)); + byte[] plainDek = cipher.doFinal(ciphertext); + + logger.debug("Unwrapped key with KEK '{}' using AES-CBC", kekLabel); + return plainDek; + } catch (BadPaddingException e) { + throw KMSException.wrapUnwrapFailed( + "Decryption failed: wrapped key may be corrupted or KEK is incorrect", e); + } catch (IllegalBlockSizeException | InvalidKeyException e) { + handlePKCS11Exception(e, "Invalid key or data for unwrapping"); + } catch (NoSuchAlgorithmException | NoSuchPaddingException e) { + handlePKCS11Exception(e, "AES-CBC not supported by HSM"); + } catch (InvalidAlgorithmParameterException e) { + handlePKCS11Exception(e, "Invalid IV for CBC mode"); + } catch (Exception e) { + handlePKCS11Exception(e, "Failed to unwrap key with HSM"); + } finally { + kek = null; + } + return null; + } + + /** + * Deletes a key from the HSM. + * + *

Warning: Deleting a KEK makes all DEKs wrapped with that KEK + * permanently unrecoverable. This operation should be used with extreme caution. + * + * @param label Label of the key to delete + * @throws KMSException with appropriate ErrorType: + *

    + *
  • {@code KEK_NOT_FOUND} if key with label doesn't exist
  • + *
  • {@code KEK_OPERATION_FAILED} if deletion fails (e.g., key is in use)
  • + *
+ */ + void deleteKey(String label) throws KMSException { + try { + if (!keyStore.containsAlias(label)) { + throw KMSException.kekNotFound("Key with label '" + label + "' not found in HSM"); + } + + keyStore.deleteEntry(label); + + logger.debug("Deleted key '{}' from HSM", label); + } catch (KeyStoreException e) { + String errorMsg = e.getMessage(); + if (errorMsg != null && errorMsg.contains("not found")) { + throw KMSException.kekNotFound("Key with label '" + label + "' not found in HSM"); + } else if (errorMsg != null && errorMsg.contains("in use")) { + throw KMSException.kekOperationFailed( + "Key with label '" + label + "' is in use and cannot be deleted"); + } else { + handlePKCS11Exception(e, "Failed to delete key from HSM"); + } + } catch (Exception e) { + handlePKCS11Exception(e, "Failed to delete key from HSM"); + } + } + + /** + * Checks if a key with the given label exists and is accessible in the HSM. + * + * @param label Label of the key to check + * @return true if key exists and is accessible, false otherwise + * @throws KMSException only for unexpected errors (KeyStoreException, etc.) + * Returns false for expected cases (key not found, unrecoverable key) + */ + boolean checkKeyExists(String label) throws KMSException { + try { + Key key = keyStore.getKey(label, null); + return key != null; + } catch (KeyStoreException e) { + logger.debug("KeyStore error while checking key existence: {}", e.getMessage()); + return false; + } catch (UnrecoverableKeyException e) { + // Key exists but is not accessible (might be a different key type) + logger.debug("Key '{}' exists but is not accessible: {}", label, e.getMessage()); + return false; + } catch (NoSuchAlgorithmException e) { + logger.debug("Algorithm error while checking key existence: {}", e.getMessage()); + return false; + } catch (Exception e) { + logger.debug("Unexpected error while checking key existence: {}", e.getMessage()); + return false; + } + } + } +} diff --git a/plugins/kms/pkcs11/src/main/resources/META-INF/cloudstack/pkcs11-kms/module.properties b/plugins/kms/pkcs11/src/main/resources/META-INF/cloudstack/pkcs11-kms/module.properties new file mode 100644 index 000000000000..aa7a51607577 --- /dev/null +++ b/plugins/kms/pkcs11/src/main/resources/META-INF/cloudstack/pkcs11-kms/module.properties @@ -0,0 +1,21 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +name=pkcs11-kms +parent=kms diff --git a/plugins/kms/pkcs11/src/main/resources/META-INF/cloudstack/pkcs11-kms/spring-pkcs11-kms-context.xml b/plugins/kms/pkcs11/src/main/resources/META-INF/cloudstack/pkcs11-kms/spring-pkcs11-kms-context.xml new file mode 100644 index 000000000000..cdd29d2cf244 --- /dev/null +++ b/plugins/kms/pkcs11/src/main/resources/META-INF/cloudstack/pkcs11-kms/spring-pkcs11-kms-context.xml @@ -0,0 +1,32 @@ + + + + + + + + + diff --git a/plugins/kms/pkcs11/src/test/java/org/apache/cloudstack/kms/provider/pkcs11/PKCS11HSMProviderTest.java b/plugins/kms/pkcs11/src/test/java/org/apache/cloudstack/kms/provider/pkcs11/PKCS11HSMProviderTest.java new file mode 100644 index 000000000000..59e06a4c9b78 --- /dev/null +++ b/plugins/kms/pkcs11/src/test/java/org/apache/cloudstack/kms/provider/pkcs11/PKCS11HSMProviderTest.java @@ -0,0 +1,279 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms.provider.pkcs11; + +import org.apache.cloudstack.framework.kms.KMSException; +import org.apache.cloudstack.framework.kms.KeyPurpose; +import org.apache.cloudstack.kms.HSMProfileDetailsVO; +import org.apache.cloudstack.kms.KMSKekVersionVO; +import org.apache.cloudstack.kms.dao.HSMProfileDetailsDao; +import org.apache.cloudstack.kms.dao.KMSKekVersionDao; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Spy; +import org.mockito.junit.MockitoJUnitRunner; + +import java.util.Arrays; +import java.util.Map; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * Unit tests for PKCS11HSMProvider + * Tests provider-specific logic: config loading, profile resolution, sensitive key detection + */ +@RunWith(MockitoJUnitRunner.class) +public class PKCS11HSMProviderTest { + + @Spy + @InjectMocks + private PKCS11HSMProvider provider; + + @Mock + private HSMProfileDetailsDao hsmProfileDetailsDao; + + @Mock + private KMSKekVersionDao kmsKekVersionDao; + + private Long testProfileId = 1L; + private String testKekLabel = "test-kek-label"; + + @Before + public void setUp() { + // Minimal setup + } + + /** + * Test: resolveProfileId successfully finds profile from KEK label + */ + @Test + public void testResolveProfileId_FindsFromKekLabel() throws KMSException { + // Setup: KEK version with profile ID + KMSKekVersionVO kekVersion = mock(KMSKekVersionVO.class); + when(kekVersion.getHsmProfileId()).thenReturn(testProfileId); + when(kmsKekVersionDao.findByKekLabel(testKekLabel)).thenReturn(kekVersion); + + // Test + Long result = provider.resolveProfileId(testKekLabel); + + // Verify + assertNotNull("Should return profile ID", result); + assertEquals("Should return correct profile ID", testProfileId, result); + verify(kmsKekVersionDao).findByKekLabel(testKekLabel); + } + + /** + * Test: resolveProfileId throws exception when KEK version not found + */ + @Test(expected = KMSException.class) + public void testResolveProfileId_ThrowsExceptionWhenVersionNotFound() throws KMSException { + // Setup: No KEK version found + when(kmsKekVersionDao.findByKekLabel(testKekLabel)).thenReturn(null); + + // Test - should throw exception + provider.resolveProfileId(testKekLabel); + } + + /** + * Test: resolveProfileId throws exception when profile ID is null + */ + @Test(expected = KMSException.class) + public void testResolveProfileId_ThrowsExceptionWhenProfileIdNull() throws KMSException { + // Setup: KEK version exists but has null profile ID + KMSKekVersionVO kekVersion = mock(KMSKekVersionVO.class); + when(kekVersion.getHsmProfileId()).thenReturn(null); + when(kmsKekVersionDao.findByKekLabel(testKekLabel)).thenReturn(kekVersion); + + // Test - should throw exception + provider.resolveProfileId(testKekLabel); + } + + /** + * Test: loadProfileConfig loads and decrypts sensitive values + */ + @Test + public void testLoadProfileConfig_DecryptsSensitiveValues() { + // Setup: Profile details with encrypted pin + HSMProfileDetailsVO detail1 = mock(HSMProfileDetailsVO.class); + when(detail1.getName()).thenReturn("library"); + when(detail1.getValue()).thenReturn("/path/to/lib.so"); + + HSMProfileDetailsVO detail2 = mock(HSMProfileDetailsVO.class); + when(detail2.getName()).thenReturn("pin"); + when(detail2.getValue()).thenReturn("ENC(encrypted_pin)"); + + HSMProfileDetailsVO detail3 = mock(HSMProfileDetailsVO.class); + when(detail3.getName()).thenReturn("slot"); + when(detail3.getValue()).thenReturn("0"); + + when(hsmProfileDetailsDao.listByProfileId(testProfileId)).thenReturn( + Arrays.asList(detail1, detail2, detail3)); + + // Test + Map config = provider.loadProfileConfig(testProfileId); + + // Verify + assertNotNull("Config should not be null", config); + assertEquals(3, config.size()); + assertEquals("/path/to/lib.so", config.get("library")); + // Note: In real code, DBEncryptionUtil.decrypt would be called + // Here we just verify the structure is correct + assertTrue("Config should contain pin", config.containsKey("pin")); + assertEquals("0", config.get("slot")); + + verify(hsmProfileDetailsDao).listByProfileId(testProfileId); + } + + /** + * Test: loadProfileConfig handles empty details + */ + @Test(expected = KMSException.class) + public void testLoadProfileConfig_HandlesEmptyDetails() { + // Setup + when(hsmProfileDetailsDao.listByProfileId(testProfileId)).thenReturn(Arrays.asList()); + + // Test + Map config = provider.loadProfileConfig(testProfileId); + } + + /** + * Test: isSensitiveKey correctly identifies sensitive keys + */ + @Test + public void testIsSensitiveKey_IdentifiesSensitiveKeys() { + // Test + assertTrue(provider.isSensitiveKey("pin")); + assertTrue(provider.isSensitiveKey("password")); + assertTrue(provider.isSensitiveKey("api_secret")); + assertTrue(provider.isSensitiveKey("private_key")); + assertTrue(provider.isSensitiveKey("PIN")); // Case-insensitive + } + + /** + * Test: isSensitiveKey correctly identifies non-sensitive keys + */ + @Test + public void testIsSensitiveKey_IdentifiesNonSensitiveKeys() { + // Test + assertFalse(provider.isSensitiveKey("library")); + assertFalse(provider.isSensitiveKey("slot_id")); + assertFalse(provider.isSensitiveKey("endpoint")); + assertFalse(provider.isSensitiveKey("max_sessions")); + } + + /** + * Test: generateKekLabel creates valid label + */ + @Test + public void testGenerateKekLabel_CreatesValidLabel() { + // Test + String label = provider.generateKekLabel(KeyPurpose.VOLUME_ENCRYPTION); + + // Verify + assertNotNull("Label should not be null", label); + assertTrue("Label should start with purpose", label.startsWith(KeyPurpose.VOLUME_ENCRYPTION.getName())); + assertTrue("Label should contain UUID", + label.length() > (KeyPurpose.VOLUME_ENCRYPTION.getName() + "-kek-").length()); + } + + /** + * Test: getProviderName returns correct name + */ + @Test + public void testGetProviderName() { + assertEquals("pkcs11", provider.getProviderName()); + } + + /** + * Test: createKek requires hsmProfileId + */ + @Test(expected = KMSException.class) + public void testCreateKek_RequiresProfileId() throws KMSException { + provider.createKek( + KeyPurpose.VOLUME_ENCRYPTION, + "test-label", + 256, + null // null profile ID should throw exception + ); + } + + /** + * Test: getSessionPool creates pool for new profile + */ + @Test + public void testGetSessionPool_CreatesPoolForNewProfile() { + // Setup + HSMProfileDetailsVO libraryDetail = mock(HSMProfileDetailsVO.class); + when(libraryDetail.getName()).thenReturn("library"); + when(libraryDetail.getValue()).thenReturn("/path/to/lib.so"); + HSMProfileDetailsVO slotDetail = mock(HSMProfileDetailsVO.class); + when(slotDetail.getName()).thenReturn("slot"); + when(slotDetail.getValue()).thenReturn("1"); + HSMProfileDetailsVO pinDetail = mock(HSMProfileDetailsVO.class); + when(pinDetail.getName()).thenReturn("pin"); + when(pinDetail.getValue()).thenReturn("1234"); + when(hsmProfileDetailsDao.listByProfileId(testProfileId)).thenReturn( + Arrays.asList(libraryDetail, slotDetail, pinDetail)); + + // Test + Object pool = provider.getSessionPool(testProfileId); + + // Verify + assertNotNull("Pool should be created", pool); + verify(hsmProfileDetailsDao).listByProfileId(testProfileId); + } + + /** + * Test: getSessionPool reuses pool for same profile + */ + @Test + public void testGetSessionPool_ReusesPoolForSameProfile() { + // Setup + HSMProfileDetailsVO libraryDetail = mock(HSMProfileDetailsVO.class); + when(libraryDetail.getName()).thenReturn("library"); + when(libraryDetail.getValue()).thenReturn("/path/to/lib.so"); + HSMProfileDetailsVO slotDetail = mock(HSMProfileDetailsVO.class); + when(slotDetail.getName()).thenReturn("slot"); + when(slotDetail.getValue()).thenReturn("1"); + HSMProfileDetailsVO pinDetail = mock(HSMProfileDetailsVO.class); + when(pinDetail.getName()).thenReturn("pin"); + when(pinDetail.getValue()).thenReturn("1234"); + when(hsmProfileDetailsDao.listByProfileId(testProfileId)).thenReturn( + Arrays.asList(libraryDetail, slotDetail, pinDetail)); + + // Test + Object pool1 = provider.getSessionPool(testProfileId); + Object pool2 = provider.getSessionPool(testProfileId); + + // Verify + assertNotNull("Pool should be created", pool1); + assertEquals("Should reuse same pool", pool1, pool2); + // Config should only be loaded once + verify(hsmProfileDetailsDao, times(1)).listByProfileId(testProfileId); + } +} diff --git a/plugins/kms/pom.xml b/plugins/kms/pom.xml new file mode 100644 index 000000000000..8436242447d9 --- /dev/null +++ b/plugins/kms/pom.xml @@ -0,0 +1,40 @@ + + + + 4.0.0 + cloudstack-kms-plugins + pom + Apache CloudStack Plugin - KMS + Key Management Service providers + + + org.apache.cloudstack + cloudstack-plugins + 4.23.0.0-SNAPSHOT + ../pom.xml + + + + database + pkcs11 + + diff --git a/plugins/pom.xml b/plugins/pom.xml index e7d13871285e..4b4aae9479c9 100755 --- a/plugins/pom.xml +++ b/plugins/pom.xml @@ -97,6 +97,8 @@ integrations/prometheus integrations/kubernetes-service + kms + metrics network-elements/bigswitch diff --git a/plugins/storage/sharedfs/storagevm/src/main/java/org/apache/cloudstack/storage/sharedfs/lifecycle/StorageVmSharedFSLifeCycle.java b/plugins/storage/sharedfs/storagevm/src/main/java/org/apache/cloudstack/storage/sharedfs/lifecycle/StorageVmSharedFSLifeCycle.java index ac8d6a58f0cb..f47a35ced44a 100644 --- a/plugins/storage/sharedfs/storagevm/src/main/java/org/apache/cloudstack/storage/sharedfs/lifecycle/StorageVmSharedFSLifeCycle.java +++ b/plugins/storage/sharedfs/storagevm/src/main/java/org/apache/cloudstack/storage/sharedfs/lifecycle/StorageVmSharedFSLifeCycle.java @@ -199,7 +199,7 @@ private UserVm deploySharedFSVM(Long zoneId, Account owner, List networkId diskOfferingId, size, null, null, Hypervisor.HypervisorType.None, BaseCmd.HTTPMethod.POST, base64UserData, null, null, keypairs, null, addrs, null, null, null, customParameterMap, null, null, null, null, - true, UserVmManager.SHAREDFSVM, null, null, null); + true, UserVmManager.SHAREDFSVM, null, null, null, null); vmContext.setEventResourceId(vm.getId()); userVmService.startVirtualMachine(vm, null); } catch (InsufficientCapacityException ex) { diff --git a/plugins/storage/sharedfs/storagevm/src/test/java/org/apache/cloudstack/storage/sharedfs/lifecycle/StorageVmSharedFSLifeCycleTest.java b/plugins/storage/sharedfs/storagevm/src/test/java/org/apache/cloudstack/storage/sharedfs/lifecycle/StorageVmSharedFSLifeCycleTest.java index c64e8c05c995..82d055b9a359 100644 --- a/plugins/storage/sharedfs/storagevm/src/test/java/org/apache/cloudstack/storage/sharedfs/lifecycle/StorageVmSharedFSLifeCycleTest.java +++ b/plugins/storage/sharedfs/storagevm/src/test/java/org/apache/cloudstack/storage/sharedfs/lifecycle/StorageVmSharedFSLifeCycleTest.java @@ -257,7 +257,7 @@ public void testDeploySharedFS() throws ResourceUnavailableException, Insufficie anyString(), anyLong(), anyLong(), any(), isNull(), any(Hypervisor.HypervisorType.class), any(BaseCmd.HTTPMethod.class), anyString(), isNull(), isNull(), anyList(), isNull(), any(Network.IpAddresses.class), isNull(), isNull(), isNull(), anyMap(), isNull(), isNull(), isNull(), isNull(), - anyBoolean(), anyString(), isNull(), isNull(), isNull())).thenReturn(vm); + anyBoolean(), anyString(), isNull(), isNull(), isNull(), isNull())).thenReturn(vm); VolumeVO rootVol = mock(VolumeVO.class); when(rootVol.getVolumeType()).thenReturn(Volume.Type.ROOT); diff --git a/plugins/storage/volume/default/src/main/java/org/apache/cloudstack/storage/datastore/driver/CloudStackPrimaryDataStoreDriverImpl.java b/plugins/storage/volume/default/src/main/java/org/apache/cloudstack/storage/datastore/driver/CloudStackPrimaryDataStoreDriverImpl.java index 5faa377ce3d3..a5e87870eab4 100644 --- a/plugins/storage/volume/default/src/main/java/org/apache/cloudstack/storage/datastore/driver/CloudStackPrimaryDataStoreDriverImpl.java +++ b/plugins/storage/volume/default/src/main/java/org/apache/cloudstack/storage/datastore/driver/CloudStackPrimaryDataStoreDriverImpl.java @@ -575,11 +575,25 @@ public void provideVmTags(long vmId, long volumeId, String tagValue) { */ private boolean anyVolumeRequiresEncryption(DataObject ... objects) { for (DataObject o : objects) { - // this fails code smell for returning true twice, but it is more readable than combining all tests into one statement - if (o instanceof VolumeInfo && ((VolumeInfo) o).getPassphraseId() != null) { - return true; - } else if (o instanceof SnapshotInfo && ((SnapshotInfo) o).getBaseVolume().getPassphraseId() != null) { - return true; + // Check for legacy passphrase-based encryption + if (o instanceof VolumeInfo) { + VolumeInfo vol = (VolumeInfo) o; + if (vol.getPassphraseId() != null) { + return true; + } + // Check for KMS-based encryption + if (vol.getKmsWrappedKeyId() != null || vol.getKmsKeyId() != null) { + return true; + } + } else if (o instanceof SnapshotInfo) { + VolumeInfo baseVol = ((SnapshotInfo) o).getBaseVolume(); + if (baseVol.getPassphraseId() != null) { + return true; + } + // Check for KMS-based encryption + if (baseVol.getKmsWrappedKeyId() != null || baseVol.getKmsKeyId() != null) { + return true; + } } } return false; diff --git a/server/pom.xml b/server/pom.xml index 2b35a0f42ac8..a44c3af0e73a 100644 --- a/server/pom.xml +++ b/server/pom.xml @@ -69,6 +69,11 @@ cloud-framework-ca ${project.version}
+ + org.apache.cloudstack + cloud-framework-kms + ${project.version} + org.apache.cloudstack cloud-framework-jobs diff --git a/server/src/main/java/com/cloud/api/ApiResponseHelper.java b/server/src/main/java/com/cloud/api/ApiResponseHelper.java index 655f5acb46e3..d62f80727242 100644 --- a/server/src/main/java/com/cloud/api/ApiResponseHelper.java +++ b/server/src/main/java/com/cloud/api/ApiResponseHelper.java @@ -208,6 +208,7 @@ import org.apache.cloudstack.framework.jobs.AsyncJob; import org.apache.cloudstack.framework.jobs.AsyncJobManager; import org.apache.cloudstack.gui.theme.GuiThemeJoin; +import org.apache.cloudstack.kms.dao.HSMProfileDao; import org.apache.cloudstack.management.ManagementServerHost; import org.apache.cloudstack.network.BgpPeerVO; import org.apache.cloudstack.network.RoutedIpv4Manager; @@ -519,6 +520,8 @@ public class ApiResponseHelper implements ResponseGenerator { private ASNumberRangeDao asNumberRangeDao; @Inject private ASNumberDao asNumberDao; + @Inject + private HSMProfileDao hsmProfileDao; @Inject ObjectStoreDao _objectStoreDao; diff --git a/server/src/main/java/com/cloud/api/query/QueryManagerImpl.java b/server/src/main/java/com/cloud/api/query/QueryManagerImpl.java index 0cec3a38075d..642b8b49ab52 100644 --- a/server/src/main/java/com/cloud/api/query/QueryManagerImpl.java +++ b/server/src/main/java/com/cloud/api/query/QueryManagerImpl.java @@ -2636,6 +2636,7 @@ private Pair, Integer> searchForVolumeIdsAndCount(ListVolumesCmd cmd) Long clusterId = cmd.getClusterId(); Long serviceOfferingId = cmd.getServiceOfferingId(); Long diskOfferingId = cmd.getDiskOfferingId(); + Long kmsKeyId = cmd.getKmsKeyId(); Boolean display = cmd.getDisplay(); String state = cmd.getState(); boolean shouldListSystemVms = shouldListSystemVms(cmd, caller.getId()); @@ -2672,6 +2673,7 @@ private Pair, Integer> searchForVolumeIdsAndCount(ListVolumesCmd cmd) volumeSearchBuilder.and("uuid", volumeSearchBuilder.entity().getUuid(), SearchCriteria.Op.NNULL); volumeSearchBuilder.and("instanceId", volumeSearchBuilder.entity().getInstanceId(), SearchCriteria.Op.EQ); volumeSearchBuilder.and("dataCenterId", volumeSearchBuilder.entity().getDataCenterId(), SearchCriteria.Op.EQ); + volumeSearchBuilder.and("kmsKeyId", volumeSearchBuilder.entity().getKmsKeyId(), SearchCriteria.Op.EQ); if (cmd.isEncrypted() != null) { if (cmd.isEncrypted()) { volumeSearchBuilder.and("encryptFormat", volumeSearchBuilder.entity().getEncryptFormat(), SearchCriteria.Op.NNULL); @@ -2796,6 +2798,9 @@ private Pair, Integer> searchForVolumeIdsAndCount(ListVolumesCmd cmd) if (vmInstanceId != null) { sc.setParameters("instanceId", vmInstanceId); } + if (kmsKeyId != null) { + sc.setParameters("kmsKeyId", kmsKeyId); + } if (zoneId != null) { sc.setParameters("dataCenterId", zoneId); } diff --git a/server/src/main/java/com/cloud/api/query/dao/VolumeJoinDaoImpl.java b/server/src/main/java/com/cloud/api/query/dao/VolumeJoinDaoImpl.java index 4f5d984c969a..6f8647422cd5 100644 --- a/server/src/main/java/com/cloud/api/query/dao/VolumeJoinDaoImpl.java +++ b/server/src/main/java/com/cloud/api/query/dao/VolumeJoinDaoImpl.java @@ -29,6 +29,11 @@ import org.apache.cloudstack.api.response.VolumeResponse; import org.apache.cloudstack.context.CallContext; import org.apache.cloudstack.framework.config.dao.ConfigurationDao; +import org.apache.cloudstack.kms.KMSKekVersionVO; +import org.apache.cloudstack.kms.KMSWrappedKeyVO; +import org.apache.cloudstack.kms.dao.KMSKekVersionDao; +import org.apache.cloudstack.kms.dao.KMSKeyDao; +import org.apache.cloudstack.kms.dao.KMSWrappedKeyDao; import org.apache.cloudstack.storage.datastore.db.PrimaryDataStoreDao; import org.apache.cloudstack.storage.datastore.db.StoragePoolVO; import org.springframework.stereotype.Component; @@ -58,6 +63,12 @@ public class VolumeJoinDaoImpl extends GenericDaoBaseWithTagInformation volSearch; @@ -284,6 +295,18 @@ public VolumeResponse newVolumeResponse(ResponseView view, VolumeJoinVO volume) volResponse.setObjectName("volume"); volResponse.setExternalUuid(volume.getExternalUuid()); volResponse.setEncryptionFormat(volume.getEncryptionFormat()); + volResponse.setKmsKeyId(volume.getKmsKeyUuid()); + volResponse.setKmsKey(volume.getKmsKeyName()); + + if (volume.getKmsWrappedKeyId() != null) { + KMSWrappedKeyVO wrappedKey = kmsWrappedKeyDao.findById(volume.getKmsWrappedKeyId()); + if (wrappedKey != null) { + KMSKekVersionVO kekVersion = kmsKekVersionDao.findById(wrappedKey.getKekVersionId()); + if (kekVersion != null) { + volResponse.setKmsKeyVersion(kekVersion.getVersionNumber()); + } + } + } return volResponse; } diff --git a/server/src/main/java/com/cloud/api/query/vo/VolumeJoinVO.java b/server/src/main/java/com/cloud/api/query/vo/VolumeJoinVO.java index 2ae720fa8524..a29e926dc492 100644 --- a/server/src/main/java/com/cloud/api/query/vo/VolumeJoinVO.java +++ b/server/src/main/java/com/cloud/api/query/vo/VolumeJoinVO.java @@ -280,6 +280,18 @@ public class VolumeJoinVO extends BaseViewWithTagInformationVO implements Contro @Column(name = "encrypt_format") private String encryptionFormat = null; + @Column(name = "kms_key_id") + private Long kmsKeyId; + + @Column(name = "kms_key_uuid") + private String kmsKeyUuid; + + @Column(name = "kms_key_name") + private String kmsKeyName; + + @Column(name = "kms_wrapped_key_id") + private Long kmsWrappedKeyId; + @Column(name = "delete_protection") protected Boolean deleteProtection; @@ -622,6 +634,22 @@ public String getEncryptionFormat() { return encryptionFormat; } + public Long getKmsKeyId() { + return kmsKeyId; + } + + public String getKmsKeyName() { + return kmsKeyName; + } + + public String getKmsKeyUuid() { + return kmsKeyUuid; + } + + public Long getKmsWrappedKeyId() { + return kmsWrappedKeyId; + } + public Boolean getDeleteProtection() { return deleteProtection; } diff --git a/server/src/main/java/com/cloud/network/as/AutoScaleManagerImpl.java b/server/src/main/java/com/cloud/network/as/AutoScaleManagerImpl.java index 4e07611ff716..9eade583da7b 100644 --- a/server/src/main/java/com/cloud/network/as/AutoScaleManagerImpl.java +++ b/server/src/main/java/com/cloud/network/as/AutoScaleManagerImpl.java @@ -1837,7 +1837,7 @@ protected UserVm createNewVM(AutoScaleVmGroupVO asGroup) { vmDisplayName, diskOfferingId, dataDiskSize, null, null, hypervisorType, HTTPMethod.GET, userData, userDataId, userDataDetails, sshKeyPairs, null, null, true, null, affinityGroupIdList, customParameters, null, null, null, - null, true, overrideDiskOfferingId, null, null); + null, true, overrideDiskOfferingId, null, null, null); } else { if (networkModel.checkSecurityGroupSupportForNetwork(owner, zone, networkIds, Collections.emptyList())) { @@ -1845,13 +1845,13 @@ protected UserVm createNewVM(AutoScaleVmGroupVO asGroup) { owner, vmHostName, vmDisplayName, diskOfferingId, dataDiskSize, null, null, hypervisorType, HTTPMethod.GET, userData, userDataId, userDataDetails, sshKeyPairs, null, null, true, null, affinityGroupIdList, customParameters, null, null, null, - null, true, overrideDiskOfferingId, null, null, null); + null, true, overrideDiskOfferingId, null, null, null, null); } else { vm = userVmService.createAdvancedVirtualMachine(zone, serviceOffering, template, networkIds, owner, vmHostName, vmDisplayName, diskOfferingId, dataDiskSize, null, null, hypervisorType, HTTPMethod.GET, userData, userDataId, userDataDetails, sshKeyPairs, null, addrs, true, null, affinityGroupIdList, customParameters, null, null, null, - null, true, null, overrideDiskOfferingId, null, null); + null, true, null, overrideDiskOfferingId, null, null, null); } } diff --git a/server/src/main/java/com/cloud/storage/VolumeApiServiceImpl.java b/server/src/main/java/com/cloud/storage/VolumeApiServiceImpl.java index 17961dbd955f..1778c24a1930 100644 --- a/server/src/main/java/com/cloud/storage/VolumeApiServiceImpl.java +++ b/server/src/main/java/com/cloud/storage/VolumeApiServiceImpl.java @@ -98,6 +98,7 @@ import org.apache.cloudstack.resourcedetail.SnapshotPolicyDetailVO; import org.apache.cloudstack.resourcedetail.dao.DiskOfferingDetailsDao; import org.apache.cloudstack.resourcedetail.dao.SnapshotPolicyDetailsDao; +import org.apache.cloudstack.kms.KMSManager; import org.apache.cloudstack.snapshot.SnapshotHelper; import org.apache.cloudstack.storage.command.AttachAnswer; import org.apache.cloudstack.storage.command.AttachCommand; @@ -370,6 +371,8 @@ public class VolumeApiServiceImpl extends ManagerBase implements VolumeApiServic @Inject private VMSnapshotDetailsDao vmSnapshotDetailsDao; + @Inject + private KMSManager kmsManager; public static final String KVM_FILE_BASED_STORAGE_SNAPSHOT = "kvmFileBasedStorageSnapshot"; @@ -962,8 +965,12 @@ public VolumeVO allocVolume(CreateVolumeCmd cmd) throws ResourceAllocationExcept String userSpecifiedName = getVolumeNameFromCommand(cmd); + if (cmd.getKmsKeyId() != null) { + kmsManager.checkKmsKeyForVolumeEncryption(caller, cmd.getKmsKeyId(), zoneId); + } + return commitVolume(cmd.getSnapshotId(), caller, owner, displayVolume, zoneId, diskOfferingId, provisioningType, size, minIops, maxIops, parentVolume, userSpecifiedName, - _uuidMgr.generateUuid(Volume.class, cmd.getCustomId()), details); + _uuidMgr.generateUuid(Volume.class, cmd.getCustomId()), details, cmd.getKmsKeyId()); } @Override @@ -977,7 +984,7 @@ public void validateCustomDiskOfferingSizeRange(Long sizeInGB) { } private VolumeVO commitVolume(final Long snapshotId, final Account caller, final Account owner, final Boolean displayVolume, final Long zoneId, final Long diskOfferingId, - final Storage.ProvisioningType provisioningType, final Long size, final Long minIops, final Long maxIops, final VolumeVO parentVolume, final String userSpecifiedName, final String uuid, final Map details) { + final Storage.ProvisioningType provisioningType, final Long size, final Long minIops, final Long maxIops, final VolumeVO parentVolume, final String userSpecifiedName, final String uuid, final Map details, final Long kmsKeyId) { return Transaction.execute(new TransactionCallback() { @Override public VolumeVO doInTransaction(TransactionStatus status) { @@ -1023,6 +1030,12 @@ public VolumeVO doInTransaction(TransactionStatus status) { } } + // Store KMS key ID if provided (for volume encryption) + if (volume != null && kmsKeyId != null) { + volume.setKmsKeyId(kmsKeyId); + _volsDao.update(volume.getId(), volume); + } + CallContext.current().setEventDetails("Volume ID: " + volume.getUuid()); CallContext.current().putContextParameter(Volume.class, volume.getId()); // Increment resource count during allocation; if actual creation fails, @@ -2679,7 +2692,7 @@ public Volume attachVolumeToVM(Long vmId, Long volumeId, Long deviceId, Boolean } DiskOfferingVO diskOffering = _diskOfferingDao.findById(volumeToAttach.getDiskOfferingId()); - if (diskOffering.getEncrypt() && rootDiskHyperType != HypervisorType.KVM) { + if (diskOffering.getEncrypt() && !(rootDiskHyperType == HypervisorType.KVM)) { throw new InvalidParameterValueException("Volume's disk offering has encryption enabled, but volume encryption is not supported for hypervisor type " + rootDiskHyperType); } diff --git a/server/src/main/java/com/cloud/user/AccountManagerImpl.java b/server/src/main/java/com/cloud/user/AccountManagerImpl.java index 09ef9fe8bec9..6a4cb8016a85 100644 --- a/server/src/main/java/com/cloud/user/AccountManagerImpl.java +++ b/server/src/main/java/com/cloud/user/AccountManagerImpl.java @@ -77,6 +77,7 @@ import org.apache.cloudstack.framework.config.dao.ConfigurationDao; import org.apache.cloudstack.framework.messagebus.MessageBus; import org.apache.cloudstack.framework.messagebus.PublishScope; +import org.apache.cloudstack.kms.KMSManager; import org.apache.cloudstack.managed.context.ManagedContextRunnable; import org.apache.cloudstack.network.RoutedIpv4Manager; import org.apache.cloudstack.network.dao.NetworkPermissionDao; @@ -315,6 +316,8 @@ public class AccountManagerImpl extends ManagerBase implements AccountManager, M private NetworkPermissionDao networkPermissionDao; @Inject private SslCertDao sslCertDao; + @Inject + private KMSManager kmsManager; private List _querySelectors; @@ -1204,6 +1207,17 @@ public int compare(NetworkVO network1, NetworkVO network2) { // Delete Webhooks deleteWebhooksForAccount(accountId); + // Delete KMS keys + try { + if (!kmsManager.deleteKMSKeysByAccountId(accountId)) { + logger.warn("Failed to delete all KMS keys for account {}", account); + accountCleanupNeeded = true; + } + } catch (Exception e) { + logger.error("Error deleting KMS keys for account {}: {}", account, e.getMessage(), e); + accountCleanupNeeded = true; + } + return true; } catch (Exception ex) { logger.warn("Failed to cleanup account " + account + " due to ", ex); diff --git a/server/src/main/java/com/cloud/vm/UserVmManagerImpl.java b/server/src/main/java/com/cloud/vm/UserVmManagerImpl.java index 36f1f7a2f126..5caa5d07b325 100644 --- a/server/src/main/java/com/cloud/vm/UserVmManagerImpl.java +++ b/server/src/main/java/com/cloud/vm/UserVmManagerImpl.java @@ -110,6 +110,7 @@ import org.apache.cloudstack.backup.dao.BackupDao; import org.apache.cloudstack.backup.dao.BackupScheduleDao; import org.apache.cloudstack.context.CallContext; +import org.apache.cloudstack.kms.KMSManager; import org.apache.cloudstack.engine.cloud.entity.api.VirtualMachineEntity; import org.apache.cloudstack.engine.cloud.entity.api.db.dao.VMNetworkMapDao; import org.apache.cloudstack.engine.orchestration.service.NetworkOrchestrationService; @@ -473,6 +474,8 @@ public class UserVmManagerImpl extends ManagerBase implements UserVmManager, Vir @Inject private AccountManager _accountMgr; @Inject + private KMSManager kmsManager; + @Inject private AccountService _accountService; @Inject private ClusterDao _clusterDao; @@ -3772,7 +3775,7 @@ public UserVm createBasicSecurityGroupVirtualMachine(DataCenter zone, ServiceOff Account owner, String hostName, String displayName, Long diskOfferingId, Long diskSize, List dataDiskInfoList, String group, HypervisorType hypervisor, HTTPMethod httpmethod, String userData, Long userDataId, String userDataDetails, List sshKeyPairs, Map requestedIps, IpAddresses defaultIps, Boolean displayVm, String keyboard, List affinityGroupIdList, Map customParametes, String customId, Map> dhcpOptionMap, - Map dataDiskTemplateToDiskOfferingMap, Map userVmOVFProperties, boolean dynamicScalingEnabled, Long overrideDiskOfferingId, Volume volume, Snapshot snapshot) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException, + Map dataDiskTemplateToDiskOfferingMap, Map userVmOVFProperties, boolean dynamicScalingEnabled, Long overrideDiskOfferingId, Long rootDiskKmsKeyId, Volume volume, Snapshot snapshot) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException, StorageUnavailableException, ResourceAllocationException { Account caller = CallContext.current().getCallingAccount(); @@ -3821,7 +3824,7 @@ public UserVm createBasicSecurityGroupVirtualMachine(DataCenter zone, ServiceOff return createVirtualMachine(zone, serviceOffering, template, hostName, displayName, owner, diskOfferingId, diskSize, dataDiskInfoList, networkList, securityGroupIdList, group, httpmethod, userData, userDataId, userDataDetails, sshKeyPairs, hypervisor, caller, requestedIps, defaultIps, displayVm, keyboard, affinityGroupIdList, customParametes, customId, dhcpOptionMap, - dataDiskTemplateToDiskOfferingMap, userVmOVFProperties, dynamicScalingEnabled, null, overrideDiskOfferingId, volume, snapshot); + dataDiskTemplateToDiskOfferingMap, userVmOVFProperties, dynamicScalingEnabled, null, overrideDiskOfferingId, rootDiskKmsKeyId, volume, snapshot); } @@ -3831,7 +3834,7 @@ public UserVm createAdvancedSecurityGroupVirtualMachine(DataCenter zone, Service List securityGroupIdList, Account owner, String hostName, String displayName, Long diskOfferingId, Long diskSize, List dataDiskInfoList, String group, HypervisorType hypervisor, HTTPMethod httpmethod, String userData, Long userDataId, String userDataDetails, List sshKeyPairs, Map requestedIps, IpAddresses defaultIps, Boolean displayVm, String keyboard, List affinityGroupIdList, Map customParameters, String customId, Map> dhcpOptionMap, - Map dataDiskTemplateToDiskOfferingMap, Map userVmOVFProperties, boolean dynamicScalingEnabled, Long overrideDiskOfferingId, String vmType, Volume volume, Snapshot snapshot) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException, StorageUnavailableException, ResourceAllocationException { + Map dataDiskTemplateToDiskOfferingMap, Map userVmOVFProperties, boolean dynamicScalingEnabled, Long overrideDiskOfferingId, Long rootDiskKmsKeyId, String vmType, Volume volume, Snapshot snapshot) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException, StorageUnavailableException, ResourceAllocationException { Account caller = CallContext.current().getCallingAccount(); List networkList = new ArrayList<>(); @@ -3934,7 +3937,7 @@ public UserVm createAdvancedSecurityGroupVirtualMachine(DataCenter zone, Service return createVirtualMachine(zone, serviceOffering, template, hostName, displayName, owner, diskOfferingId, diskSize, dataDiskInfoList, networkList, securityGroupIdList, group, httpmethod, userData, userDataId, userDataDetails, sshKeyPairs, hypervisor, caller, requestedIps, defaultIps, displayVm, keyboard, affinityGroupIdList, customParameters, customId, dhcpOptionMap, dataDiskTemplateToDiskOfferingMap, - userVmOVFProperties, dynamicScalingEnabled, vmType, overrideDiskOfferingId, volume, snapshot); + userVmOVFProperties, dynamicScalingEnabled, vmType, overrideDiskOfferingId, rootDiskKmsKeyId, volume, snapshot); } @Override @@ -3943,7 +3946,7 @@ public UserVm createAdvancedVirtualMachine(DataCenter zone, ServiceOffering serv String hostName, String displayName, Long diskOfferingId, Long diskSize, List dataDiskInfoList, String group, HypervisorType hypervisor, HTTPMethod httpmethod, String userData, Long userDataId, String userDataDetails, List sshKeyPairs, Map requestedIps, IpAddresses defaultIps, Boolean displayvm, String keyboard, List affinityGroupIdList, Map customParametrs, String customId, Map> dhcpOptionsMap, Map dataDiskTemplateToDiskOfferingMap, - Map userVmOVFPropertiesMap, boolean dynamicScalingEnabled, String vmType, Long overrideDiskOfferingId, Volume volume, Snapshot snapshot) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException, + Map userVmOVFPropertiesMap, boolean dynamicScalingEnabled, String vmType, Long overrideDiskOfferingId, Long rootDiskKmsKeyId, Volume volume, Snapshot snapshot) throws InsufficientCapacityException, ConcurrentOperationException, ResourceUnavailableException, StorageUnavailableException, ResourceAllocationException { Account caller = CallContext.current().getCallingAccount(); @@ -3996,7 +3999,7 @@ public UserVm createAdvancedVirtualMachine(DataCenter zone, ServiceOffering serv verifyExtraDhcpOptionsNetwork(dhcpOptionsMap, networkList); return createVirtualMachine(zone, serviceOffering, template, hostName, displayName, owner, diskOfferingId, diskSize, dataDiskInfoList, networkList, null, group, httpmethod, userData, userDataId, userDataDetails, sshKeyPairs, hypervisor, caller, requestedIps, defaultIps, displayvm, keyboard, affinityGroupIdList, customParametrs, customId, dhcpOptionsMap, - dataDiskTemplateToDiskOfferingMap, userVmOVFPropertiesMap, dynamicScalingEnabled, vmType, overrideDiskOfferingId, volume, snapshot); + dataDiskTemplateToDiskOfferingMap, userVmOVFPropertiesMap, dynamicScalingEnabled, vmType, overrideDiskOfferingId, rootDiskKmsKeyId, volume, snapshot); } @Override @@ -4128,7 +4131,7 @@ private UserVm createVirtualMachine(DataCenter zone, ServiceOffering serviceOffe Long userDataId, String userDataDetails, List sshKeyPairs, HypervisorType hypervisor, Account caller, Map requestedIps, IpAddresses defaultIps, Boolean isDisplayVm, String keyboard, List affinityGroupIdList, Map customParameters, String customId, Map> dhcpOptionMap, Map datadiskTemplateToDiskOfferringMap, - Map userVmOVFPropertiesMap, boolean dynamicScalingEnabled, String vmType, Long overrideDiskOfferingId, Volume volume, Snapshot snapshot) throws InsufficientCapacityException, ResourceUnavailableException, + Map userVmOVFPropertiesMap, boolean dynamicScalingEnabled, String vmType, Long overrideDiskOfferingId, Long rootDiskKmsKeyId, Volume volume, Snapshot snapshot) throws InsufficientCapacityException, ResourceUnavailableException, ConcurrentOperationException, StorageUnavailableException, ResourceAllocationException { _accountMgr.checkAccess(caller, null, true, owner); @@ -4215,7 +4218,14 @@ private UserVm createVirtualMachine(DataCenter zone, ServiceOffering serviceOffe throw new InvalidParameterValueException("Root volume encryption is not supported for hypervisor type " + hypervisorType); } - UserVm vm = getCheckedUserVmResource(zone, hostName, displayName, owner, diskOfferingId, diskSize, dataDiskInfoList, networkList, securityGroupIdList, group, httpmethod, userData, userDataId, userDataDetails, sshKeyPairs, caller, requestedIps, defaultIps, isDisplayVm, keyboard, affinityGroupIdList, customParameters, customId, dhcpOptionMap, datadiskTemplateToDiskOfferringMap, userVmOVFPropertiesMap, dynamicScalingEnabled, vmType, template, hypervisorType, accountId, offering, isIso, rootDiskOfferingId, volumesSize, volume, snapshot); + kmsManager.checkKmsKeyForVolumeEncryption(caller, rootDiskKmsKeyId, zone.getId()); + if (dataDiskInfoList != null) { + for (VmDiskInfo diskInfo : dataDiskInfoList) { + kmsManager.checkKmsKeyForVolumeEncryption(caller, diskInfo.getKmsKeyId(), zone.getId()); + } + } + + UserVm vm = getCheckedUserVmResource(zone, hostName, displayName, owner, diskOfferingId, diskSize, dataDiskInfoList, networkList, securityGroupIdList, group, httpmethod, userData, userDataId, userDataDetails, sshKeyPairs, caller, requestedIps, defaultIps, isDisplayVm, keyboard, affinityGroupIdList, customParameters, customId, dhcpOptionMap, datadiskTemplateToDiskOfferringMap, userVmOVFPropertiesMap, dynamicScalingEnabled, vmType, template, hypervisorType, accountId, offering, isIso, rootDiskOfferingId, rootDiskKmsKeyId, volumesSize, volume, snapshot); _securityGroupMgr.addInstanceToGroups(vm, securityGroupIdList); @@ -4235,7 +4245,7 @@ private UserVm getCheckedUserVmResource(DataCenter zone, String hostName, String Map> dhcpOptionMap, Map datadiskTemplateToDiskOfferringMap, Map userVmOVFPropertiesMap, boolean dynamicScalingEnabled, String vmType, VMTemplateVO template, HypervisorType hypervisorType, long accountId, ServiceOfferingVO offering, boolean isIso, - Long rootDiskOfferingId, long volumesSize, Volume volume, Snapshot snapshot) throws ResourceAllocationException { + Long rootDiskOfferingId, Long rootDiskKmsKeyId, long volumesSize, Volume volume, Snapshot snapshot) throws ResourceAllocationException { if (!VirtualMachineManager.ResourceCountRunningVMsonly.value()) { List resourceLimitHostTags = resourceLimitService.getResourceLimitHostTags(offering, template); try (CheckedReservation vmReservation = new CheckedReservation(owner, ResourceType.user_vm, resourceLimitHostTags, 1l, reservationDao, resourceLimitService); @@ -4244,7 +4254,7 @@ private UserVm getCheckedUserVmResource(DataCenter zone, String hostName, String CheckedReservation gpuReservation = offering.getGpuCount() != null && offering.getGpuCount() > 0 ? new CheckedReservation(owner, ResourceType.gpu, resourceLimitHostTags, Long.valueOf(offering.getGpuCount()), reservationDao, resourceLimitService) : null; ) { - return getUncheckedUserVmResource(zone, hostName, displayName, owner, diskOfferingId, diskSize, dataDiskInfoList, networkList, securityGroupIdList, group, httpmethod, userData, userDataId, userDataDetails, sshKeyPairs, caller, requestedIps, defaultIps, isDisplayVm, keyboard, affinityGroupIdList, customParameters, customId, dhcpOptionMap, datadiskTemplateToDiskOfferringMap, userVmOVFPropertiesMap, dynamicScalingEnabled, vmType, template, hypervisorType, accountId, offering, isIso, rootDiskOfferingId, volumesSize, volume, snapshot); + return getUncheckedUserVmResource(zone, hostName, displayName, owner, diskOfferingId, diskSize, dataDiskInfoList, networkList, securityGroupIdList, group, httpmethod, userData, userDataId, userDataDetails, sshKeyPairs, caller, requestedIps, defaultIps, isDisplayVm, keyboard, affinityGroupIdList, customParameters, customId, dhcpOptionMap, datadiskTemplateToDiskOfferringMap, userVmOVFPropertiesMap, dynamicScalingEnabled, vmType, template, hypervisorType, accountId, offering, isIso, rootDiskOfferingId, rootDiskKmsKeyId, volumesSize, volume, snapshot); } catch (ResourceAllocationException | CloudRuntimeException e) { throw e; } catch (Exception e) { @@ -4253,7 +4263,7 @@ private UserVm getCheckedUserVmResource(DataCenter zone, String hostName, String } } else { - return getUncheckedUserVmResource(zone, hostName, displayName, owner, diskOfferingId, diskSize, dataDiskInfoList, networkList, securityGroupIdList, group, httpmethod, userData, userDataId, userDataDetails, sshKeyPairs, caller, requestedIps, defaultIps, isDisplayVm, keyboard, affinityGroupIdList, customParameters, customId, dhcpOptionMap, datadiskTemplateToDiskOfferringMap, userVmOVFPropertiesMap, dynamicScalingEnabled, vmType, template, hypervisorType, accountId, offering, isIso, rootDiskOfferingId, volumesSize, volume, snapshot); + return getUncheckedUserVmResource(zone, hostName, displayName, owner, diskOfferingId, diskSize, dataDiskInfoList, networkList, securityGroupIdList, group, httpmethod, userData, userDataId, userDataDetails, sshKeyPairs, caller, requestedIps, defaultIps, isDisplayVm, keyboard, affinityGroupIdList, customParameters, customId, dhcpOptionMap, datadiskTemplateToDiskOfferringMap, userVmOVFPropertiesMap, dynamicScalingEnabled, vmType, template, hypervisorType, accountId, offering, isIso, rootDiskOfferingId, rootDiskKmsKeyId, volumesSize, volume, snapshot); } } @@ -4304,7 +4314,7 @@ private UserVm getUncheckedUserVmResource(DataCenter zone, String hostName, Stri Map> dhcpOptionMap, Map datadiskTemplateToDiskOfferringMap, Map userVmOVFPropertiesMap, boolean dynamicScalingEnabled, String vmType, VMTemplateVO template, HypervisorType hypervisorType, long accountId, ServiceOfferingVO offering, boolean isIso, - Long rootDiskOfferingId, long volumesSize, Volume volume, Snapshot snapshot) throws ResourceAllocationException { + Long rootDiskOfferingId, Long rootDiskKmsKeyId, long volumesSize, Volume volume, Snapshot snapshot) throws ResourceAllocationException { List checkedReservations = new ArrayList<>(); try { @@ -4590,7 +4600,7 @@ private UserVm getUncheckedUserVmResource(DataCenter zone, String hostName, Stri UserVmVO vm = commitUserVm(zone, template, hostName, displayName, owner, diskOfferingId, diskSize, userData, userDataId, userDataDetails, caller, isDisplayVm, keyboard, accountId, userId, offering, isIso, sshPublicKeys, networkNicMap, id, instanceName, uuidName, hypervisorType, customParameters, dhcpOptionMap, - datadiskTemplateToDiskOfferringMap, userVmOVFPropertiesMap, dynamicScalingEnabled, vmType, rootDiskOfferingId, keypairnames, dataDiskInfoList, volume, snapshot); + datadiskTemplateToDiskOfferringMap, userVmOVFPropertiesMap, dynamicScalingEnabled, vmType, rootDiskOfferingId, rootDiskKmsKeyId, keypairnames, dataDiskInfoList, volume, snapshot); assignInstanceToGroup(group, id); return vm; @@ -4792,7 +4802,7 @@ private UserVmVO commitUserVm(final boolean isImport, final DataCenter zone, fin final long accountId, final long userId, final ServiceOffering offering, final boolean isIso, final String sshPublicKeys, final LinkedHashMap> networkNicMap, final long id, final String instanceName, final String uuidName, final HypervisorType hypervisorType, final Map customParameters, final Map> extraDhcpOptionMap, final Map dataDiskTemplateToDiskOfferingMap, - final Map userVmOVFPropertiesMap, final VirtualMachine.PowerState powerState, final boolean dynamicScalingEnabled, String vmType, final Long rootDiskOfferingId, String sshkeypairs, + final Map userVmOVFPropertiesMap, final VirtualMachine.PowerState powerState, final boolean dynamicScalingEnabled, String vmType, final Long rootDiskOfferingId, final Long rootDiskKmsKeyId, String sshkeypairs, List dataDiskInfoList, Volume volume, Snapshot snapshot) throws InsufficientCapacityException { UserVmVO vm = new UserVmVO(id, instanceName, displayName, template.getId(), hypervisorType, template.getGuestOSId(), offering.isOfferHA(), offering.getLimitCpuUse(), owner.getDomainId(), owner.getId(), userId, offering.getId(), userData, userDataId, userDataDetails, hostName); @@ -4911,7 +4921,7 @@ private UserVmVO commitUserVm(final boolean isImport, final DataCenter zone, fin orchestrateVirtualMachineCreate(vm, guestOSCategory, computeTags, rootDiskTags, plan, rootDiskSize, template, hostName, displayName, owner, diskOfferingId, diskSize, offering, isIso,networkNicMap, hypervisorType, extraDhcpOptionMap, dataDiskTemplateToDiskOfferingMap, - rootDiskOfferingId, dataDiskInfoList, volume, snapshot); + rootDiskOfferingId, rootDiskKmsKeyId, dataDiskInfoList, volume, snapshot); } CallContext.current().setEventDetails("Vm Id: " + vm.getUuid()); @@ -4944,16 +4954,16 @@ private void orchestrateVirtualMachineCreate(UserVmVO vm, GuestOSCategoryVO gues ServiceOffering offering, boolean isIso, LinkedHashMap> networkNicMap, HypervisorType hypervisorType, Map> extraDhcpOptionMap, Map dataDiskTemplateToDiskOfferingMap, - Long rootDiskOfferingId, List dataDiskInfoList, Volume volume, Snapshot snapshot) throws InsufficientCapacityException{ + Long rootDiskOfferingId, Long rootDiskKmsKeyId, List dataDiskInfoList, Volume volume, Snapshot snapshot) throws InsufficientCapacityException{ try { if (isIso) { _orchSrvc.createVirtualMachineFromScratch(vm.getUuid(), Long.toString(owner.getAccountId()), vm.getIsoId().toString(), hostName, displayName, hypervisorType.name(), guestOSCategory.getName(), offering.getCpu(), offering.getSpeed(), offering.getRamSize(), diskSize, computeTags, rootDiskTags, - networkNicMap, plan, extraDhcpOptionMap, rootDiskOfferingId, dataDiskInfoList, volume, snapshot); + networkNicMap, plan, extraDhcpOptionMap, rootDiskOfferingId, rootDiskKmsKeyId, dataDiskInfoList, volume, snapshot); } else { _orchSrvc.createVirtualMachine(vm.getUuid(), Long.toString(owner.getAccountId()), Long.toString(template.getId()), hostName, displayName, hypervisorType.name(), offering.getCpu(), offering.getSpeed(), offering.getRamSize(), diskSize, computeTags, rootDiskTags, networkNicMap, plan, rootDiskSize, extraDhcpOptionMap, - dataDiskTemplateToDiskOfferingMap, diskOfferingId, rootDiskOfferingId, dataDiskInfoList, volume, snapshot); + dataDiskTemplateToDiskOfferingMap, diskOfferingId, rootDiskOfferingId, rootDiskKmsKeyId, dataDiskInfoList, volume, snapshot); } if (logger.isDebugEnabled()) { @@ -5075,14 +5085,14 @@ private UserVmVO commitUserVm(final DataCenter zone, final VirtualMachineTemplat final long accountId, final long userId, final ServiceOfferingVO offering, final boolean isIso, final String sshPublicKeys, final LinkedHashMap> networkNicMap, final long id, final String instanceName, final String uuidName, final HypervisorType hypervisorType, final Map customParameters, final Map> extraDhcpOptionMap, final Map dataDiskTemplateToDiskOfferingMap, - Map userVmOVFPropertiesMap, final boolean dynamicScalingEnabled, String vmType, final Long rootDiskOfferingId, String sshkeypairs, + Map userVmOVFPropertiesMap, final boolean dynamicScalingEnabled, String vmType, final Long rootDiskOfferingId, final Long rootDiskKmsKeyId, String sshkeypairs, List dataDiskInfoList, Volume volume, Snapshot snapshot) throws InsufficientCapacityException { return commitUserVm(false, zone, null, null, template, hostName, displayName, owner, diskOfferingId, diskSize, userData, userDataId, userDataDetails, isDisplayVm, keyboard, accountId, userId, offering, isIso, sshPublicKeys, networkNicMap, id, instanceName, uuidName, hypervisorType, customParameters, extraDhcpOptionMap, dataDiskTemplateToDiskOfferingMap, - userVmOVFPropertiesMap, null, dynamicScalingEnabled, vmType, rootDiskOfferingId, sshkeypairs, dataDiskInfoList, volume, snapshot); + userVmOVFPropertiesMap, null, dynamicScalingEnabled, vmType, rootDiskOfferingId, rootDiskKmsKeyId, sshkeypairs, dataDiskInfoList, volume, snapshot); } public void validateRootDiskResize(final HypervisorType hypervisorType, Long rootDiskSize, VMTemplateVO templateVO, UserVmVO vm, final Map customParameters) throws InvalidParameterValueException @@ -6471,7 +6481,7 @@ private UserVm createVirtualMachine(BaseDeployVMCmd cmd, DataCenter zone, Accoun vm = createBasicSecurityGroupVirtualMachine(zone, serviceOffering, template, getSecurityGroupIdList(cmd, zone, template, owner), owner, name, displayName, diskOfferingId, size , dataDiskInfoList, group , hypervisor, cmd.getHttpMethod(), userData, userDataId, userDataDetails, sshKeyPairNames, ipToNetworkMap, addrs, displayVm , keyboard , cmd.getAffinityGroupIdList(), cmd.getDetails(), cmd.getCustomId(), cmd.getDhcpOptionsMap(), - dataDiskTemplateToDiskOfferingMap, userVmOVFProperties, dynamicScalingEnabled, overrideDiskOfferingId, volume, snapshot); + dataDiskTemplateToDiskOfferingMap, userVmOVFProperties, dynamicScalingEnabled, overrideDiskOfferingId, cmd.getRootDiskKmsKeyId(), volume, snapshot); } } else { if (_networkModel.checkSecurityGroupSupportForNetwork(owner, zone, networkIds, @@ -6479,7 +6489,7 @@ private UserVm createVirtualMachine(BaseDeployVMCmd cmd, DataCenter zone, Accoun vm = createAdvancedSecurityGroupVirtualMachine(zone, serviceOffering, template, networkIds, getSecurityGroupIdList(cmd, zone, template, owner), owner, name, displayName, diskOfferingId, size, dataDiskInfoList, group, hypervisor, cmd.getHttpMethod(), userData, userDataId, userDataDetails, sshKeyPairNames, ipToNetworkMap, addrs, displayVm, keyboard, cmd.getAffinityGroupIdList(), cmd.getDetails(), cmd.getCustomId(), cmd.getDhcpOptionsMap(), - dataDiskTemplateToDiskOfferingMap, userVmOVFProperties, dynamicScalingEnabled, overrideDiskOfferingId, null, volume, snapshot); + dataDiskTemplateToDiskOfferingMap, userVmOVFProperties, dynamicScalingEnabled, overrideDiskOfferingId, cmd.getRootDiskKmsKeyId(), null, volume, snapshot); } else { if (cmd.getSecurityGroupIdList() != null && !cmd.getSecurityGroupIdList().isEmpty()) { @@ -6487,7 +6497,7 @@ private UserVm createVirtualMachine(BaseDeployVMCmd cmd, DataCenter zone, Accoun } vm = createAdvancedVirtualMachine(zone, serviceOffering, template, networkIds, owner, name, displayName, diskOfferingId, size, dataDiskInfoList, group, hypervisor, cmd.getHttpMethod(), userData, userDataId, userDataDetails, sshKeyPairNames, ipToNetworkMap, addrs, displayVm, keyboard, cmd.getAffinityGroupIdList(), cmd.getDetails(), - cmd.getCustomId(), cmd.getDhcpOptionsMap(), dataDiskTemplateToDiskOfferingMap, userVmOVFProperties, dynamicScalingEnabled, null, overrideDiskOfferingId, volume, snapshot); + cmd.getCustomId(), cmd.getDhcpOptionsMap(), dataDiskTemplateToDiskOfferingMap, userVmOVFProperties, dynamicScalingEnabled, null, overrideDiskOfferingId, cmd.getRootDiskKmsKeyId(), volume, snapshot); if (cmd instanceof DeployVnfApplianceCmd) { vnfTemplateManager.createIsolatedNetworkRulesForVnfAppliance(zone, template, owner, vm, (DeployVnfApplianceCmd) cmd); } @@ -9512,7 +9522,7 @@ public UserVm importVM(final DataCenter zone, final Host host, final VirtualMach null, null, userData, null, null, isDisplayVm, keyboard, accountId, userId, serviceOffering, template.getFormat().equals(ImageFormat.ISO), sshPublicKeys, networkNicMap, id, instanceName, uuidName, hypervisorType, customParameters, - null, null, null, powerState, dynamicScalingEnabled, null, serviceOffering.getDiskOfferingId(), null, null, null, null); + null, null, null, powerState, dynamicScalingEnabled, null, serviceOffering.getDiskOfferingId(), null, null, null, null, null); }); } diff --git a/server/src/main/java/org/apache/cloudstack/kms/KMSManagerImpl.java b/server/src/main/java/org/apache/cloudstack/kms/KMSManagerImpl.java new file mode 100644 index 000000000000..451638fc2f66 --- /dev/null +++ b/server/src/main/java/org/apache/cloudstack/kms/KMSManagerImpl.java @@ -0,0 +1,1626 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms; + +import com.cloud.api.ApiDBUtils; +import com.cloud.api.ApiResponseHelper; +import com.cloud.dc.DataCenter; +import com.cloud.dc.DataCenterVO; +import com.cloud.dc.dao.DataCenterDao; +import com.cloud.event.ActionEvent; +import com.cloud.event.ActionEventUtils; +import com.cloud.event.EventTypes; +import com.cloud.event.EventVO; +import com.cloud.exception.InvalidParameterValueException; +import com.cloud.exception.PermissionDeniedException; +import com.cloud.projects.Project.ListProjectResourcesCriteria; +import com.cloud.storage.Volume; +import com.cloud.storage.VolumeVO; +import com.cloud.storage.dao.VolumeDao; +import com.cloud.user.Account; +import com.cloud.user.AccountManager; +import com.cloud.utils.Pair; +import com.cloud.utils.Ternary; +import com.cloud.utils.component.ManagerBase; +import com.cloud.utils.component.PluggableService; +import com.cloud.utils.crypt.DBEncryptionUtil; +import com.cloud.utils.db.Filter; +import com.cloud.utils.db.GlobalLock; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; +import com.cloud.utils.db.TransactionCallbackWithException; +import com.cloud.utils.db.TransactionStatus; +import com.cloud.utils.exception.CloudRuntimeException; +import org.apache.cloudstack.api.ApiCommandResourceType; +import org.apache.cloudstack.api.command.admin.kms.MigrateVolumesToKMSCmd; +import org.apache.cloudstack.api.command.user.kms.CreateKMSKeyCmd; +import org.apache.cloudstack.api.command.user.kms.DeleteKMSKeyCmd; +import org.apache.cloudstack.api.command.user.kms.ListKMSKeysCmd; +import org.apache.cloudstack.api.command.user.kms.RotateKMSKeyCmd; +import org.apache.cloudstack.api.command.user.kms.UpdateKMSKeyCmd; +import org.apache.cloudstack.api.command.user.kms.hsm.AddHSMProfileCmd; +import org.apache.cloudstack.api.command.user.kms.hsm.DeleteHSMProfileCmd; +import org.apache.cloudstack.api.command.user.kms.hsm.ListHSMProfilesCmd; +import org.apache.cloudstack.api.command.user.kms.hsm.UpdateHSMProfileCmd; +import org.apache.cloudstack.api.response.HSMProfileResponse; +import org.apache.cloudstack.api.response.KMSKeyResponse; +import org.apache.cloudstack.api.response.ListResponse; +import org.apache.cloudstack.api.response.SuccessResponse; +import org.apache.cloudstack.context.CallContext; +import org.apache.cloudstack.framework.config.ConfigKey; +import org.apache.cloudstack.framework.kms.KMSException; +import org.apache.cloudstack.framework.kms.KMSProvider; +import org.apache.cloudstack.framework.kms.KeyPurpose; +import org.apache.cloudstack.framework.kms.WrappedKey; +import org.apache.cloudstack.kms.dao.HSMProfileDao; +import org.apache.cloudstack.kms.dao.HSMProfileDetailsDao; +import org.apache.cloudstack.kms.dao.KMSKekVersionDao; +import org.apache.cloudstack.kms.dao.KMSKeyDao; +import org.apache.cloudstack.kms.dao.KMSWrappedKeyDao; +import org.apache.cloudstack.managed.context.ManagedContextRunnable; +import org.apache.cloudstack.secret.PassphraseVO; +import org.apache.cloudstack.secret.dao.PassphraseDao; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import javax.inject.Inject; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.SynchronousQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +public class KMSManagerImpl extends ManagerBase implements KMSManager, PluggableService { + private static final Logger logger = LogManager.getLogger(KMSManagerImpl.class); + private static final Map kmsProviderMap = new HashMap<>(); + private final ExecutorService kmsOperationExecutor = new ThreadPoolExecutor( + 2, 100, 60L, TimeUnit.SECONDS, new SynchronousQueue<>(), r -> { + Thread t = new Thread(r, "kms-operation"); + t.setDaemon(true); + return t; + }); + @Inject + private KMSWrappedKeyDao kmsWrappedKeyDao; + @Inject + private KMSKeyDao kmsKeyDao; + @Inject + private KMSKekVersionDao kmsKekVersionDao; + @Inject + private HSMProfileDao hsmProfileDao; + @Inject + private HSMProfileDetailsDao hsmProfileDetailsDao; + @Inject + private AccountManager accountManager; + @Inject + private DataCenterDao dataCenterDao; + @Inject + private VolumeDao volumeDao; + @Inject + private PassphraseDao passphraseDao; + private List kmsProviders; + private ScheduledExecutorService rewrapExecutor; + + @Override + public List listKMSProviders() { + return kmsProviders; + } + + @Override + public KMSProvider getKMSProvider(String name) { + if (StringUtils.isEmpty(name)) { + name = "database"; + } + + String providerName = name.toLowerCase(); + if (!kmsProviderMap.containsKey(providerName)) { + throw new CloudRuntimeException(String.format("KMS provider '%s' not found", providerName)); + } + + KMSProvider provider = kmsProviderMap.get(providerName); + if (provider == null) { + throw new CloudRuntimeException(String.format("KMS provider '%s' returned is null", providerName)); + } + + return provider; + } + + @Override + @ActionEvent(eventType = EventTypes.EVENT_KMS_KEY_UNWRAP, eventDescription = "unwrapping volume key") + public byte[] unwrapVolumeKey(WrappedKey wrappedKey, Long zoneId) throws KMSException { + String providerName = wrappedKey.getProviderName(); + KMSProvider provider = getKMSProvider(providerName); + + try { + logger.debug("Unwrapping {} key", wrappedKey.getPurpose()); + return retryOperation(() -> provider.unwrapKey(wrappedKey)); + } catch (Exception e) { + logger.error("Failed to unwrap key: {}", e.getMessage()); + throw handleKmsException(e); + } + } + + @Override + public boolean hasPermission(Long callerAccountId, KMSKey key) { + if (callerAccountId == null) { + return false; + } + if (key == null) { + return false; + } + if (!key.isEnabled()) { + throw new InvalidParameterValueException("KMS key is not enabled: " + key); + } + Account caller = accountManager.getAccount(callerAccountId); + if (caller == null) { + return false; + } + Account owner = accountManager.getAccount(key.getAccountId()); + try { + accountManager.checkAccess(caller, null, true, owner); + return true; + } catch (PermissionDeniedException e) { + return false; + } + } + + @Override + public void checkKmsKeyForVolumeEncryption(Account caller, Long kmsKeyId, Long zoneId) { + if (kmsKeyId == null) { + return; + } + checkKmsKeyAccess(caller, kmsKeyId); + KMSKeyVO key = kmsKeyDao.findById(kmsKeyId); + if (key.getZoneId() != null && zoneId != null && !key.getZoneId().equals(zoneId)) { + throw new InvalidParameterValueException( + "KMS key belongs to zone " + key.getZoneId() + + " but the target resource is in zone " + zoneId); + } + if (!key.isEnabled()) { + throw new InvalidParameterValueException( + "KMS key is not enabled and cannot be used for volume encryption: " + key.getUuid()); + } + if (key.getPurpose() != KeyPurpose.VOLUME_ENCRYPTION) { + throw new InvalidParameterValueException( + "KMS key purpose must be volume encryption; key has purpose: " + key.getPurpose().getName()); + } + } + + /** + * Validate that the caller has permission to use a KMS key. + * No-op if kmsKeyId is null. + * + * @param caller the caller's account + * @param kmsKeyId the KMS key database ID + * @throws InvalidParameterValueException if key not found + * @throws PermissionDeniedException if caller lacks access + */ + public void checkKmsKeyAccess(Account caller, Long kmsKeyId) { + if (kmsKeyId == null) { + return; + } + KMSKeyVO key = kmsKeyDao.findById(kmsKeyId); + checkKmsKeyAccess(caller, key); + } + + public void checkKmsKeyAccess(Account caller, KMSKeyVO key) { + if (key == null) { + throw new InvalidParameterValueException("KMS key not found"); + } + accountManager.checkAccess(caller, null, true, key); + } + + @Override + public byte[] unwrapKey(Long wrappedKeyId) throws KMSException { + KMSWrappedKeyVO wrappedVO = kmsWrappedKeyDao.findById(wrappedKeyId); + if (wrappedVO == null) { + throw KMSException.kekNotFound("Wrapped key not found: " + wrappedKeyId); + } + + KMSKeyVO kmsKey = kmsKeyDao.findById(wrappedVO.getKmsKeyId()); + if (kmsKey == null) { + throw KMSException.kekNotFound("KMS key not found for wrapped key: " + wrappedKeyId); + } + + if (wrappedVO.getKekVersionId() != null) { + KMSKekVersionVO version = kmsKekVersionDao.findById(wrappedVO.getKekVersionId()); + if (version != null && version.getStatus() != KMSKekVersionVO.Status.Archived) { + try { + byte[] dek = getUnwrappedKey(wrappedVO, kmsKey, version); + logger.debug("Successfully unwrapped key {} with KEK version {}", wrappedKeyId, + version.getVersionNumber()); + return dek; + } catch (Exception e) { + logger.warn("Failed to unwrap with version {}: {}", version.getVersionNumber(), e.getMessage()); + } + } + } + + // Fallback: try all available versions for decryption + List versions = kmsKekVersionDao.getVersionsForDecryption(kmsKey.getId()); + for (KMSKekVersionVO version : versions) { + try { + byte[] dek = getUnwrappedKey(wrappedVO, kmsKey, version); + logger.info("Successfully unwrapped key {} with KEK version {} (fallback)", wrappedKeyId, + version.getVersionNumber()); + return dek; + } catch (Exception e) { + logger.debug("Failed to unwrap with version {}: {}", version.getVersionNumber(), e.getMessage()); + } + } + + throw KMSException.wrapUnwrapFailed("Failed to unwrap key with any available KEK version"); + } + + private byte[] getUnwrappedKey(KMSWrappedKeyVO wrappedVO, KMSKeyVO kmsKey, + KMSKekVersionVO version) throws Exception { + HSMProfileVO hsmProfile = hsmProfileDao.findById(version.getHsmProfileId()); + KMSProvider provider = getKMSProvider(hsmProfile.getProtocol()); + + WrappedKey wrapped = new WrappedKey(wrappedVO.getUuid(), version.getKekLabel(), kmsKey.getPurpose(), + kmsKey.getAlgorithm(), wrappedVO.getWrappedBlob(), + hsmProfile.getProtocol(), wrappedVO.getCreated(), kmsKey.getZoneId()); + return retryOperation(() -> provider.unwrapKey(wrapped, version.getHsmProfileId())); + } + + @Override + @ActionEvent(eventType = EventTypes.EVENT_KMS_KEY_WRAP, + eventDescription = "generating volume key with specified KEK") + public WrappedKey generateVolumeKeyWithKek(KMSKey kmsKey, Long callerAccountId) throws KMSException { + if (kmsKey == null) { + throw KMSException.kekNotFound("KMS key not found"); + } + + if (!kmsKey.isEnabled()) { + throw KMSException.invalidParameter("KMS key is not enabled: " + kmsKey); + } + + if (kmsKey.getPurpose() != KeyPurpose.VOLUME_ENCRYPTION) { + throw KMSException.invalidParameter("KMS key purpose is not VOLUME_ENCRYPTION: " + kmsKey); + } + + KMSKekVersionVO activeVersion = getActiveKekVersion(kmsKey.getId()); + + HSMProfileVO hsmProfile = hsmProfileDao.findById(activeVersion.getHsmProfileId()); + if (hsmProfile == null) { + throw KMSException.invalidParameter("HSM profile not found: " + activeVersion.getHsmProfileId()); + } + if (!hsmProfile.isEnabled()) { + throw KMSException.invalidParameter("HSM profile is not enabled: " + hsmProfile.getName()); + } + KMSProvider provider = getKMSProvider(hsmProfile.getProtocol()); + + int dekSize = KMSDekSizeBits.value(); + WrappedKey wrappedKey; + try { + wrappedKey = retryOperation(() -> provider.generateAndWrapDek(KeyPurpose.VOLUME_ENCRYPTION, + activeVersion.getKekLabel(), dekSize, + activeVersion.getHsmProfileId())); + KMSWrappedKeyVO wrappedKeyVO = new KMSWrappedKeyVO(kmsKey.getId(), activeVersion.getId(), + kmsKey.getZoneId(), wrappedKey.getWrappedKeyMaterial()); + wrappedKeyVO = kmsWrappedKeyDao.persist(wrappedKeyVO); + + // Volume creation code looks up by UUID and sets volume.kmsWrappedKeyId + wrappedKey = new WrappedKey( + wrappedKeyVO.getUuid(), + wrappedKey.getKekId(), + wrappedKey.getPurpose(), + wrappedKey.getAlgorithm(), + wrappedKey.getWrappedKeyMaterial(), + wrappedKey.getProviderName(), + wrappedKey.getCreated(), + wrappedKey.getZoneId()); + } catch (Exception e) { + throw handleKmsException(e); + } + + logger.debug("Generated volume key using KMS key {} with KEK version {}, wrapped key UUID: {}", + kmsKey, activeVersion.getVersionNumber(), wrappedKey.getUuid()); + return wrappedKey; + } + + private KMSKekVersionVO getActiveKekVersion(Long kmsKeyId) throws KMSException { + KMSKekVersionVO activeVersion = kmsKekVersionDao.getActiveVersion(kmsKeyId); + if (activeVersion == null) { + throw KMSException.kekNotFound("No active KEK version found for KMS key ID: " + kmsKeyId); + } + return activeVersion; + } + + @Override + public KMSKeyResponse createKMSKey(CreateKMSKeyCmd cmd) throws KMSException { + Account caller = CallContext.current().getCallingAccount(); + Account targetAccount = accountManager.finalizeOwner(caller, cmd.getAccountName(), cmd.getDomainId(), + cmd.getProjectId()); + + KeyPurpose keyPurpose = parseKeyPurpose(cmd.getPurpose()); + + int bits = cmd.getKeyBits(); + if (bits != 128 && bits != 192 && bits != 256) { + throw new InvalidParameterValueException("Key bits must be 128, 192, or 256"); + } + + HSMProfileVO profile = getHSMProfile(cmd.getHsmProfileId()); + checkHSMProfileAccess(caller, profile, false); + if (!profile.isEnabled()) { + throw new InvalidParameterValueException("HSM profile is not enabled: " + profile.getName()); + } + + KMSKey kmsKey = createUserKMSKey( + targetAccount.getId(), + targetAccount.getDomainId(), + cmd.getZoneId(), + cmd.getName(), + cmd.getDescription(), + keyPurpose, + bits, + cmd.getHsmProfileId()); + + return createKMSKeyResponse(kmsKey); + } + + KMSKeyResponse createKMSKeyResponse(KMSKey kmsKey) { + KMSKeyResponse response = new KMSKeyResponse(); + response.setId(kmsKey.getUuid()); + response.setName(kmsKey.getName()); + response.setDescription(kmsKey.getDescription()); + response.setPurpose(kmsKey.getPurpose().getName()); + response.setAlgorithm(kmsKey.getAlgorithm()); + response.setKeyBits(kmsKey.getKeyBits()); + response.setEnabled(kmsKey.isEnabled()); + response.setCreated(kmsKey.getCreated()); + + KMSKekVersionVO activeVersion = kmsKekVersionDao.getActiveVersion(kmsKey.getId()); + if (activeVersion != null) { + response.setVersion(activeVersion.getVersionNumber()); + } + + HSMProfileVO hsmProfile = hsmProfileDao.findById(kmsKey.getHsmProfileId()); + if (hsmProfile != null) { + response.setHsmProfileId(hsmProfile.getUuid()); + response.setHsmProfileName(hsmProfile.getName()); + } + + ApiResponseHelper.populateOwner(response, kmsKey); + + DataCenter zone = ApiDBUtils.findZoneById(kmsKey.getZoneId()); + if (zone != null) { + response.setZoneId(zone.getUuid()); + response.setZoneName(zone.getName()); + } + + Account caller = CallContext.current().getCallingAccount(); + if (caller != null && (caller.getType() == Account.Type.ADMIN + || caller.getType() == Account.Type.RESOURCE_DOMAIN_ADMIN)) { + response.setKekLabel(kmsKey.getKekLabel()); + } + + response.setObjectName("kmskey"); + return response; + } + + @ActionEvent(eventType = EventTypes.EVENT_KMS_KEY_CREATE, eventDescription = "creating user KMS key") + KMSKey createUserKMSKey(Long accountId, Long domainId, Long zoneId, + String name, String description, KeyPurpose purpose, + Integer keyBits, long hsmProfileId) throws KMSException { + HSMProfileVO profile = hsmProfileDao.findById(hsmProfileId); + if (profile == null) { + throw KMSException.invalidParameter("HSM Profile not found"); + } + + KMSProvider provider = getKMSProvider(profile.getProtocol()); + String kekLabel = purpose.getName() + "-kek-" + UUID.randomUUID().toString().substring(0, 8); + + String providerKekLabel; + Long finalProfileId = hsmProfileId; + try { + providerKekLabel = retryOperation(() -> provider.createKek(purpose, kekLabel, keyBits, finalProfileId)); + } catch (Exception e) { + throw handleKmsException(e); + } + + KMSKeyVO kmsKey = new KMSKeyVO(name, description, providerKekLabel, purpose, + accountId, domainId, zoneId, "AES/GCM/NoPadding", keyBits); + kmsKey.setHsmProfileId(finalProfileId); + kmsKey = kmsKeyDao.persist(kmsKey); + + KMSKekVersionVO initialVersion = new KMSKekVersionVO(kmsKey.getId(), 1, providerKekLabel, + KMSKekVersionVO.Status.Active); + initialVersion.setHsmProfileId(finalProfileId); + initialVersion = kmsKekVersionDao.persist(initialVersion); + + logger.info("Created KMS key ({}) with initial KEK version {} for account {} in zone {} (profile: {})", + kmsKey, initialVersion.getVersionNumber(), accountId, zoneId, finalProfileId); + ActionEventUtils.onCompletedActionEvent(CallContext.current().getCallingUserId(), kmsKey.getAccountId(), + EventVO.LEVEL_INFO, EventTypes.EVENT_KMS_KEY_CREATE, + String.format("Created KMS key: %s", kmsKey.getUuid()), + kmsKey.getId(), ApiCommandResourceType.KmsKey.toString(), CallContext.current().getStartEventId()); + return kmsKey; + } + + @Override + public ListResponse listKMSKeys(ListKMSKeysCmd cmd) { + Account caller = CallContext.current().getCallingAccount(); + + List permittedAccounts = new ArrayList<>(); + Ternary domainIdRecursiveListProject = new Ternary<>( + cmd.getDomainId(), cmd.isRecursive(), null); + accountManager.buildACLSearchParameters(caller, cmd.getId(), cmd.getAccountName(), + cmd.getProjectId(), permittedAccounts, domainIdRecursiveListProject, + cmd.listAll(), false); + Long domainId = domainIdRecursiveListProject.first(); + Boolean isRecursive = domainIdRecursiveListProject.second(); + ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third(); + + SearchBuilder sb = getSearchBuilderForKMSKeys(domainId, isRecursive, permittedAccounts, + listProjectResourcesCriteria); + SearchCriteria sc = getSearchCriteriaForKMSKeys(sb, cmd, domainId, isRecursive, permittedAccounts, + listProjectResourcesCriteria); + + Filter searchFilter = new Filter(KMSKeyVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal()); + Pair, Integer> result = kmsKeyDao.searchAndCount(sc, searchFilter); + List keys = result.first(); + Integer count = result.second(); + + List responses = new ArrayList<>(); + for (KMSKey key : keys) { + responses.add(createKMSKeyResponse(key)); + } + + ListResponse listResponse = new ListResponse<>(); + listResponse.setResponses(responses, count); + return listResponse; + } + + SearchBuilder getSearchBuilderForKMSKeys(Long domainId, Boolean isRecursive, List permittedAccounts, + ListProjectResourcesCriteria listProjectResourcesCriteria) { + SearchBuilder sb = kmsKeyDao.createSearchBuilder(); + accountManager.buildACLSearchBuilder(sb, domainId, isRecursive, permittedAccounts, + listProjectResourcesCriteria); + + sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ); + sb.and("zoneId", sb.entity().getZoneId(), SearchCriteria.Op.EQ); + sb.and("purpose", sb.entity().getPurpose(), SearchCriteria.Op.EQ); + sb.and("enabled", sb.entity().isEnabled(), SearchCriteria.Op.EQ); + sb.and("hsmProfileId", sb.entity().getHsmProfileId(), SearchCriteria.Op.EQ); + sb.done(); + return sb; + } + + SearchCriteria getSearchCriteriaForKMSKeys(SearchBuilder searchBuilder, ListKMSKeysCmd cmd, + Long domainId, Boolean isRecursive, List permittedAccounts, + ListProjectResourcesCriteria listProjectResourcesCriteria) { + SearchCriteria sc = searchBuilder.create(); + accountManager.buildACLSearchCriteria(sc, domainId, isRecursive, permittedAccounts, + listProjectResourcesCriteria); + KeyPurpose keyPurpose = parseKeyPurpose(cmd.getPurpose()); + if (cmd.getId() != null) { + sc.setParameters("id", cmd.getId()); + } + if (cmd.getZoneId() != null) { + sc.setParameters("zoneId", cmd.getZoneId()); + } + if (keyPurpose != null) { + sc.setParameters("purpose", keyPurpose); + } + if (cmd.getEnabled() != null) { + sc.setParameters("enabled", cmd.getEnabled()); + } + if (cmd.getHsmProfileId() != null) { + sc.setParameters("hsmProfileId", cmd.getHsmProfileId()); + } + return sc; + } + + @Override + public KMSKeyResponse updateKMSKey(UpdateKMSKeyCmd cmd) throws KMSException { + Account caller = CallContext.current().getCallingAccount(); + KMSKeyVO key = findKMSKeyAndCheckAccess(cmd.getId(), caller); + KMSKey updatedKey = updateUserKMSKey(key, cmd.getName(), cmd.getDescription(), cmd.getEnabled()); + return createKMSKeyResponse(updatedKey); + } + + private KMSKey updateUserKMSKey(KMSKeyVO key, String name, String description, Boolean enabled) { + boolean updated = false; + if (name != null && !name.equals(key.getName())) { + key.setName(name); + updated = true; + } + if (description != null && !description.equals(key.getDescription())) { + key.setDescription(description); + updated = true; + } + if (enabled != null && enabled != key.isEnabled()) { + key.setEnabled(enabled); + updated = true; + } + + if (updated) { + kmsKeyDao.update(key.getId(), key); + logger.info("Updated KMS key {}", key); + } + + return key; + } + + @Override + public SuccessResponse deleteKMSKey(DeleteKMSKeyCmd cmd) throws KMSException { + Account caller = CallContext.current().getCallingAccount(); + + KMSKeyVO key = findKMSKeyAndCheckAccess(cmd.getId(), caller); + + deleteUserKMSKey(key, caller); + return new SuccessResponse(); + } + + private void deleteUserKMSKey(KMSKeyVO key, Account caller) throws KMSException { + long wrappedKeyCount = kmsWrappedKeyDao.countByKmsKeyId(key.getId()); + if (wrappedKeyCount > 0) { + throw new InvalidParameterValueException("Cannot delete KMS key: " + key + ". " + wrappedKeyCount + + " wrapped key(s) still reference this key"); + } + + kmsKeyDao.remove(key.getId()); + if (volumeDao.existsWithKmsKey(key.getId())) { + throw new InvalidParameterValueException("Cannot delete KMS key: " + key + ". " + + "There are Volumes which still reference this key"); + } + logger.info("Deleted KMS key {}", key); + } + + @Override + @ActionEvent(eventType = EventTypes.EVENT_KMS_KEY_ROTATE, eventDescription = "rotating KMS key", async = true) + public String rotateKMSKey(RotateKMSKeyCmd cmd) throws KMSException { + Account caller = CallContext.current().getCallingAccount(); + Integer keyBits = cmd.getKeyBits(); + Long hsmProfileId = cmd.getHsmProfileId(); + + KMSKeyVO kmsKey = findKMSKeyAndCheckAccess(cmd.getId(), caller); + + if (!kmsKey.isEnabled()) { + throw new InvalidParameterValueException("KMS key is not enabled: " + kmsKey); + } + + HSMProfileVO profile = null; + if (hsmProfileId != null) { + profile = hsmProfileDao.findById(hsmProfileId); + if (profile == null) { + throw new InvalidParameterValueException("Target HSM Profile not found: " + hsmProfileId); + } + checkHSMProfileAccess(caller, profile, false); + if (!profile.isEnabled()) { + throw new InvalidParameterValueException("HSM profile is not enabled: " + profile.getName()); + } + } + + int newKeyBits = keyBits != null ? keyBits : kmsKey.getKeyBits(); + KMSKekVersionVO currentActive = getActiveKekVersion(kmsKey.getId()); + + rotateKek( + kmsKey, + currentActive.getKekLabel(), + null, // auto-generate new label + newKeyBits, + profile); + + KMSKekVersionVO newVersion = getActiveKekVersion(kmsKey.getId()); + + logger.info("KMS key rotation initiated: {} -> new KEK version {} (UUID: {}). " + + "Background job will gradually rewrap {} wrapped key(s)", + kmsKey, newVersion.getVersionNumber(), newVersion.getUuid(), + kmsWrappedKeyDao.countByKmsKeyId(kmsKey.getId())); + + ActionEventUtils.onCompletedActionEvent(CallContext.current().getCallingUserId(), kmsKey.getAccountId(), + EventVO.LEVEL_INFO, EventTypes.EVENT_KMS_KEY_ROTATE, + String.format("KMS key rotation completed for KMS key from version %d to version %d", + currentActive.getVersionNumber(), newVersion.getVersionNumber()), + kmsKey.getId(), ApiCommandResourceType.KmsKey.toString(), CallContext.current().getStartEventId()); + + return newVersion.getUuid(); + } + + String rotateKek(KMSKeyVO kmsKey, String oldKekLabel, String newKekLabel, int keyBits, + HSMProfileVO newHSMProfile) throws KMSException { + if (StringUtils.isEmpty(oldKekLabel)) { + throw KMSException.invalidParameter("oldKekLabel must be specified"); + } + + if (newHSMProfile == null) { + newHSMProfile = hsmProfileDao.findById(kmsKey.getHsmProfileId()); + } + + KMSProvider provider = getKMSProvider(newHSMProfile.getProtocol()); + + try { + logger.info("Starting KEK rotation from {} to {} for kms key {}", oldKekLabel, newKekLabel, kmsKey); + + if (StringUtils.isEmpty(newKekLabel)) { + newKekLabel = kmsKey.getPurpose().getName() + "-kek-" + UUID.randomUUID().toString().substring(0, 8); + } + + String finalNewKekLabel = newKekLabel; + Long newProfileId = newHSMProfile.getId(); + final HSMProfileVO finalHSMProfile = newHSMProfile; + String newKekId = retryOperation( + () -> provider.createKek(kmsKey.getPurpose(), finalNewKekLabel, keyBits, newProfileId)); + + try { + KMSKekVersionVO newVersion = Transaction + .execute(new TransactionCallbackWithException() { + @Override + public KMSKekVersionVO doInTransaction(TransactionStatus status) throws KMSException { + KMSKekVersionVO version = createKekVersion(kmsKey.getId(), newKekId, newProfileId); + + if (!newProfileId.equals(kmsKey.getHsmProfileId())) { + kmsKey.setHsmProfileId(newProfileId); + kmsKeyDao.update(kmsKey.getId(), kmsKey); + logger.info("Updated KMS key {} to use HSM profile {}", kmsKey, finalHSMProfile); + } + return version; + } + }); + + logger.info("KEK rotation: KMS key {} now has {} versions (active: v{}, previous: v{})", + kmsKey, newVersion.getVersionNumber(), newVersion.getVersionNumber(), + newVersion.getVersionNumber() - 1); + + return newKekId; + } catch (KMSException e) { + logger.error( + "Database update failed during KEK rotation for kmsKey {}. Attempting to delete orphaned KEK " + + "{} from provider {}", + kmsKey, newKekId, provider.getProviderName()); + try { + provider.deleteKek(newKekId); + } catch (KMSException ex) { + logger.error("Failed to delete orphaned KEK {} from provider {} after DB failure: {}", + newKekId, provider.getProviderName(), ex.getMessage()); + } + throw e; + } + + } catch (Exception e) { + logger.error("KEK rotation failed for kmsKey {}: {}", kmsKey, e.getMessage()); + throw handleKmsException(e); + } + } + + private KMSKekVersionVO createKekVersion(Long kmsKeyId, String kekLabel, Long hsmProfileId) throws KMSException { + List existingVersions = kmsKekVersionDao.listByKmsKeyId(kmsKeyId); + int nextVersion = existingVersions.stream() + .mapToInt(KMSKekVersionVO::getVersionNumber) + .max() + .orElse(0) + 1; + + KMSKekVersionVO currentActive = kmsKekVersionDao.getActiveVersion(kmsKeyId); + if (currentActive != null) { + currentActive.setStatus(KMSKekVersionVO.Status.Previous); + kmsKekVersionDao.update(currentActive.getId(), currentActive); + } + + KMSKekVersionVO newVersion = new KMSKekVersionVO(kmsKeyId, nextVersion, kekLabel, + KMSKekVersionVO.Status.Active); + newVersion.setHsmProfileId(hsmProfileId); + newVersion = kmsKekVersionDao.persist(newVersion); + + logger.info("Created KEK version {} for KMS key {} (label: {}, profile: {})", nextVersion, kmsKeyId, kekLabel, + hsmProfileId); + return newVersion; + } + + @Override + @ActionEvent(eventType = EventTypes.EVENT_VOLUME_MIGRATE_TO_KMS, + eventDescription = "migrating volumes to KMS", + async = true) + public int migrateVolumesToKMS(MigrateVolumesToKMSCmd cmd) throws KMSException { + Account caller = CallContext.current().getCallingAccount(); + Long zoneId = cmd.getZoneId(); + String accountName = cmd.getAccountName(); + Long domainId = cmd.getDomainId(); + Long kmsKeyId = cmd.getKmsKeyId(); + List volumeIds = cmd.getVolumeIds(); + + if (zoneId == null && CollectionUtils.isEmpty(volumeIds)) { + throw new InvalidParameterValueException("Need to specify either ZoneId or Volume IDs"); + } + + if (zoneId != null && CollectionUtils.isNotEmpty(volumeIds)) { + throw new InvalidParameterValueException("Specify either ZoneId or Volume IDs"); + } + + if (kmsKeyId == null) { + throw new InvalidParameterValueException("kmsKeyId must be specified"); + } + + KMSKeyVO kmsKey = kmsKeyDao.findById(kmsKeyId); + if (kmsKey == null) { + throw new InvalidParameterValueException("KMS key not found: " + kmsKeyId); + } + checkKmsKeyAccess(caller, kmsKey); + + if (!kmsKey.isEnabled()) { + throw new InvalidParameterValueException("KMS key is not enabled: " + kmsKey.getUuid()); + } + + if (kmsKey.getPurpose() != KeyPurpose.VOLUME_ENCRYPTION) { + throw new InvalidParameterValueException("KMS key purpose must be VOLUME_ENCRYPTION"); + } + + KMSProvider provider; + if (kmsKey.getHsmProfileId() != null) { + HSMProfileVO profile = getHSMProfile(kmsKey.getHsmProfileId()); + if (!profile.isEnabled()) { + throw new InvalidParameterValueException("HSM profile is not enabled: " + profile.getName()); + } + provider = getKMSProvider(profile.getProtocol()); + } else { + provider = getKMSProvider("database"); + } + + KMSKekVersionVO activeVersion = getActiveKekVersion(kmsKey.getId()); + + Long accountId = null; + if (accountName != null) { + accountId = accountManager.finalizeAccountId(accountName, domainId, null, true); + } + + int pageSize = 100; + + int successCount = 0; + int failureCount = 0; + int totalCount; + logger.info("Starting migration of volumes to KMS (zone: {}, account: {}, domain: {})", + zoneId, accountId, domainId); + + List volumes; + if (CollectionUtils.isNotEmpty(volumeIds)) { + volumes = volumeDao.listByIds(volumeIds); + accountManager.checkAccess(caller, null, true, volumes.toArray(new Volume[0])); + totalCount = volumes.size(); + } else { + Pair, Integer> volumeListPair = volumeDao.listVolumesForKMSMigration(zoneId, accountId, + domainId, + pageSize); + volumes = volumeListPair.first(); + totalCount = volumeListPair.second(); + } + while (true) { + + if (CollectionUtils.isEmpty(volumes) || totalCount == 0) { + break; + } + + for (VolumeVO volume : volumes) { + try { + if (migrateVolumeToKmsKey(provider, volume, kmsKey, activeVersion)) { + successCount++; + logger.debug("Migrated volume's encryption {} to KMS (batch {})", volume, kmsKey); + } + } catch (Exception e) { + failureCount++; + logger.warn("Failed to migrate volume {}: {}", volume.getId(), e.getMessage()); + } + } + logger.debug("Processed {} volumes. success: {}, failure: {}, total: {}", volumes.size(), + successCount, failureCount, totalCount); + + if (CollectionUtils.isNotEmpty(volumeIds)) { + break; + } + + Pair, Integer> volumeListPair = volumeDao.listVolumesForKMSMigration(zoneId, accountId, + domainId, pageSize); + volumes = volumeListPair.first(); + if (totalCount == volumeListPair.second()) { + logger.debug( + "{} volumes pending for migration because passphrase was not found or migration failed", + totalCount); + break; + } + totalCount = volumeListPair.second(); + } + logger.info("Migration operation completed: {} total volumes processed, {} success, {} failures", + successCount + failureCount, successCount, failureCount); + + return successCount; + } + + private boolean migrateVolumeToKmsKey(KMSProvider provider, VolumeVO volume, KMSKey kmsKey, + KMSKekVersionVO activeVersion) { + PassphraseVO passphrase = passphraseDao.findById(volume.getPassphraseId()); + if (passphrase == null) { + logger.warn( + "Skipping migration of volume from to the KMS key {} because passphrase id: {} not found for " + + "volume {}", + kmsKey, volume.getPassphraseId(), volume); + return false; + } + + // PassphraseVO.getPassphrase() returns Base64-encoded bytes matching KVM/QEMU + // format + byte[] passphraseBytes = passphrase.getPassphrase(); + try { + WrappedKey wrappedKey = provider.wrapKey( + passphraseBytes, + KeyPurpose.VOLUME_ENCRYPTION, + activeVersion.getKekLabel(), + activeVersion.getHsmProfileId()); + + KMSWrappedKeyVO wrappedKeyVO = new KMSWrappedKeyVO( + kmsKey.getId(), + activeVersion.getId(), + volume.getDataCenterId(), + wrappedKey.getWrappedKeyMaterial()); + wrappedKeyVO = kmsWrappedKeyDao.persist(wrappedKeyVO); + + volume.setKmsWrappedKeyId(wrappedKeyVO.getId()); + volume.setKmsKeyId(kmsKey.getId()); + volume.setPassphraseId(null); + volumeDao.update(volume.getId(), volume); + return true; + } finally { + if (passphraseBytes != null) { + Arrays.fill(passphraseBytes, (byte) 0); + } + } + } + + @Override + public boolean deleteKMSKeysByAccountId(Long accountId) { + if (accountId == null) { + logger.warn("Cannot delete KMS keys: account ID is null"); + return false; + } + + try { + List accountKeys = kmsKeyDao.listByAccount(accountId, null, null); + + if (accountKeys == null || accountKeys.isEmpty()) { + logger.debug("No KMS keys found for account {}", accountId); + return true; + } + + logger.info("Deleting {} KMS key(s) for account {}", accountKeys.size(), accountId); + + boolean allDeleted = true; + for (KMSKeyVO key : accountKeys) { + try { + List kekVersions = kmsKekVersionDao.listByKmsKeyId(key.getId()); + if (kekVersions != null && !kekVersions.isEmpty()) { + logger.debug("Deleting {} KEK version(s) from provider for KMS key {}", + kekVersions.size(), key.getUuid()); + for (KMSKekVersionVO kekVersion : kekVersions) { + HSMProfileVO hsmProfile = hsmProfileDao.findById(kekVersion.getHsmProfileId()); + try { + KMSProvider provider = getKMSProvider(hsmProfile.getProtocol()); + provider.deleteKek(kekVersion.getKekLabel()); + logger.debug("Deleted KEK {} (v{}) from provider", + kekVersion.getKekLabel(), kekVersion.getVersionNumber()); + } catch (Exception e) { + logger.warn("Failed to delete KEK {} from provider: {}", + kekVersion.getKekLabel(), e.getMessage()); + } + } + } + + // CASCADE deletes KEK versions and wrapped keys + boolean deleted = kmsKeyDao.remove(key.getId()); + if (deleted) { + logger.debug("Deleted KMS key {} as part of account {} cleanup", key.getUuid(), accountId); + } else { + logger.warn("Failed to delete KMS key {} as part of account {} cleanup", + key.getUuid(), accountId); + allDeleted = false; + } + } catch (Exception e) { + logger.error("Error deleting KMS key {} for account {}: {}", + key.getUuid(), accountId, e.getMessage(), e); + allDeleted = false; + } + } + + if (allDeleted) { + logger.info("Successfully deleted all KMS keys for account {}", accountId); + } else { + logger.warn("Some KMS keys for account {} could not be deleted", accountId); + } + + return allDeleted; + } catch (Exception e) { + logger.error("Error during KMS key cleanup for account {}: {}", accountId, e.getMessage(), e); + return false; + } + } + + @Override + @ActionEvent(eventType = EventTypes.EVENT_HSM_PROFILE_CREATE, eventDescription = "Adding HSM profile") + public HSMProfile addHSMProfile(AddHSMProfileCmd cmd) throws KMSException { + Account caller = CallContext.current().getCallingAccount(); + + String protocol = cmd.getProtocol(); + if (StringUtils.isEmpty(protocol)) { + throw new InvalidParameterValueException("Protocol cannot be empty"); + } + + KMSProvider provider; + try { + provider = getKMSProvider(protocol); + } catch (CloudRuntimeException e) { + throw new InvalidParameterValueException("No provider found for protocol: " + protocol); + } + + Map details = cmd.getDetails() != null ? cmd.getDetails() : new HashMap<>(); + provider.validateProfileConfig(details); + + boolean isSystem = cmd.isSystem(); + if (isSystem && !accountManager.isRootAdmin(caller.getId())) { + throw new PermissionDeniedException("Only root admins can create system HSM profiles"); + } + + Account targetAccount = accountManager.finalizeOwner(caller, cmd.getAccountName(), cmd.getDomainId(), + cmd.getProjectId()); + + Long accountId = targetAccount.getId(); + Long domainId = targetAccount.getDomainId(); + + HSMProfileVO profile = new HSMProfileVO( + cmd.getName(), + protocol, + accountId, + domainId, + cmd.getZoneId(), + cmd.getVendorName()); + profile.setSystem(isSystem); + profile = hsmProfileDao.persist(profile); + + if (cmd.getDetails() != null) { + for (Map.Entry entry : cmd.getDetails().entrySet()) { + String key = entry.getKey(); + String value = entry.getValue(); + + if (isSensitiveKey(key)) { + value = DBEncryptionUtil.encrypt(value); + } + + hsmProfileDetailsDao.persist(profile.getId(), key, value); + } + } + + ActionEventUtils.onCompletedActionEvent(CallContext.current().getCallingUserId(), profile.getAccountId(), + EventVO.LEVEL_INFO, EventTypes.EVENT_HSM_PROFILE_CREATE, + String.format("created HSM profile with id: %s, name: %s", profile.getUuid(), profile.getName()), + profile.getId(), ApiCommandResourceType.HsmProfile.toString(), CallContext.current().getStartEventId()); + + return profile; + } + + @Override + public ListResponse listHSMProfiles(ListHSMProfilesCmd cmd) { + Account caller = CallContext.current().getCallingAccount(); + if (caller == null) { + return new ListResponse<>(); + } + + List permittedAccounts = new ArrayList<>(); + Ternary domainIdRecursiveListProject = new Ternary<>( + cmd.getDomainId(), cmd.isRecursive(), null); + accountManager.buildACLSearchParameters(caller, cmd.getId(), cmd.getAccountName(), + cmd.getProjectId(), permittedAccounts, domainIdRecursiveListProject, + cmd.listAll(), false); + Long domainId = domainIdRecursiveListProject.first(); + Boolean isRecursive = domainIdRecursiveListProject.second(); + ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third(); + + SearchBuilder sb = getSearchBuilderForHSMProfiles(domainId, isRecursive, permittedAccounts, + listProjectResourcesCriteria); + SearchCriteria sc = getSearchCriteriaForHSMProfiles(sb, cmd, caller, domainId, isRecursive, + permittedAccounts, listProjectResourcesCriteria); + + Filter searchFilter = new Filter(HSMProfileVO.class, "id", true, cmd.getStartIndex(), cmd.getPageSizeVal()); + Pair, Integer> result = hsmProfileDao.searchAndCount(sc, searchFilter); + List profiles = result.first(); + Integer totalCount = result.second(); + + List responses = new ArrayList<>(); + + boolean isRootAdmin = accountManager.isRootAdmin(caller.getId()); + for (HSMProfileVO profile : profiles) { + // When isSystem=true, non-admin users explicitly requested system profiles, so + // don't mark as limited + // When listall=true, also don't mark as limited since user requested all + // profiles + // If the profile is owned by the user, they should see full details even if it + // is a system profile + boolean limited = profile.isSystem() && !isRootAdmin && !(cmd.getIsSystem() || cmd.listAll()) + && profile.getAccountId() != caller.getId(); + responses.add(createHSMProfileResponse(profile, limited)); + } + + ListResponse listResponse = new ListResponse<>(); + listResponse.setResponses(responses, totalCount); + return listResponse; + } + + SearchBuilder getSearchBuilderForHSMProfiles(Long domainId, Boolean isRecursive, + List permittedAccounts, ListProjectResourcesCriteria listProjectResourcesCriteria) { + SearchBuilder sb = hsmProfileDao.createSearchBuilder(); + accountManager.buildACLSearchBuilder(sb, domainId, isRecursive, permittedAccounts, + listProjectResourcesCriteria); + + sb.and("id", sb.entity().getId(), SearchCriteria.Op.EQ); + sb.and("zoneId", sb.entity().getZoneId(), SearchCriteria.Op.EQ); + sb.and("protocol", sb.entity().getProtocol(), SearchCriteria.Op.EQ); + sb.and("enabled", sb.entity().isEnabled(), SearchCriteria.Op.EQ); + sb.and("system", sb.entity().isSystem(), SearchCriteria.Op.EQ); + sb.done(); + return sb; + } + + SearchCriteria getSearchCriteriaForHSMProfiles(SearchBuilder searchBuilder, + ListHSMProfilesCmd cmd, Account caller, Long domainId, Boolean isRecursive, List permittedAccounts, + ListProjectResourcesCriteria listProjectResourcesCriteria) { + SearchCriteria sc = searchBuilder.create(); + + sc.setParametersIfNotNull("id", cmd.getId()); + sc.setParametersIfNotNull("zoneId", cmd.getZoneId()); + sc.setParametersIfNotNull("protocol", cmd.getProtocol()); + sc.setParametersIfNotNull("enabled", cmd.getEnabled()); + sc.setParametersIfNotNull("system", cmd.getIsSystem()); + + // Access control for non-root-admins: + // system profiles (null account_id/domain_id) are globally visible to all + // users, + // so they must always be reachable via "system=true OR ". + // ANDing ACL criteria directly onto sc would exclude them because their + // account_id is NULL. + // + // The `system` field filter already set above (line sc.setParametersIfNotNull) + // correctly + // narrows the final result when the caller passes isSystem=true/false: + // isSystem=true → sc already has system=true → effective: WHERE system=true + // isSystem=false → sc already has system=false → effective: WHERE system=false + // AND ACL + // isSystem=null → no extra filter → effective: WHERE (system=true OR ACL) + // + // Root admins bypass ACL entirely and see everything filtered only by explicit + // params. + boolean isRootAdmin = accountManager.isRootAdmin(caller.getId()); + + if (!isRootAdmin) { + SearchCriteria systemOrAclSC = hsmProfileDao.createSearchCriteria(); + if (cmd.listAll()) { + systemOrAclSC.addOr("system", SearchCriteria.Op.EQ, true); + } + + SearchCriteria aclSC = searchBuilder.create(); + accountManager.buildACLSearchCriteria(aclSC, domainId, isRecursive, permittedAccounts, + listProjectResourcesCriteria); + + if (StringUtils.isNotBlank(aclSC.getWhereClause()) && StringUtils.isNotBlank( + systemOrAclSC.getWhereClause())) { + systemOrAclSC.addOr("id", SearchCriteria.Op.SC, aclSC); + } else if (StringUtils.isNotBlank(aclSC.getWhereClause()) && StringUtils.isBlank( + systemOrAclSC.getWhereClause())) { + systemOrAclSC = aclSC; + } + + if (StringUtils.isNotBlank(systemOrAclSC.getWhereClause())) { + sc.addAnd("id", SearchCriteria.Op.SC, systemOrAclSC); + } + } + return sc; + } + + @Override + @ActionEvent(eventType = EventTypes.EVENT_HSM_PROFILE_DELETE, eventDescription = "Deleting HSM profile") + public boolean deleteHSMProfile(DeleteHSMProfileCmd cmd) throws KMSException { + HSMProfileVO profile = getHSMProfile(cmd.getId()); + Account caller = CallContext.current().getCallingAccount(); + checkHSMProfileAccess(caller, profile, true); + + long keyCount = kmsKeyDao.countByHsmProfileId(profile.getId()); + if (keyCount > 0) { + throw new InvalidParameterValueException( + String.format("Cannot delete HSM profile '%s': it is referenced by %d KMS key(s). " + + "Please delete or reassign those keys first.", profile.getName(), keyCount)); + } + + // Check if any KEK versions reference this HSM profile + List kekVersions = kmsKekVersionDao.listByHsmProfileId(profile.getId()); + if (!kekVersions.isEmpty()) { + // Check if any wrapped keys are using these KEK versions + long wrappedKeyCount = 0; + for (KMSKekVersionVO kekVersion : kekVersions) { + wrappedKeyCount += kmsWrappedKeyDao.countByKekVersionId(kekVersion.getId()); + } + if (wrappedKeyCount > 0) { + throw new InvalidParameterValueException( + String.format("Cannot delete HSM profile '%s': it is referenced by %d wrapped key(s) " + + "through KEK versions. Please wait for key rotation to complete or delete those" + + " volumes first.", + profile.getName(), wrappedKeyCount)); + } + } + + getKMSProvider(profile.getProtocol()).invalidateProfileCache(profile.getId()); + hsmProfileDetailsDao.deleteDetails(profile.getId()); + if (hsmProfileDao.remove(profile.getId())) { + ActionEventUtils.onCompletedActionEvent(CallContext.current().getCallingUserId(), profile.getAccountId(), + EventVO.LEVEL_INFO, EventTypes.EVENT_HSM_PROFILE_DELETE, + String.format("Deleted HSM profile with id: %s, name: %s", profile.getUuid(), profile.getName()), + profile.getId(), ApiCommandResourceType.HsmProfile.toString(), + CallContext.current().getStartEventId()); + return true; + } + return false; + } + + @Override + @ActionEvent(eventType = EventTypes.EVENT_HSM_PROFILE_UPDATE, eventDescription = "Updating HSM profile") + public HSMProfile updateHSMProfile(UpdateHSMProfileCmd cmd) throws KMSException { + HSMProfileVO profile = getHSMProfile(cmd.getId()); + Account caller = CallContext.current().getCallingAccount(); + checkHSMProfileAccess(caller, profile, true); + + if (cmd.getName() != null) { + profile.setName(cmd.getName()); + } + if (cmd.getEnabled() != null) { + profile.setEnabled(cmd.getEnabled()); + } + + hsmProfileDao.update(profile.getId(), profile); + + ActionEventUtils.onCompletedActionEvent(CallContext.current().getCallingUserId(), profile.getAccountId(), + EventVO.LEVEL_INFO, EventTypes.EVENT_HSM_PROFILE_UPDATE, + String.format("Updated HSM profile with id: %s, name: %s", profile.getUuid(), profile.getName()), + profile.getId(), ApiCommandResourceType.HsmProfile.toString(), CallContext.current().getStartEventId()); + return profile; + } + + @Override + public HSMProfileResponse createHSMProfileResponse(HSMProfile profile) { + return createHSMProfileResponse(profile, false); + } + + private HSMProfileResponse createHSMProfileResponse(HSMProfile profile, boolean limited) { + HSMProfileResponse response = new HSMProfileResponse(); + response.setId(profile.getUuid()); + response.setName(profile.getName()); + response.setVendorName(profile.getVendorName()); + response.setSystem(profile.isSystem()); + + if (profile.getZoneId() != null) { + DataCenterVO zone = dataCenterDao.findById(profile.getZoneId()); + if (zone != null) { + response.setZoneId(zone.getUuid()); + response.setZoneName(zone.getName()); + } + } + + if (limited) { + return response; + } + + response.setProtocol(profile.getProtocol()); + response.setEnabled(profile.isEnabled()); + response.setCreated(profile.getCreated()); + + ApiResponseHelper.populateOwner(response, profile); + + List details = hsmProfileDetailsDao.listByProfileId(profile.getId()); + Map detailsMap = new HashMap<>(); + for (HSMProfileDetailsVO detail : details) { + detailsMap.put(detail.getName(), detail.getValue()); + } + response.setDetails(detailsMap); + response.setObjectName("hsmprofile"); + return response; + } + + boolean isSensitiveKey(String key) { + return KMSProvider.isSensitiveKey(key); + } + + /** + * Find a KMS key by ID and verify the caller has write access to it. + */ + private KMSKeyVO findKMSKeyAndCheckAccess(Long keyId, Account caller) { + KMSKeyVO key = kmsKeyDao.findById(keyId); + if (key == null) { + throw new InvalidParameterValueException("KMS key not found: " + keyId); + } + accountManager.checkAccess(caller, null, true, key); + return key; + } + + /** + * Find an HSM profile by ID, throwing InvalidParameterValueException if not + * found. + */ + private HSMProfileVO getHSMProfile(Long profileId) { + HSMProfileVO profile = hsmProfileDao.findById(profileId); + if (profile == null) { + throw new InvalidParameterValueException("HSM Profile not found: " + profileId); + } + return profile; + } + + /** + * Validate caller's access to an HSM profile. + * For system profiles: read/use access is open to all; modify access requires + * root admin. + * For owned profiles: delegates to ACL checkAccess. + */ + private void checkHSMProfileAccess(Account caller, HSMProfileVO profile, boolean requireModifyAccess) { + if (profile.isSystem()) { + if (requireModifyAccess && !accountManager.isRootAdmin(caller.getId())) { + throw new PermissionDeniedException("Only root admins can modify system HSM profiles"); + } + } else { + accountManager.checkAccess(caller, null, requireModifyAccess, profile); + } + } + + /** + * Parse and validate a key purpose string. Returns null if the input is null. + */ + private KeyPurpose parseKeyPurpose(String purpose) { + if (purpose == null) { + return null; + } + try { + return KeyPurpose.fromString(purpose); + } catch (IllegalArgumentException e) { + throw new InvalidParameterValueException( + "Invalid purpose: " + purpose + ". Valid values: volume, tls"); + } + } + + T retryOperation(KmsOperation operation) throws Exception { + int maxRetries = getRetryCount(); + int retryDelay = getRetryDelayMs(); + int timeoutSec = getOperationTimeoutSec(); + + Exception lastException = null; + + for (int attempt = 0; attempt <= maxRetries; attempt++) { + Future future = kmsOperationExecutor.submit(operation::execute); + try { + return future.get(timeoutSec, TimeUnit.SECONDS); + } catch (TimeoutException e) { + future.cancel(true); + // Note: if the underlying provider makes a native (JNI/JNA) call, the daemon + // thread may remain blocked until the native call returns even after cancel — + // this is a known JVM limitation. The caller is unblocked regardless. + lastException = KMSException.transientError( + "KMS operation timed out after " + timeoutSec + "s", e); + logger.warn("KMS operation timed out (attempt {}/{}), timeout={}s", + attempt + 1, maxRetries + 1, timeoutSec); + } catch (ExecutionException e) { + future.cancel(true); + Throwable cause = e.getCause(); + lastException = (cause instanceof Exception) ? (Exception) cause : e; + + if (lastException instanceof KMSException && !((KMSException) lastException).isRetryable()) { + throw lastException; + } + + logger.warn("KMS operation failed (attempt {}/{}): {}", + attempt + 1, maxRetries + 1, lastException.getMessage()); + } catch (InterruptedException e) { + future.cancel(true); + Thread.currentThread().interrupt(); + throw new CloudRuntimeException("Interrupted while waiting for KMS operation", e); + } + + if (attempt < maxRetries) { + try { + Thread.sleep((long) retryDelay * (attempt + 1)); + } catch (InterruptedException ie) { + Thread.currentThread().interrupt(); + throw new CloudRuntimeException("Interrupted during KMS retry delay", ie); + } + } else { + logger.error("KMS operation failed after {} attempt(s)", maxRetries + 1); + } + } + + if (lastException != null) { + throw lastException; + } + + throw new CloudRuntimeException("KMS operation failed with no exception details"); + } + + protected int getOperationTimeoutSec() { + return KMSOperationTimeoutSec.value(); + } + + protected int getRetryCount() { + return KMSRetryCount.value(); + } + + protected int getRetryDelayMs() { + return KMSRetryDelayMs.value(); + } + + private KMSException handleKmsException(Exception e) { + if (e instanceof KMSException) { + return (KMSException) e; + } + return KMSException.transientError("KMS operation failed: " + e.getMessage(), e); + } + + public void setKmsProviders(List kmsProviders) { + this.kmsProviders = kmsProviders; + initializeKmsProviderMap(); + } + + private void initializeKmsProviderMap() { + if (kmsProviders == null) { + return; + } + kmsProviderMap.clear(); + for (KMSProvider provider : kmsProviders) { + if (provider != null) { + kmsProviderMap.put(provider.getProviderName().toLowerCase(), provider); + logger.info("Registered KMS provider: {}", provider.getProviderName()); + } + } + } + + @Override + public boolean start() { + super.start(); + initializeKmsProviderMap(); + + for (KMSProvider provider : kmsProviderMap.values()) { + if (provider != null) { + try { + boolean healthy = provider.healthCheck(); + if (healthy) { + logger.info("KMS provider {} health check passed", provider.getProviderName()); + } else { + logger.warn("KMS provider {} health check failed", provider.getProviderName()); + } + } catch (Exception e) { + logger.warn("KMS provider {} health check error: {}", provider.getProviderName(), e.getMessage()); + } + } + } + + scheduleRewrapWorker(); + + return true; + } + + private void scheduleRewrapWorker() { + long intervalMs = KMSRewrapIntervalMs.value(); + if (intervalMs <= 0) { + return; + } + + rewrapExecutor = Executors.newScheduledThreadPool(1, r -> { + Thread t = new Thread(r, "KMSRewrapWorker"); + t.setDaemon(true); + return t; + }); + + rewrapExecutor.scheduleAtFixedRate(new ManagedContextRunnable() { + @Override + protected void runInContext() { + try { + processRewrapBatch(); + } catch (final Exception e) { + logger.error("Error while running KMS rewrap worker", e); + } + } + }, 10000L, intervalMs, TimeUnit.MILLISECONDS); + + logger.info("KMS rewrap worker scheduled with interval: {} ms", intervalMs); + } + + /** + * Finds KEK versions marked as Previous and gradually rewraps wrapped keys + * using the active version. + */ + private void processRewrapBatch() { + GlobalLock lock = GlobalLock.getInternLock("kms.rewrap.worker"); + try { + if (lock.lock(5)) { + try { + List previousVersions = kmsKekVersionDao + .findByStatus(KMSKekVersionVO.Status.Previous); + + if (previousVersions.isEmpty()) { + logger.trace("No KEK versions pending rewrap"); + return; + } + + logger.debug("Found {} KEK version(s) with status Previous - processing rewrap batches", + previousVersions.size()); + + int batchSize = KMSRewrapBatchSize.value(); + + for (KMSKekVersionVO oldVersion : previousVersions) { + try { + processVersionRewrap(oldVersion, batchSize); + } catch (Exception e) { + logger.error("Error processing rewrap for KEK version {}: {}", oldVersion, e.getMessage(), + e); + } + } + } finally { + lock.unlock(); + } + } else { + logger.trace("KMS rewrap worker: could not acquire cluster lock, skipping batch"); + } + } catch (Exception e) { + logger.error("Error in rewrap worker: {}", e.getMessage(), e); + } finally { + lock.releaseRef(); + } + } + + private void processVersionRewrap(KMSKekVersionVO oldVersion, int batchSize) throws KMSException { + KMSKeyVO kmsKey = kmsKeyDao.findById(oldVersion.getKmsKeyId()); + if (kmsKey == null) { + logger.warn("KMS key not found for KEK version {}, skipping", oldVersion); + return; + } + + KMSKekVersionVO activeVersion = kmsKekVersionDao.getActiveVersion(oldVersion.getKmsKeyId()); + if (activeVersion == null) { + logger.warn("No active KEK version found for KMS key {}, skipping", kmsKey); + return; + } + + List keysToRewrap = kmsWrappedKeyDao.listByKekVersionId(oldVersion.getId(), batchSize); + + if (keysToRewrap.isEmpty()) { + logger.info("All wrapped keys rewrapped for KEK version {} (v{}) - archiving and deleting from provider", + oldVersion.getUuid(), oldVersion.getVersionNumber()); + + oldVersion.setStatus(KMSKekVersionVO.Status.Archived); + kmsKekVersionDao.update(oldVersion.getId(), oldVersion); + + // Delete the old KEK from the HSM since no wrapped keys reference it anymore + try { + HSMProfileVO oldProfile = hsmProfileDao.findById(oldVersion.getHsmProfileId()); + if (oldProfile != null) { + KMSProvider provider = getKMSProvider(oldProfile.getProtocol()); + provider.deleteKek(oldVersion.getKekLabel()); + logger.info("Deleted archived KEK {} (v{}) from provider {}", + oldVersion.getKekLabel(), oldVersion.getVersionNumber(), provider.getProviderName()); + } + } catch (Exception e) { + logger.warn("Failed to delete archived KEK {} (v{}) from provider: {}", + oldVersion.getKekLabel(), oldVersion.getVersionNumber(), e.getMessage()); + } + + return; + } + + HSMProfileVO hsmProfile = hsmProfileDao.findById(activeVersion.getHsmProfileId()); + KMSProvider provider = getKMSProvider(hsmProfile.getProtocol()); + + int successCount = 0; + int failureCount = 0; + + for (KMSWrappedKeyVO wrappedKeyVO : keysToRewrap) { + try { + rewrapSingleKey(wrappedKeyVO, kmsKey, activeVersion, provider); + successCount++; + } catch (Exception e) { + failureCount++; + logger.warn("Failed to rewrap key {} for KMS key {}: {}", + wrappedKeyVO.getId(), kmsKey, e.getMessage()); + // Continue with next key - will retry in next run + } + } + + logger.info("Rewrapped batch for KMS key {} (KEK v{} -> v{}): {} success, {} failures", + kmsKey, oldVersion.getVersionNumber(), activeVersion.getVersionNumber(), + successCount, failureCount); + } + + void rewrapSingleKey(KMSWrappedKeyVO wrappedKeyVO, KMSKeyVO kmsKey, + KMSKekVersionVO newVersion, KMSProvider provider) { + byte[] dek = null; + try { + dek = unwrapKey(wrappedKeyVO.getId()); + + WrappedKey newWrapped = provider.wrapKey( + dek, + kmsKey.getPurpose(), + newVersion.getKekLabel(), + newVersion.getHsmProfileId()); + + wrappedKeyVO.setKekVersionId(newVersion.getId()); + wrappedKeyVO.setWrappedBlob(newWrapped.getWrappedKeyMaterial()); + kmsWrappedKeyDao.update(wrappedKeyVO.getId(), wrappedKeyVO); + } finally { + if (dek != null) { + Arrays.fill(dek, (byte) 0); + } + } + } + + @Override + public boolean stop() { + if (rewrapExecutor != null) { + rewrapExecutor.shutdownNow(); + rewrapExecutor = null; + } + kmsOperationExecutor.shutdownNow(); + return super.stop(); + } + + @Override + public String getConfigComponentName() { + return KMSManager.class.getSimpleName(); + } + + @Override + public ConfigKey[] getConfigKeys() { + return new ConfigKey[]{ + KMSDekSizeBits, + KMSRetryCount, + KMSRetryDelayMs, + KMSOperationTimeoutSec, + KMSRewrapBatchSize, + KMSRewrapIntervalMs + }; + } + + @Override + public List> getCommands() { + List> cmdList = new ArrayList<>(); + cmdList.add(ListKMSKeysCmd.class); + cmdList.add(CreateKMSKeyCmd.class); + cmdList.add(UpdateKMSKeyCmd.class); + cmdList.add(DeleteKMSKeyCmd.class); + cmdList.add(RotateKMSKeyCmd.class); + cmdList.add(MigrateVolumesToKMSCmd.class); + cmdList.add(AddHSMProfileCmd.class); + cmdList.add(ListHSMProfilesCmd.class); + cmdList.add(UpdateHSMProfileCmd.class); + cmdList.add(DeleteHSMProfileCmd.class); + + return cmdList; + } + + @FunctionalInterface + interface KmsOperation { + T execute() throws Exception; + } +} diff --git a/server/src/main/java/org/apache/cloudstack/vm/UnmanagedVMsManagerImpl.java b/server/src/main/java/org/apache/cloudstack/vm/UnmanagedVMsManagerImpl.java index 14c67417015c..7fb48d43922a 100644 --- a/server/src/main/java/org/apache/cloudstack/vm/UnmanagedVMsManagerImpl.java +++ b/server/src/main/java/org/apache/cloudstack/vm/UnmanagedVMsManagerImpl.java @@ -2688,7 +2688,7 @@ private UserVm importExternalKvmVirtualMachine(final UnmanagedInstanceTO unmanag } DiskOfferingVO diskOffering = diskOfferingDao.findById(serviceOffering.getDiskOfferingId()); String rootVolumeName = String.format("ROOT-%s", userVm.getId()); - DiskProfile diskProfile = volumeManager.allocateRawVolume(Volume.Type.ROOT, rootVolumeName, diskOffering, null, null, null, userVm, template, owner, null); + DiskProfile diskProfile = volumeManager.allocateRawVolume(Volume.Type.ROOT, rootVolumeName, diskOffering, null, null, null, userVm, template, owner, null, null); DiskProfile[] dataDiskProfiles = new DiskProfile[dataDisks.size()]; int diskSeq = 0; @@ -2697,7 +2697,7 @@ private UserVm importExternalKvmVirtualMachine(final UnmanagedInstanceTO unmanag throw new InvalidParameterValueException(String.format("Disk ID: %s size is invalid", disk.getDiskId())); } DiskOffering offering = diskOfferingDao.findById(dataDiskOfferingMap.get(disk.getDiskId())); - DiskProfile dataDiskProfile = volumeManager.allocateRawVolume(Volume.Type.DATADISK, String.format("DATA-%d-%s", userVm.getId(), disk.getDiskId()), offering, null, null, null, userVm, template, owner, null); + DiskProfile dataDiskProfile = volumeManager.allocateRawVolume(Volume.Type.DATADISK, String.format("DATA-%d-%s", userVm.getId(), disk.getDiskId()), offering, null, null, null, userVm, template, owner, null, null); dataDiskProfiles[diskSeq++] = dataDiskProfile; } @@ -2826,7 +2826,7 @@ private UserVm importKvmVirtualMachineFromDisk(final ImportSource importSource, } DiskOfferingVO diskOffering = diskOfferingDao.findById(serviceOffering.getDiskOfferingId()); String rootVolumeName = String.format("ROOT-%s", userVm.getId()); - DiskProfile diskProfile = volumeManager.allocateRawVolume(Volume.Type.ROOT, rootVolumeName, diskOffering, null, null, null, userVm, template, owner, null); + DiskProfile diskProfile = volumeManager.allocateRawVolume(Volume.Type.ROOT, rootVolumeName, diskOffering, null, null, null, userVm, template, owner, null, null); final VirtualMachineProfile profile = new VirtualMachineProfileImpl(userVm, template, serviceOffering, owner, null); ServiceOfferingVO dummyOffering = serviceOfferingDao.findById(userVm.getId(), serviceOffering.getId()); diff --git a/server/src/main/resources/META-INF/cloudstack/core/spring-server-core-managers-context.xml b/server/src/main/resources/META-INF/cloudstack/core/spring-server-core-managers-context.xml index b90c40dc95e7..3fdde048e72d 100644 --- a/server/src/main/resources/META-INF/cloudstack/core/spring-server-core-managers-context.xml +++ b/server/src/main/resources/META-INF/cloudstack/core/spring-server-core-managers-context.xml @@ -326,6 +326,11 @@ + + + + + diff --git a/server/src/test/java/com/cloud/network/as/AutoScaleManagerImplTest.java b/server/src/test/java/com/cloud/network/as/AutoScaleManagerImplTest.java index c186083b8ce1..6363c85d0bc5 100644 --- a/server/src/test/java/com/cloud/network/as/AutoScaleManagerImplTest.java +++ b/server/src/test/java/com/cloud/network/as/AutoScaleManagerImplTest.java @@ -1281,7 +1281,7 @@ public void testCreateNewVM1() throws ResourceUnavailableException, Insufficient when(zoneMock.getNetworkType()).thenReturn(DataCenter.NetworkType.Basic); when(userVmService.createBasicSecurityGroupVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), eq(userData), eq(userDataId), eq(userDataDetails.toString()), any(), any(), any(), eq(true), any(), any(), any(), - any(), any(), any(), any(), eq(true), any(), any(), any())).thenReturn(userVmMock); + any(), any(), any(), any(), eq(true), any(), any(), any(), any())).thenReturn(userVmMock); UserVm result = autoScaleManagerImplSpy.createNewVM(asVmGroupMock); @@ -1292,7 +1292,7 @@ public void testCreateNewVM1() throws ResourceUnavailableException, Insufficient Mockito.verify(userVmService).createBasicSecurityGroupVirtualMachine(any(), any(), any(), any(), any(), matches(vmHostNamePattern), matches(vmHostNamePattern), any(), any(), any(), any(), any(), any(), eq(userData), eq(userDataId), eq(userDataDetails.toString()), any(), any(), any(), eq(true), any(), any(), any(), - any(), any(), any(), any(), eq(true), any(), any(), any()); + any(), any(), any(), any(), eq(true), any(), any(), any(), any()); Mockito.verify(asVmGroupMock).setNextVmSeq(nextVmSeq + 1); } @@ -1328,7 +1328,7 @@ public void testCreateNewVM2() throws ResourceUnavailableException, Insufficient when(zoneMock.getNetworkType()).thenReturn(DataCenter.NetworkType.Advanced); when(userVmService.createAdvancedSecurityGroupVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), eq(userData), eq(userDataId), eq(userDataDetails.toString()), any(), any(), any(), any(), any(), any(), - any(), any(), any(), any(), any(), eq(true), any(), any(), any(), any())).thenReturn(userVmMock); + any(), any(), any(), any(), any(), eq(true), any(), any(), any(), any(), any())).thenReturn(userVmMock); when(networkModel.checkSecurityGroupSupportForNetwork(account, zoneMock, List.of(networkId), Collections.emptyList())).thenReturn(true); @@ -1341,7 +1341,7 @@ public void testCreateNewVM2() throws ResourceUnavailableException, Insufficient Mockito.verify(userVmService).createAdvancedSecurityGroupVirtualMachine(any(), any(), any(), any(), any(), any(), matches(vmHostNamePattern), matches(vmHostNamePattern), any(), any(), any(), any(), any(), any(), eq(userData), eq(userDataId), eq(userDataDetails.toString()), any(), any(), any(), any(), any(), any(), - any(), any(), any(), any(), any(), eq(true), any(), any(), any(), any()); + any(), any(), any(), any(), any(), eq(true), any(), any(), any(), any(), any()); Mockito.verify(asVmGroupMock).setNextVmSeq(nextVmSeq + 2); } @@ -1377,7 +1377,7 @@ public void testCreateNewVM3() throws ResourceUnavailableException, Insufficient when(zoneMock.getNetworkType()).thenReturn(DataCenter.NetworkType.Advanced); when(userVmService.createAdvancedVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), eq(userData), eq(userDataId), eq(userDataDetails.toString()), any(), any(), any(), eq(true), any(), any(), any(), - any(), any(), any(), any(), eq(true), any(), any(), any(), any())).thenReturn(userVmMock); + any(), any(), any(), any(), eq(true), any(), any(), any(), any(), any())).thenReturn(userVmMock); when(networkModel.checkSecurityGroupSupportForNetwork(account, zoneMock, List.of(networkId), Collections.emptyList())).thenReturn(false); @@ -1390,7 +1390,7 @@ public void testCreateNewVM3() throws ResourceUnavailableException, Insufficient Mockito.verify(userVmService).createAdvancedVirtualMachine(any(), any(), any(), any(), any(), matches(vmHostNamePattern), matches(vmHostNamePattern), any(), any(), any(), any(), any(), any(), eq(userData), eq(userDataId), eq(userDataDetails.toString()), any(), any(), any(), eq(true), any(), any(), any(), - any(), any(), any(), any(), eq(true), any(), any(), any(), any()); + any(), any(), any(), any(), eq(true), any(), any(), any(), any(), any()); Mockito.verify(asVmGroupMock).setNextVmSeq(nextVmSeq + 3); } diff --git a/server/src/test/java/com/cloud/user/AccountManagentImplTestBase.java b/server/src/test/java/com/cloud/user/AccountManagentImplTestBase.java index 8c790b78da0b..93ed3c87829a 100644 --- a/server/src/test/java/com/cloud/user/AccountManagentImplTestBase.java +++ b/server/src/test/java/com/cloud/user/AccountManagentImplTestBase.java @@ -66,6 +66,7 @@ import org.apache.cloudstack.engine.service.api.OrchestrationService; import org.apache.cloudstack.framework.config.dao.ConfigurationDao; import org.apache.cloudstack.framework.messagebus.MessageBus; +import org.apache.cloudstack.kms.KMSManager; import org.apache.cloudstack.network.RoutedIpv4Manager; import org.apache.cloudstack.network.dao.NetworkPermissionDao; import org.apache.cloudstack.region.gslb.GlobalLoadBalancerRuleDao; @@ -212,6 +213,8 @@ public class AccountManagentImplTestBase { AccountService _accountService; @Mock RoutedIpv4Manager routedIpv4Manager; + @Mock + KMSManager kmsManager; @Before public void setup() { diff --git a/server/src/test/java/com/cloud/user/AccountManagerImplTest.java b/server/src/test/java/com/cloud/user/AccountManagerImplTest.java index e5d09ba9141a..40aedefc1683 100644 --- a/server/src/test/java/com/cloud/user/AccountManagerImplTest.java +++ b/server/src/test/java/com/cloud/user/AccountManagerImplTest.java @@ -200,6 +200,7 @@ public void deleteUserAccount() { Mockito.when(_sshKeyPairDao.remove(Mockito.anyLong())).thenReturn(true); Mockito.when(userDataDao.removeByAccountId(Mockito.anyLong())).thenReturn(222); Mockito.when(sslCertDao.removeByAccountId(Mockito.anyLong())).thenReturn(333); + Mockito.when(kmsManager.deleteKMSKeysByAccountId(Mockito.anyLong())).thenReturn(true); Mockito.doNothing().when(accountManagerImpl).deleteWebhooksForAccount(Mockito.anyLong()); Mockito.doNothing().when(accountManagerImpl).verifyCallerPrivilegeForUserOrAccountOperations((Account) any()); diff --git a/server/src/test/java/com/cloud/vm/UserVmManagerImplTest.java b/server/src/test/java/com/cloud/vm/UserVmManagerImplTest.java index 4edafb3a05a8..264a04e73597 100644 --- a/server/src/test/java/com/cloud/vm/UserVmManagerImplTest.java +++ b/server/src/test/java/com/cloud/vm/UserVmManagerImplTest.java @@ -1196,14 +1196,14 @@ public void createVirtualMachine() throws ResourceUnavailableException, Insuffic when(_dcMock.getNetworkType()).thenReturn(DataCenter.NetworkType.Basic); Mockito.doReturn(userVmVoMock).when(userVmManagerImpl).createBasicSecurityGroupVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), nullable(Boolean.class), any(), any(), any(), - any(), any(), any(), any(), eq(true), any(), any(), any()); + any(), any(), any(), any(), eq(true), any(), any(), any(), any()); UserVm result = userVmManagerImpl.createVirtualMachine(deployVMCmd); assertEquals(userVmVoMock, result); Mockito.verify(vnfTemplateManager).validateVnfApplianceNics(templateMock, null, Collections.emptyMap()); Mockito.verify(userVmManagerImpl).createBasicSecurityGroupVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), nullable(Boolean.class), any(), any(), any(), - any(), any(), any(), any(), eq(true), any(), any(), any()); + any(), any(), any(), any(), eq(true), any(), any(), any(), any()); } private List mockVolumesForIsAnyVmVolumeUsingLocalStorageTest(int localVolumes, int nonLocalVolumes) { @@ -1456,7 +1456,7 @@ public void createVirtualMachineWithCloudRuntimeException() throws ResourceUnava doThrow(cre).when(userVmManagerImpl).createBasicSecurityGroupVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), nullable(Boolean.class), any(), any(), any(), - any(), any(), any(), any(), eq(true), any(), any(), any()); + any(), any(), any(), any(), eq(true), any(), any(), any(), any()); CloudRuntimeException creThrown = assertThrows(CloudRuntimeException.class, () -> userVmManagerImpl.createVirtualMachine(deployVMCmd)); ArrayList proxyIdList = creThrown.getIdProxyList(); @@ -3369,7 +3369,7 @@ public void testAllocateVMFromBackupUsingCmdValues() throws InsufficientCapacity Mockito.doReturn(userVmVoMock).when(userVmManagerImpl).createAdvancedVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), nullable(Boolean.class), any(), any(), any(), - any(), any(), any(), any(), eq(true), any(), any(), any(), any()); + any(), any(), any(), any(), eq(true), any(), any(), any(), any(), any()); UserVm result = userVmManagerImpl.allocateVMFromBackup(cmd); @@ -3377,7 +3377,7 @@ public void testAllocateVMFromBackupUsingCmdValues() throws InsufficientCapacity Mockito.verify(backupDao).findById(backupId); Mockito.verify(userVmManagerImpl).createAdvancedVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), nullable(Boolean.class), any(), any(), any(), - any(), any(), any(), any(), eq(true), any(), any(), any(), any()); + any(), any(), any(), any(), eq(true), any(), any(), any(), any(), any()); } @Test @@ -3428,14 +3428,14 @@ public void testAllocateVMFromBackupUsingBackupValues() throws InsufficientCapac Mockito.doReturn(userVmVoMock).when(userVmManagerImpl).createAdvancedVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), eq(false), any(), any(), any(), - any(), any(), any(), any(), eq(false), any(), any(), any(), any()); + any(), any(), any(), any(), eq(false), any(), any(), any(), any(), any()); UserVm result = userVmManagerImpl.allocateVMFromBackup(cmd); assertNotNull(result); Mockito.verify(userVmManagerImpl).createAdvancedVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), eq(false), any(), any(), any(), - any(), any(), any(), any(), eq(false), any(), any(), any(), any()); + any(), any(), any(), any(), eq(false), any(), any(), any(), any(), any()); } @Test @@ -3545,7 +3545,7 @@ public void testAllocateVMFromBackupUsingCmdValuesWithISO() throws InsufficientC Mockito.doReturn(userVmVoMock).when(userVmManagerImpl).createAdvancedVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), nullable(Boolean.class), any(), any(), any(), - any(), any(), any(), any(), eq(true), any(), any(), any(), any()); + any(), any(), any(), any(), eq(true), any(), any(), any(), any(), any()); UserVm result = userVmManagerImpl.allocateVMFromBackup(cmd); @@ -3553,7 +3553,7 @@ public void testAllocateVMFromBackupUsingCmdValuesWithISO() throws InsufficientC Mockito.verify(backupDao).findById(backupId); Mockito.verify(userVmManagerImpl).createAdvancedVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), nullable(Boolean.class), any(), any(), any(), - any(), any(), any(), any(), eq(true), any(), any(), any(), any()); + any(), any(), any(), any(), eq(true), any(), any(), any(), any(), any()); } @Test @@ -3607,14 +3607,14 @@ public void testAllocateVMFromBackupUsingBackupValuesWithISO() throws Insufficie Mockito.doReturn(userVmVoMock).when(userVmManagerImpl).createAdvancedVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), eq(false), any(), any(), any(), - any(), any(), any(), any(), eq(false), any(), any(), any(), any()); + any(), any(), any(), any(), eq(false), any(), any(), any(), any(), any()); UserVm result = userVmManagerImpl.allocateVMFromBackup(cmd); assertNotNull(result); Mockito.verify(userVmManagerImpl).createAdvancedVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), eq(false), any(), any(), any(), - any(), any(), any(), any(), eq(false), any(), any(), any(), any()); + any(), any(), any(), any(), eq(false), any(), any(), any(), any(), any()); } @Test @@ -3909,7 +3909,7 @@ public void createVirtualMachineWithExistingVolume() throws ResourceUnavailableE when(_dcMock.getNetworkType()).thenReturn(DataCenter.NetworkType.Basic); Mockito.doReturn(userVmVoMock).when(userVmManagerImpl).createBasicSecurityGroupVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), nullable(Boolean.class), any(), any(), any(), - any(), any(), any(), any(), eq(true), any(), any(), any()); + any(), any(), any(), any(), eq(true), any(), any(), any(), any()); userVmManagerImpl.createVirtualMachine(deployVMCmd); @@ -3947,7 +3947,7 @@ public void createVirtualMachineWithExistingSnapshot() throws ResourceUnavailabl when(_dcMock.getNetworkType()).thenReturn(DataCenter.NetworkType.Basic); Mockito.doReturn(userVmVoMock).when(userVmManagerImpl).createBasicSecurityGroupVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), nullable(Boolean.class), any(), any(), any(), - any(), any(), any(), any(), eq(true), any(), any(), any()); + any(), any(), any(), any(), eq(true), any(), any(), any(), any()); userVmManagerImpl.createVirtualMachine(deployVMCmd); @@ -3998,7 +3998,7 @@ public void testAllocateVMFromBackupWithVmSettingsRestoration() throws Insuffici when(createdVm.getId()).thenReturn(2L); Mockito.doReturn(createdVm).when(userVmManagerImpl).createAdvancedVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), nullable(Boolean.class), any(), any(), any(), - any(), any(), any(), any(), eq(true), any(), any(), any(), any()); + any(), any(), any(), any(), eq(true), any(), any(), any(), any(), any()); Map existingDetails = new HashMap<>(); existingDetails.put("existingKey", "existingValue"); @@ -4066,7 +4066,7 @@ public void testAllocateVMFromBackupWithOverrideDiskOfferingComputeOnly() throws when(createdVm.getId()).thenReturn(2L); Mockito.doReturn(createdVm).when(userVmManagerImpl).createAdvancedVirtualMachine(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), nullable(Boolean.class), any(), any(), any(), - any(), any(), any(), any(), eq(true), any(), any(), any(), any()); + any(), any(), any(), any(), eq(true), any(), any(), any(), any(), any()); UserVm result = userVmManagerImpl.allocateVMFromBackup(cmd); diff --git a/server/src/test/java/org/apache/cloudstack/kms/KMSManagerImplHSMTest.java b/server/src/test/java/org/apache/cloudstack/kms/KMSManagerImplHSMTest.java new file mode 100644 index 000000000000..94218d0e1e02 --- /dev/null +++ b/server/src/test/java/org/apache/cloudstack/kms/KMSManagerImplHSMTest.java @@ -0,0 +1,157 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms; + +import com.cloud.api.ApiResponseHelper; +import com.cloud.dc.dao.DataCenterDao; +import com.cloud.domain.dao.DomainDao; +import com.cloud.user.AccountManager; +import org.apache.cloudstack.api.response.HSMProfileResponse; +import org.apache.cloudstack.kms.dao.HSMProfileDao; +import org.apache.cloudstack.kms.dao.HSMProfileDetailsDao; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockedStatic; +import org.mockito.Mockito; +import org.mockito.Spy; +import org.mockito.junit.MockitoJUnitRunner; + +import java.util.Arrays; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * Unit tests for HSM-related business logic in KMSManagerImpl + * Tests sensitive key detection, profile resolution hierarchy, and provider matching + */ +@RunWith(MockitoJUnitRunner.class) +public class KMSManagerImplHSMTest { + + private final Long testAccountId = 100L; + @Spy + @InjectMocks + private KMSManagerImpl kmsManager; + @Mock + private HSMProfileDao hsmProfileDao; + @Mock + private HSMProfileDetailsDao hsmProfileDetailsDao; + @Mock + private AccountManager accountManager; + @Mock + private DataCenterDao dataCenterDao; + @Mock + private DomainDao domainDao; + + /** + * Test: isSensitiveKey correctly identifies "pin" as sensitive + */ + @Test + public void testIsSensitiveKey_DetectsPin() { + boolean result = kmsManager.isSensitiveKey("pin"); + assertTrue("'pin' should be detected as sensitive", result); + } + + /** + * Test: isSensitiveKey correctly identifies "password" as sensitive + */ + @Test + public void testIsSensitiveKey_DetectsPassword() { + boolean result = kmsManager.isSensitiveKey("password"); + assertTrue("'password' should be detected as sensitive", result); + } + + /** + * Test: isSensitiveKey correctly identifies keys containing "secret" as sensitive + */ + @Test + public void testIsSensitiveKey_DetectsSecret() { + boolean result = kmsManager.isSensitiveKey("api_secret"); + assertTrue("'api_secret' should be detected as sensitive", result); + } + + /** + * Test: isSensitiveKey correctly identifies "private_key" as sensitive + */ + @Test + public void testIsSensitiveKey_DetectsPrivateKey() { + boolean result = kmsManager.isSensitiveKey("private_key"); + assertTrue("'private_key' should be detected as sensitive", result); + } + + /** + * Test: isSensitiveKey correctly identifies non-sensitive keys + */ + @Test + public void testIsSensitiveKey_DoesNotDetectNonSensitive() { + boolean result = kmsManager.isSensitiveKey("library_path"); + assertFalse("'library_path' should not be detected as sensitive", result); + } + + /** + * Test: isSensitiveKey is case-insensitive + */ + @Test + public void testIsSensitiveKey_CaseInsensitive() { + boolean resultUpper = kmsManager.isSensitiveKey("PIN"); + boolean resultMixed = kmsManager.isSensitiveKey("Password"); + + assertTrue("'PIN' (uppercase) should be detected as sensitive", resultUpper); + assertTrue("'Password' (mixed case) should be detected as sensitive", resultMixed); + } + + /** + * Test: createHSMProfileResponse populates details correctly + */ + @Test + public void testCreateHSMProfileResponse_PopulatesDetails() { + Long profileId = 10L; + + HSMProfileVO profile = mock(HSMProfileVO.class); + when(profile.getId()).thenReturn(profileId); + when(profile.getUuid()).thenReturn("profile-uuid"); + when(profile.getName()).thenReturn("test-profile"); + when(profile.getProtocol()).thenReturn("PKCS11"); + when(profile.getVendorName()).thenReturn("TestVendor"); + when(profile.isEnabled()).thenReturn(true); + when(profile.getCreated()).thenReturn(new java.util.Date()); + + HSMProfileDetailsVO detail1 = mock(HSMProfileDetailsVO.class); + when(detail1.getName()).thenReturn("library_path"); + when(detail1.getValue()).thenReturn("/path/to/lib.so"); + + HSMProfileDetailsVO detail2 = mock(HSMProfileDetailsVO.class); + when(detail2.getName()).thenReturn("pin"); + when(detail2.getValue()).thenReturn("ENC(encrypted_value)"); + + when(hsmProfileDetailsDao.listByProfileId(profileId)).thenReturn(Arrays.asList(detail1, detail2)); + + try (MockedStatic mockedApiResponseHelper = Mockito.mockStatic(ApiResponseHelper.class)) { + HSMProfileResponse response = kmsManager.createHSMProfileResponse(profile); + + assertNotNull("Response should not be null", response); + verify(hsmProfileDetailsDao).listByProfileId(profileId); + } + } +} diff --git a/server/src/test/java/org/apache/cloudstack/kms/KMSManagerImplKeyCreationTest.java b/server/src/test/java/org/apache/cloudstack/kms/KMSManagerImplKeyCreationTest.java new file mode 100644 index 000000000000..147181ce3d2b --- /dev/null +++ b/server/src/test/java/org/apache/cloudstack/kms/KMSManagerImplKeyCreationTest.java @@ -0,0 +1,175 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms; + +import com.cloud.event.ActionEventUtils; +import org.apache.cloudstack.framework.kms.KMSException; +import org.apache.cloudstack.framework.kms.KMSProvider; +import org.apache.cloudstack.framework.kms.KeyPurpose; +import org.apache.cloudstack.kms.dao.HSMProfileDao; +import org.apache.cloudstack.kms.dao.KMSKekVersionDao; +import org.apache.cloudstack.kms.dao.KMSKeyDao; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockedStatic; +import org.mockito.Mockito; +import org.mockito.Spy; +import org.mockito.junit.MockitoJUnitRunner; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * Unit tests for KMS key creation logic in KMSManagerImpl + * Tests key creation with explicit and auto-resolved HSM profiles + */ +@RunWith(MockitoJUnitRunner.class) +public class KMSManagerImplKeyCreationTest { + + private final Long testAccountId = 100L; + private final Long testDomainId = 1L; + private final Long testZoneId = 1L; + private final String testProviderName = "pkcs11"; + @Spy + @InjectMocks + private KMSManagerImpl kmsManager; + @Mock + private KMSKeyDao kmsKeyDao; + @Mock + private KMSKekVersionDao kmsKekVersionDao; + @Mock + private HSMProfileDao hsmProfileDao; + @Mock + private KMSProvider kmsProvider; + + /** + * Test: createUserKMSKey uses explicit HSM profile when provided + */ + @Test + public void testCreateUserKMSKey_WithExplicitProfile() throws Exception { + // Setup: Explicit profile name provided + String hsmProfileName = "user-hsm-profile"; + Long hsmProfileId = 10L; + + HSMProfileVO profile = mock(HSMProfileVO.class); + when(profile.getProtocol()).thenReturn(testProviderName); + when(hsmProfileDao.findById(hsmProfileId)).thenReturn(profile); + + // Mock provider KEK creation + when(kmsProvider.createKek(any(KeyPurpose.class), anyString(), anyInt(), eq(hsmProfileId))) + .thenReturn("test-kek-label"); + + // Mock DAO persist operations + KMSKeyVO mockKey = mock(KMSKeyVO.class); + when(mockKey.getId()).thenReturn(1L); + when(kmsKeyDao.persist(any(KMSKeyVO.class))).thenReturn(mockKey); + + KMSKekVersionVO mockVersion = mock(KMSKekVersionVO.class); + when(kmsKekVersionDao.persist(any(KMSKekVersionVO.class))).thenReturn(mockVersion); + + doReturn(kmsProvider).when(kmsManager).getKMSProvider(testProviderName); + + try (MockedStatic actionEventUtils = Mockito.mockStatic(ActionEventUtils.class)) { + actionEventUtils.when(() -> ActionEventUtils.onCompletedActionEvent( + Mockito.anyLong(), Mockito.anyLong(), Mockito.anyString(), + Mockito.anyString(), Mockito.anyString(), Mockito.anyLong(), + Mockito.anyString(), Mockito.anyInt())).thenReturn(2L); + KMSKey result = kmsManager.createUserKMSKey(testAccountId, testDomainId, + testZoneId, "test-key", "Test key", KeyPurpose.VOLUME_ENCRYPTION, 256, hsmProfileId); + + // Verify explicit profile was used + assertNotNull(result); + verify(hsmProfileDao).findById(hsmProfileId); + verify(kmsProvider).createKek(any(KeyPurpose.class), anyString(), eq(256), eq(hsmProfileId)); + + // Verify KMSKeyVO was created with correct profile ID + ArgumentCaptor keyCaptor = ArgumentCaptor.forClass(KMSKeyVO.class); + verify(kmsKeyDao).persist(keyCaptor.capture()); + KMSKeyVO createdKey = keyCaptor.getValue(); + assertEquals(hsmProfileId, createdKey.getHsmProfileId()); + } + } + + /** + * Test: createUserKMSKey throws exception when explicit profile not found + */ + @Test(expected = KMSException.class) + public void testCreateUserKMSKey_ThrowsExceptionWhenProfileNotFound() throws KMSException { + // Setup: Profile name provided but doesn't exist + String invalidProfileName = "non-existent-profile"; + long hsmProfileId = 1L; + when(hsmProfileDao.findById(hsmProfileId)).thenReturn(null); + + kmsManager.createUserKMSKey(testAccountId, testDomainId, testZoneId, + "test-key", "Test key", KeyPurpose.VOLUME_ENCRYPTION, 256, hsmProfileId); + } + + /** + * Test: createUserKMSKey creates KEK version with correct profile ID + */ + @Test + public void testCreateUserKMSKey_CreatesKekVersionWithProfileId() throws Exception { + // Setup + Long hsmProfileId = 40L; + + HSMProfileVO profile = mock(HSMProfileVO.class); + when(profile.getProtocol()).thenReturn(testProviderName); + when(hsmProfileDao.findById(hsmProfileId)).thenReturn(profile); + + when(kmsProvider.createKek(any(KeyPurpose.class), anyString(), anyInt(), eq(hsmProfileId))) + .thenReturn("test-kek-label"); + + KMSKeyVO mockKey = mock(KMSKeyVO.class); + when(mockKey.getId()).thenReturn(1L); + when(kmsKeyDao.persist(any(KMSKeyVO.class))).thenReturn(mockKey); + + KMSKekVersionVO mockVersion = mock(KMSKekVersionVO.class); + when(kmsKekVersionDao.persist(any(KMSKekVersionVO.class))).thenReturn(mockVersion); + + doReturn(kmsProvider).when(kmsManager).getKMSProvider(testProviderName); + + try (MockedStatic actionEventUtils = Mockito.mockStatic(ActionEventUtils.class)) { + actionEventUtils.when(() -> ActionEventUtils.onCompletedActionEvent( + Mockito.anyLong(), Mockito.anyLong(), Mockito.anyString(), + Mockito.anyString(), Mockito.anyString(), Mockito.anyLong(), + Mockito.anyString(), Mockito.anyInt())).thenReturn(2L); + + kmsManager.createUserKMSKey(testAccountId, testDomainId, testZoneId, + "test-key", "Test key", KeyPurpose.VOLUME_ENCRYPTION, 256, hsmProfileId); + + // Verify KEK version was created with correct profile ID + ArgumentCaptor versionCaptor = ArgumentCaptor.forClass(KMSKekVersionVO.class); + verify(kmsKekVersionDao).persist(versionCaptor.capture()); + KMSKekVersionVO createdVersion = versionCaptor.getValue(); + assertEquals(hsmProfileId, createdVersion.getHsmProfileId()); + assertEquals(Integer.valueOf(1), createdVersion.getVersionNumber()); + assertEquals("test-kek-label", createdVersion.getKekLabel()); + } + } +} diff --git a/server/src/test/java/org/apache/cloudstack/kms/KMSManagerImplKeyRotationTest.java b/server/src/test/java/org/apache/cloudstack/kms/KMSManagerImplKeyRotationTest.java new file mode 100644 index 000000000000..2631fd758177 --- /dev/null +++ b/server/src/test/java/org/apache/cloudstack/kms/KMSManagerImplKeyRotationTest.java @@ -0,0 +1,357 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms; + +import org.apache.cloudstack.framework.kms.KMSException; +import org.apache.cloudstack.framework.kms.KMSProvider; +import org.apache.cloudstack.framework.kms.KeyPurpose; +import org.apache.cloudstack.framework.kms.WrappedKey; +import org.apache.cloudstack.kms.dao.HSMProfileDao; +import org.apache.cloudstack.kms.dao.KMSKekVersionDao; +import org.apache.cloudstack.kms.dao.KMSKeyDao; +import org.apache.cloudstack.kms.dao.KMSWrappedKeyDao; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Spy; +import org.mockito.junit.MockitoJUnitRunner; + +import java.util.List; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * Unit tests for KMS key rotation logic in KMSManagerImpl + * Tests key rotation within same HSM and cross-HSM migration + */ +@RunWith(MockitoJUnitRunner.class) +public class KMSManagerImplKeyRotationTest { + + @Spy + @InjectMocks + private KMSManagerImpl kmsManager; + + @Mock + private KMSKeyDao kmsKeyDao; + + @Mock + private KMSKekVersionDao kmsKekVersionDao; + + @Mock + private KMSWrappedKeyDao kmsWrappedKeyDao; + + @Mock + private HSMProfileDao hsmProfileDao; + + @Mock + private KMSProvider kmsProvider; + + private final Long testZoneId = 1L; + private final String testProviderName = "pkcs11"; + + @Before + public void setUp() { + } + + /** + * Test: rotateKek creates new KEK version in same HSM + */ + @Test + public void testRotateKek_SameHSM() throws Exception { + // Setup: Rotating within same HSM + Long oldProfileId = 10L; + Long kmsKeyId = 1L; + String oldKekLabel = "old-kek-label"; + String newKekLabel = "new-kek-label"; + + KMSKeyVO kmsKey = mock(KMSKeyVO.class); + when(kmsKey.getId()).thenReturn(kmsKeyId); + when(kmsKey.getHsmProfileId()).thenReturn(oldProfileId); + when(kmsKey.getPurpose()).thenReturn(KeyPurpose.VOLUME_ENCRYPTION); + + HSMProfileVO hsmProfile = mock(HSMProfileVO.class); + when(hsmProfile.getId()).thenReturn(oldProfileId); + when(hsmProfile.getProtocol()).thenReturn(testProviderName); + when(hsmProfileDao.findById(oldProfileId)).thenReturn(hsmProfile); + + // Old version should be marked as Previous + KMSKekVersionVO oldVersion = mock(KMSKekVersionVO.class); + when(oldVersion.getVersionNumber()).thenReturn(1); + when(oldVersion.getId()).thenReturn(10L); + when(kmsKekVersionDao.getActiveVersion(kmsKeyId)).thenReturn(oldVersion); + when(kmsKekVersionDao.listByKmsKeyId(kmsKeyId)).thenReturn(List.of(oldVersion)); + + // Provider creates new KEK + when(kmsProvider.createKek(any(KeyPurpose.class), eq(newKekLabel), anyInt(), eq(oldProfileId))) + .thenReturn("new-kek-id"); + + KMSKekVersionVO newVersion = mock(KMSKekVersionVO.class); + when(newVersion.getVersionNumber()).thenReturn(2); + when(kmsKekVersionDao.persist(any(KMSKekVersionVO.class))).thenReturn(newVersion); + + doReturn(kmsProvider).when(kmsManager).getKMSProvider(testProviderName); + + String result = kmsManager.rotateKek(kmsKey, oldKekLabel, newKekLabel, 256, null); + + // Verify new KEK was created in same HSM + assertNotNull(result); + verify(kmsProvider).createKek(any(KeyPurpose.class), eq(newKekLabel), eq(256), eq(oldProfileId)); + + // Verify old version marked as Previous + verify(oldVersion).setStatus(KMSKekVersionVO.Status.Previous); + verify(kmsKekVersionDao).update(eq(10L), eq(oldVersion)); + + // Verify new version created + ArgumentCaptor versionCaptor = ArgumentCaptor.forClass(KMSKekVersionVO.class); + verify(kmsKekVersionDao).persist(versionCaptor.capture()); + KMSKekVersionVO createdVersion = versionCaptor.getValue(); + assertEquals(Integer.valueOf(2), createdVersion.getVersionNumber()); + assertEquals(oldProfileId, createdVersion.getHsmProfileId()); + } + + /** + * Test: rotateKek migrates key to different HSM + */ + @Test + public void testRotateKek_CrossHSMMigration() throws Exception { + // Setup: Rotating to different HSM + Long oldProfileId = 10L; + Long newProfileId = 20L; + Long kmsKeyId = 1L; + String oldKekLabel = "old-kek-label"; + String newKekLabel = "new-kek-label"; + + KMSKeyVO kmsKey = mock(KMSKeyVO.class); + when(kmsKey.getId()).thenReturn(kmsKeyId); + when(kmsKey.getHsmProfileId()).thenReturn(oldProfileId); + when(kmsKey.getPurpose()).thenReturn(KeyPurpose.VOLUME_ENCRYPTION); + + HSMProfileVO hsmProfile = mock(HSMProfileVO.class); + when(hsmProfile.getId()).thenReturn(newProfileId); + when(hsmProfile.getProtocol()).thenReturn(testProviderName); + + KMSKekVersionVO oldVersion = mock(KMSKekVersionVO.class); + when(oldVersion.getVersionNumber()).thenReturn(1); + when(oldVersion.getId()).thenReturn(10L); + when(kmsKekVersionDao.getActiveVersion(kmsKeyId)).thenReturn(oldVersion); + when(kmsKekVersionDao.listByKmsKeyId(kmsKeyId)).thenReturn(List.of(oldVersion)); + + // Provider creates new KEK in different HSM + when(kmsProvider.createKek(any(KeyPurpose.class), eq(newKekLabel), anyInt(), eq(newProfileId))) + .thenReturn("new-kek-id"); + + KMSKekVersionVO newVersion = mock(KMSKekVersionVO.class); + when(newVersion.getVersionNumber()).thenReturn(2); + when(kmsKekVersionDao.persist(any(KMSKekVersionVO.class))).thenReturn(newVersion); + + doReturn(kmsProvider).when(kmsManager).getKMSProvider(testProviderName); + + String result = kmsManager.rotateKek(kmsKey, oldKekLabel, newKekLabel, 256, hsmProfile); + + // Verify new KEK was created in new HSM + assertNotNull(result); + verify(kmsProvider).createKek(any(KeyPurpose.class), eq(newKekLabel), eq(256), eq(newProfileId)); + + // Verify new version created with new profile ID + ArgumentCaptor versionCaptor = ArgumentCaptor.forClass(KMSKekVersionVO.class); + verify(kmsKekVersionDao).persist(versionCaptor.capture()); + KMSKekVersionVO createdVersion = versionCaptor.getValue(); + assertEquals(newProfileId, createdVersion.getHsmProfileId()); + + // Verify KMS key updated with new profile ID + verify(kmsKey).setHsmProfileId(newProfileId); + verify(kmsKeyDao).update(kmsKeyId, kmsKey); + } + + /** + * Test: rewrapSingleKey unwraps with old KEK and wraps with new KEK + */ + @Test + public void testRewrapSingleKey_UnwrapAndRewrap() throws Exception { + // Setup + Long wrappedKeyId = 100L; + Long oldVersionId = 1L; + Long newVersionId = 2L; + Long oldProfileId = 10L; + Long newProfileId = 20L; + + KMSWrappedKeyVO wrappedKeyVO = mock(KMSWrappedKeyVO.class); + when(wrappedKeyVO.getId()).thenReturn(wrappedKeyId); + + KMSKeyVO kmsKey = mock(KMSKeyVO.class); + when(kmsKey.getPurpose()).thenReturn(KeyPurpose.VOLUME_ENCRYPTION); + + KMSKekVersionVO oldVersion = mock(KMSKekVersionVO.class); + + KMSKekVersionVO newVersion = mock(KMSKekVersionVO.class); + when(newVersion.getId()).thenReturn(newVersionId); + when(newVersion.getKekLabel()).thenReturn("new-kek-label"); + when(newVersion.getHsmProfileId()).thenReturn(newProfileId); + + // Mock unwrap and wrap operations + byte[] plainDek = "plain-dek-bytes".getBytes(); + doReturn(plainDek).when(kmsManager).unwrapKey(wrappedKeyId); + + WrappedKey newWrappedKey = mock(WrappedKey.class); + when(newWrappedKey.getWrappedKeyMaterial()).thenReturn("new-wrapped-blob".getBytes()); + when(kmsProvider.wrapKey(plainDek, KeyPurpose.VOLUME_ENCRYPTION, "new-kek-label", newProfileId)) + .thenReturn(newWrappedKey); + + kmsManager.rewrapSingleKey(wrappedKeyVO, kmsKey, newVersion, kmsProvider); + + // Verify unwrap was called + verify(kmsManager).unwrapKey(wrappedKeyId); + + // Verify wrap was called with new profile + verify(kmsProvider).wrapKey(plainDek, KeyPurpose.VOLUME_ENCRYPTION, "new-kek-label", newProfileId); + + // Verify wrapped key was updated + verify(wrappedKeyVO).setKekVersionId(newVersionId); + verify(wrappedKeyVO).setWrappedBlob("new-wrapped-blob".getBytes()); + verify(kmsWrappedKeyDao).update(wrappedKeyId, wrappedKeyVO); + } + + /** + * Test: rotateKek generates new label when not provided + */ + @Test + public void testRotateKek_GeneratesLabel() throws Exception { + // Setup + Long oldProfileId = 10L; + Long kmsKeyId = 1L; + String oldKekLabel = "old-kek-label"; + + KMSKeyVO kmsKey = mock(KMSKeyVO.class); + when(kmsKey.getId()).thenReturn(kmsKeyId); + when(kmsKey.getHsmProfileId()).thenReturn(oldProfileId); + when(kmsKey.getPurpose()).thenReturn(KeyPurpose.VOLUME_ENCRYPTION); + + HSMProfileVO hsmProfile = mock(HSMProfileVO.class); + when(hsmProfile.getId()).thenReturn(oldProfileId); + when(hsmProfile.getProtocol()).thenReturn(testProviderName); + when(hsmProfileDao.findById(oldProfileId)).thenReturn(hsmProfile); + + KMSKekVersionVO oldVersion = mock(KMSKekVersionVO.class); + when(oldVersion.getVersionNumber()).thenReturn(1); + when(oldVersion.getId()).thenReturn(10L); + when(kmsKekVersionDao.getActiveVersion(kmsKeyId)).thenReturn(oldVersion); + when(kmsKekVersionDao.listByKmsKeyId(kmsKeyId)).thenReturn(List.of(oldVersion)); + + // Provider creates new KEK - will accept any label + when(kmsProvider.createKek(any(KeyPurpose.class), anyString(), anyInt(), eq(oldProfileId))) + .thenReturn("new-kek-id"); + + KMSKekVersionVO newVersion = mock(KMSKekVersionVO.class); + when(newVersion.getVersionNumber()).thenReturn(2); + when(kmsKekVersionDao.persist(any(KMSKekVersionVO.class))).thenReturn(newVersion); + + doReturn(kmsProvider).when(kmsManager).getKMSProvider(testProviderName); + + kmsManager.rotateKek(kmsKey, oldKekLabel, null, 256, null); + + // Verify a label was generated and passed to createKek + ArgumentCaptor labelCaptor = ArgumentCaptor.forClass(String.class); + verify(kmsProvider).createKek(any(KeyPurpose.class), labelCaptor.capture(), eq(256), eq(oldProfileId)); + String generatedLabel = labelCaptor.getValue(); + assertNotNull("Label should be generated", generatedLabel); + } + + /** + * Test: rotateKek throws exception when old KEK not found (provider rejects the rotation) + */ + @Test(expected = KMSException.class) + public void testRotateKek_ThrowsExceptionWhenOldKekNotFound() throws KMSException { + Long oldProfileId = 10L; + Long kmsKeyId = 1L; + + KMSKeyVO kmsKey = mock(KMSKeyVO.class); + when(kmsKey.getHsmProfileId()).thenReturn(oldProfileId); + when(kmsKey.getPurpose()).thenReturn(KeyPurpose.VOLUME_ENCRYPTION); + + HSMProfileVO hsmProfile = mock(HSMProfileVO.class); + when(hsmProfile.getId()).thenReturn(oldProfileId); + when(hsmProfile.getProtocol()).thenReturn(testProviderName); + when(hsmProfileDao.findById(oldProfileId)).thenReturn(hsmProfile); + + // Provider throws because the old KEK label doesn't exist in the HSM + when(kmsProvider.createKek(any(KeyPurpose.class), eq("new-label"), eq(256), eq(oldProfileId))) + .thenThrow(KMSException.kekNotFound("Old KEK not found: non-existent-label")); + + doReturn(kmsProvider).when(kmsManager).getKMSProvider(testProviderName); + + kmsManager.rotateKek(kmsKey, "non-existent-label", "new-label", 256, null); + } + + /** + * Test: rotateKek uses current profile when target profile is null + */ + @Test + public void testRotateKek_UsesCurrentProfileWhenTargetNull() throws Exception { + // Setup + Long currentProfileId = 10L; + Long kmsKeyId = 1L; + String oldKekLabel = "old-kek-label"; + + KMSKeyVO kmsKey = mock(KMSKeyVO.class); + when(kmsKey.getId()).thenReturn(kmsKeyId); + when(kmsKey.getHsmProfileId()).thenReturn(currentProfileId); + when(kmsKey.getPurpose()).thenReturn(KeyPurpose.VOLUME_ENCRYPTION); + + HSMProfileVO hsmProfile = mock(HSMProfileVO.class); + when(hsmProfile.getId()).thenReturn(currentProfileId); + when(hsmProfile.getProtocol()).thenReturn(testProviderName); + when(hsmProfileDao.findById(currentProfileId)).thenReturn(hsmProfile); + + KMSKekVersionVO oldVersion = mock(KMSKekVersionVO.class); + when(oldVersion.getVersionNumber()).thenReturn(1); + when(oldVersion.getId()).thenReturn(10L); + when(kmsKekVersionDao.getActiveVersion(kmsKeyId)).thenReturn(oldVersion); + when(kmsKekVersionDao.listByKmsKeyId(kmsKeyId)).thenReturn(List.of(oldVersion)); + + when(kmsProvider.createKek(any(KeyPurpose.class), anyString(), anyInt(), eq(currentProfileId))) + .thenReturn("new-kek-id"); + + KMSKekVersionVO newVersion = mock(KMSKekVersionVO.class); + when(newVersion.getVersionNumber()).thenReturn(2); + when(kmsKekVersionDao.persist(any(KMSKekVersionVO.class))).thenReturn(newVersion); + + doReturn(kmsProvider).when(kmsManager).getKMSProvider(testProviderName); + + kmsManager.rotateKek(kmsKey, oldKekLabel, "new-label", 256, null); + + // Verify current profile was used (not a different one) + verify(kmsProvider).createKek(any(KeyPurpose.class), anyString(), eq(256), eq(currentProfileId)); + + // Verify KMS key was not updated (same profile) + verify(kmsKey, never()).setHsmProfileId(currentProfileId); + verify(kmsKeyDao, never()).update(kmsKeyId, kmsKey); + } +} diff --git a/server/src/test/java/org/apache/cloudstack/kms/KMSManagerImplRetryTest.java b/server/src/test/java/org/apache/cloudstack/kms/KMSManagerImplRetryTest.java new file mode 100644 index 000000000000..11e694a64419 --- /dev/null +++ b/server/src/test/java/org/apache/cloudstack/kms/KMSManagerImplRetryTest.java @@ -0,0 +1,159 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.cloudstack.kms; + +import org.apache.cloudstack.framework.kms.KMSException; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Spy; +import org.mockito.junit.MockitoJUnitRunner; + +import java.util.concurrent.atomic.AtomicInteger; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.doReturn; + +/** + * Unit tests for KMSManagerImpl's retryOperation() logic, covering + * timeout enforcement, retry-on-transient-failure, and non-retryable fast-fail. + * + * Config values (retry count, delay, timeout) are spied on so tests remain + * fast without needing a full management-server config context. + */ +@RunWith(MockitoJUnitRunner.class) +public class KMSManagerImplRetryTest { + + @Spy + @InjectMocks + private KMSManagerImpl kmsManager; + + /** Configure the spy to use a 1-second timeout, the given retry count, and no delay. */ + private void useShortConfig(int retries) { + doReturn(1).when(kmsManager).getOperationTimeoutSec(); + doReturn(retries).when(kmsManager).getRetryCount(); + doReturn(0).when(kmsManager).getRetryDelayMs(); + } + + /** + * Normal path: operation completes immediately, result returned. + */ + @Test + public void testRetryOperation_succeedsImmediately() throws Exception { + useShortConfig(0); + + String result = kmsManager.retryOperation(() -> "ok"); + + assertEquals("ok", result); + } + + /** + * Timeout path: operation never finishes within the configured timeout. + * retryOperation() must unblock and throw a retryable KMSException. + */ + @Test + public void testRetryOperation_timesOutAndThrowsKMSException() { + useShortConfig(0); + + try { + kmsManager.retryOperation(() -> { + Thread.sleep(5_000); + return "should never reach here"; + }); + fail("Expected KMSException due to timeout"); + } catch (KMSException e) { + assertTrue("Exception should be retryable (transient timeout)", e.isRetryable()); + assertTrue("Message should mention timeout", e.getMessage().contains("timed out")); + } catch (Exception e) { + fail("Expected KMSException, got: " + e.getClass().getName() + ": " + e.getMessage()); + } + } + + /** + * Retry path: operation fails with a retryable KMSException on the first + * attempt and succeeds on the second. retryOperation() should return the + * successful result. + */ + @Test + public void testRetryOperation_retriesOnTransientFailureAndSucceeds() throws Exception { + useShortConfig(2); + AtomicInteger attempts = new AtomicInteger(0); + + String result = kmsManager.retryOperation(() -> { + if (attempts.getAndIncrement() == 0) { + throw KMSException.transientError("temporary HSM error", null); + } + return "recovered"; + }); + + assertEquals("recovered", result); + assertEquals("Should have taken exactly 2 attempts", 2, attempts.get()); + } + + /** + * Non-retryable path: a KMSException with isRetryable() == false must be + * re-thrown immediately without consuming any retry budget. + */ + @Test + public void testRetryOperation_nonRetryableExceptionFastFails() { + useShortConfig(3); + AtomicInteger attempts = new AtomicInteger(0); + + try { + kmsManager.retryOperation(() -> { + attempts.incrementAndGet(); + throw KMSException.invalidParameter("bad key label"); + }); + fail("Expected non-retryable KMSException"); + } catch (KMSException e) { + assertFalse("Exception should NOT be retryable", e.isRetryable()); + } catch (Exception e) { + fail("Expected KMSException, got: " + e.getClass().getName()); + } + + assertEquals("Non-retryable exception must not trigger retries", 1, attempts.get()); + } + + /** + * Retry exhaustion on timeout: all attempts time out; retryOperation() + * must eventually throw after exhausting the retry budget. + */ + @Test + public void testRetryOperation_exhaustsRetriesOnRepeatedTimeout() { + useShortConfig(2); // 3 total attempts (initial + 2 retries), each timing out after 1s + AtomicInteger attempts = new AtomicInteger(0); + + try { + kmsManager.retryOperation(() -> { + attempts.incrementAndGet(); + Thread.sleep(5_000); + return "never"; + }); + fail("Expected KMSException after retry exhaustion"); + } catch (KMSException e) { + assertTrue("Final exception should be retryable (timeout)", e.isRetryable()); + } catch (Exception e) { + fail("Expected KMSException, got: " + e.getClass().getName()); + } + + assertEquals("Should have attempted exactly 3 times (1 initial + 2 retries)", 3, attempts.get()); + } +} diff --git a/server/src/test/java/org/apache/cloudstack/vm/UnmanagedVMsManagerImplTest.java b/server/src/test/java/org/apache/cloudstack/vm/UnmanagedVMsManagerImplTest.java index a24ba7f068b2..d3b0f236706d 100644 --- a/server/src/test/java/org/apache/cloudstack/vm/UnmanagedVMsManagerImplTest.java +++ b/server/src/test/java/org/apache/cloudstack/vm/UnmanagedVMsManagerImplTest.java @@ -597,7 +597,7 @@ public void testImportFromExternalTest() throws InsufficientServerCapacityExcept DeployDestination mockDest = Mockito.mock(DeployDestination.class); when(deploymentPlanningManager.planDeployment(any(), any(), any(), any())).thenReturn(mockDest); DiskProfile diskProfile = Mockito.mock(DiskProfile.class); - when(volumeManager.allocateRawVolume(any(), any(), any(), any(), any(), any(), any(), any(), any(), any())) + when(volumeManager.allocateRawVolume(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any())) .thenReturn(diskProfile); Map storage = new HashMap<>(); VolumeVO volume = Mockito.mock(VolumeVO.class); @@ -831,7 +831,7 @@ private void importFromDisk(String source) throws InsufficientServerCapacityExce DeployDestination mockDest = Mockito.mock(DeployDestination.class); when(deploymentPlanningManager.planDeployment(any(), any(), any(), any())).thenReturn(mockDest); DiskProfile diskProfile = Mockito.mock(DiskProfile.class); - when(volumeManager.allocateRawVolume(any(), any(), any(), any(), any(), any(), any(), any(), any(), any())) + when(volumeManager.allocateRawVolume(any(), any(), any(), any(), any(), any(), any(), any(), any(), any(), any())) .thenReturn(diskProfile); Map storage = new HashMap<>(); VolumeVO volume = Mockito.mock(VolumeVO.class); diff --git a/tools/apidoc/gen_toc.py b/tools/apidoc/gen_toc.py index e41a04ff2e1b..40164e25ce86 100644 --- a/tools/apidoc/gen_toc.py +++ b/tools/apidoc/gen_toc.py @@ -56,6 +56,8 @@ 'HypervisorGuestOsNames': 'Guest OS', 'Domain': 'Domain', 'Template': 'Template', + 'KMS': 'KMS', + 'HSM': 'KMS', 'Iso': 'ISO', 'Volume': 'Volume', 'Vlan': 'VLAN', diff --git a/ui/public/locales/en.json b/ui/public/locales/en.json index 8bcc5d0a94bf..a6e4cf90ad02 100644 --- a/ui/public/locales/en.json +++ b/ui/public/locales/en.json @@ -1419,10 +1419,27 @@ "label.keep": "Keep", "label.kernelversion": "Kernel Version", "label.key": "Key", +"label.keybits": "Key Bits", "label.keyboard": "Keyboard language", "label.keyboardtype": "Keyboard type", "label.keypair": "SSH key pair", "label.keypairs": "SSH key pair(s)", +"label.kms": "Key Management", +"label.kms.key": "KMS Key", +"label.kmskey": "KMS Key", +"label.kms.keys": "KMS Keys", +"label.create.kms.key": "Create KMS Key", +"label.update.kms.key": "Update KMS Key", +"label.rotate.kms.key": "Rotate KMS Key", +"label.delete.kms.key": "Delete KMS Key", +"label.migrate.volumes.to.kms": "Migrate Volumes to KMS", +"label.hsm.profile": "HSM Profile", +"label.hsmprofile": "HSM Profile", +"label.hsmprofileid": "HSM Profile", +"label.create.hsmprofile": "Add HSM Profile", +"label.update.hsm.profile": "Update HSM Profile", +"label.delete.hsm.profile": "Delete HSM Profile", +"label.select.kms.key.optional": "Select KMS Key (optional)", "label.kubeconfig.cluster": "Kubernetes Cluster config", "label.kubernetes": "Kubernetes", "label.kubernetes.access.details": "The kubernetes nodes can be accessed via ssh using:
ssh -i [ssh_key] -p [port_number] cloud@[public_ip_address]

where,
ssh_key: points to the ssh private key file corresponding to the key that was associated while creating the Kubernetes Cluster. If no ssh key was provided during Kubernetes cluster creation, use the ssh private key of the management server.
port_number: can be obtained from the Port Forwarding Tab (Public Port column)", @@ -1481,6 +1498,7 @@ "label.lbruleid": "Load balancer ID", "label.lbtype": "Load balancer type", "label.ldap": "LDAP", +"label.library": "Library", "label.ldapdomain": "LDAP Domain", "label.ldap.configuration": "LDAP Configuration", "label.ldap.group.name": "LDAP Group", @@ -1620,6 +1638,7 @@ "label.migrate.instance.specific.storages": "Migrate volume(s) of the Instance to specific primary storages", "label.migrate.systemvm.to": "Migrate System VM to", "label.migrate.volume": "Migrate Volume", +"label.migrate.volume.to.kms": "Migrate Volume Encryption to KMS", "message.memory.usage.info.hypervisor.additionals": "The data shown may not reflect the actual memory usage if the Instance does not have the additional hypervisor tools installed", "message.memory.usage.info.negative.value": "If the Instance's memory usage cannot be obtained from the hypervisor, the lines for free memory in the raw data graph and memory usage in the percentage graph will be disabled", "message.migrate.volume.tooltip": "Volume can be migrated to any suitable storage pool. Admin has to choose the appropriate disk offering to replace, that supports the new storage pool", @@ -1896,6 +1915,7 @@ "label.physicalnetworkid": "Physical Network", "label.physicalnetworkname": "Physical Network name", "label.physicalsize": "Physical size", +"label.pin": "PIN", "label.ping.path": "Ping path", "label.pkcs.private.certificate": "PKCS#8 private certificate", "label.plannermode": "Planner mode", @@ -2482,6 +2502,7 @@ "label.suspend.project": "Suspend Project", "label.switch.type": "Switch type", "label.sync.storage": "Sync Storage Pool", +"label.system": "System", "label.system.ip.pool": "System Pool", "label.system.offering": "System Offering", "label.system.offerings": "System Offerings", @@ -2953,9 +2974,11 @@ "message.action.delete.guest.os": "Please confirm that you want to delete this guest os. System defined entry cannot be deleted.", "message.action.delete.guest.os.category": "Please confirm that you want to delete this guest os category.", "message.action.delete.guest.os.hypervisor.mapping": "Please confirm that you want to delete this guest os hypervisor mapping. System defined entry cannot be deleted.", +"message.action.delete.hsm.profile": "Please confirm that you want to delete this HSM profile.", "message.action.delete.instance.group": "Please confirm that you want to delete the Instance group.", "message.action.delete.interface.static.route": "Please confirm that you want to remove this interface Static Route?", "message.action.delete.iso": "Please confirm that you want to delete this ISO.", +"message.action.delete.kms.key": "Please confirm that you want to delete this KMS key.", "message.action.delete.network": "Please confirm that you want to delete this Network.", "message.action.delete.network.static.route": "Please confirm that you want to remove this Network Static Route", "message.action.delete.nexusvswitch": "Please confirm that you want to delete this nexus 1000v", @@ -3686,6 +3709,8 @@ "message.migrate.volume.failed": "Migrating volume failed.", "message.migrate.volume.pool.auto.assign": "Primary storage for the volume will be automatically chosen based on the suitability and Instance destination", "message.migrate.volume.processing": "Migrating volume...", +"message.action.migrate.volume.to.kms": "Please confirm that you want to migrate this volume's passphrase encryption to KMS. This operation re-encrypts the volume key using the selected KMS key and cannot be undone.", +"message.action.migrate.volumes.to.kms": "Please confirm that you want to migrate volumes to KMS encryption. This operation re-encrypts volume keys using the selected KMS key and cannot be undone.", "message.migrate.with.storage": "Specify storage pool for volumes of the Instance.", "message.migrating.failed": "Migration failed.", "message.migrating.processing": "Migration in progress for", @@ -4203,5 +4228,6 @@ "Compute*Month": "Compute * Month", "GB*Month": "GB * Month", "IP*Month": "IP * Month", -"Policy*Month": "Policy * Month" +"Policy*Month": "Policy * Month", +"message.kms.key.optional": "Optional: Select a KMS key for encryption. If not selected, legacy passphrase encryption will be used." } diff --git a/ui/src/components/view/DetailsTab.vue b/ui/src/components/view/DetailsTab.vue index 4145eeb9be6d..93af6b155718 100644 --- a/ui/src/components/view/DetailsTab.vue +++ b/ui/src/components/view/DetailsTab.vue @@ -189,7 +189,7 @@
{{ dataResource[item].rbd_default_data_pool }}
- +
{{ $t('label.configuration.details') }}
diff --git a/ui/src/components/view/InfoCard.vue b/ui/src/components/view/InfoCard.vue index 996e30ead3b5..09763fcc3629 100644 --- a/ui/src/components/view/InfoCard.vue +++ b/ui/src/components/view/InfoCard.vue @@ -413,6 +413,30 @@
+
+
{{ $t('label.kms.key') }}
+
+ + + {{ resource.kmskey }} + + {{ resource.kmskey }} +
+
+
+
{{ $t('label.hsm.profile') }}
+
+ + + {{ resource.hsmprofile }} + + {{ resource.hsmprofile }} +
+
{{ $t('label.network') }}
diff --git a/ui/src/components/view/ListView.vue b/ui/src/components/view/ListView.vue index 66dd6b3db9e6..50b80d43cc62 100644 --- a/ui/src/components/view/ListView.vue +++ b/ui/src/components/view/ListView.vue @@ -650,6 +650,10 @@ + @@ -1219,7 +1223,8 @@ export default { '/zone', '/pod', '/cluster', '/host', '/storagepool', '/imagestore', '/systemvm', '/router', '/ilbvm', '/annotation', '/computeoffering', '/systemoffering', '/diskoffering', '/backupoffering', '/networkoffering', '/vpcoffering', '/tungstenfabric', '/oauthsetting', '/guestos', '/guestoshypervisormapping', '/webhook', 'webhookdeliveries', 'webhookfilters', '/quotatariff', '/sharedfs', - '/ipv4subnets', '/managementserver', '/gpucard', '/gpudevices', '/vgpuprofile', '/extension', '/snapshotpolicy', '/backupschedule'].join('|')) + '/ipv4subnets', '/managementserver', '/gpucard', '/gpudevices', '/vgpuprofile', '/extension', '/snapshotpolicy', '/backupschedule', + '/kmskey', '/hsmprofile'].join('|')) .test(this.$route.path) }, enableGroupAction () { diff --git a/ui/src/components/view/SearchFilter.vue b/ui/src/components/view/SearchFilter.vue index 1b38ae6820d9..fa4ab4ebd541 100644 --- a/ui/src/components/view/SearchFilter.vue +++ b/ui/src/components/view/SearchFilter.vue @@ -166,6 +166,18 @@ export default { responseKey1: 'listnetworksresponse', responseKey2: 'network', field: 'name' + }, + hsmprofileid: { + apiName: 'listHSMProfiles', + responseKey1: 'listhsmprofilesresponse', + responseKey2: 'hsmprofile', + field: 'name' + }, + kmskeyid: { + apiName: 'listKMSKeys', + responseKey1: 'listkmskeysresponse', + responseKey2: 'kmskey', + field: 'name' } } } @@ -217,6 +229,12 @@ export default { if (fieldName === 'groupid') { fieldName = 'group' } + if (fieldName === 'hsmprofileid') { + fieldName = 'hsm.profile' + } + if (fieldName === 'kmskeyid') { + fieldName = 'kms.key' + } if (fieldName === 'keyword') { if ('listAnnotations' in this.$store.getters.apis) { return this.$t('label.annotation') diff --git a/ui/src/components/view/SearchView.vue b/ui/src/components/view/SearchView.vue index bd952f049476..608093289290 100644 --- a/ui/src/components/view/SearchView.vue +++ b/ui/src/components/view/SearchView.vue @@ -275,6 +275,12 @@ export default { if (fieldName === 'groupid') { fieldName = 'group' } + if (fieldName === 'hsmprofileid') { + fieldName = 'hsm.profile' + } + if (fieldName === 'kmskeyid') { + fieldName = 'kms.key' + } if (fieldName === 'keyword') { if ('listAnnotations' in this.$store.getters.apis) { return this.$t('label.annotation') @@ -320,12 +326,18 @@ export default { if (item === 'backupofferingid' && !('listBackupOfferings' in this.$store.getters.apis)) { return true } + if (item === 'hsmprofileid' && !('listHSMProfiles' in this.$store.getters.apis)) { + return true + } + if (item === 'kmskeyid' && !('listKMSKeys' in this.$store.getters.apis)) { + return true + } if (['zoneid', 'domainid', 'imagestoreid', 'storageid', 'state', 'account', 'hypervisor', 'level', 'clusterid', 'podid', 'groupid', 'entitytype', 'accounttype', 'systemvmtype', 'scope', 'provider', 'type', 'scope', 'managementserverid', 'serviceofferingid', 'diskofferingid', 'networkid', 'usagetype', 'restartrequired', 'gpuenabled', 'displaynetwork', 'guestiptype', 'usersource', 'arch', 'oscategoryid', 'templatetype', 'gpucardid', 'vgpuprofileid', - 'extensionid', 'backupoffering', 'volumeid', 'virtualmachineid'].includes(item) + 'extensionid', 'backupoffering', 'volumeid', 'virtualmachineid', 'hsmprofileid', 'kmskeyid'].includes(item) ) { type = 'list' } else if (item === 'tags') { @@ -516,6 +528,8 @@ export default { let gpuCardIndex = -1 let vgpuProfileIndex = -1 let extensionIndex = -1 + let hsmProfileIndex = -1 + let kmsKeyIndex = -1 if (arrayField.includes('type')) { if (this.$route.path === '/alert') { @@ -661,6 +675,18 @@ export default { promises.push(await this.fetchVolumes(searchKeyword)) } + if (arrayField.includes('hsmprofileid')) { + hsmProfileIndex = this.fields.findIndex(item => item.name === 'hsmprofileid') + this.fields[hsmProfileIndex].loading = true + promises.push(await this.fetchHSMProfiles(searchKeyword)) + } + + if (arrayField.includes('kmskeyid')) { + kmsKeyIndex = this.fields.findIndex(item => item.name === 'kmskeyid') + this.fields[kmsKeyIndex].loading = true + promises.push(await this.fetchKMSKeys(searchKeyword)) + } + Promise.all(promises).then(response => { if (typeIndex > -1) { const types = response.filter(item => item.type === 'type') @@ -805,6 +831,20 @@ export default { this.fields[virtualmachineIndex].opts = this.sortArray(virtualMachines[0].data) } } + + if (hsmProfileIndex > -1) { + const hsmProfiles = response.filter(item => item.type === 'hsmprofileid') + if (hsmProfiles && hsmProfiles.length > 0) { + this.fields[hsmProfileIndex].opts = this.sortArray(hsmProfiles[0].data) + } + } + + if (kmsKeyIndex > -1) { + const kmsKeys = response.filter(item => item.type === 'kmskeyid') + if (kmsKeys && kmsKeys.length > 0) { + this.fields[kmsKeyIndex].opts = this.sortArray(kmsKeys[0].data) + } + } }).finally(() => { if (typeIndex > -1) { this.fields[typeIndex].loading = false @@ -872,6 +912,12 @@ export default { if (virtualmachineIndex > -1) { this.fields[virtualmachineIndex].loading = false } + if (hsmProfileIndex > -1) { + this.fields[hsmProfileIndex].loading = false + } + if (kmsKeyIndex > -1) { + this.fields[kmsKeyIndex].loading = false + } if (Array.isArray(arrayField)) { this.fillFormFieldValues() } @@ -1590,6 +1636,32 @@ export default { }) }) }, + fetchHSMProfiles (searchKeyword) { + return new Promise((resolve, reject) => { + getAPI('listHSMProfiles', { listAll: true, keyword: searchKeyword }).then(json => { + const hsmProfiles = json.listhsmprofilesresponse.hsmprofile + resolve({ + type: 'hsmprofileid', + data: hsmProfiles || [] + }) + }).catch(error => { + reject(error.response.headers['x-description']) + }) + }) + }, + fetchKMSKeys (searchKeyword) { + return new Promise((resolve, reject) => { + getAPI('listKMSKeys', { listAll: true, keyword: searchKeyword }).then(json => { + const kmsKeys = json.listkmskeysresponse.kmskey + resolve({ + type: 'kmskeyid', + data: kmsKeys || [] + }) + }).catch(error => { + reject(error.response.headers['x-description']) + }) + }) + }, onSearch (value) { this.paramsFilter = {} this.searchQuery = value diff --git a/ui/src/components/view/VolumesTab.vue b/ui/src/components/view/VolumesTab.vue index bdd511ef1ec4..b02932adea3a 100644 --- a/ui/src/components/view/VolumesTab.vue +++ b/ui/src/components/view/VolumesTab.vue @@ -80,7 +80,7 @@ export default { return { vm: {}, volumes: [], - defaultColumns: ['name', 'state', 'type', 'size'], + defaultColumns: ['name', 'state', 'type', 'size', 'kmskey'], allColumns: [ { key: 'name', @@ -101,6 +101,11 @@ export default { title: this.$t('label.size'), dataIndex: 'size' }, + { + key: 'kmskey', + title: this.$t('label.kms.key'), + dataIndex: 'kmskey' + }, { key: 'storage', title: this.$t('label.storage'), diff --git a/ui/src/components/widgets/DetailsInput.vue b/ui/src/components/widgets/DetailsInput.vue index a8d39fce02b6..6d698e643c1b 100644 --- a/ui/src/components/widgets/DetailsInput.vue +++ b/ui/src/components/widgets/DetailsInput.vue @@ -19,7 +19,15 @@
- + + @@ -82,6 +90,15 @@ export default { showTableHeaders: { type: Boolean, default: true + }, + optionalKeys: { + type: Array, + default: () => [] + } + }, + computed: { + optionalKeyOptions () { + return this.optionalKeys.map(k => ({ value: k })) } }, data () { diff --git a/ui/src/config/router.js b/ui/src/config/router.js index 43e8efd7b5d3..ee9520149a52 100644 --- a/ui/src/config/router.js +++ b/ui/src/config/router.js @@ -28,6 +28,7 @@ import compute from '@/config/section/compute' import storage from '@/config/section/storage' import network from '@/config/section/network' import image from '@/config/section/image' +import kms from '@/config/section/kms' import project from '@/config/section/project' import event from '@/config/section/event' import user from '@/config/section/user' @@ -216,6 +217,7 @@ export function asyncRouterMap () { generateRouterMap(compute), generateRouterMap(storage), + generateRouterMap(kms), generateRouterMap(network), generateRouterMap(image), generateRouterMap(event), diff --git a/ui/src/config/section/kms.js b/ui/src/config/section/kms.js new file mode 100644 index 000000000000..a5773b29a7ff --- /dev/null +++ b/ui/src/config/section/kms.js @@ -0,0 +1,265 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import { shallowRef, defineAsyncComponent } from 'vue' +import store from '@/store' + +export default { + name: 'kms', + title: 'label.kms', + icon: 'hdd-outlined', + show: (record, store) => { + return ['Admin'].includes(store.getters.userInfo.roletype) || store.getters.features.hashsmprofiles + }, + children: [ + { + name: 'kmskey', + title: 'label.kms.keys', + icon: 'file-text-outlined', + permission: ['listKMSKeys'], + resourceType: 'KMSKey', + columns: () => { + const fields = ['name', 'enabled', 'purpose', 'hsmprofile'] + if (['Admin', 'DomainAdmin'].includes(store.getters.userInfo.roletype)) { + fields.push('account') + } + if (store.getters.listAllProjects) { + fields.push('project') + } + fields.push('domain') + return fields + }, + details: ['id', 'name', 'description', 'version', 'enabled', 'account', 'domain', 'project', 'created', 'hsmprofile'], + related: [ + { + name: 'volume', + title: 'label.volumes', + param: 'kmskeyid' + } + ], + tabs: [ + { + name: 'details', + component: shallowRef(defineAsyncComponent(() => import('@/components/view/DetailsTab.vue'))) + }, + { + name: 'events', + resourceType: 'KmsKey', + component: shallowRef(defineAsyncComponent(() => import('@/components/view/EventsTab.vue'))), + show: () => { + return 'listEvents' in store.getters.apis + } + } + ], + searchFilters: () => { + var filters = ['zoneid', 'hsmprofileid'] + if (store.getters.userInfo.roletype === 'Admin') { + filters.push('account', 'domainid', 'projectid') + } + return filters + }, + actions: [ + { + api: 'createKMSKey', + icon: 'plus-outlined', + label: 'label.create.kms.key', + listView: true, + popup: true, + dataView: false, + args: (record, store, group) => { + return ['Admin'].includes(store.userInfo.roletype) + ? ['zoneid', 'domainid', 'account', 'projectid', 'name', 'description', 'hsmprofileid', 'keybits'] + : ['zoneid', 'name', 'description', 'hsmprofileid', 'keybits'] + } + }, + { + api: 'updateKMSKey', + icon: 'edit-outlined', + docHelp: 'adminguide/storage.html#lifecycle-operations', + label: 'label.update.kms.key', + dataView: true, + popup: true, + args: ['id', 'name', 'description', 'enabled'], + mapping: { + id: { + value: (record) => record.id + } + } + }, + { + api: 'rotateKMSKey', + icon: 'sync-outlined', + docHelp: 'adminguide/storage.html#lifecycle-operations', + label: 'label.rotate.kms.key', + dataView: true, + popup: true, + args: ['id', 'keybits', 'hsmprofileid'], + mapping: { + id: { + value: (record) => record.id + } + } + }, + { + api: 'migrateVolumesToKMS', + icon: 'swap-outlined', + docHelp: 'adminguide/storage.html#lifecycle-operations', + label: 'label.migrate.volumes.to.kms', + message: 'message.action.migrate.volumes.to.kms', + dataView: true, + popup: true, + show: (record, store) => { + return ['Admin'].includes(store.userInfo.roletype) + }, + args: (record, store) => { + var fields = ['zoneid', 'kmskeyid', 'volumeids'] + if (['Admin'].includes(store.userInfo.roletype)) { + fields = fields.concat(['account', 'domainid']) + } + return fields + }, + mapping: { + kmskeyid: { + value: (record) => record.id + } + } + }, + { + api: 'deleteKMSKey', + icon: 'delete-outlined', + docHelp: 'adminguide/storage.html#lifecycle-operations', + label: 'label.delete.kms.key', + message: 'message.action.delete.kms.key', + dataView: true, + popup: true, + args: ['id'], + mapping: { + id: { + value: (record) => record.id + } + } + } + ] + }, + { + name: 'hsmprofile', + title: 'label.hsm.profile', + icon: 'safety-outlined', + permission: ['listHSMProfiles'], + show: (record, route, user) => { return ['Admin'].includes(user.roletype) }, + resourceType: 'HSMProfile', + columns: () => { + const fields = ['name', 'enabled'] + if (['Admin', 'DomainAdmin'].includes(store.getters.userInfo.roletype)) { + fields.push('account') + } + if (store.getters.listAllProjects) { + fields.push('project') + } + fields.push('domain') + return fields + }, + details: ['id', 'name', 'description', 'enabled', 'account', 'domain', 'project', 'created', 'details'], + related: [ + { + name: 'kmskey', + title: 'label.kms.keys', + param: 'hsmprofileid' + } + ], + tabs: [ + { + name: 'details', + component: shallowRef(defineAsyncComponent(() => import('@/components/view/DetailsTab.vue'))) + }, + { + name: 'events', + resourceType: 'HsmProfile', + component: shallowRef(defineAsyncComponent(() => import('@/components/view/EventsTab.vue'))), + show: () => { + return 'listEvents' in store.getters.apis + } + } + ], + searchFilters: () => { + var filters = ['zoneid'] + if (store.getters.userInfo.roletype === 'Admin') { + filters.push('account', 'domainid', 'projectid') + } + return filters + }, + actions: [ + { + api: 'addHSMProfile', + icon: 'plus-outlined', + label: 'label.create.hsmprofile', + listView: true, + popup: true, + dataView: false, + show: (record, store) => { + return ['Admin'].includes(store.userInfo.roletype) + }, + args: (record, store, group) => { + return ['Admin'].includes(store.userInfo.roletype) + ? ['name', 'zoneid', 'vendorname', 'domainid', 'account', 'projectid', 'details', 'system'] + : ['name', 'zoneid', 'vendorname', 'details'] + }, + mapping: { + details: { + optionalKeys: ['pin', 'library', 'slot', 'slot_list_index', 'token_label'] + } + } + }, + { + api: 'updateHSMProfile', + icon: 'edit-outlined', + docHelp: 'adminguide/storage.html#lifecycle-operations', + label: 'label.update.hsm.profile', + dataView: true, + popup: true, + show: (record, store) => { + return ['Admin'].includes(store.userInfo.roletype) + }, + args: ['id', 'name', 'enabled'], + mapping: { + id: { + value: (record) => record.id + } + } + }, + { + api: 'deleteHSMProfile', + icon: 'delete-outlined', + docHelp: 'adminguide/storage.html#lifecycle-operations', + label: 'label.delete.hsm.profile', + message: 'message.action.delete.hsm.profile', + dataView: true, + popup: true, + show: (record, store) => { + return ['Admin'].includes(store.userInfo.roletype) + }, + args: ['id'], + mapping: { + id: { + value: (record) => record.id + } + } + } + ] + } + ] +} diff --git a/ui/src/config/section/storage.js b/ui/src/config/section/storage.js index 75432314b034..964c025ab5fe 100644 --- a/ui/src/config/section/storage.js +++ b/ui/src/config/section/storage.js @@ -63,7 +63,7 @@ export default { return fields }, - details: ['name', 'id', 'type', 'storagetype', 'diskofferingdisplaytext', 'deviceid', 'sizegb', 'physicalsize', 'provisioningtype', 'utilization', 'diskkbsread', 'diskkbswrite', 'diskioread', 'diskiowrite', 'diskiopstotal', 'miniops', 'maxiops', 'path', 'deleteprotection'], + details: ['name', 'id', 'type', 'storagetype', 'diskofferingdisplaytext', 'kmskey', 'deviceid', 'sizegb', 'physicalsize', 'provisioningtype', 'utilization', 'diskkbsread', 'diskkbswrite', 'diskioread', 'diskiowrite', 'diskiopstotal', 'miniops', 'maxiops', 'path', 'deleteprotection'], related: [{ name: 'snapshot', title: 'label.snapshots', @@ -92,7 +92,7 @@ export default { } ], searchFilters: () => { - const filters = ['name', 'zoneid', 'domainid', 'account', 'state', 'tags', 'serviceofferingid', 'diskofferingid', 'isencrypted'] + const filters = ['name', 'zoneid', 'domainid', 'account', 'state', 'tags', 'serviceofferingid', 'diskofferingid', 'kmskeyid', 'isencrypted'] if (['Admin', 'DomainAdmin'].includes(store.getters.userInfo.roletype)) { filters.push('storageid') } @@ -221,6 +221,25 @@ export default { popup: true, component: shallowRef(defineAsyncComponent(() => import('@/views/storage/MigrateVolume.vue'))) }, + { + api: 'migrateVolumesToKMS', + icon: 'lock-outlined', + docHelp: 'adminguide/storage.html#lifecycle-operations', + label: 'label.migrate.volume.to.kms', + message: 'message.action.migrate.volume.to.kms', + dataView: true, + popup: true, + show: (record, store) => { + return record.encryptformat && !record.kmskeyid && + ['Ready', 'Allocated'].includes(record.state) + }, + args: ['kmskeyid'], + mapping: { + volumeids: { + value: (record) => { return record.id } + } + } + }, { api: 'changeOfferingForVolume', icon: 'swap-outlined', diff --git a/ui/src/store/modules/user.js b/ui/src/store/modules/user.js index 6a818d587233..9cffe85b81f7 100644 --- a/ui/src/store/modules/user.js +++ b/ui/src/store/modules/user.js @@ -480,6 +480,16 @@ const user = { commit('SET_CLOUDIAN', cloudian) }).catch(ignored => { }) + + if ('listHSMProfiles' in store.getters.apis) { + getAPI('listHSMProfiles', { listall: true }).then(response => { + const hasHsmProfiles = (response.listhsmprofilesresponse.count > 0) + const features = Object.assign({}, store.getters.features) + features.hashsmprofiles = hasHsmProfiles + commit('SET_FEATURES', features) + }).catch(ignored => { + }) + } }).catch(error => { console.error(error) }) diff --git a/ui/src/views/AutogenView.vue b/ui/src/views/AutogenView.vue index f4aa842d8f2b..3de206a8b680 100644 --- a/ui/src/views/AutogenView.vue +++ b/ui/src/views/AutogenView.vue @@ -486,7 +486,8 @@ + v-model:value="form[field.name]" + :optionalKeys="currentAction.mapping?.[field.name]?.optionalKeys || []" /> + @update-root-disk-iops-value="updateIOPSValue" + @update-root-kms-key="updateRootKmsKey"/> @@ -394,14 +398,17 @@ @handle-search-filter="($event) => handleSearchFilter('diskOfferings', $event)" > + @update-iops-value="updateIOPSValue" + @update-data-kms-key="updateDataKmsKey"/> @@ -1050,7 +1057,8 @@ export default { keyboards: [], bootTypes: [], bootModes: [], - ioPolicyTypes: [] + ioPolicyTypes: [], + kmsKeys: [] }, rowCount: {}, loading: { @@ -1071,7 +1079,8 @@ export default { pods: false, clusters: false, hosts: false, - groups: false + groups: false, + kmsKeys: false }, owner: { projectid: store.getters.project?.id, @@ -1726,6 +1735,22 @@ export default { serviceOffering (oldValue, newValue) { if (oldValue && newValue && oldValue.id !== newValue.id) { this.dynamicscalingenabled = this.isDynamicallyScalable() + // Fetch KMS keys if encryption is enabled + if (newValue && newValue.encryptroot && this.zoneId) { + this.fetchKmsKeys() + } + } + }, + diskOffering (newValue) { + // Fetch KMS keys if encryption is enabled + if (newValue && newValue.encrypt && this.zoneId) { + this.fetchKmsKeys() + } + }, + overrideDiskOffering (newValue) { + // Fetch KMS keys if encryption is enabled + if (newValue && newValue.encrypt && this.zoneId) { + this.fetchKmsKeys() } }, template (oldValue, newValue) { @@ -1993,6 +2018,30 @@ export default { const param = this.params.networks this.fetchOptions(param, 'networks') }, + fetchKmsKeys () { + if (!this.zoneId) { + return + } + this.loading.kmsKeys = true + this.options.kmsKeys = [] + getAPI('listKMSKeys', { + zoneid: this.zoneId, + account: this.owner.account, + domainid: this.owner.domainid, + projectid: this.owner.projectid + }).then(response => { + const kmskeyMap = response.listkmskeysresponse.kmskey || [] + if (kmskeyMap.length > 0) { + this.options.kmsKeys = kmskeyMap + } else { + this.options.kmsKeys = null + } + }).catch(() => { + this.options.kmsKeys = null + }).finally(() => { + this.loading.kmsKeys = false + }) + }, resetData () { this.vm = { name: null, @@ -2017,6 +2066,12 @@ export default { this.formRef.value.resetFields() this.fetchData() }, + updateRootKmsKey (value) { + this.form.rootkmskeyid = value + }, + updateDataKmsKey (value) { + this.form.datakmskeyid = value + }, updateFieldValue (name, value) { if (name === 'templateid') { this.imageType = 'templateid' @@ -2380,6 +2435,10 @@ export default { deployVmData['details[0].memory'] = values.memory } } + // Add root disk KMS key if selected (optional - falls back to legacy passphrase if not provided) + if (values.rootkmskeyid) { + deployVmData.rootdiskkmskeyid = values.rootkmskeyid + } if (this.selectedTemplateConfiguration) { deployVmData['details[0].configurationId'] = this.selectedTemplateConfiguration.id } @@ -2406,12 +2465,29 @@ export default { }) } } else { - deployVmData.diskofferingid = values.diskofferingid - if (values.size) { - deployVmData.size = values.size + // When a KMS key is selected for data disk, we must use datadisksdetails format + if (values.datakmskeyid) { + deployVmData['datadisksdetails[0].diskofferingid'] = values.diskofferingid + deployVmData['datadisksdetails[0].deviceid'] = 1 // Device ID 1 for first data disk (0=root, 3=CD-ROM reserved) + if (values.size) { + deployVmData['datadisksdetails[0].size'] = values.size + } + deployVmData['datadisksdetails[0].kmskeyid'] = values.datakmskeyid + // Add IOPS if customized + if (this.isCustomizedDiskIOPS) { + deployVmData['datadisksdetails[0].miniops'] = this.diskIOpsMin + deployVmData['datadisksdetails[0].maxiops'] = this.diskIOpsMax + } + } else { + // Legacy format when no KMS key + deployVmData.diskofferingid = values.diskofferingid + if (values.size) { + deployVmData.size = values.size + } } } - if (this.isCustomizedDiskIOPS) { + // IOPS for non-KMS data disks (KMS data disks IOPS handled above in datadisksdetails) + if (this.isCustomizedDiskIOPS && !values.datakmskeyid) { deployVmData['details[0].minIopsDo'] = this.diskIOpsMin deployVmData['details[0].maxIopsDo'] = this.diskIOpsMax } @@ -3087,6 +3163,7 @@ export default { this.selectedBackupOffering = null this.fetchZoneOptions() this.updateZoneAllowsBackupOperations() + this.fetchKmsKeys() }, onSelectPodId (value) { this.podId = value diff --git a/ui/src/views/compute/wizard/DiskSizeSelection.vue b/ui/src/views/compute/wizard/DiskSizeSelection.vue index bd202042e536..baae69ea1a12 100644 --- a/ui/src/views/compute/wizard/DiskSizeSelection.vue +++ b/ui/src/views/compute/wizard/DiskSizeSelection.vue @@ -16,35 +16,62 @@ // under the License.