Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
45ad210
wip: multi-scope config
shwstppr Dec 9, 2024
c5da8fa
wip: config-key scope as bitmask
shwstppr Dec 12, 2024
6e7fe6e
Minor service layer changes
abh1sar Jan 29, 2025
24d4cfb
Unit Tests for multi scope config
abh1sar Jan 29, 2025
80f2554
revert changes to marvin/setup.py
abh1sar Jan 29, 2025
3f76576
fixed 41720to41800 upgrade path
abh1sar Jan 30, 2025
1924410
Merge branch 'main' into multi-scope-config
abh1sar Jan 31, 2025
16add72
UT for upgrade path files
abh1sar Jan 31, 2025
3f1c70e
Merge branch 'multi-scope-config' of https://github.com/shapeblue/clo…
abh1sar Jan 31, 2025
17a9e60
fix pre-commit failure
abh1sar Feb 1, 2025
d648900
Merge remote-tracking branch 'upstream/main' into multi-scope
abh1sar Feb 1, 2025
82fede2
remove unused imports in CapacityManager
abh1sar Feb 1, 2025
1666aae
Add license to ConfigurationGroupsAggregatorTest.java
abh1sar Feb 1, 2025
673dc56
remove extra logging
abh1sar Feb 3, 2025
e9214ca
Move dao code from ConfigurationGroupsAggregator to ConfigurationDaoImpl
abh1sar Feb 3, 2025
2fe63ed
Fix logging message in Upgrade42010to42100
abh1sar Feb 4, 2025
2d25b7b
Merge remote-tracking branch 'upstream/main' into multi-scope
abh1sar Feb 4, 2025
4d24432
fix errors in ConfigurationGroupsAggregatorTest
abh1sar Feb 4, 2025
bab660f
fix errors in MockConfigurationDaoImpl
abh1sar Feb 4, 2025
e20bc6e
changed logger.debug to logger.warn in DatabaseAccessObject.java
abh1sar Feb 5, 2025
1b64fa6
Use pool id to fetch value
abh1sar Feb 12, 2025
950309d
Add storagePool scope to volume.resize.allowed.beyond.allocation
abh1sar Feb 12, 2025
a0aee79
Show the exact scope for which listConfigurations api was called
abh1sar Feb 12, 2025
05f051a
UT for Upgrade42010to42100 and ConfigDepotImpl
abh1sar Feb 12, 2025
ba76d64
Merge remote-tracking branch 'upstream/main' into multi-scope
abh1sar Feb 12, 2025
9485099
Merge remote-tracking branch 'upstream/main' into multi-scope
abh1sar Feb 13, 2025
9f22056
fix type in LinstorStorageAdaptor
abh1sar Feb 13, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
// under the License.
package com.cloud.capacity;

import java.util.List;

import org.apache.cloudstack.framework.config.ConfigKey;
import org.apache.cloudstack.storage.datastore.db.StoragePoolVO;

Expand Down Expand Up @@ -67,7 +69,7 @@ public interface CapacityManager {
"0.85",
"Percentage (as a value between 0 and 1) of storage utilization above which allocators will disable using the pool for low storage available.",
true,
ConfigKey.Scope.Zone);
List.of(ConfigKey.Scope.StoragePool, ConfigKey.Scope.Zone));
static final ConfigKey<Double> StorageOverprovisioningFactor =
new ConfigKey<>(
"Storage",
Expand All @@ -85,7 +87,7 @@ public interface CapacityManager {
"0.85",
"Percentage (as a value between 0 and 1) of allocated storage utilization above which allocators will disable using the pool for low allocated storage available.",
true,
ConfigKey.Scope.Zone);
List.of(ConfigKey.Scope.StoragePool, ConfigKey.Scope.Zone));
static final ConfigKey<Boolean> StorageOperationsExcludeCluster =
new ConfigKey<>(
Boolean.class,
Expand Down Expand Up @@ -125,7 +127,7 @@ public interface CapacityManager {
"Percentage (as a value between 0 and 1) of allocated storage utilization above which allocators will disable using the pool for volume resize. " +
"This is applicable only when volume.resize.allowed.beyond.allocation is set to true.",
true,
ConfigKey.Scope.Zone);
List.of(ConfigKey.Scope.StoragePool, ConfigKey.Scope.Zone));

ConfigKey<Integer> CapacityCalculateWorkers = new ConfigKey<>(ConfigKey.CATEGORY_ADVANCED, Integer.class,
"capacity.calculate.workers", "1",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ public interface StorageManager extends StorageService {
ConfigKey<Boolean> AllowVolumeReSizeBeyondAllocation = new ConfigKey<Boolean>("Advanced", Boolean.class, "volume.resize.allowed.beyond.allocation", "false",
"Determines whether volume size can exceed the pool capacity allocation disable threshold (pool.storage.allocated.capacity.disablethreshold) " +
"when resize a volume upto resize capacity disable threshold (pool.storage.allocated.resize.capacity.disablethreshold)",
true, ConfigKey.Scope.Zone);
true, List.of(ConfigKey.Scope.StoragePool, ConfigKey.Scope.Zone));

ConfigKey<Integer> StoragePoolHostConnectWorkers = new ConfigKey<>("Storage", Integer.class,
"storage.pool.host.connect.workers", "1",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,16 @@
import java.util.Map;
import java.util.stream.Collectors;

import javax.inject.Inject;

import org.apache.cloudstack.framework.config.ConfigKey;
import org.apache.cloudstack.framework.config.ConfigKey.Scope;
import org.apache.cloudstack.framework.config.ScopedConfigStorage;
import org.apache.commons.collections.CollectionUtils;

import com.cloud.dc.dao.ClusterDao;
import com.cloud.org.Cluster;
import com.cloud.utils.Pair;
import com.cloud.utils.crypt.DBEncryptionUtil;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
Expand All @@ -35,6 +40,9 @@

public class ClusterDetailsDaoImpl extends ResourceDetailsDaoBase<ClusterDetailsVO> implements ClusterDetailsDao, ScopedConfigStorage {

@Inject
ClusterDao clusterDao;

protected final SearchBuilder<ClusterDetailsVO> ClusterSearch;
protected final SearchBuilder<ClusterDetailsVO> DetailSearch;

Expand Down Expand Up @@ -186,4 +194,13 @@ private String getCpuMemoryOvercommitRatio(String name) {

return name;
}

@Override
public Pair<Scope, Long> getParentScope(long id) {
Cluster cluster = clusterDao.findById(id);
if (cluster == null) {
return null;
}
return new Pair<>(getScope().getParent(), cluster.getDataCenterId());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@
import org.apache.cloudstack.storage.datastore.db.StoragePoolDetailsDao;
import org.apache.cloudstack.storage.datastore.db.StoragePoolVO;

import com.cloud.utils.Pair;

public class StoragePoolDetailsDaoImpl extends ResourceDetailsDaoBase<StoragePoolDetailVO> implements StoragePoolDetailsDao, ScopedConfigStorage {

@Inject
Expand Down Expand Up @@ -57,4 +59,17 @@ public void addDetail(long resourceId, String key, String value, boolean display
}
super.addDetail(new StoragePoolDetailVO(resourceId, key, value, display));
}

@Override
public Pair<Scope, Long> getParentScope(long id) {
StoragePoolVO pool = _storagePoolDao.findById(id);
if (pool != null) {
if (pool.getClusterId() != null) {
return new Pair<>(getScope().getParent(), pool.getClusterId());
} else {
return new Pair<>(ConfigKey.Scope.Zone, pool.getDataCenterId());
}
}
return null;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ public ConfigurationGroupsAggregator() {

public void updateConfigurationGroups() {
LOG.debug("Updating configuration groups");
List<ConfigurationVO> configs = configDao.listAllIncludingRemoved();
List<ConfigurationVO> configs = configDao.searchPartialConfigurations();
if (CollectionUtils.isEmpty(configs)) {
return;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,36 @@ public boolean columnExists(Connection conn, String tableName, String columnName
return columnExists;
}

public String getColumnType(Connection conn, String tableName, String columnName) {
try (PreparedStatement pstmt = conn.prepareStatement(String.format("DESCRIBE %s %s", tableName, columnName));){
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getString("Type");
}
} catch (SQLException e) {
logger.warn("Type for column {} can not be retrieved in {} ignoring exception: {}", columnName, tableName, e.getMessage());
}
return null;
}

public void addColumn(Connection conn, String tableName, String columnName, String columnDefinition) {
try (PreparedStatement pstmt = conn.prepareStatement(String.format("ALTER TABLE %s ADD COLUMN %s %s", tableName, columnName, columnDefinition));){
pstmt.executeUpdate();
logger.debug("Column {} is added successfully from the table {}", columnName, tableName);
} catch (SQLException e) {
logger.warn("Unable to add column {} to table {} due to exception", columnName, tableName, e);
}
}

public void changeColumn(Connection conn, String tableName, String oldColumnName, String newColumnName, String columnDefinition) {
try (PreparedStatement pstmt = conn.prepareStatement(String.format("ALTER TABLE %s CHANGE COLUMN %s %s %s", tableName, oldColumnName, newColumnName, columnDefinition));){
pstmt.executeUpdate();
logger.debug("Column {} is changed successfully to {} from the table {}", oldColumnName, newColumnName, tableName);
} catch (SQLException e) {
logger.warn("Unable to add column {} to {} from the table {} due to exception", oldColumnName, newColumnName, tableName, e);
}
}

public String generateIndexName(String tableName, String... columnName) {
return String.format("i_%s__%s", tableName, StringUtils.join(columnName, "__"));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,4 +58,20 @@ public static void dropTableColumnsIfExist(Connection conn, String tableName, Li
}
}

public static String getTableColumnType(Connection conn, String tableName, String columnName) {
return dao.getColumnType(conn, tableName, columnName);
}

public static void addTableColumnIfNotExist(Connection conn, String tableName, String columnName, String columnDefinition) {
if (!dao.columnExists(conn, tableName, columnName)) {
dao.addColumn(conn, tableName, columnName, columnDefinition);
}
}

public static void changeTableColumnIfNotExist(Connection conn, String tableName, String oldColumnName, String newColumnName, String columnDefinition) {
if (dao.columnExists(conn, tableName, oldColumnName)) {
dao.changeColumn(conn, tableName, oldColumnName, newColumnName, columnDefinition);
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,16 @@
package com.cloud.upgrade.dao;

import com.cloud.upgrade.SystemVmTemplateRegistration;
import com.cloud.utils.db.TransactionLegacy;
import com.cloud.utils.exception.CloudRuntimeException;

import java.io.InputStream;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.List;

import org.apache.cloudstack.framework.config.ConfigKey;

public class Upgrade42010to42100 extends DbUpgradeAbstractImpl implements DbUpgrade, DbUpgradeSystemVmTemplate {
private SystemVmTemplateRegistration systemVmTemplateRegistration;
Expand Down Expand Up @@ -53,6 +59,7 @@ public InputStream[] getPrepareScripts() {

@Override
public void performDataMigration(Connection conn) {
migrateConfigurationScopeToBitmask(conn);
}

@Override
Expand Down Expand Up @@ -80,4 +87,35 @@ public void updateSystemVmTemplates(Connection conn) {
throw new CloudRuntimeException("Failed to find / register SystemVM template(s)");
}
}

protected void migrateConfigurationScopeToBitmask(Connection conn) {
String scopeDataType = DbUpgradeUtils.getTableColumnType(conn, "configuration", "scope");
logger.info("Data type of the column scope of table configuration is {}", scopeDataType);
if (!"varchar(255)".equals(scopeDataType)) {
return;
}
DbUpgradeUtils.addTableColumnIfNotExist(conn, "configuration", "new_scope", "BIGINT DEFAULT 0");
migrateExistingConfigurationScopeValues(conn);
DbUpgradeUtils.dropTableColumnsIfExist(conn, "configuration", List.of("scope"));
DbUpgradeUtils.changeTableColumnIfNotExist(conn, "configuration", "new_scope", "scope", "BIGINT NOT NULL DEFAULT 0 COMMENT 'Bitmask for scope(s) of this parameter'");
}

protected void migrateExistingConfigurationScopeValues(Connection conn) {
StringBuilder sql = new StringBuilder("UPDATE configuration\n" +
"SET new_scope = " +
" CASE ");
for (ConfigKey.Scope scope : ConfigKey.Scope.values()) {
sql.append(" WHEN scope = '").append(scope.name()).append("' THEN ").append(scope.getBitValue()).append(" ");
}
sql.append(" ELSE 0 " +
" END " +
"WHERE scope IS NOT NULL;");
TransactionLegacy txn = TransactionLegacy.currentTxn();
try (PreparedStatement pstmt = txn.prepareAutoCloseStatement(sql.toString())) {
pstmt.executeUpdate();
} catch (SQLException e) {
logger.error("Failed to migrate existing configuration scope values to bitmask", e);
throw new CloudRuntimeException(String.format("Failed to migrate existing configuration scope values to bitmask due to: %s", e.getMessage()));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import com.cloud.domain.dao.DomainDao;
import com.cloud.domain.dao.DomainDetailsDao;
import com.cloud.user.dao.AccountDao;
import com.cloud.utils.Pair;
import com.cloud.utils.db.QueryBuilder;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
Expand Down Expand Up @@ -156,4 +157,13 @@ public String getConfigValue(long id, String key) {
}
return value;
}

@Override
public Pair<Scope, Long> getParentScope(long id) {
Account account = _accountDao.findById(id);
if (account == null) {
return null;
}
return new Pair<>(getScope().getParent(), account.getDomainId());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,17 @@
import java.util.List;
import java.util.Map;

import javax.inject.Inject;

import org.apache.cloudstack.api.ApiConstants;
import org.apache.cloudstack.framework.config.ConfigKey;
import org.apache.cloudstack.framework.config.ConfigKey.Scope;
import org.apache.cloudstack.framework.config.ScopedConfigStorage;
import org.apache.cloudstack.resourcedetail.ResourceDetailsDaoBase;
import org.springframework.stereotype.Component;

import com.cloud.storage.ImageStore;
import com.cloud.utils.Pair;
import com.cloud.utils.crypt.DBEncryptionUtil;
import com.cloud.utils.db.QueryBuilder;
import com.cloud.utils.db.SearchBuilder;
Expand All @@ -36,6 +40,9 @@

@Component
public class ImageStoreDetailsDaoImpl extends ResourceDetailsDaoBase<ImageStoreDetailVO> implements ImageStoreDetailsDao, ScopedConfigStorage {
@Inject
ImageStoreDao imageStoreDao;

protected final SearchBuilder<ImageStoreDetailVO> storeSearch;

public ImageStoreDetailsDaoImpl() {
Expand Down Expand Up @@ -113,10 +120,20 @@ public String getConfigValue(long id, String key) {
public String getConfigValue(long id, ConfigKey<?> key) {
ImageStoreDetailVO vo = findDetail(id, key.key());
return vo == null ? null : getActualValue(vo);
}
}

@Override
public void addDetail(long resourceId, String key, String value, boolean display) {
super.addDetail(new ImageStoreDetailVO(resourceId, key, value, display));
}

@Override
public Pair<Scope, Long> getParentScope(long id) {
ImageStore store = imageStoreDao.findById(id);
if (store == null) {
return null;
}
return new Pair<>(getScope().getParent(), store.getDataCenterId());
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.upgrade;

import static org.mockito.Mockito.when;

import java.util.Collections;

import org.apache.cloudstack.framework.config.dao.ConfigurationDao;
import org.apache.cloudstack.framework.config.dao.ConfigurationGroupDao;
import org.apache.cloudstack.framework.config.dao.ConfigurationSubGroupDao;
import org.apache.cloudstack.framework.config.impl.ConfigurationSubGroupVO;
import org.apache.cloudstack.framework.config.impl.ConfigurationVO;
import org.apache.logging.log4j.Logger;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;

@RunWith(MockitoJUnitRunner.class)
public class ConfigurationGroupsAggregatorTest {
@InjectMocks
private ConfigurationGroupsAggregator configurationGroupsAggregator = new ConfigurationGroupsAggregator();

@Mock
private ConfigurationDao configDao;

@Mock
private ConfigurationGroupDao configGroupDao;

@Mock
private ConfigurationSubGroupDao configSubGroupDao;

@Mock
private Logger logger;

@Test
public void testUpdateConfigurationGroups() {
ConfigurationVO config = new ConfigurationVO("Advanced", "DEFAULT", "management-server",
"test.config.name", null, "description");
config.setGroupId(1L);
config.setSubGroupId(1L);

when(configDao.searchPartialConfigurations()).thenReturn(Collections.singletonList(config));

ConfigurationSubGroupVO configSubGroup = Mockito.mock(ConfigurationSubGroupVO.class);
when(configSubGroupDao.findByName("name")).thenReturn(configSubGroup);
Mockito.when(configSubGroup.getId()).thenReturn(10L);
Mockito.when(configSubGroup.getGroupId()).thenReturn(5L);

configurationGroupsAggregator.updateConfigurationGroups();

Assert.assertEquals(Long.valueOf(5), config.getGroupId());
Assert.assertEquals(Long.valueOf(10), config.getSubGroupId());
Mockito.verify(configDao, Mockito.times(1)).persist(config);
Mockito.verify(logger, Mockito.times(1)).debug("Updating configuration groups");
Mockito.verify(logger, Mockito.times(1)).debug("Successfully updated configuration groups.");
}
}
Loading