fix: prevent quarkus from persisting logging runtime defaults (#41005) (#41166)

* fix: ensures that build time logging wildcards are not used at runtime

closes: #40977



* fix: removing the usage of ConfigValue.getRawValue where not appropriate

closes:



* correcting auto logging tests



---------


(cherry picked from commit cf7c9a6ecd21c9a538e9c84aa154edb981ae3b08)

Signed-off-by: Steve Hawkins <shawkins@redhat.com>
This commit is contained in:
Steven Hawkins 2025-07-19 10:15:20 -04:00 committed by GitHub
parent 91eec167ad
commit df1329f70a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 96 additions and 55 deletions

View File

@ -41,6 +41,7 @@ import org.keycloak.common.Profile;
import org.keycloak.common.crypto.CryptoIntegration;
import org.keycloak.common.crypto.CryptoProvider;
import org.keycloak.common.crypto.FipsMode;
import org.keycloak.config.DatabaseOptions;
import org.keycloak.config.TruststoreOptions;
import org.keycloak.marshalling.Marshalling;
import org.keycloak.provider.Provider;
@ -130,7 +131,7 @@ public class KeycloakRecorder {
}
public HibernateOrmIntegrationRuntimeInitListener createDefaultUnitListener() {
return propertyCollector -> propertyCollector.accept(AvailableSettings.DEFAULT_SCHEMA, Configuration.getRawValue("kc.db-schema"));
return propertyCollector -> propertyCollector.accept(AvailableSettings.DEFAULT_SCHEMA, Configuration.getConfigValue(DatabaseOptions.DB_SCHEMA).getValue());
}
public void setCryptoProvider(FipsMode fipsMode) {

View File

@ -11,7 +11,7 @@ public class QuarkusProfileConfigResolver extends CommaSeparatedListProfileConfi
static String getConfig(String key) {
return Configuration.getRawPersistedProperty(key)
.orElse(Configuration.getRawValue(key));
.orElse(Configuration.getConfigValue(key).getValue());
}
}

View File

@ -81,7 +81,17 @@ public final class ShowConfig extends AbstractCommand implements Runnable {
}
if (mapper != null) {
property = mapper.forKey(property).getFrom();
String from = mapper.forKey(property).getFrom();
// only report from when it exists
if (!property.equals(from)) {
ConfigValue value = getConfigValue(from);
if (value.getValue() != null) {
return;
}
configValue = value;
property = from;
}
}
if (!uniqueNames.add(property)) {
@ -110,11 +120,7 @@ public final class ShowConfig extends AbstractCommand implements Runnable {
private void printProperty(String property, PropertyMapper<?> mapper, ConfigValue configValue) {
String sourceName = configValue.getConfigSourceName();
String value = configValue.getRawValue();
if (value == null) {
value = configValue.getValue();
}
String value = configValue.getValue();
value = maskValue(value, sourceName, mapper);

View File

@ -111,10 +111,6 @@ public final class Configuration {
return PersistedConfigSource.getInstance().getProperties();
}
public static String getRawValue(String propertyName) {
return getConfig().getRawValue(propertyName);
}
public static Iterable<String> getPropertyNames() {
return getPropertyNames(false);
}

View File

@ -102,7 +102,10 @@ public class PropertyMappingInterceptor implements ConfigSourceInterceptor {
}
allMappers.remove(mapper);
if (!mapper.hasWildcard()) {
// include additional mappings if we're on the from side of the mapping
// as the mapping may not be bi-directional
if (!mapper.hasWildcard() && name.equals(mapper.getFrom())) {
// this is not a wildcard value, but may map to wildcards
// the current example is something like log-level=wildcardCat1:level,wildcardCat2:level
var wildCard = PropertyMappers.getWildcardMappedFrom(mapper.getOption());
@ -116,9 +119,12 @@ public class PropertyMappingInterceptor implements ConfigSourceInterceptor {
mapper = mapper.forKey(name);
// there is a corner case here: -1 for the reload period has no 'to' value.
// if that becomes an issue we could use more metadata to perform a full mapping
return toDistinctStream(name, mapper.getTo());
if (name.equals(mapper.getFrom())) {
// there is a corner case here: -1 for the reload period has no 'to' value.
// if that becomes an issue we could use more metadata to perform a full mapping
return toDistinctStream(name, mapper.getTo());
}
return Stream.of(name);
});
// include anything remaining that has a default value

View File

@ -10,11 +10,16 @@ import static org.keycloak.quarkus.runtime.configuration.Configuration.isTrue;
import static org.keycloak.quarkus.runtime.configuration.mappers.PropertyMapper.fromOption;
import java.io.File;
import java.util.List;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.logging.Level;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import io.quarkus.runtime.configuration.MemorySizeConverter;
@ -34,13 +39,13 @@ public final class LoggingPropertyMappers {
private static final String SYSLOG_ENABLED_MSG = "Syslog is activated";
private static final String DEFAULT_ROOT_LOG_LEVEL = toLevel(LoggingOptions.LOG_LEVEL.getDefaultValue().orElseThrow().get(0)).getName();
private static List<CategoryLevel> rootLogLevels;
private final static Map<String, Map<String, String>> rootLogLevels = new HashMap<String, Map<String,String>>();
private LoggingPropertyMappers() {
}
public static PropertyMapper<?>[] getMappers() {
rootLogLevels = null; // reset the cached root log level and categories
rootLogLevels.clear(); // reset the cached root log level and categories
PropertyMapper<?>[] defaultMappers = new PropertyMapper[]{
fromOption(LoggingOptions.LOG)
.paramLabel("<handler>")
@ -338,21 +343,11 @@ public final class LoggingPropertyMappers {
}
private static String resolveRootLogLevel(String value, ConfigSourceInterceptorContext configSourceInterceptorContext) {
for (CategoryLevel categoryLevel : parseRootLogLevel(value)) {
if (categoryLevel.category == null) {
return categoryLevel.levelName;
}
}
return DEFAULT_ROOT_LOG_LEVEL; // defaults are not resolved in the mapper if transformer is present, so doing it explicitly here
return parseRootLogLevel(value).getOrDefault(null, DEFAULT_ROOT_LOG_LEVEL); // defaults are not resolved in the mapper if transformer is present, so doing it explicitly here
}
private static Set<String> getConfiguredLogCategories(String value, Set<String> categories) {
for (CategoryLevel categoryLevel : parseRootLogLevel(value)) {
if (categoryLevel.category != null) {
categories.add(categoryLevel.category);
}
}
return categories;
return parseRootLogLevel(value).keySet().stream().filter(Objects::nonNull).collect(Collectors.toCollection(LinkedHashSet::new));
}
private static void validateCategoryLogLevel(String logLevel) {
@ -364,26 +359,17 @@ public final class LoggingPropertyMappers {
}
private static String resolveCategoryLogLevelFromParentLogLevelOption(String category, String parentLogLevelValue, ConfigSourceInterceptorContext context) {
for (CategoryLevel categoryLevel : parseRootLogLevel(parentLogLevelValue)) {
if (category.equals(categoryLevel.category)) {
return categoryLevel.levelName;
}
}
return null;
return parseRootLogLevel(parentLogLevelValue).get(category);
}
private static List<CategoryLevel> parseRootLogLevel(String values) {
if (rootLogLevels == null) {
var value = values != null ? values : Configuration.getConfigValue(LoggingOptions.LOG_LEVEL).getValue();
if (value == null) {
return List.of(); // if no value is present, we do not cache the result
}
rootLogLevels = Stream.of(value.split(","))
.map(LoggingPropertyMappers::validateLogLevel)
.toList();
private static Map<String, String> parseRootLogLevel(String values) {
if (values == null) {
return Map.of(); // if no value is present, we do not cache the result
}
return rootLogLevels;
// it's possible to have more than one root if not reset during tests, or we are unexpectedly parsing the default
return rootLogLevels.computeIfAbsent(values, key -> Stream.of(values.split(","))
.map(LoggingPropertyMappers::validateLogLevel)
.collect(Collectors.toMap(CategoryLevel::category, CategoryLevel::levelName, (s1, s2) -> s1, LinkedHashMap::new)));
}
private static String resolveLogOutput(String value, ConfigSourceInterceptorContext context) {

View File

@ -35,6 +35,7 @@ import java.util.stream.Stream;
import org.keycloak.config.DeprecatedMetadata;
import org.keycloak.config.Option;
import org.keycloak.config.OptionCategory;
import org.keycloak.quarkus.runtime.Environment;
import org.keycloak.quarkus.runtime.cli.PropertyException;
import org.keycloak.quarkus.runtime.cli.ShortErrorMessageHandler;
import org.keycloak.quarkus.runtime.configuration.ConfigArgsConfigSource;
@ -280,6 +281,10 @@ public class PropertyMapper<T> {
return configValue;
}
if (!isBuildTime() && Environment.isRebuild()) {
value = null; // prevent quarkus from recording these raw values as runtime defaults
}
// by unsetting the ordinal this will not be seen as directly modified by the user
return configValue.from().withName(name).withValue(mappedValue).withRawValue(value).withConfigSourceOrdinal(0).build();
}

View File

@ -173,10 +173,7 @@ public class DebugHostnameSettingsResource {
}
private void addOption(String key) {
String rawValue = Configuration.getRawValue("kc." + key);
if (rawValue != null && !rawValue.isEmpty()) {
this.allConfigPropertiesMap.put(key, rawValue);
}
Configuration.getOptionalKcValue(key).ifPresent(value -> this.allConfigPropertiesMap.put(key, value));
}
private Map<String, String> getHeaders() {

View File

@ -28,6 +28,7 @@ import jakarta.persistence.FlushModeType;
import jakarta.persistence.SynchronizationType;
import org.hibernate.internal.SessionFactoryImpl;
import org.keycloak.Config;
import org.keycloak.config.DatabaseOptions;
import org.keycloak.connections.jpa.JpaConnectionProviderFactory;
import org.keycloak.connections.jpa.support.EntityManagerProxy;
import org.keycloak.models.KeycloakSession;
@ -59,7 +60,7 @@ public abstract class AbstractJpaConnectionProviderFactory implements JpaConnect
@Override
public String getSchema() {
String schema = Configuration.getRawValue("kc.db-schema");
String schema = Configuration.getConfigValue(DatabaseOptions.DB_SCHEMA).getValue();
if (schema != null && schema.contains("-") && ! Boolean.parseBoolean(System.getProperty(GlobalConfiguration.PRESERVE_SCHEMA_CASE.getKey()))) {
System.setProperty(GlobalConfiguration.PRESERVE_SCHEMA_CASE.getKey(), "true");
logger.warnf("The passed schema '%s' contains a dash. Setting liquibase config option PRESERVE_SCHEMA_CASE to true. See https://github.com/keycloak/keycloak/issues/20870 for more information.", schema);

View File

@ -18,6 +18,7 @@
package org.keycloak.quarkus.runtime.vault;
import org.keycloak.Config;
import org.keycloak.config.VaultOptions;
import org.keycloak.provider.EnvironmentDependentProviderFactory;
import org.keycloak.quarkus.runtime.configuration.Configuration;
@ -33,6 +34,6 @@ public class FilesKeystoreVaultProviderFactory extends org.keycloak.vault.FilesK
@Override
public boolean isSupported(Config.Scope config) {
return getId().equals(Configuration.getRawValue("kc.vault"));
return getId().equals(Configuration.getConfigValue(VaultOptions.VAULT).getValue());
}
}

View File

@ -18,6 +18,7 @@
package org.keycloak.quarkus.runtime.vault;
import org.keycloak.Config;
import org.keycloak.config.VaultOptions;
import org.keycloak.provider.EnvironmentDependentProviderFactory;
import org.keycloak.quarkus.runtime.configuration.Configuration;
@ -33,6 +34,6 @@ public class FilesPlainTextVaultProviderFactory extends org.keycloak.vault.Files
@Override
public boolean isSupported(Config.Scope config) {
return getId().equals(Configuration.getRawValue("kc.vault"));
return getId().equals(Configuration.getConfigValue(VaultOptions.VAULT).getValue());
}
}

View File

@ -266,6 +266,13 @@ public class PicocliTest extends AbstractConfigurationTest {
});
}
@Test
public void testShowConfigDisplaysPrimaryValue() {
build("build", "--db=postgres");
NonRunningPicocli nonRunningPicocli = pseudoLaunch("show-config");
assertThat(nonRunningPicocli.getOutString(), containsString("postgres (Persisted)"));
}
@Test
public void failSingleParamWithSpace() {
NonRunningPicocli nonRunningPicocli = pseudoLaunch("start", "--db postgres");
@ -449,6 +456,9 @@ public class PicocliTest extends AbstractConfigurationTest {
var value = nonRunningPicocli.config.getConfigValue("quarkus.log.category.\"org.keycloak\".level");
assertEquals("quarkus.log.category.\"org.keycloak\".level", value.getName());
assertEquals("WARN", value.getValue());
value = nonRunningPicocli.config.getConfigValue("quarkus.log.category.\"org.keycloak1\".level");
assertEquals("quarkus.log.category.\"org.keycloak1\".level", value.getName());
assertNull(value.getValue());
}
@Test
@ -458,6 +468,9 @@ public class PicocliTest extends AbstractConfigurationTest {
var value = nonRunningPicocli.config.getConfigValue("quarkus.log.category.\"org.keycloak\".level");
assertEquals("quarkus.log.category.\"org.keycloak\".level", value.getName());
assertEquals("WARN", value.getValue());
value = nonRunningPicocli.config.getConfigValue("quarkus.log.category.\"org.keycloak1\".level");
assertEquals("quarkus.log.category.\"org.keycloak1\".level", value.getName());
assertNull(value.getValue());
}
@Test

View File

@ -149,4 +149,32 @@ public class StartAutoBuildDistTest {
cliResult.assertStartedDevMode();
}
@Test
@Order(11)
void testLogLevelNotPeristed(KeycloakDistribution dist) {
CLIResult cliResult = dist.run("start", "--db=dev-file", "--log-level=org.hibernate.SQL:debug", "--http-enabled=true", "--hostname-strict=false");
cliResult.assertMessage("DEBUG [org.hibernate.SQL]");
cliResult.assertStarted();
dist.stop();
// logging runtime defaults should not be used
cliResult = dist.run("start", "--db=dev-file", "--http-enabled=true", "--hostname-strict=false");
cliResult.assertNoMessage("DEBUG [org.hibernate.SQL]");
cliResult.assertStarted();
}
@Test
@Order(12)
void testLogLevelWildcardNotPeristed(KeycloakDistribution dist) {
CLIResult cliResult = dist.run("start-dev", "--log-level-org.hibernate.SQL=debug");
cliResult.assertMessage("DEBUG [org.hibernate.SQL]");
cliResult.assertStartedDevMode();
dist.stop();
// logging runtime defaults should not be used
cliResult = dist.run("start-dev");
cliResult.assertNoMessage("DEBUG [org.hibernate.SQL]");
cliResult.assertStartedDevMode();
}
}