Skip to content

Commit

Permalink
fix(core): skip existing items with null ids in StorageServiceSupport…
Browse files Browse the repository at this point in the history
….fetchAllItemsOptimized (#1279)

Even though it seems like the existing filters on items coming back from the database
would make this unnecessary, given that existingItems starts empty and, from what I can
tell, only contains (filtered) items from the database.

We've seen this happen in production though.  With any luck the logging will help us
figure out why.
  • Loading branch information
dbyron-sf committed Jul 1, 2023
1 parent 69141df commit 162c8fb
Show file tree
Hide file tree
Showing 2 changed files with 118 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

import static net.logstash.logback.argument.StructuredArguments.value;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import com.netflix.spectator.api.Counter;
import com.netflix.spectator.api.Registry;
Expand Down Expand Up @@ -527,7 +528,8 @@ private Set<T> fetchAllItems(Set<T> existingItems) {
return result;
}

private Set<T> fetchAllItemsOptimized(Set<T> existingItems) {
@VisibleForTesting
Set<T> fetchAllItemsOptimized(Set<T> existingItems) {
if (existingItems == null) {
existingItems = new HashSet<>();
}
Expand Down Expand Up @@ -555,16 +557,18 @@ private Set<T> fetchAllItemsOptimized(Set<T> existingItems) {
Map<String, T> resultMap = new HashMap<>();
existingItems.forEach(
existingItem -> {
String existingItemId = buildObjectKey(existingItem.getId());
if (deletedItems.containsKey(existingItemId)) {
// item was deleted, skip it
log.debug(
"Item with id {} deleted from the store. Will not add to the result.",
existingItemId);
numRemoved.getAndIncrement();
} else if (!modifiedItems.containsKey(existingItemId)) {
// item was unchanged, add it back as is
resultMap.put(existingItemId, existingItem);
if (isIdNotNull(existingItem)) {
String existingItemId = buildObjectKey(existingItem.getId());
if (deletedItems.containsKey(existingItemId)) {
// item was deleted, skip it
log.debug(
"Item with id {} deleted from the store. Will not add to the result.",
existingItemId);
numRemoved.getAndIncrement();
} else if (!modifiedItems.containsKey(existingItemId)) {
// item was unchanged, add it back as is
resultMap.put(existingItemId, existingItem);
}
}
});

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
/*
* Copyright 2023 Salesforce, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.front50.model;

import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;

import com.netflix.spectator.api.NoopRegistry;
import com.netflix.spectator.api.Registry;
import com.netflix.spinnaker.front50.api.model.pipeline.Pipeline;
import com.netflix.spinnaker.front50.config.StorageServiceConfigurationProperties;
import io.github.resilience4j.circuitbreaker.CircuitBreakerRegistry;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.junit.jupiter.api.Test;
import rx.Scheduler;

class StorageServiceSupportTest {

/** Pipeline is an arbitrary choice of an object type. */
class TestDAO extends StorageServiceSupport<Pipeline> {
public TestDAO(
StorageService service,
Scheduler scheduler,
ObjectKeyLoader objectKeyLoader,
StorageServiceConfigurationProperties.PerObjectType configurationProperties,
Registry registry,
CircuitBreakerRegistry circuitBreakerRegistry) {
super(
ObjectType.PIPELINE,
service,
scheduler,
objectKeyLoader,
configurationProperties,
registry,
circuitBreakerRegistry);
}
}

private StorageService storageService = mock(StorageService.class);

private Scheduler scheduler = mock(Scheduler.class);

private StorageServiceConfigurationProperties.PerObjectType testDAOConfigProperties =
new StorageServiceConfigurationProperties.PerObjectType();

private TestDAO testDAO =
new TestDAO(
storageService,
scheduler,
new DefaultObjectKeyLoader(storageService),
testDAOConfigProperties,
new NoopRegistry(),
CircuitBreakerRegistry.ofDefaults());

@Test
void fetchAllItemsOptimizedWithNullId() {
// If presented with an existing item with a null id, make sure
// fetchAllItemsOptimized ignores it, as opposed to trying to build an
// object key for a null id, which results in a NullPointerException.

// No need for any items from the data store for this
List<Pipeline> modifiedItems = Collections.emptyList();
List<Pipeline> deletedItems = Collections.emptyList();
Map<String, List<Pipeline>> newerItems =
Map.of("not_deleted", modifiedItems, "deleted", deletedItems);
doReturn(newerItems)
.when(storageService)
.loadObjectsNewerThan(eq(ObjectType.PIPELINE), anyLong());

HashSet<Pipeline> existingItems = new HashSet<>();
Pipeline itemWithNullId = new Pipeline();
itemWithNullId.setName("pipeline1");
itemWithNullId.setId(null);
existingItems.add(itemWithNullId);

Set<Pipeline> resultingItems = testDAO.fetchAllItemsOptimized(existingItems);
assertThat(resultingItems).isNotNull();
assertThat(resultingItems).isEmpty();

verify(storageService).loadObjectsNewerThan(eq(ObjectType.PIPELINE), anyLong());
}
}

0 comments on commit 162c8fb

Please sign in to comment.