-
Notifications
You must be signed in to change notification settings - Fork 84
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[compat][server][controller] Introduced options to speed up KafkaProd…
…ucer in Server for nearline workload (#1258) * [compat][server][controller] Introduced options to speed up KafkaProducer in Server for nearline workload During benchmarking, we noticed that the producing is slow and sometimes it can take 10+ms for producing one single record for some medium size record: 20-30KB. There are two issues we discovered during the benchmarking: 1. Kafka producer compression is taking a lot of time. Even we disable KafkaProducer compression, Kafka broker will compress it according to the target compression setup in the broker. 2. Kafka Producer is not scalable and when there are multiple threads invoking the same producer, there are a lot of contentions. These optimizations have overheads: 1. By disabling compression, the Kafka workload will increase and considering the optimization is only for nearline workload, hope the workload increase is not too much. Also there is a store-level control. 2. More Producers in Venice Server. More memory consumption as each producer has its own buffer, and more producer threads. Ideally, we would like to enable these optimizations to some critical stores, which have heavy AA/WC nearline workload to improve the E2E write latency. New Server Config: server.nearline.workload.producer.throughput.optimization.enabled: default true This setting can override all the store-level config to disable the optimization entirely. Two new store-level config: NearlineProducerCountPerWriter: default 1 NearlineProducerCompressionEnabled: default true We can use admin-tool to update these store-level configs. This PR also changes the admin operation protocol, so we need to deploy child controllers first and then parent controller.
- Loading branch information
Showing
35 changed files
with
2,329 additions
and
58 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
203 changes: 171 additions & 32 deletions
203
...t/src/main/java/com/linkedin/davinci/kafka/consumer/LeaderFollowerStoreIngestionTask.java
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
116 changes: 116 additions & 0 deletions
116
...c/test/java/com/linkedin/davinci/kafka/consumer/LeaderFollowerStoreIngestionTaskTest.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,116 @@ | ||
package com.linkedin.davinci.kafka.consumer; | ||
|
||
import static org.mockito.Mockito.doReturn; | ||
import static org.mockito.Mockito.mock; | ||
import static org.mockito.Mockito.never; | ||
import static org.mockito.Mockito.verify; | ||
import static org.testng.Assert.assertFalse; | ||
import static org.testng.Assert.assertTrue; | ||
|
||
import com.linkedin.venice.utils.lazy.Lazy; | ||
import com.linkedin.venice.writer.VeniceWriter; | ||
import java.util.HashMap; | ||
import java.util.Map; | ||
import org.testng.annotations.Test; | ||
|
||
|
||
public class LeaderFollowerStoreIngestionTaskTest { | ||
@Test | ||
public void testCheckWhetherToCloseUnusedVeniceWriter() { | ||
VeniceWriter<byte[], byte[], byte[]> writer1 = mock(VeniceWriter.class); | ||
VeniceWriter<byte[], byte[], byte[]> writer2 = mock(VeniceWriter.class); | ||
PartitionConsumptionState pcsForLeaderBeforeEOP = mock(PartitionConsumptionState.class); | ||
doReturn(LeaderFollowerStateType.LEADER).when(pcsForLeaderBeforeEOP).getLeaderFollowerState(); | ||
doReturn(false).when(pcsForLeaderBeforeEOP).isEndOfPushReceived(); | ||
PartitionConsumptionState pcsForLeaderAfterEOP = mock(PartitionConsumptionState.class); | ||
doReturn(LeaderFollowerStateType.LEADER).when(pcsForLeaderAfterEOP).getLeaderFollowerState(); | ||
doReturn(true).when(pcsForLeaderAfterEOP).isEndOfPushReceived(); | ||
PartitionConsumptionState pcsForFollowerBeforeEOP = mock(PartitionConsumptionState.class); | ||
doReturn(LeaderFollowerStateType.STANDBY).when(pcsForFollowerBeforeEOP).getLeaderFollowerState(); | ||
doReturn(false).when(pcsForFollowerBeforeEOP).isEndOfPushReceived(); | ||
PartitionConsumptionState pcsForFollowerAfterEOP = mock(PartitionConsumptionState.class); | ||
doReturn(LeaderFollowerStateType.STANDBY).when(pcsForFollowerAfterEOP).getLeaderFollowerState(); | ||
doReturn(true).when(pcsForLeaderAfterEOP).isEndOfPushReceived(); | ||
|
||
String versionTopicName = "store_v1"; | ||
// Some writers are not available. | ||
assertFalse( | ||
LeaderFollowerStoreIngestionTask.checkWhetherToCloseUnusedVeniceWriter( | ||
Lazy.of(() -> writer1), | ||
Lazy.of(() -> writer1), | ||
mock(Map.class), | ||
() -> {}, | ||
versionTopicName)); | ||
Lazy<VeniceWriter<byte[], byte[], byte[]>> veniceWriterWithInitializedValue1 = Lazy.of(() -> writer1); | ||
veniceWriterWithInitializedValue1.get(); | ||
assertFalse( | ||
LeaderFollowerStoreIngestionTask.checkWhetherToCloseUnusedVeniceWriter( | ||
veniceWriterWithInitializedValue1, | ||
Lazy.of(() -> writer1), | ||
mock(Map.class), | ||
() -> {}, | ||
versionTopicName)); | ||
assertFalse( | ||
LeaderFollowerStoreIngestionTask.checkWhetherToCloseUnusedVeniceWriter( | ||
Lazy.of(() -> writer1), | ||
veniceWriterWithInitializedValue1, | ||
mock(Map.class), | ||
() -> {}, | ||
versionTopicName)); | ||
|
||
// Same writers | ||
assertFalse( | ||
LeaderFollowerStoreIngestionTask.checkWhetherToCloseUnusedVeniceWriter( | ||
veniceWriterWithInitializedValue1, | ||
veniceWriterWithInitializedValue1, | ||
mock(Map.class), | ||
() -> {}, | ||
versionTopicName)); | ||
|
||
Lazy<VeniceWriter<byte[], byte[], byte[]>> veniceWriterWithInitializedValue2 = Lazy.of(() -> writer2); | ||
veniceWriterWithInitializedValue2.get(); | ||
// No leader | ||
Map<Integer, PartitionConsumptionState> noLeaderPCSMap = new HashMap<>(); | ||
noLeaderPCSMap.put(0, pcsForFollowerAfterEOP); | ||
noLeaderPCSMap.put(1, pcsForFollowerBeforeEOP); | ||
Runnable runnable = mock(Runnable.class); | ||
|
||
assertTrue( | ||
LeaderFollowerStoreIngestionTask.checkWhetherToCloseUnusedVeniceWriter( | ||
veniceWriterWithInitializedValue1, | ||
veniceWriterWithInitializedValue2, | ||
noLeaderPCSMap, | ||
runnable, | ||
versionTopicName)); | ||
verify(runnable).run(); | ||
|
||
// One leader before EOP and some follower | ||
Map<Integer, PartitionConsumptionState> oneLeaderBeforeEOPPCSMap = new HashMap<>(); | ||
oneLeaderBeforeEOPPCSMap.put(0, pcsForLeaderBeforeEOP); | ||
oneLeaderBeforeEOPPCSMap.put(1, pcsForFollowerBeforeEOP); | ||
runnable = mock(Runnable.class); | ||
assertFalse( | ||
LeaderFollowerStoreIngestionTask.checkWhetherToCloseUnusedVeniceWriter( | ||
veniceWriterWithInitializedValue1, | ||
veniceWriterWithInitializedValue2, | ||
oneLeaderBeforeEOPPCSMap, | ||
runnable, | ||
versionTopicName)); | ||
verify(runnable, never()).run(); | ||
|
||
// One leader before EOP and one leader after EOP and some follower | ||
Map<Integer, PartitionConsumptionState> oneLeaderBeforeEOPAndOneLeaderAfterEOPPCSMap = new HashMap<>(); | ||
oneLeaderBeforeEOPAndOneLeaderAfterEOPPCSMap.put(0, pcsForLeaderBeforeEOP); | ||
oneLeaderBeforeEOPAndOneLeaderAfterEOPPCSMap.put(1, pcsForLeaderAfterEOP); | ||
oneLeaderBeforeEOPAndOneLeaderAfterEOPPCSMap.put(2, pcsForFollowerAfterEOP); | ||
runnable = mock(Runnable.class); | ||
assertFalse( | ||
LeaderFollowerStoreIngestionTask.checkWhetherToCloseUnusedVeniceWriter( | ||
veniceWriterWithInitializedValue1, | ||
veniceWriterWithInitializedValue2, | ||
oneLeaderBeforeEOPAndOneLeaderAfterEOPPCSMap, | ||
runnable, | ||
versionTopicName)); | ||
verify(runnable, never()).run(); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.