Skip to content

Commit

Permalink
Option to delay fetching bulk data between batches of new clients
Browse files Browse the repository at this point in the history
  • Loading branch information
moshegood committed Feb 16, 2024
1 parent 52ff6a5 commit f517e61
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 14 deletions.
2 changes: 1 addition & 1 deletion docs/configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ For **Duration** settings, the value should be be an integer followed by `ms`, `
| `logLevel` | `LOG_LEVEL` | String | `info` | Should be `debug`, `info`, `warn`, `error`, or `none`. To learn more, read [Logging](./logging.md). |
| `bigSegmentsStaleAsDegraded` | `BIG_SEGMENTS_STALE_AS_DEGRADED` | Boolean | `false` | Indicates if environments should be considered degraded if Big Segments are not fully synchronized. |
| `bigSegmentsStaleThreshold` | `BIG_SEGMENTS_STALE_THRESHOLD` | Duration | `5m` | Indicates how long until Big Segments should be considered stale. |
| n/a | `STREAMING_MIN_DELAY` | Duration | `0` | The minimum latency of responding to a new client connection. Used only in proxy mode for streaming clients. Useful for reducing memory when under heavy load, as many clients can share a single data fetch. |
| n/a | `BATCH_FETCH_PERIOD` | Duration | `0` | The minimum latency between bulk fetching all data for a batch of new clients. Used only in proxy mode for streaming clients. Useful for reducing memory when under heavy load, as many clients can share a single data fetch. |

_(1)_ The default values for `streamUri`, `baseUri`, and `clientSideBaseUri` are `https://stream.launchdarkly.com`, `https://sdk.launchdarkly.com`, and `https://clientsdk.launchdarkly.com`, respectively. You should never need to change these URIs unless you are either using a special instance of the LaunchDarkly service, in which case Support will tell you how to set them, or you are accessing LaunchDarkly using a reverse proxy or some other mechanism that rewrites URLs.

Expand Down
29 changes: 16 additions & 13 deletions internal/streams/stream_provider_server_side.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ type serverSideEnvStreamRepository struct {
store EnvStoreQueries
loggers ldlog.Loggers

flightGroup singleflight.Group
flightGroup singleflight.Group
previousFlight time.Time
}

func (s *serverSideStreamProvider) Handler(credential sdkauth.ScopedCredential) http.HandlerFunc {
Expand Down Expand Up @@ -111,7 +112,20 @@ func (r *serverSideEnvStreamRepository) Replay(channel, id string) chan eventsou
// getReplayEvent will return a ServerSidePutEvent with all the data needed for a Replay.
func (r *serverSideEnvStreamRepository) getReplayEvent() (eventsource.Event, error) {
data, err, _ := r.flightGroup.Do("getReplayEvent", func() (interface{}, error) {
start := time.Now()
// We do not want to call this flight group too often, as it can use a lot of RAM.
// This will ensure that we don't call it more than once every BATCH_FETCH_PERIOD.
delayS, has := os.LookupEnv("BATCH_FETCH_PERIOD")
if has {
if delay, err := time.ParseDuration(delayS); err == nil {
if time.Since(r.previousFlight) < delay {
time.Sleep(delay - time.Since(r.previousFlight))
}
} else {
r.loggers.Warnf("Ignoring invalid BATCH_FETCH_PERIOD: %s\n", delayS)
}
r.previousFlight = time.Now()
}

flags, err := r.store.GetAll(ldstoreimpl.Features())

if err != nil {
Expand All @@ -131,17 +145,6 @@ func (r *serverSideEnvStreamRepository) getReplayEvent() (eventsource.Event, err

// This call uses a lot of system resources (RAM in particular).
event := MakeServerSidePutEvent(allData)
// So we sleep for a bit to allow a bunch of concurrent calls to
// all make use of this same flightGroup.
delayS, has := os.LookupEnv("STREAMING_MIN_DELAY")
if has {
if delay, err := time.ParseDuration(delayS); err == nil {
time.Sleep(delay - time.Since(start))
} else {
r.loggers.Warnf("Ignoring invalid STREAMING_MIN_DELAY: %s\n", delayS)
}
}

return event, nil
})

Expand Down

0 comments on commit f517e61

Please sign in to comment.