;
diff --git a/ui/api/kafka/topic.promql.ts b/ui/api/kafka/topic.promql.ts
deleted file mode 100644
index 0f883aed1..000000000
--- a/ui/api/kafka/topic.promql.ts
+++ /dev/null
@@ -1,31 +0,0 @@
-export const incomingByteRate = (
- namespace: string,
- cluster: string,
- nodePools: string,
-) => `
- sum by (__console_metric_name__) (
- label_replace(
- irate(kafka_server_brokertopicmetrics_bytesin_total{topic!="",namespace="${namespace}",pod=~"${cluster}-(kafka|${nodePools})-\\\\d+",strimzi_io_kind="Kafka"}[5m]),
- "__console_metric_name__",
- "incoming_byte_rate",
- "",
- ""
- )
- )
-`;
-
-export const outgoingByteRate = (
- namespace: string,
- cluster: string,
- nodePools: string,
-) => `
- sum by (__console_metric_name__) (
- label_replace(
- irate(kafka_server_brokertopicmetrics_bytesout_total{topic!="",namespace="${namespace}",pod=~"${cluster}-(kafka|${nodePools})-\\\\d+",strimzi_io_kind="Kafka"}[5m]),
- "__console_metric_name__",
- "outgoing_byte_rate",
- "",
- ""
- )
- )
-`;
diff --git a/ui/app/[locale]/(authorized)/kafka/[kafkaId]/layout.tsx b/ui/app/[locale]/(authorized)/kafka/[kafkaId]/layout.tsx
index 132a132ff..75f24c23c 100644
--- a/ui/app/[locale]/(authorized)/kafka/[kafkaId]/layout.tsx
+++ b/ui/app/[locale]/(authorized)/kafka/[kafkaId]/layout.tsx
@@ -1,5 +1,5 @@
import { ClusterLinks } from "@/app/[locale]/(authorized)/kafka/[kafkaId]/ClusterLinks";
-import { getAuthOptions } from "@/app/api/auth/[...nextauth]/route";
+import { getAuthOptions } from "@/app/api/auth/[...nextauth]/auth-options";
import { AppLayout } from "@/components/AppLayout";
import { AppLayoutProvider } from "@/components/AppLayoutProvider";
import {
diff --git a/ui/app/[locale]/(authorized)/kafka/[kafkaId]/nodes/NodesTable.tsx b/ui/app/[locale]/(authorized)/kafka/[kafkaId]/nodes/NodesTable.tsx
index aa1b4f23a..75d2523ac 100644
--- a/ui/app/[locale]/(authorized)/kafka/[kafkaId]/nodes/NodesTable.tsx
+++ b/ui/app/[locale]/(authorized)/kafka/[kafkaId]/nodes/NodesTable.tsx
@@ -100,7 +100,7 @@ export function NodesTable({ nodes }: { nodes: Node[] }) {
);
case "status":
- const isStable = row.status == "Stable";
+ const isStable = row.status == "Running";
return (
diff --git a/ui/app/[locale]/(authorized)/kafka/[kafkaId]/nodes/page.tsx b/ui/app/[locale]/(authorized)/kafka/[kafkaId]/nodes/page.tsx
index 3efca94d2..a819c9028 100644
--- a/ui/app/[locale]/(authorized)/kafka/[kafkaId]/nodes/page.tsx
+++ b/ui/app/[locale]/(authorized)/kafka/[kafkaId]/nodes/page.tsx
@@ -1,4 +1,4 @@
-import { getKafkaCluster, getKafkaClusterKpis } from "@/api/kafka/actions";
+import { getKafkaCluster } from "@/api/kafka/actions";
import { KafkaParams } from "@/app/[locale]/(authorized)/kafka/[kafkaId]/kafka.params";
import { DistributionChart } from "@/app/[locale]/(authorized)/kafka/[kafkaId]/nodes/DistributionChart";
import {
@@ -6,17 +6,25 @@ import {
NodesTable,
} from "@/app/[locale]/(authorized)/kafka/[kafkaId]/nodes/NodesTable";
import { Alert, PageSection } from "@/libs/patternfly/react-core";
-import { redirect } from "@/i18n/routing";
import { getTranslations } from "next-intl/server";
import { Suspense } from "react";
function nodeMetric(
- metrics: Record | undefined,
+ metrics: { value: string, nodeId: string }[] | undefined,
nodeId: number,
): number {
- return metrics ? (metrics[nodeId.toString()] ?? 0) : 0;
+ return parseFloat(metrics?.find(e => e.nodeId == nodeId.toString())?.value ?? "0");
}
+function nodeRangeMetric(
+ metrics: { range: string[][], nodeId?: string }[] | undefined,
+ nodeId: number,
+): number {
+ let range = metrics?.find(e => e.nodeId == nodeId.toString())?.range;
+ return parseFloat(range?.[range?.length - 1]?.[1] ?? "0");
+}
+
+
export default function NodesPage({ params }: { params: KafkaParams }) {
return (
@@ -27,29 +35,60 @@ export default function NodesPage({ params }: { params: KafkaParams }) {
async function ConnectedNodes({ params }: { params: KafkaParams }) {
const t = await getTranslations();
- const res = await getKafkaClusterKpis(params.kafkaId);
+ const cluster = await getKafkaCluster(params.kafkaId, {
+ fields: 'name,namespace,creationTimestamp,status,kafkaVersion,nodes,controller,authorizedOperations,listeners,conditions,metrics'
+ });
+ const metrics = cluster?.attributes.metrics;
- let { cluster, kpis } = res || {};
+ const nodes: Node[] = (cluster?.attributes.nodes ?? []).map((node) => {
+ let brokerState = metrics && nodeMetric(metrics.values?.["broker_state"], node.id);
+ let status;
- const nodes: Node[] = (cluster?.attributes.nodes || []).map((node) => {
- const status = kpis
- ? nodeMetric(kpis.broker_state, node.id) === 3
- ? "Stable"
- : "Unstable"
- : "Unknown";
- const leaders = kpis
- ? nodeMetric(kpis.leader_count?.byNode, node.id)
+ /*
+ * https://github.com/apache/kafka/blob/3.8.0/metadata/src/main/java/org/apache/kafka/metadata/BrokerState.java
+ */
+ switch (brokerState ?? 127) {
+ case 0:
+ status = "Not Running";
+ break;
+ case 1:
+ status = "Starting";
+ break;
+ case 2:
+ status = "Recovery";
+ break;
+ case 3:
+ status = "Running";
+ break;
+ case 6:
+ status = "Pending Controlled Shutdown";
+ break;
+ case 7:
+ status = "Shutting Down";
+ break;
+ case 127:
+ default:
+ status = "Unknown";
+ break;
+ }
+
+ const leaders = metrics
+ ? nodeMetric(metrics.values?.["leader_count"], node.id)
: undefined;
+
const followers =
- kpis && leaders
- ? nodeMetric(kpis.replica_count?.byNode, node.id) - leaders
+ metrics && leaders
+ ? nodeMetric(metrics.values?.["replica_count"], node.id) - leaders
: undefined;
- const diskCapacity = kpis
- ? nodeMetric(kpis.volume_stats_capacity_bytes?.byNode, node.id)
+
+ const diskCapacity = metrics
+ ? nodeRangeMetric(metrics.ranges?.["volume_stats_capacity_bytes"], node.id)
: undefined;
- const diskUsage = kpis
- ? nodeMetric(kpis.volume_stats_used_bytes?.byNode, node.id)
+
+ const diskUsage = metrics
+ ? nodeRangeMetric(metrics.ranges?.["volume_stats_used_bytes"], node.id)
: undefined;
+
return {
id: node.id,
status,
@@ -71,7 +110,7 @@ async function ConnectedNodes({ params }: { params: KafkaParams }) {
return (
<>
- {!kpis && (
+ {!metrics && (
diff --git a/ui/app/[locale]/(authorized)/kafka/[kafkaId]/overview/ConnectedClusterCard.tsx b/ui/app/[locale]/(authorized)/kafka/[kafkaId]/overview/ConnectedClusterCard.tsx
index 9f29a655c..8bc078cf8 100644
--- a/ui/app/[locale]/(authorized)/kafka/[kafkaId]/overview/ConnectedClusterCard.tsx
+++ b/ui/app/[locale]/(authorized)/kafka/[kafkaId]/overview/ConnectedClusterCard.tsx
@@ -1,46 +1,49 @@
import { ConsumerGroupsResponse } from "@/api/consumerGroups/schema";
-import { ClusterDetail, ClusterKpis } from "@/api/kafka/schema";
+import { ClusterDetail } from "@/api/kafka/schema";
import { ClusterCard } from "@/components/ClusterOverview/ClusterCard";
export async function ConnectedClusterCard({
- data,
+ cluster,
consumerGroups,
}: {
- data: Promise<{ cluster: ClusterDetail; kpis: ClusterKpis | null } | null>;
+ cluster: Promise;
consumerGroups: Promise;
}) {
- const res = await data;
- if (!res?.kpis) {
+ const res = await cluster;
+
+ if (!res?.attributes?.metrics) {
return (
);
}
const groupCount = await consumerGroups.then(
(grpResp) => grpResp?.meta.page.total ?? 0,
);
- const brokersTotal = Object.keys(res?.kpis.broker_state ?? {}).length;
- const brokersOnline =
- Object.values(res?.kpis.broker_state ?? {}).filter((s) => s === 3).length ||
- 0;
- const messages = res?.cluster.attributes.conditions
+
+ const brokersTotal = res?.attributes.metrics?.values?.["broker_state"]?.length ?? 0;
+ const brokersOnline = (res?.attributes.metrics?.values?.["broker_state"] ?? [])
+ .filter((s) => s.value === "3")
+ .length;
+
+ const messages = res?.attributes.conditions
?.filter((c) => "Ready" !== c.type)
.map((c) => ({
variant:
c.type === "Error" ? "danger" : ("warning" as "danger" | "warning"),
subject: {
type: c.type!,
- name: res?.cluster.attributes.name ?? "",
- id: res?.cluster.id ?? "",
+ name: res?.attributes.name ?? "",
+ id: res?.id ?? "",
},
message: c.message ?? "",
date: c.lastTransitionTime ?? "",
@@ -49,14 +52,14 @@ export async function ConnectedClusterCard({
return (
);
}
diff --git a/ui/app/[locale]/(authorized)/kafka/[kafkaId]/overview/ConnectedClusterChartsCard.tsx b/ui/app/[locale]/(authorized)/kafka/[kafkaId]/overview/ConnectedClusterChartsCard.tsx
index fc76246ba..9e37c0d7a 100644
--- a/ui/app/[locale]/(authorized)/kafka/[kafkaId]/overview/ConnectedClusterChartsCard.tsx
+++ b/ui/app/[locale]/(authorized)/kafka/[kafkaId]/overview/ConnectedClusterChartsCard.tsx
@@ -1,32 +1,35 @@
-import { ClusterMetric } from "@/api/kafka/actions";
-import { ClusterDetail, MetricRange } from "@/api/kafka/schema";
+import { ClusterDetail } from "@/api/kafka/schema";
import { ClusterChartsCard } from "@/components/ClusterOverview/ClusterChartsCard";
function timeSeriesMetrics(
- ranges: Record | null | undefined,
- rangeName: ClusterMetric,
-): TimeSeriesMetrics[] {
- return ranges
- ? Object.values(ranges[rangeName] ?? {}).map((val) => val ?? {})
- : [];
+ ranges: Record | undefined,
+ rangeName: string,
+): Record {
+ const series: Record = {};
+
+ if (ranges) {
+ Object.values(ranges[rangeName] ?? {}).forEach((r) => {
+ series[r.nodeId!] = r.range.reduce((a, v) => ({ ...a, [v[0]]: parseFloat(v[1]) }), {} as TimeSeriesMetrics);
+ });
+ }
+
+ return series;
}
export async function ConnectedClusterChartsCard({
- data,
+ cluster,
}: {
- data: Promise<{
- cluster: ClusterDetail;
- ranges: Record | null;
- } | null>;
+ cluster: Promise;
}) {
- const res = await data;
+ const res = await cluster;
+
return (
);
}
diff --git a/ui/app/[locale]/(authorized)/kafka/[kafkaId]/overview/ConnectedTopicChartsCard.tsx b/ui/app/[locale]/(authorized)/kafka/[kafkaId]/overview/ConnectedTopicChartsCard.tsx
index aa3b412c0..323c253df 100644
--- a/ui/app/[locale]/(authorized)/kafka/[kafkaId]/overview/ConnectedTopicChartsCard.tsx
+++ b/ui/app/[locale]/(authorized)/kafka/[kafkaId]/overview/ConnectedTopicChartsCard.tsx
@@ -1,21 +1,33 @@
-import { TopicMetric } from "@/api/kafka/actions";
-import { ClusterDetail, MetricRange } from "@/api/kafka/schema";
+import { ClusterDetail } from "@/api/kafka/schema";
import { TopicChartsCard } from "@/components/ClusterOverview/TopicChartsCard";
+function timeSeriesMetrics(
+ ranges: Record | undefined,
+ rangeName: string,
+): TimeSeriesMetrics {
+ let series: TimeSeriesMetrics = {};
+
+ if (ranges) {
+ Object.values(ranges[rangeName] ?? {}).forEach((r) => {
+ series = r.range.reduce((a, v) => ({ ...a, [v[0]]: parseFloat(v[1]) }), series);
+ });
+ }
+
+ return series;
+}
+
export async function ConnectedTopicChartsCard({
- data,
+ cluster,
}: {
- data: Promise<{
- cluster: ClusterDetail;
- ranges: Record | null;
- } | null>;
+ cluster: Promise;
}) {
- const res = await data;
+ const res = await cluster;
+
return (
);
}
diff --git a/ui/app/[locale]/(authorized)/kafka/[kafkaId]/overview/page.tsx b/ui/app/[locale]/(authorized)/kafka/[kafkaId]/overview/page.tsx
index 6054ce526..a591bd6b9 100644
--- a/ui/app/[locale]/(authorized)/kafka/[kafkaId]/overview/page.tsx
+++ b/ui/app/[locale]/(authorized)/kafka/[kafkaId]/overview/page.tsx
@@ -1,9 +1,5 @@
import { getConsumerGroups } from "@/api/consumerGroups/actions";
-import {
- getKafkaClusterKpis,
- getKafkaClusterMetrics,
- getKafkaTopicMetrics,
-} from "@/api/kafka/actions";
+import { getKafkaCluster } from "@/api/kafka/actions";
import { getTopics, getViewedTopics } from "@/api/topics/actions";
import { KafkaParams } from "@/app/[locale]/(authorized)/kafka/[kafkaId]/kafka.params";
import { ConnectedClusterCard } from "@/app/[locale]/(authorized)/kafka/[kafkaId]/overview/ConnectedClusterCard";
@@ -14,17 +10,9 @@ import { PageLayout } from "@/components/ClusterOverview/PageLayout";
import { ConnectedRecentTopics } from "./ConnectedRecentTopics";
export default function OverviewPage({ params }: { params: KafkaParams }) {
- const kpi = getKafkaClusterKpis(params.kafkaId);
- const cluster = getKafkaClusterMetrics(params.kafkaId, [
- "volumeUsed",
- "volumeCapacity",
- "memory",
- "cpu",
- ]);
- const topic = getKafkaTopicMetrics(params.kafkaId, [
- "outgoingByteRate",
- "incomingByteRate",
- ]);
+ const kafkaCluster = getKafkaCluster(params.kafkaId, {
+ fields: 'name,namespace,creationTimestamp,status,kafkaVersion,nodes,controller,authorizedOperations,listeners,conditions,metrics'
+ });
const topics = getTopics(params.kafkaId, { fields: "status", pageSize: 1 });
const consumerGroups = getConsumerGroups(params.kafkaId, { fields: "state" });
const viewedTopics = getViewedTopics().then((topics) =>
@@ -34,11 +22,11 @@ export default function OverviewPage({ params }: { params: KafkaParams }) {
return (
+
}
topicsPartitions={}
- clusterCharts={}
- topicCharts={}
+ clusterCharts={}
+ topicCharts={}
recentTopics={}
/>
);
diff --git a/ui/app/[locale]/(authorized)/layout.tsx b/ui/app/[locale]/(authorized)/layout.tsx
index a7d7e232c..b9a1f5cfc 100644
--- a/ui/app/[locale]/(authorized)/layout.tsx
+++ b/ui/app/[locale]/(authorized)/layout.tsx
@@ -1,4 +1,4 @@
-import { getAuthOptions } from "@/app/api/auth/[...nextauth]/route";
+import { getAuthOptions } from "@/app/api/auth/[...nextauth]/auth-options";
import { getServerSession } from "next-auth";
import { ReactNode } from "react";
diff --git a/ui/app/api/auth/[...nextauth]/auth-options.ts b/ui/app/api/auth/[...nextauth]/auth-options.ts
new file mode 100644
index 000000000..5c70fcf43
--- /dev/null
+++ b/ui/app/api/auth/[...nextauth]/auth-options.ts
@@ -0,0 +1,48 @@
+import { getKafkaClusters } from "@/api/kafka/actions";
+import { ClusterList } from "@/api/kafka/schema";
+import { logger } from "@/utils/logger";
+import { AuthOptions } from "next-auth";
+import { Provider } from "next-auth/providers/index";
+import { makeAnonymous } from "./anonymous";
+import { makeOauthTokenProvider } from "./oauth-token";
+import { makeScramShaProvider } from "./scram";
+
+const log = logger.child({ module: "auth" });
+
+function makeAuthOption(cluster: ClusterList): Provider {
+ switch (cluster.meta.authentication?.method) {
+ case "oauth": {
+ const { tokenUrl } = cluster.meta.authentication;
+ return makeOauthTokenProvider(tokenUrl ?? "TODO");
+ }
+ case "basic":
+ return makeScramShaProvider(cluster.id);
+ case "anonymous":
+ default:
+ return makeAnonymous();
+ }
+}
+
+export async function getAuthOptions(): Promise {
+ // retrieve the authentication method required by the default Kafka cluster
+ const clusters = await getKafkaClusters();
+ const providers = clusters.map(makeAuthOption);
+ log.trace({ providers }, "getAuthOptions");
+ return {
+ providers,
+ callbacks: {
+ async jwt({ token, user }) {
+ if (user) {
+ token.authorization = user.authorization;
+ }
+ return token;
+ },
+ async session({ session, token, user }) {
+ // Send properties to the client, like an access_token and user id from a provider.
+ session.authorization = token.authorization;
+
+ return session;
+ },
+ },
+ };
+}
\ No newline at end of file
diff --git a/ui/app/api/auth/[...nextauth]/route.ts b/ui/app/api/auth/[...nextauth]/route.ts
index 812db7da4..731ad900c 100644
--- a/ui/app/api/auth/[...nextauth]/route.ts
+++ b/ui/app/api/auth/[...nextauth]/route.ts
@@ -1,54 +1,7 @@
-import { getKafkaClusters } from "@/api/kafka/actions";
-import { ClusterList } from "@/api/kafka/schema";
-import { logger } from "@/utils/logger";
-import NextAuth, { AuthOptions } from "next-auth";
-import { Provider } from "next-auth/providers/index";
+import NextAuth from "next-auth";
import { NextRequest, NextResponse } from "next/server";
-import { makeAnonymous } from "./anonymous";
-import { makeOauthTokenProvider } from "./oauth-token";
-import { makeScramShaProvider } from "./scram";
+import { getAuthOptions } from "./auth-options";
-const log = logger.child({ module: "auth" });
-
-export async function getAuthOptions(): Promise {
- // retrieve the authentication method required by the default Kafka cluster
- const clusters = await getKafkaClusters();
- const providers = clusters.map(makeAuthOption);
- log.trace({ providers }, "getAuthOptions");
- return {
- providers,
- callbacks: {
- async jwt({ token, user }) {
- if (user) {
- token.authorization = user.authorization;
- }
- return token;
- },
- async session({ session, token, user }) {
- // Send properties to the client, like an access_token and user id from a provider.
- session.authorization = token.authorization;
-
- return session;
- },
- },
- };
-}
-
-function makeAuthOption(cluster: ClusterList): Provider {
- switch (cluster.meta.authentication?.method) {
- case "oauth": {
- const { tokenUrl } = cluster.meta.authentication;
- return makeOauthTokenProvider(tokenUrl ?? "TODO");
- }
- case "basic":
- return makeScramShaProvider(cluster.id);
- case "anonymous":
- default:
- return makeAnonymous();
- }
-}
-
-// const handler = NextAuth(authOptions);
async function handler(req: NextRequest, res: NextResponse) {
const authOptions = await getAuthOptions();
if (authOptions) {
diff --git a/ui/components/ClusterOverview/ClusterChartsCard.tsx b/ui/components/ClusterOverview/ClusterChartsCard.tsx
index c09e6d91b..aa4a799d5 100644
--- a/ui/components/ClusterOverview/ClusterChartsCard.tsx
+++ b/ui/components/ClusterOverview/ClusterChartsCard.tsx
@@ -17,10 +17,10 @@ import { HelpIcon } from "@/libs/patternfly/react-icons";
import { useTranslations } from "next-intl";
type ClusterChartsCardProps = {
- usedDiskSpace: TimeSeriesMetrics[];
- availableDiskSpace: TimeSeriesMetrics[];
- memoryUsage: TimeSeriesMetrics[];
- cpuUsage: TimeSeriesMetrics[];
+ usedDiskSpace: Record;
+ availableDiskSpace: Record;
+ memoryUsage: Record;
+ cpuUsage: Record;
};
export function ClusterChartsCard({
diff --git a/ui/components/ClusterOverview/TopicChartsCard.tsx b/ui/components/ClusterOverview/TopicChartsCard.tsx
index b33303584..a6d1c2ca7 100644
--- a/ui/components/ClusterOverview/TopicChartsCard.tsx
+++ b/ui/components/ClusterOverview/TopicChartsCard.tsx
@@ -1,5 +1,4 @@
"use client";
-import { MetricRange } from "@/api/kafka/schema";
import {
Card,
CardBody,
@@ -15,8 +14,8 @@ import { ChartSkeletonLoader } from "./components/ChartSkeletonLoader";
import { useTranslations } from "next-intl";
type TopicChartsCardProps = {
- incoming: MetricRange;
- outgoing: MetricRange;
+ incoming: TimeSeriesMetrics;
+ outgoing: TimeSeriesMetrics;
};
export function TopicChartsCard({
diff --git a/ui/components/ClusterOverview/components/ChartCpuUsage.tsx b/ui/components/ClusterOverview/components/ChartCpuUsage.tsx
index 32eb9520b..e6ca7618e 100644
--- a/ui/components/ClusterOverview/components/ChartCpuUsage.tsx
+++ b/ui/components/ClusterOverview/components/ChartCpuUsage.tsx
@@ -15,7 +15,7 @@ import { getHeight, getPadding } from "./chartConsts";
import { useChartWidth } from "./useChartWidth";
type ChartCpuUsageProps = {
- usages: TimeSeriesMetrics[];
+ usages: Record;
};
type Datum = {
@@ -29,7 +29,15 @@ export function ChartCpuUsage({ usages }: ChartCpuUsageProps) {
const format = useFormatter();
const [containerRef, width] = useChartWidth();
- const itemsPerRow = width > 650 ? 6 : width > 300 ? 3 : 1;
+ let itemsPerRow;
+
+ if (width > 650) {
+ itemsPerRow = 6;
+ } else if (width > 300) {
+ itemsPerRow = 3;
+ } else {
+ itemsPerRow = 1;
+ }
const hasMetrics = Object.keys(usages).length > 0;
if (!hasMetrics) {
@@ -42,11 +50,11 @@ export function ChartCpuUsage({ usages }: ChartCpuUsageProps) {
/>
);
}
- // const showDate = shouldShowDate(duration);
+
const CursorVoronoiContainer = createContainer("voronoi", "cursor");
- const legendData = usages.map((_, idx) => ({
- name: `Node ${idx}`,
- childName: `node ${idx}`,
+ const legendData = Object.keys(usages).map((nodeId) => ({
+ name: `Node ${nodeId}`,
+ childName: `node ${nodeId}`,
}));
const padding = getPadding(legendData.length / itemsPerRow);
return (
@@ -112,17 +120,18 @@ export function ChartCpuUsage({ usages }: ChartCpuUsageProps) {
}}
/>
- {usages.map((usage, idx) => {
- const usageArray = Object.entries(usage);
+ {Object.entries(usages).map(([nodeId, series]) => {
return (
({
- name: `Node ${idx + 1}`,
- x,
- y,
- }))}
- name={`node ${idx}`}
+ key={ `cpu-usage-${nodeId}` }
+ data={ Object.entries(series).map(([k, v]) => {
+ return ({
+ name: `Node ${nodeId}`,
+ x: Date.parse(k),
+ y: v,
+ })
+ })}
+ name={ `node ${nodeId}` }
/>
);
})}
diff --git a/ui/components/ClusterOverview/components/ChartDiskUsage.tsx b/ui/components/ClusterOverview/components/ChartDiskUsage.tsx
index 831f142cd..40646e084 100644
--- a/ui/components/ClusterOverview/components/ChartDiskUsage.tsx
+++ b/ui/components/ClusterOverview/components/ChartDiskUsage.tsx
@@ -17,8 +17,8 @@ import { getHeight, getPadding } from "./chartConsts";
import { useChartWidth } from "./useChartWidth";
type ChartDiskUsageProps = {
- usages: TimeSeriesMetrics[];
- available: TimeSeriesMetrics[];
+ usages: Record;
+ available: Record;
};
type Datum = {
x: number;
@@ -46,17 +46,23 @@ export function ChartDiskUsage({ usages, available }: ChartDiskUsageProps) {
);
}
const CursorVoronoiContainer = createContainer("voronoi", "cursor");
- const legendData = [
- ...usages.map((_, idx) => ({
- name: `Node ${idx}`,
- childName: `node ${idx}`,
- })),
- ...usages.map((_, idx) => ({
- name: `Available storage threshold (node ${idx})`,
- childName: `threshold ${idx}`,
+ const legendData: { name: string, childName: string, symbol?: { type: string } }[] = [];
+
+ Object.entries(usages).forEach(([nodeId, _]) => {
+ legendData.push({
+ name: `Node ${nodeId}`,
+ childName: `node ${nodeId}`,
+ });
+ });
+
+ Object.entries(usages).forEach(([nodeId, _]) => {
+ legendData.push({
+ name: `Available storage threshold (node ${nodeId})`,
+ childName: `threshold ${nodeId}`,
symbol: { type: "threshold" },
- })),
- ];
+ });
+ });
+
const padding = getPadding(legendData.length / itemsPerRow);
return (
@@ -117,36 +123,38 @@ export function ChartDiskUsage({ usages, available }: ChartDiskUsageProps) {
dependentAxis
showGrid={true}
tickFormat={(d) => {
- return formatBytes(d, { maximumFractionDigits: 0 });
+ return formatBytes(d);
}}
/>
- {usages.map((usage, idx) => {
- const usageArray = Object.entries(usage);
+ {Object.entries(usages).map(([nodeId, series]) => {
return (
({
- name: `Node ${idx + 1}`,
- x,
- y,
- }))}
- name={`node ${idx}`}
+ key={ `usage-area-${nodeId}` }
+ data={ Object.entries(series).map(([k, v]) => {
+ return ({
+ name: `Node ${nodeId}`,
+ x: Date.parse(k),
+ y: v,
+ })
+ })}
+ name={ `node ${nodeId}` }
/>
);
})}
- {usages.map((usage, idx) => {
- const usageArray = Object.entries(usage);
- const data = Object.entries(available[idx]);
+
+ {Object.entries(usages).map(([nodeId, _]) => {
+ const availableSeries = available[nodeId];
+
return (
({
- name: `Available storage threshold (node ${idx + 1})`,
- x: usageArray[x][0],
- y,
+ key={ `chart-softlimit-${nodeId}` }
+ data={ Object.entries(availableSeries).map(([k, v]) => ({
+ name: `Available storage threshold (node ${nodeId})`,
+ x: Date.parse(k),
+ y: v,
}))}
- name={`threshold ${idx}`}
+ name={`threshold ${nodeId}`}
/>
);
})}
diff --git a/ui/components/ClusterOverview/components/ChartIncomingOutgoing.tsx b/ui/components/ClusterOverview/components/ChartIncomingOutgoing.tsx
index aec906093..4097cad78 100644
--- a/ui/components/ClusterOverview/components/ChartIncomingOutgoing.tsx
+++ b/ui/components/ClusterOverview/components/ChartIncomingOutgoing.tsx
@@ -1,6 +1,7 @@
"use client";
import {
Chart,
+ ChartArea,
ChartAxis,
ChartGroup,
ChartLegend,
@@ -9,15 +10,14 @@ import {
createContainer,
} from "@/libs/patternfly/react-charts";
import { useFormatBytes } from "@/utils/useFormatBytes";
-import { ChartArea } from "@/libs/patternfly/react-charts";
import { Alert } from "@patternfly/react-core";
import { useFormatter, useTranslations } from "next-intl";
import { getHeight, getPadding } from "./chartConsts";
import { useChartWidth } from "./useChartWidth";
type ChartIncomingOutgoingProps = {
- incoming: Record;
- outgoing: Record;
+ incoming: TimeSeriesMetrics;
+ outgoing: TimeSeriesMetrics;
};
type Datum = {
@@ -52,16 +52,14 @@ export function ChartIncomingOutgoing({
}
// const showDate = shouldShowDate(duration);
const CursorVoronoiContainer = createContainer("voronoi", "cursor");
- const legendData = [
- ...Object.keys(incoming).map((name) => ({
- name: `Incoming bytes (${name})`,
- childName: `incoming ${name}`,
- })),
- ...Object.keys(outgoing).map((name) => ({
- name: `Outgoing bytes (${name})`,
- childName: `outgoing ${name}`,
- })),
- ];
+ const legendData = [ {
+ name: "Incoming bytes (all topics)",
+ childName: "incoming"
+ }, {
+ name: "Outgoing bytes (all topics)",
+ childName: "outgoing"
+ } ];
+
const padding = getPadding(legendData.length / itemsPerRow);
return (
@@ -125,43 +123,36 @@ export function ChartIncomingOutgoing({
{
- return formatBytes(Math.abs(d), { maximumFractionDigits: 0 });
+ return formatBytes(Math.abs(d));
}}
/>
- {Object.entries(incoming).map(([name, entries], idx) => {
- const entriesArray = Object.entries(entries ?? {});
- return (
- ({
- name: `Incoming (${name})`,
- x,
- y,
- value: y,
- }))}
- name={`incoming ${name}`}
- interpolation={"stepAfter"}
- />
- );
- })}
- {Object.entries(outgoing).map(([name, entries], idx) => {
- const entriesArray = Object.entries(entries ?? {});
- const incomingArray = Object.keys(incoming[name] ?? {});
- return (
- ({
- name: `Outgoing (${name})`,
- x: incomingArray[idx],
- y: -1 * y,
- value: y,
- }))}
- name={`outgoing ${name}`}
- interpolation={"stepAfter"}
- />
- );
- })}
+ {
+ return ({
+ name: `Incoming`,
+ x: Date.parse(k),
+ y: v,
+ value: v,
+ })
+ })}
+ name={ `incoming` }
+ interpolation={"stepAfter"}
+ />
+ {
+ return ({
+ name: `Outgoing`,
+ x: Date.parse(k),
+ y: v * -1,
+ value: v,
+ })
+ })}
+ name={ `outgoing` }
+ interpolation={"stepAfter"}
+ />
diff --git a/ui/components/ClusterOverview/components/ChartMemoryUsage.tsx b/ui/components/ClusterOverview/components/ChartMemoryUsage.tsx
index fe741647b..77024f7a0 100644
--- a/ui/components/ClusterOverview/components/ChartMemoryUsage.tsx
+++ b/ui/components/ClusterOverview/components/ChartMemoryUsage.tsx
@@ -16,7 +16,7 @@ import { getHeight, getPadding } from "./chartConsts";
import { useChartWidth } from "./useChartWidth";
type ChartDiskUsageProps = {
- usages: TimeSeriesMetrics[];
+ usages: Record;
};
type Datum = {
@@ -46,11 +46,12 @@ export function ChartMemoryUsage({ usages }: ChartDiskUsageProps) {
}
const CursorVoronoiContainer = createContainer("voronoi", "cursor");
- const legendData = usages.map((_, idx) => ({
- name: `Node ${idx}`,
- childName: `node ${idx}`,
+ const legendData = Object.keys(usages).map((nodeId) => ({
+ name: `Node ${nodeId}`,
+ childName: `node ${nodeId}`,
}));
const padding = getPadding(legendData.length / itemsPerRow);
+
return (
{
- return formatBytes(d, { maximumFractionDigits: 0 });
+ return formatBytes(d);
}}
/>
- {usages.map((usage, idx) => {
- const usageArray = Object.entries(usage);
+ {Object.entries(usages).map(([nodeId, series]) => {
return (
({
- name: `Node ${idx + 1}`,
- x,
- y,
- }))}
- name={`node ${idx}`}
+ key={ `memory-usage-${nodeId}` }
+ data={ Object.entries(series).map(([k, v]) => {
+ return ({
+ name: `Node ${nodeId}`,
+ x: Date.parse(k),
+ y: v,
+ })
+ })}
+ name={ `node ${nodeId}` }
/>
);
})}
diff --git a/ui/components/ClusterOverview/components/chartConsts.ts b/ui/components/ClusterOverview/components/chartConsts.ts
index 98d7e9891..0998a0afa 100644
--- a/ui/components/ClusterOverview/components/chartConsts.ts
+++ b/ui/components/ClusterOverview/components/chartConsts.ts
@@ -3,7 +3,7 @@ export const getHeight = (legendEntriesCount: number) => {
return 150 + bottom;
};
export const getPadding = (legendEntriesCount: number) => ({
- bottom: 35 + 32 * legendEntriesCount,
+ bottom: 50 + 32 * legendEntriesCount,
top: 5,
left: 70,
right: 30,
diff --git a/ui/components/Format/Bytes.stories.tsx b/ui/components/Format/Bytes.stories.tsx
index c093d5e9d..7f22bbe8e 100644
--- a/ui/components/Format/Bytes.stories.tsx
+++ b/ui/components/Format/Bytes.stories.tsx
@@ -26,7 +26,7 @@ type Story = StoryObj;
export const Default: Story = {
play: async ({ canvasElement }) => {
const canvas = within(canvasElement);
- await waitFor(() => expect(canvas.getByText("1 KiB")).toBeInTheDocument());
+ await waitFor(() => expect(canvas.getByText("1.00 KiB")).toBeInTheDocument());
},
};
@@ -47,7 +47,7 @@ export const KilobytesWithDecimal: Story = {
play: async ({ canvasElement }) => {
const canvas = within(canvasElement);
await waitFor(() =>
- expect(canvas.getByText("1.5 KiB")).toBeInTheDocument(),
+ expect(canvas.getByText("1.50 KiB")).toBeInTheDocument(),
);
},
};
@@ -59,7 +59,7 @@ export const MegabytesWithDecimal: Story = {
play: async ({ canvasElement }) => {
const canvas = within(canvasElement);
await waitFor(() =>
- expect(canvas.getByText("1.5 MiB")).toBeInTheDocument(),
+ expect(canvas.getByText("1.50 MiB")).toBeInTheDocument(),
);
},
};
@@ -71,7 +71,7 @@ export const GigabytesWithDecimal: Story = {
play: async ({ canvasElement }) => {
const canvas = within(canvasElement);
await waitFor(() =>
- expect(canvas.getByText("1.5 GiB")).toBeInTheDocument(),
+ expect(canvas.getByText("1.50 GiB")).toBeInTheDocument(),
);
},
};
@@ -83,7 +83,7 @@ export const TerabytesWithDecimal: Story = {
play: async ({ canvasElement }) => {
const canvas = within(canvasElement);
await waitFor(() =>
- expect(canvas.getByText("1.5 TiB")).toBeInTheDocument(),
+ expect(canvas.getByText("1.50 TiB")).toBeInTheDocument(),
);
},
};
diff --git a/ui/environment.d.ts b/ui/environment.d.ts
index 06950d380..af85ca3a9 100644
--- a/ui/environment.d.ts
+++ b/ui/environment.d.ts
@@ -7,7 +7,6 @@ namespace NodeJS {
KEYCLOAK_CLIENTSECRET?: string;
NEXT_PUBLIC_KEYCLOAK_URL?: string;
NEXT_PUBLIC_PRODUCTIZED_BUILD?: "true" | "false";
- CONSOLE_METRICS_PROMETHEUS_URL?: string;
LOG_LEVEL?: "fatal" | "error" | "warn" | "info" | "debug" | "trace";
CONSOLE_MODE?: "read-only" | "read-write";
}
diff --git a/ui/package.json b/ui/package.json
index 8a9751152..0b79fdff7 100644
--- a/ui/package.json
+++ b/ui/package.json
@@ -26,7 +26,6 @@
"@stdlib/string-truncate": "^0.2.2",
"@stdlib/string-truncate-middle": "^0.2.2",
"@tanstack/react-virtual": "^3.10.8",
- "@types/lodash.groupby": "^4.6.9",
"@types/node": "22.8.4",
"@types/react": "18.3.12",
"@types/react-dom": "18.3.1",
@@ -41,13 +40,11 @@
"eslint-import-resolver-typescript": "^3.6.3",
"eslint-plugin-storybook": "^0.10.1",
"iron-session": "^8.0.3",
- "lodash.groupby": "^4.6.0",
"next": "^14.2.15",
"next-auth": "^4.24.10",
"next-intl": "^3.23.5",
"next-logger": "^5.0.1",
"pino": "^9.5.0",
- "prometheus-query": "^3.4.0",
"react": "18.3.1",
"react-csv-downloader": "^3.1.1",
"react-dom": "18.3.1",
diff --git a/ui/utils/session.ts b/ui/utils/session.ts
index c185cf7bf..3268b3022 100644
--- a/ui/utils/session.ts
+++ b/ui/utils/session.ts
@@ -1,6 +1,6 @@
"use server";
-import { getAuthOptions } from "@/app/api/auth/[...nextauth]/route";
+import { getAuthOptions } from "@/app/api/auth/[...nextauth]/auth-options";
import { logger } from "@/utils/logger";
import { sealData, unsealData } from "iron-session";
import { getServerSession } from "next-auth";
diff --git a/ui/utils/useFormatBytes.ts b/ui/utils/useFormatBytes.ts
index 04e90a273..be6f97548 100644
--- a/ui/utils/useFormatBytes.ts
+++ b/ui/utils/useFormatBytes.ts
@@ -11,8 +11,34 @@ export function useFormatBytes() {
return "0 B";
}
const res = convert(bytes, "bytes").to("best", "imperial");
+ let minimumFractionDigits = undefined;
+
+ if (maximumFractionDigits === undefined) {
+ switch (res.unit) {
+ case "PiB":
+ case "TiB":
+ case "GiB":
+ case "MiB":
+ case "KiB":
+ if (res.quantity >= 100) {
+ maximumFractionDigits = 0;
+ } else if (res.quantity >= 10) {
+ minimumFractionDigits = 1;
+ maximumFractionDigits = 1;
+ } else {
+ minimumFractionDigits = 2;
+ maximumFractionDigits = 2;
+ }
+ break;
+ default:
+ maximumFractionDigits = 0;
+ break;
+ }
+ }
+
return `${format.number(res.quantity, {
style: "decimal",
+ minimumFractionDigits,
maximumFractionDigits,
})} ${res.unit}`;
};
|