From 8893ea03c7000a3cf63a6bace22bd0050ce1efe8 Mon Sep 17 00:00:00 2001
From: LinaLam <65078897+LinaLam@users.noreply.github.com>
Date: Wed, 15 Jan 2025 14:53:11 -0800
Subject: [PATCH] landing page chanegs (#3132)
---
bifrost/components/home/CTA.tsx | 11 ++++-
bifrost/components/home/Companies.tsx | 19 ++++----
bifrost/components/home/Evaluate.tsx | 10 ++---
bifrost/components/home/Experiment.tsx | 2 +-
bifrost/components/home/FAQ.tsx | 4 ++
bifrost/components/home/LLMLifecycle.tsx | 2 +-
bifrost/components/home/Log.tsx | 2 +-
bifrost/components/home/OpenSource.tsx | 6 +--
bifrost/components/home/Production.tsx | 2 +-
bifrost/components/home/Prototype.tsx | 26 ++++++++---
bifrost/components/home/Quote.tsx | 34 +++++++--------
bifrost/components/home/Quote2.tsx | 8 ++--
bifrost/components/home/Quote3.tsx | 31 ++++++-------
bifrost/components/home/Stats.tsx | 2 +-
.../templates/landing/integrations.tsx | 43 +++++++------------
15 files changed, 110 insertions(+), 92 deletions(-)
diff --git a/bifrost/components/home/CTA.tsx b/bifrost/components/home/CTA.tsx
index a37d4d4a33..69ca776126 100644
--- a/bifrost/components/home/CTA.tsx
+++ b/bifrost/components/home/CTA.tsx
@@ -7,6 +7,7 @@ import LogoBox from "./LogoBox";
import { useState } from "react";
import { ISLAND_WIDTH } from "@/lib/utils";
import Image from "next/image";
+import { ChevronRight as ChevronRightIcon } from "lucide-react";
const CTA = () => {
const [isHovered, setIsHovered] = useState(false);
@@ -46,7 +47,7 @@ const CTA = () => {
@@ -59,11 +60,17 @@ const CTA = () => {
setIsHovered(true)}
onMouseLeave={() => setIsHovered(false)}
>
Try Helicone for free
+ {isHovered && (
+
+ )}
diff --git a/bifrost/components/home/Companies.tsx b/bifrost/components/home/Companies.tsx
index e0d7e38846..d437fe6f25 100644
--- a/bifrost/components/home/Companies.tsx
+++ b/bifrost/components/home/Companies.tsx
@@ -6,17 +6,18 @@ const Companies = () => {
return (
-
+ {/* */}
+
{(
[
- ["/static/qawolf.webp", "qawolf", 99, 33],
- ["/static/sunrun.webp", "sunrun", 83, 33],
- ["/static/filevine.webp", "filevine", 81, 33],
- ["/static/slate.webp", "slate", 65, 33],
- ["/static/mintlify.svg", "mintlify", 94, 33],
- ["/static/upenn.webp", "upenn", 83, 33],
- ["/static/togetherai.webp", "togetherai", 106, 33],
- ["/static/swiss.webp", "swiss red cross", 150, 33],
+ ["/static/togetherai.webp", "togetherai", 169, 53],
+ ["/static/qawolf.webp", "qawolf", 157, 53],
+ ["/static/sunrun.webp", "sunrun", 132, 53],
+ ["/static/filevine.webp", "filevine", 130, 53],
+ ["/static/slate.webp", "slate", 92, 53],
+ ["/static/mintlify.svg", "mintlify", 150, 53],
+ ["/static/upenn.webp", "upenn", 132, 53],
+ ["/static/swiss.webp", "swiss red cross", 240, 53],
] as const
).map((src, index) => (
diff --git a/bifrost/components/home/Evaluate.tsx b/bifrost/components/home/Evaluate.tsx
index 78cf2e0520..d014d03374 100644
--- a/bifrost/components/home/Evaluate.tsx
+++ b/bifrost/components/home/Evaluate.tsx
@@ -24,7 +24,7 @@ const Evaluate = () => {
Prevent regression and improve
quality over-time
-
+
Monitor performance in real-time and catch regressions
pre-deployment with LLM-as-a-judge or custom evals
@@ -58,7 +58,7 @@ const Evaluate = () => {
setIsQuestionOpen(!isQuestionOpen)}
>
@@ -85,10 +85,10 @@ const Evaluate = () => {
)}
>
-
+
Online evaluation tests systems in real-time using live data
- and actual user interactions. It’s useful to capture dynamic
- real-world scenarios.
+ and actual user interactions. It's useful to capture
+ dynamic real-world scenarios.
In contrast, offline evaluation occurs in controlled,
simulated environments using previous requests or synthetic
diff --git a/bifrost/components/home/Experiment.tsx b/bifrost/components/home/Experiment.tsx
index deb8dae7b3..e1162468f5 100644
--- a/bifrost/components/home/Experiment.tsx
+++ b/bifrost/components/home/Experiment.tsx
@@ -321,7 +321,7 @@ const Experiment = () => {
Push high-quality prompt
changes to production
-
+
Tune your prompts and justify your iterations with quantifiable
data, not just “vibes”.
diff --git a/bifrost/components/home/FAQ.tsx b/bifrost/components/home/FAQ.tsx
index 912f365c39..bf150274e7 100644
--- a/bifrost/components/home/FAQ.tsx
+++ b/bifrost/components/home/FAQ.tsx
@@ -28,6 +28,8 @@ const FAQ = () => {
limiting, API key management.
+
+
I don't want to use Helicone's Proxy, can I still use
@@ -45,6 +47,7 @@ const FAQ = () => {
infrastructure.
+
How do you calculate the cost of LLM requests?{" "}
@@ -60,6 +63,7 @@ const FAQ = () => {
Pricing Calculator.
+
diff --git a/bifrost/components/home/LLMLifecycle.tsx b/bifrost/components/home/LLMLifecycle.tsx
index 749fba259e..0f797da96a 100644
--- a/bifrost/components/home/LLMLifecycle.tsx
+++ b/bifrost/components/home/LLMLifecycle.tsx
@@ -2412,7 +2412,7 @@ const LLMLifecycle = () => {
Designed for the{" "}
entire LLM lifecycle
-
+
The CI workflow to take your LLM application from MVP to production,
and from production to perfection.
diff --git a/bifrost/components/home/Log.tsx b/bifrost/components/home/Log.tsx
index 3fa8b0b4f4..3e730283f4 100644
--- a/bifrost/components/home/Log.tsx
+++ b/bifrost/components/home/Log.tsx
@@ -16,7 +16,7 @@ const Log = () => {
Dive deep into each trace and debug your agent with ease
-
+
Visualize your multi-step LLM interactions, log requests in
real-time and pinpoint root cause of errors.
diff --git a/bifrost/components/home/OpenSource.tsx b/bifrost/components/home/OpenSource.tsx
index c66b331aa3..ff21c8465e 100644
--- a/bifrost/components/home/OpenSource.tsx
+++ b/bifrost/components/home/OpenSource.tsx
@@ -194,7 +194,7 @@ const OpenSource = async () => {
Proudly open-source
-
+
We value transparency and we believe in the power of community.
@@ -284,9 +284,9 @@ const OpenSource = async () => {
-
+
Built by Helicone
diff --git a/bifrost/components/home/Production.tsx b/bifrost/components/home/Production.tsx
index 7e8693879f..6f8f4bf49d 100644
--- a/bifrost/components/home/Production.tsx
+++ b/bifrost/components/home/Production.tsx
@@ -18,7 +18,7 @@ const Production = () => {
Turn complexity and abstraction to{" "}
actionable insights
-
+
Unified insights across all providers to quickly detect
hallucinations, abuse and performance issues.
diff --git a/bifrost/components/home/Prototype.tsx b/bifrost/components/home/Prototype.tsx
index a0651d3a57..c23931cd9f 100644
--- a/bifrost/components/home/Prototype.tsx
+++ b/bifrost/components/home/Prototype.tsx
@@ -104,7 +104,15 @@ const PrototypeSidebar = ({
>
- Dashboard
+
+ Dashboard
+ {openedPage !== "dashboard" && (
+ <>
+
+
+ >
+ )}
+
Requests
-
-
+ {openedPage !== "requests" && (
+ <>
+
+
+ >
+ )}
@@ -148,8 +160,12 @@ const PrototypeSidebar = ({
Sessions
-
-
+ {openedPage !== "sessions" && (
+ <>
+
+
+ >
+ )}
diff --git a/bifrost/components/home/Quote.tsx b/bifrost/components/home/Quote.tsx
index 8f6216ee3e..ca2ecacff1 100644
--- a/bifrost/components/home/Quote.tsx
+++ b/bifrost/components/home/Quote.tsx
@@ -5,32 +5,32 @@ const Quote = () => {
return (
-
-
+
+
The ability to test prompt variations on production traffic without
touching a line of code is magical.{" "}
- It feels like we’re cheating; it’s just that good!
+ It feels like we're cheating; it's just that good!
-
+
-
+
-
-
- Nishant Shukla
-
-
Sr. Director of AI
-
+
+ Nishant Shukla
+
+
+ Sr. Director of AI
+
diff --git a/bifrost/components/home/Quote2.tsx b/bifrost/components/home/Quote2.tsx
index b0a1f94c0f..c7252475c1 100644
--- a/bifrost/components/home/Quote2.tsx
+++ b/bifrost/components/home/Quote2.tsx
@@ -18,8 +18,8 @@ const Quote2 = () => {
setIsQuestionOpen(!isQuestionOpen)}
>
@@ -29,7 +29,7 @@ const Quote2 = () => {
)}
>
- What if I don’t want Helicone to be in my critical path.{" "}
+ What if I don't want Helicone to be in my critical path?
{isQuestionOpen ? (
@@ -48,7 +48,7 @@ const Quote2 = () => {
)}
>
-
+
There are two ways to interface with Helicone - Proxy and Async.
You can integrate with Helicone using the async integration to
ensure zero propagation delay, or choose proxy for the simplest
diff --git a/bifrost/components/home/Quote3.tsx b/bifrost/components/home/Quote3.tsx
index 6554a8780f..9ceea6d10a 100644
--- a/bifrost/components/home/Quote3.tsx
+++ b/bifrost/components/home/Quote3.tsx
@@ -6,28 +6,29 @@ const Quote3 = () => {
-
- {" "}
+
Thank you for an{" "}
excellent observability platform!
{" "}
- . I pretty much use it for all my AI apps now.
+ I pretty much use it for all my AI apps now.
-
-
-
+
+
+
-
-
- Hassan El Mghari
-
-
Devrel Lead
-
+
+ Hassan El Mghari
+
+
DevRel Lead
diff --git a/bifrost/components/home/Stats.tsx b/bifrost/components/home/Stats.tsx
index c94f117083..59e2847894 100644
--- a/bifrost/components/home/Stats.tsx
+++ b/bifrost/components/home/Stats.tsx
@@ -62,7 +62,7 @@ const Stats = ({
diff --git a/bifrost/components/templates/landing/integrations.tsx b/bifrost/components/templates/landing/integrations.tsx
index 3646f062cc..c731082091 100644
--- a/bifrost/components/templates/landing/integrations.tsx
+++ b/bifrost/components/templates/landing/integrations.tsx
@@ -46,7 +46,7 @@ const Integrations = (props: IntegrationsProps) => {
),
href: "https://docs.helicone.ai/integrations/openai/javascript#openai-javascript-sdk-integration",
integrations: {
- "node.js": {
+ javascript: {
language: "tsx",
code: `import OpenAI from "openai";
@@ -95,31 +95,23 @@ client = OpenAI(
),
href: "https://docs.helicone.ai/integrations/anthropic/javascript",
integrations: {
- "node.js": {
+ javascript: {
language: "tsx",
code: `import Anthropic from "@anthropic-ai/sdk";
const anthropic = new Anthropic({
- baseURL: "https://anthropic.helicone.ai/",
- apiKey: process.env.ANTHROPIC_API_KEY,
- defaultHeaders: {
- "Helicone-Auth": ,
- },
-});
-`,
+ apiKey: ANTHROPIC_API_KEY,
+ baseURL: "https://anthropic.helicone.ai/\$\{HELICONE_API_KEY\}/\",
+});`,
},
python: {
language: "python",
code: `import anthropic
client = anthropic.Anthropic(
- api_key=os.environ.get("ANTHROPIC_API_KEY"),
- base_url="https://anthropic.helicone.ai"
- defaultHeaders={
- "Helicone-Auth": ,
- },
-)
-`,
+ api_key=ANTHROPIC_API_KEY,
+ base_url="https://anthropic.helicone.ai/{HELICONE_API_KEY}/"
+)`,
},
langchain: {
language: "python",
@@ -127,10 +119,7 @@ client = anthropic.Anthropic(
modelName: "claude-2",
anthropicApiKey: "ANTHROPIC_API_KEY",
clientOptions: {
- baseURL: "https://anthropic.helicone.ai/",
- defaultHeaders: {
- "Helicone-Auth": Bearer ,
- },
+ baseURL: "https://anthropic.helicone.ai/{HELICONE_API_KEY}/",
},
});
`,
@@ -151,7 +140,7 @@ client = anthropic.Anthropic(
),
href: "https://docs.helicone.ai/integrations/azure/javascript",
integrations: {
- "node.js": {
+ javascript: {
language: "tsx",
code: `import OpenAI from "openai";
@@ -270,7 +259,7 @@ self.model = AzureChatOpenAI(
),
integrations: {
- "node.js": {
+ javascript: {
language: "tsx",
code: `fetch("https://openrouter.helicone.ai/api/v1/chat/completions", {
method: "POST",
@@ -317,7 +306,7 @@ self.model = AzureChatOpenAI(
const [currentProvider, setCurrentProvider] = useState("OpenAI");
- const [currentIntegration, setCurrentIntregration] = useState("node.js");
+ const [currentIntegration, setCurrentIntregration] = useState("javascript");
const selectedProvider = PROVIDERS.find(
(provider) => provider.name === currentProvider
@@ -332,7 +321,7 @@ self.model = AzureChatOpenAI(
Get integrated in seconds
-
+
Use any model and monitor applications at any scale.{" "}
@@ -358,7 +347,7 @@ self.model = AzureChatOpenAI(
if (provider.name === "Gemini") {
setCurrentIntregration("curl");
} else {
- setCurrentIntregration("node.js");
+ setCurrentIntregration("javascript");
}
}
}}
@@ -372,9 +361,9 @@ self.model = AzureChatOpenAI(
- Other providers? See docs
+ Other providers? See docs