Skip to content

Commit

Permalink
feat: setup storybook aws deployment
Browse files Browse the repository at this point in the history
  • Loading branch information
yyyyaaa committed Dec 12, 2023
1 parent 4d297d3 commit 4641313
Show file tree
Hide file tree
Showing 6 changed files with 309 additions and 1 deletion.
3 changes: 3 additions & 0 deletions packages/react/.storybook/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@ const config: StorybookConfig = {
autodocs: "tag",
},
core: {},
features: {
buildStoriesJson: true,
},
async viteFinal(config, { configType }) {
// return the customized config
if (configType === "PRODUCTION") {
Expand Down
38 changes: 38 additions & 0 deletions packages/react/config/seo.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
const siteUrl = "https://storybook.cosmology.zone";
const siteAddress = new URL(siteUrl);
const canonical = siteAddress.href.slice(0, -1);
const title = "Interchain UI storybook";
const description = "UI building blocks for Interchain builders.";
const fbAppId = null;

console.log(canonical);

module.exports = {
title,
canonical,
description,
openGraph: {
type: "website",
url: siteUrl,
title,
description,
site_name: title,
images: [
{
url: canonical + "/og/image.jpg",
width: 942,
height: 466,
alt: title,
},
],
},
twitter: {
handle: "@cosmology_tech",
site: "@cosmology_tech",
},
facebook: fbAppId
? {
appId: fbAppId,
}
: undefined,
};
27 changes: 27 additions & 0 deletions packages/react/config/site.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
const siteUrl = "https://storybook.cosmology.zone";
const siteAddress = new URL(siteUrl);
const canonical = siteAddress.href.slice(0, -1);

module.exports = {
company: {
nick: "Cosmology",
name: "Cosmology",
addr: ["San Francisco, CA"],
legalCounty: "San Francisco",
legalState: "California",
},
site: {
siteUrl,
www: `www.${siteAddress.host}`,
host: siteAddress.host,
},
emails: {
hello: "[email protected]",
support: "[email protected]",
abuse: "[email protected]",
privacy: "[email protected]",
legal: "[email protected]",
copyright: "[email protected]",
arbitrationOptOut: "[email protected]",
},
};
7 changes: 6 additions & 1 deletion packages/react/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,12 @@
"lint": "tsc --noEmit",
"ci": "yarn lint && yarn build",
"storybook": "storybook dev -p 6006",
"build-storybook": "storybook build"
"build-storybook": "storybook build",
"sb:postbuild": "npm run sb:seo",
"sb:seo": "node ./storybook-seo/seo.js",
"sb:deploy": "AWS_PROFILE=webinc aws s3 sync storybook-static/ s3://storybook.cosmology.zone",
"sb:invalidate": "AWS_PROFILE=webinc aws cloudfront create-invalidation --distribution-id E30W90TP7JYGCN --paths \"/*\"",
"sb:deploy:all": "npm run build-storybook && npm run sb:postbuild && npm run sb:deploy && ./storybook-seo/prepare.sh"
},
"devDependencies": {
"@chain-registry/osmosis": "^1.21.0",
Expand Down
22 changes: 22 additions & 0 deletions packages/react/storybook-seo/prepare.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
#!/bin/bash

export S3_BUCKET=storybook.cosmology.zone

(cd storybook-static &&
find . -type f -name '*.html' | while read HTMLFILE; do
HTMLFILESHORT=${HTMLFILE:2}
# HTMLFILE_WITHOUT_INDEX=${HTMLFILESHORT::${#HTMLFILESHORT}-11}

HTMLFILE_WITHOUT_INDEX=${HTMLFILESHORT//index.html/}
HTMLFILE_WITHOUT_HTML=${HTMLFILE_WITHOUT_INDEX//.html/}


# cp /about/index.html to /about
aws s3 cp s3://$S3_BUCKET/${HTMLFILESHORT} s3://$S3_BUCKET/$HTMLFILE_WITHOUT_HTML
echo aws s3 cp s3://$S3_BUCKET/${HTMLFILESHORT} s3://$S3_BUCKET/$HTMLFILE_WITHOUT_HTML

if [ $? -ne 0 ]; then
echo "***** Failed renaming build to $S3_BUCKET (html)"
exit 1
fi
done)
213 changes: 213 additions & 0 deletions packages/react/storybook-seo/seo.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,213 @@
const fs = require("fs");
const path = require("path");

const seo = require("../config/seo");
const siteInfo = require("../config/site");
const canonical = seo.canonical;
const STORYBOOK_STATIC_DIR = path.resolve(__dirname, "../storybook-static/");
const STORYBOOK_INDEX_JSON = path.resolve(
__dirname,
"../storybook-static/stories.json"
);

const pageObjects = {};
const legalPageObjects = {};

function genStoryURL(kind = "docs", storyId) {
// Generate a storybook URL for a story
// URL format story
// http://localhost:6006/?path=/story/nft-nftprofile--primary
// URL format docs
// http://localhost:6006/?path=/docs/nft-nftprofile--docs
if (kind !== "docs" && kind !== "story") {
throw new Error("kind must be doc or story");
}

return `${canonical}/?path=/${kind}/${storyId}`;
}

function isDocsStory(storyId) {
return storyId.includes("--docs");
}

// Function to generate the sitemap.xml content
function genStorybookSitemap() {
try {
const storybookIndex = fs.readFileSync(STORYBOOK_INDEX_JSON, "utf8");
const storiesJson = JSON.parse(storybookIndex);
let sitemap = '<?xml version="1.0" encoding="UTF-8"?>\n';
sitemap += '<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">\n';

Object.keys(storiesJson.stories).forEach((storyId) => {
const url = genStoryURL(isDocsStory(storyId) ? "docs" : "story", storyId);
const lastmod = new Date().toISOString().split("T")[0];

sitemap += ` <url>\n`;
sitemap += ` <loc>${url}</loc>\n`;
sitemap += ` <lastmod>${lastmod}</lastmod>\n`;
sitemap += ` </url>\n`;
});

sitemap += "</urlset>";
console.log("generated", sitemap);

return sitemap;
} catch (error) {
console.error(`Error reading file: ${error}`);
return "";
}
}

const IGNORE = ["404", "_document", "_app"];

// FOR NOW keep this stuff out
// later when you generate a sitemap page, you can categorize this in Legal
const LEGAL_FILES = [
"acceptable-use-policy",
"brand-guidelines",
"cookie-policy",
"copyright-policy",
"corporate-colors",
"credits",
"data-processing-addendum",
"developer-terms-of-use",
"logo-guidelines",
"security-measures",
];

function formatDate(date) {
var d = new Date(date),
month = "" + (d.getMonth() + 1),
day = "" + d.getDate(),
year = d.getFullYear();

if (month.length < 2) month = "0" + month;
if (day.length < 2) day = "0" + day;

return [year, month, day].join("-");
}

// const pageSitemapXml = `<?xml version="1.0" encoding="UTF-8"?>
// <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
// ${Object.keys(pageObjects)
// .map(
// (path) => `<url>
// <loc>${canonical}${path}</loc>
// <lastmod>${formatDate(new Date(pageObjects[path].lastModified))}</lastmod>
// </url>`
// )
// .join("\n")}
// </urlset>`;

const legalSitemapXml = `<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
${Object.keys(legalPageObjects)
.map(
(path) => `<url>
<loc>${canonical}${path}</loc>
<lastmod>${formatDate(
new Date(legalPageObjects[path].lastModified)
)}</lastmod>
</url>`
)
.join("\n")}
</urlset>`;

const sitemapXml = `<?xml version="1.0" encoding="UTF-8"?>
<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<sitemap>
<loc>${canonical}/sitemaps/legal.xml</loc>
<loc>${canonical}/sitemaps/pages.xml</loc>
</sitemap>
</sitemapindex>
`;

const BAD_AGENTS = [
{
text: "Search engines only please :) Thanks for obeying robots.txt",
bots: ["UbiCrawler", "DOC", "Zao", "discobot", "dotbot", "yacybot"],
},
{
text: "Dear bots, we don't appreciate you copying site content and providing very little additional value.",
bots: [
"sitecheck.internetseer.com",
"Zealbot",
"MJ12bot",
"MSIECrawler",
"SiteSnagger",
"WebStripper",
"WebCopier",
"Fetch",
"Offline Explorer",
"Teleport",
"TeleportPro",
"WebZIP",
"linko",
"HTTrack",
"Microsoft.URL.Control",
"Xenu",
"larbin",
"libwww",
"ZyBORG",
"Download Ninja",
],
},
{
text: "Recursive mode wget is not friendly",
bots: ["wget", "grub-client"],
},
{
text: "I realize you don't follow robots.txt, but FYI",
bots: ["k2spider"],
},
{
text: "Abusive bots",
bots: ["NPBot"],
},
];

const robotsTxt = `
#
# Dear bot, crawler or kind technical person who wishes to crawl ${
siteInfo.site.host
},
# please email ${
siteInfo.emails.support
}. We require whitelisting to access our sitemap.
#
# Thanks in advance! Your friendly Ops Team @ ${seo.title}.
${BAD_AGENTS.map(({ text, bots }) => {
return `
#
# ${text}
#
${bots
.map((bot) => {
return `
User-agent: ${bot}
Disallow: /`;
})
.join("\n")}
`;
}).join("")}
User-agent: *
${Object.keys(pageObjects)
.map((path) => `Allow: ${path}$`)
.join("\n")}
Sitemap: ${canonical}/sitemaps/pages.xml
Sitemap: ${canonical}/sitemaps/legal.xml
Host: ${siteInfo.site.host}
`;

fs.writeFileSync("storybook-static/sitemap.xml", sitemapXml);
require("mkdirp").sync("storybook-static/sitemaps");
fs.writeFileSync("storybook-static/sitemaps/pages.xml", genStorybookSitemap());
fs.writeFileSync("storybook-static/sitemaps/legal.xml", legalSitemapXml);
fs.writeFileSync("storybook-static/robots.txt", robotsTxt);

0 comments on commit 4641313

Please sign in to comment.