diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 47a66543..114953fa 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -33,5 +33,7 @@ jobs: path: frontend/.yarn/cache key: ${{ runner.os }}-yarn-${{ hashFiles('frontend/yarn.lock') }} restore-keys: ${{ runner.os }}-yarn- - - run: yarn --cwd frontend - - run: make -C frontend lint + - run: yarn + working-directory: ./frontend + - run: make lint + working-directory: ./frontend diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index fcb65d96..88ecf059 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -119,8 +119,10 @@ jobs: - uses: actions/setup-node@v2 with: node-version: '16' - - run: yarn --cwd frontend - - run: make -C frontend build + - run: yarn + working-directory: ./frontend + - run: make build + working-directory: ./frontend env: # Don't convert warnings to errors, we love our warnings. CI: false diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2fc4f3ff..10e2180b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -34,5 +34,7 @@ jobs: path: frontend/.yarn/cache key: ${{ runner.os }}-yarn-${{ hashFiles('frontend/yarn.lock') }} restore-keys: ${{ runner.os }}-yarn- - - run: yarn --cwd frontend - - run: make -C frontend test + - run: yarn + working-directory: ./frontend + - run: make test + working-directory: ./frontend diff --git a/CHANGELOG.md b/CHANGELOG.md index ad6f4f49..390b4904 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,8 @@ ## 1.4.0 +- Components of a user provided component library can now be used inside + `` to create interactive demos of components. - Support _Table of Contents_ on documents. - Introduced new `` documentation component. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index cd5ebade..553e7c53 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -33,58 +33,49 @@ export PATH=$PATH:$(go env GOPATH)/bin ## Setup -Clone the official repository, switch into the directory and checkout the branch -you want to work on. +First we'll be setting up an umbrella directory where the main dsk repository +and supporting repositories will live, which we'll clone into it. ``` -$ git clone github.com/rundsk/dsk -$ git checkout 1.2 -$ cd dsk -``` - -When using the built-in frontend, the `frontend/build` folder must be -present and contain the compiled version of the frontend. After a new -checkout you can create it, using the following command. +mkdir rundsk +cd rundsk +git clone git@github.com:rundsk/dsk.git +git clone git@github.com:rundsk/js-sdk.git +git clone git@github.com:rundsk/example-component-library.git @rundsk/example-component-library +git clone git@github.com:rundsk/example-design-system.git ``` -$ make -C frontend build -``` - -## Improving the frontend -There are two ways to work on the built-in frontend and verify changes -in your browser easily at the same time while you go. +All the following documentation will assume that you are working from inside the +main repository checkout. -First start the backend which will also serve the frontend. ``` -$ make dev +cd dsk ``` -Each time you change the source of the frontend run the following -command and reload the browser window to see the changes. +## Developing -``` -$ make -C frontend build -``` +First we'll start the backend in development mode and after that do the same +with the frontend. -Alternatively you can start the frontend (using a development server) -and the backend separately on different ports, having the frontend use -backend over its HTTP API. +On a fresh checkout we'll have to install the dependencie of the frontend +first, and then start the frontend's development server. -Start the backend, first. By default it'll be reachable over port 8080. ``` -$ make dev +cd frontend +yarn +make dev ``` -Second, start the frontend using a development server. It will be -reachable over port 3000 and proxy requests through to the backend. +You should now be able to reach the frontend when opening http://127.0.0.1:3000 +in your browser. You might see some errors as we not yet have started the backend. + +Now inside another terminal we'll start the backend. ``` -$ make -C frontend dev +make dev ``` -Now open http://127.0.0.1:3000 in your browser. - ## Debugging When running the tests the search indexes are store to disk, to ease diff --git a/Makefile b/Makefile index 73359994..e09553e9 100644 --- a/Makefile +++ b/Makefile @@ -6,6 +6,7 @@ FRONTEND ?= $(shell pwd)/frontend/build DDT ?= $(shell pwd)/../example-design-system +COMPONENTS ?= $(shell pwd)/../@rundsk/example-component-library VERSION ?= head-$(shell git rev-parse --short HEAD) LDFLAGS = -X main.Version=$(VERSION) @@ -32,7 +33,7 @@ lint: internal/frontend/vfsdata.go .PHONY: dev dev: internal/frontend/vfsdata.go go build -tags=dev -ldflags "$(LDFLAGS)" $(CMD_PKG) - ./dsk -frontend $(FRONTEND) "$(DDT)" + ./dsk -frontend $(FRONTEND) -components $(COMPONENTS) "$(DDT)" rm dsk .PHONY: clean @@ -45,7 +46,7 @@ clean: if [ -f ./mem.prof ]; then rm ./mem.prof; fi .PHONY: dist -dist: dist/dsk-darwin-amd64 dist/dsk-linux-amd64 dist/dsk-windows-386.exe +dist: dist/dsk-darwin-amd64 dist/dsk-linux-amd64 dist/dsk-windows-386.exe dist: dist/dsk-darwin-amd64.zip dist/dsk-linux-amd64.tar.gz dist/dsk-windows-386.zip dist: container-image ls -lh dist @@ -82,5 +83,5 @@ dist/%-linux-amd64: $(ANY_DEPS) internal/frontend/vfsdata.go dist/%-windows-386.exe: $(ANY_DEPS) internal/frontend/vfsdata.go GOOS=windows GOARCH=386 go build -ldflags "$(LDFLAGS) -s -w" -o $@ $(CMD_PKG) -internal/frontend/vfsdata.go: $(shell find $(FRONTEND) -type f) +internal/frontend/vfsdata.go: $(shell find $(FRONTEND) -type f) FRONTEND=$(FRONTEND) go run cmd/frontend/generate.go diff --git a/cmd/dsk/main.go b/cmd/dsk/main.go index 108ee3d8..f67b3d38 100644 --- a/cmd/dsk/main.go +++ b/cmd/dsk/main.go @@ -58,10 +58,11 @@ func main() { } }() - host := flag.String("host", "127.0.0.1", "host IP to bind to") - port := flag.String("port", "8080", "port to bind to") - version := flag.Bool("version", false, "print DSK version") - noColor := flag.Bool("no-color", false, "disables color output") + fhost := flag.String("host", "127.0.0.1", "host IP to bind to") + fport := flag.String("port", "8080", "port to bind to") + fversion := flag.Bool("version", false, "print DSK version") + fnoColor := flag.Bool("no-color", false, "disables color output") + fcomponents := flag.String("components", "", "path to component library assets") ffrontend := flag.String("frontend", "", "path to a frontend, to use instead of the built-in") fallowOrigin := flag.String("allow-origin", "", "origins from which browsers can access the HTTP API; for multiple origins, use a comma as a separator, the wildcard * is supported; to allow all use *") flag.Parse() @@ -70,14 +71,14 @@ func main() { log.Fatalf("Too many arguments given, expecting exactly 0 or 1") } - if *version { + if *fversion { fmt.Println(Version) os.Exit(1) } // Color package automatically disables colors when not a TTY. We // don't need to check for an interactive terminal here again. - if *noColor { + if *fnoColor { color.NoColor = true } whiteOnBlue := color.New(color.FgWhite, color.BgBlue) @@ -114,7 +115,17 @@ func main() { if err != nil { panic(err) } - log.Printf("Detected live path: %s", livePath) + log.Printf("Using live path: %s", livePath) + + var componentsPath string + if *fcomponents != "" { + componentsPath = *fcomponents + } + + var frontendPath string + if *ffrontend != "" { + frontendPath = *ffrontend + } allowOrigins := strings.Split(*fallowOrigin, ",") if len(allowOrigins) != 0 { @@ -124,7 +135,8 @@ func main() { app = plex.NewApp( // assign to global Version, livePath, - *ffrontend, + componentsPath, + frontendPath, ) ctx, cancel := context.WithCancel(context.Background()) app.Teardown.AddCancelFunc(cancel) @@ -133,6 +145,12 @@ func main() { log.Fatal(red.Sprintf("Failed to initialize application: %s", err)) } + if componentsPath != "" { + if err := app.OpenComponents(ctx); err != nil { + log.Fatal(red.Sprintf("Failed to start application: %s", err)) + } + } + if app.HasMultiVersionsSupport() { log.Printf("Detected support for multi-versions") @@ -145,7 +163,7 @@ func main() { apis := map[int]httputil.Mountable{ 1: api.NewV1(app.Sources, app.Version, app.Broker, allowOrigins), - 2: api.NewV2(app.Sources, app.Version, app.Broker, allowOrigins), + 2: api.NewV2(app.Sources, app.Components, app.Version, app.Broker, allowOrigins), } for av, a := range apis { log.Printf("Mounting APIv%d HTTP mux...", av) @@ -159,7 +177,7 @@ func main() { log.Print("Mounting frontend HTTP mux...") mux.Handle("/", app.Frontend.HTTPMux()) - addr := fmt.Sprintf("%s:%s", *host, *port) + addr := fmt.Sprintf("%s:%s", *fhost, *fport) if isTerminal { log.Print("-------------------------------------------") log.Printf("Please visit: %s", green.Sprint("http://"+addr)) diff --git a/frontend/.prettierrc b/frontend/.prettierrc index b139b839..8cf527e8 100644 --- a/frontend/.prettierrc +++ b/frontend/.prettierrc @@ -3,4 +3,5 @@ "printWidth": 120, "bracketSpacing": true, "trailingComma": "es5", + "arrowParens": "always" } diff --git a/frontend/.vscode/extensions.json b/frontend/.vscode/extensions.json new file mode 100644 index 00000000..daaa5ee2 --- /dev/null +++ b/frontend/.vscode/extensions.json @@ -0,0 +1,7 @@ +{ + "recommendations": [ + "arcanis.vscode-zipfs", + "dbaeumer.vscode-eslint", + "esbenp.prettier-vscode" + ] +} diff --git a/frontend/.vscode/settings.json b/frontend/.vscode/settings.json new file mode 100644 index 00000000..5ef3cbaa --- /dev/null +++ b/frontend/.vscode/settings.json @@ -0,0 +1,8 @@ +{ + "search.exclude": { + "**/.yarn": true, + "**/.pnp.*": true + }, + "eslint.nodePath": ".yarn/sdks", + "prettier.prettierPath": ".yarn/sdks/prettier/index.js" +} diff --git a/frontend/.yarn/sdks/eslint/bin/eslint.js b/frontend/.yarn/sdks/eslint/bin/eslint.js new file mode 100755 index 00000000..4d327a49 --- /dev/null +++ b/frontend/.yarn/sdks/eslint/bin/eslint.js @@ -0,0 +1,20 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, createRequireFromPath} = require(`module`); +const {resolve} = require(`path`); + +const relPnpApiPath = "../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require eslint/bin/eslint.js + require(absPnpApiPath).setup(); + } +} + +// Defer to the real eslint/bin/eslint.js your application uses +module.exports = absRequire(`eslint/bin/eslint.js`); diff --git a/frontend/.yarn/sdks/eslint/lib/api.js b/frontend/.yarn/sdks/eslint/lib/api.js new file mode 100644 index 00000000..97a05244 --- /dev/null +++ b/frontend/.yarn/sdks/eslint/lib/api.js @@ -0,0 +1,20 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, createRequireFromPath} = require(`module`); +const {resolve} = require(`path`); + +const relPnpApiPath = "../../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require eslint/lib/api.js + require(absPnpApiPath).setup(); + } +} + +// Defer to the real eslint/lib/api.js your application uses +module.exports = absRequire(`eslint/lib/api.js`); diff --git a/frontend/.yarn/sdks/eslint/package.json b/frontend/.yarn/sdks/eslint/package.json new file mode 100644 index 00000000..744a7732 --- /dev/null +++ b/frontend/.yarn/sdks/eslint/package.json @@ -0,0 +1,6 @@ +{ + "name": "eslint", + "version": "7.32.0-sdk", + "main": "./lib/api.js", + "type": "commonjs" +} diff --git a/frontend/.yarn/sdks/integrations.yml b/frontend/.yarn/sdks/integrations.yml new file mode 100644 index 00000000..aa9d0d0a --- /dev/null +++ b/frontend/.yarn/sdks/integrations.yml @@ -0,0 +1,5 @@ +# This file is automatically generated by @yarnpkg/sdks. +# Manual changes might be lost! + +integrations: + - vscode diff --git a/frontend/.yarn/sdks/prettier/index.js b/frontend/.yarn/sdks/prettier/index.js new file mode 100755 index 00000000..f6882d80 --- /dev/null +++ b/frontend/.yarn/sdks/prettier/index.js @@ -0,0 +1,20 @@ +#!/usr/bin/env node + +const {existsSync} = require(`fs`); +const {createRequire, createRequireFromPath} = require(`module`); +const {resolve} = require(`path`); + +const relPnpApiPath = "../../../.pnp.cjs"; + +const absPnpApiPath = resolve(__dirname, relPnpApiPath); +const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); + +if (existsSync(absPnpApiPath)) { + if (!process.versions.pnp) { + // Setup the environment to be able to require prettier/index.js + require(absPnpApiPath).setup(); + } +} + +// Defer to the real prettier/index.js your application uses +module.exports = absRequire(`prettier/index.js`); diff --git a/frontend/.yarn/sdks/prettier/package.json b/frontend/.yarn/sdks/prettier/package.json new file mode 100644 index 00000000..fdd19b5d --- /dev/null +++ b/frontend/.yarn/sdks/prettier/package.json @@ -0,0 +1,6 @@ +{ + "name": "prettier", + "version": "2.4.1-sdk", + "main": "./index.js", + "type": "commonjs" +} diff --git a/frontend/Makefile b/frontend/Makefile index 32daaa9e..89403c7f 100644 --- a/frontend/Makefile +++ b/frontend/Makefile @@ -10,7 +10,7 @@ dev: .PHONY: test test: - yarn react-scripts test . + yarn test .PHONY: build build: @@ -26,7 +26,7 @@ lint: .PHONY: format format: yarn eslint --fix --cache src - yarn prettier --write src/** + yarn prettier --write 'src/**/*.{js,jsx,css,scss,json}' .PHONY: clean clean: diff --git a/frontend/build/empty b/frontend/build/empty deleted file mode 100644 index e69de29b..00000000 diff --git a/frontend/config/env.js b/frontend/config/env.js new file mode 100644 index 00000000..3d1411bd --- /dev/null +++ b/frontend/config/env.js @@ -0,0 +1,106 @@ +'use strict'; + +const fs = require('fs'); +const path = require('path'); +const paths = require('./paths'); + +// Make sure that including paths.js after env.js will read .env variables. +delete require.cache[require.resolve('./paths')]; + +const NODE_ENV = process.env.NODE_ENV; +if (!NODE_ENV) { + throw new Error( + 'The NODE_ENV environment variable is required but was not specified.' + ); +} + +// https://github.com/bkeepers/dotenv#what-other-env-files-can-i-use +const dotenvFiles = [ + `${paths.dotenv}.${NODE_ENV}.local`, + // Don't include `.env.local` for `test` environment + // since normally you expect tests to produce the same + // results for everyone + NODE_ENV !== 'test' && `${paths.dotenv}.local`, + `${paths.dotenv}.${NODE_ENV}`, + paths.dotenv, +].filter(Boolean); + +// Load environment variables from .env* files. Suppress warnings using silent +// if this file is missing. dotenv will never modify any environment variables +// that have already been set. Variable expansion is supported in .env files. +// https://github.com/motdotla/dotenv +// https://github.com/motdotla/dotenv-expand +dotenvFiles.forEach(dotenvFile => { + if (fs.existsSync(dotenvFile)) { + require('dotenv-expand')( + require('dotenv').config({ + path: dotenvFile, + }) + ); + } +}); + +// We support resolving modules according to `NODE_PATH`. +// This lets you use absolute paths in imports inside large monorepos: +// https://github.com/facebook/create-react-app/issues/253. +// It works similar to `NODE_PATH` in Node itself: +// https://nodejs.org/api/modules.html#modules_loading_from_the_global_folders +// Note that unlike in Node, only *relative* paths from `NODE_PATH` are honored. +// Otherwise, we risk importing Node.js core modules into an app instead of webpack shims. +// https://github.com/facebook/create-react-app/issues/1023#issuecomment-265344421 +// We also resolve them to make sure all tools using them work consistently. +const appDirectory = fs.realpathSync(process.cwd()); +process.env.NODE_PATH = (process.env.NODE_PATH || '') + .split(path.delimiter) + .filter(folder => folder && !path.isAbsolute(folder)) + .map(folder => path.resolve(appDirectory, folder)) + .join(path.delimiter); + +// Grab NODE_ENV and REACT_APP_* environment variables and prepare them to be +// injected into the application via DefinePlugin in webpack configuration. +const REACT_APP = /^REACT_APP_/i; + +function getClientEnvironment(publicUrl) { + const raw = Object.keys(process.env) + .filter(key => REACT_APP.test(key)) + .reduce( + (env, key) => { + env[key] = process.env[key]; + return env; + }, + { + // Useful for determining whether we’re running in production mode. + // Most importantly, it switches React into the correct mode. + NODE_ENV: process.env.NODE_ENV || 'development', + // Useful for resolving the correct path to static assets in `public`. + // For example, . + // This should only be used as an escape hatch. Normally you would put + // images into the `src` and `import` them in code to get their paths. + PUBLIC_URL: publicUrl, + // We support configuring the sockjs pathname during development. + // These settings let a developer run multiple simultaneous projects. + // They are used as the connection `hostname`, `pathname` and `port` + // in webpackHotDevClient. They are used as the `sockHost`, `sockPath` + // and `sockPort` options in webpack-dev-server. + WDS_SOCKET_HOST: process.env.WDS_SOCKET_HOST, + WDS_SOCKET_PATH: process.env.WDS_SOCKET_PATH, + WDS_SOCKET_PORT: process.env.WDS_SOCKET_PORT, + // Whether or not react-refresh is enabled. + // react-refresh is not 100% stable at this time, + // which is why it's disabled by default. + // It is defined here so it is available in the webpackHotDevClient. + FAST_REFRESH: process.env.FAST_REFRESH !== 'false', + } + ); + // Stringify all values so we can feed into webpack DefinePlugin + const stringified = { + 'process.env': Object.keys(raw).reduce((env, key) => { + env[key] = JSON.stringify(raw[key]); + return env; + }, {}), + }; + + return { raw, stringified }; +} + +module.exports = getClientEnvironment; diff --git a/frontend/config/getHttpsConfig.js b/frontend/config/getHttpsConfig.js new file mode 100644 index 00000000..013d493c --- /dev/null +++ b/frontend/config/getHttpsConfig.js @@ -0,0 +1,66 @@ +'use strict'; + +const fs = require('fs'); +const path = require('path'); +const crypto = require('crypto'); +const chalk = require('react-dev-utils/chalk'); +const paths = require('./paths'); + +// Ensure the certificate and key provided are valid and if not +// throw an easy to debug error +function validateKeyAndCerts({ cert, key, keyFile, crtFile }) { + let encrypted; + try { + // publicEncrypt will throw an error with an invalid cert + encrypted = crypto.publicEncrypt(cert, Buffer.from('test')); + } catch (err) { + throw new Error( + `The certificate "${chalk.yellow(crtFile)}" is invalid.\n${err.message}` + ); + } + + try { + // privateDecrypt will throw an error with an invalid key + crypto.privateDecrypt(key, encrypted); + } catch (err) { + throw new Error( + `The certificate key "${chalk.yellow(keyFile)}" is invalid.\n${ + err.message + }` + ); + } +} + +// Read file and throw an error if it doesn't exist +function readEnvFile(file, type) { + if (!fs.existsSync(file)) { + throw new Error( + `You specified ${chalk.cyan( + type + )} in your env, but the file "${chalk.yellow(file)}" can't be found.` + ); + } + return fs.readFileSync(file); +} + +// Get the https config +// Return cert files if provided in env, otherwise just true or false +function getHttpsConfig() { + const { SSL_CRT_FILE, SSL_KEY_FILE, HTTPS } = process.env; + const isHttps = HTTPS === 'true'; + + if (isHttps && SSL_CRT_FILE && SSL_KEY_FILE) { + const crtFile = path.resolve(paths.appPath, SSL_CRT_FILE); + const keyFile = path.resolve(paths.appPath, SSL_KEY_FILE); + const config = { + cert: readEnvFile(crtFile, 'SSL_CRT_FILE'), + key: readEnvFile(keyFile, 'SSL_KEY_FILE'), + }; + + validateKeyAndCerts({ ...config, keyFile, crtFile }); + return config; + } + return isHttps; +} + +module.exports = getHttpsConfig; diff --git a/frontend/config/jest/babelTransform.js b/frontend/config/jest/babelTransform.js new file mode 100644 index 00000000..dabf5a8c --- /dev/null +++ b/frontend/config/jest/babelTransform.js @@ -0,0 +1,29 @@ +'use strict'; + +const babelJest = require('babel-jest'); + +const hasJsxRuntime = (() => { + if (process.env.DISABLE_NEW_JSX_TRANSFORM === 'true') { + return false; + } + + try { + require.resolve('react/jsx-runtime'); + return true; + } catch (e) { + return false; + } +})(); + +module.exports = babelJest.createTransformer({ + presets: [ + [ + require.resolve('babel-preset-react-app'), + { + runtime: hasJsxRuntime ? 'automatic' : 'classic', + }, + ], + ], + babelrc: false, + configFile: false, +}); diff --git a/frontend/config/jest/cssStub.js b/frontend/config/jest/cssStub.js new file mode 100644 index 00000000..f053ebf7 --- /dev/null +++ b/frontend/config/jest/cssStub.js @@ -0,0 +1 @@ +module.exports = {}; diff --git a/frontend/config/jest/cssTransform.js b/frontend/config/jest/cssTransform.js new file mode 100644 index 00000000..8f651148 --- /dev/null +++ b/frontend/config/jest/cssTransform.js @@ -0,0 +1,14 @@ +'use strict'; + +// This is a custom Jest transformer turning style imports into empty objects. +// http://facebook.github.io/jest/docs/en/webpack.html + +module.exports = { + process() { + return 'module.exports = {};'; + }, + getCacheKey() { + // The output is always the same. + return 'cssTransform'; + }, +}; diff --git a/frontend/config/jest/fileTransform.js b/frontend/config/jest/fileTransform.js new file mode 100644 index 00000000..aab67618 --- /dev/null +++ b/frontend/config/jest/fileTransform.js @@ -0,0 +1,40 @@ +'use strict'; + +const path = require('path'); +const camelcase = require('camelcase'); + +// This is a custom Jest transformer turning file imports into filenames. +// http://facebook.github.io/jest/docs/en/webpack.html + +module.exports = { + process(src, filename) { + const assetFilename = JSON.stringify(path.basename(filename)); + + if (filename.match(/\.svg$/)) { + // Based on how SVGR generates a component name: + // https://github.com/smooth-code/svgr/blob/01b194cf967347d43d4cbe6b434404731b87cf27/packages/core/src/state.js#L6 + const pascalCaseFilename = camelcase(path.parse(filename).name, { + pascalCase: true, + }); + const componentName = `Svg${pascalCaseFilename}`; + return `const React = require('react'); + module.exports = { + __esModule: true, + default: ${assetFilename}, + ReactComponent: React.forwardRef(function ${componentName}(props, ref) { + return { + $$typeof: Symbol.for('react.element'), + type: 'svg', + ref: ref, + key: null, + props: Object.assign({}, props, { + children: ${assetFilename} + }) + }; + }), + };`; + } + + return `module.exports = ${assetFilename};`; + }, +}; diff --git a/frontend/config/modules.js b/frontend/config/modules.js new file mode 100644 index 00000000..d63e41d7 --- /dev/null +++ b/frontend/config/modules.js @@ -0,0 +1,134 @@ +'use strict'; + +const fs = require('fs'); +const path = require('path'); +const paths = require('./paths'); +const chalk = require('react-dev-utils/chalk'); +const resolve = require('resolve'); + +/** + * Get additional module paths based on the baseUrl of a compilerOptions object. + * + * @param {Object} options + */ +function getAdditionalModulePaths(options = {}) { + const baseUrl = options.baseUrl; + + if (!baseUrl) { + return ''; + } + + const baseUrlResolved = path.resolve(paths.appPath, baseUrl); + + // We don't need to do anything if `baseUrl` is set to `node_modules`. This is + // the default behavior. + if (path.relative(paths.appNodeModules, baseUrlResolved) === '') { + return null; + } + + // Allow the user set the `baseUrl` to `appSrc`. + if (path.relative(paths.appSrc, baseUrlResolved) === '') { + return [paths.appSrc]; + } + + // If the path is equal to the root directory we ignore it here. + // We don't want to allow importing from the root directly as source files are + // not transpiled outside of `src`. We do allow importing them with the + // absolute path (e.g. `src/Components/Button.js`) but we set that up with + // an alias. + if (path.relative(paths.appPath, baseUrlResolved) === '') { + return null; + } + + // Otherwise, throw an error. + throw new Error( + chalk.red.bold( + "Your project's `baseUrl` can only be set to `src` or `node_modules`." + + ' Create React App does not support other values at this time.' + ) + ); +} + +/** + * Get webpack aliases based on the baseUrl of a compilerOptions object. + * + * @param {*} options + */ +function getWebpackAliases(options = {}) { + const baseUrl = options.baseUrl; + + if (!baseUrl) { + return {}; + } + + const baseUrlResolved = path.resolve(paths.appPath, baseUrl); + + if (path.relative(paths.appPath, baseUrlResolved) === '') { + return { + src: paths.appSrc, + }; + } +} + +/** + * Get jest aliases based on the baseUrl of a compilerOptions object. + * + * @param {*} options + */ +function getJestAliases(options = {}) { + const baseUrl = options.baseUrl; + + if (!baseUrl) { + return {}; + } + + const baseUrlResolved = path.resolve(paths.appPath, baseUrl); + + if (path.relative(paths.appPath, baseUrlResolved) === '') { + return { + '^src/(.*)$': '/src/$1', + }; + } +} + +function getModules() { + // Check if TypeScript is setup + const hasTsConfig = fs.existsSync(paths.appTsConfig); + const hasJsConfig = fs.existsSync(paths.appJsConfig); + + if (hasTsConfig && hasJsConfig) { + throw new Error( + 'You have both a tsconfig.json and a jsconfig.json. If you are using TypeScript please remove your jsconfig.json file.' + ); + } + + let config; + + // If there's a tsconfig.json we assume it's a + // TypeScript project and set up the config + // based on tsconfig.json + if (hasTsConfig) { + const ts = require(resolve.sync('typescript', { + basedir: paths.appNodeModules, + })); + config = ts.readConfigFile(paths.appTsConfig, ts.sys.readFile).config; + // Otherwise we'll check if there is jsconfig.json + // for non TS projects. + } else if (hasJsConfig) { + config = require(paths.appJsConfig); + } + + config = config || {}; + const options = config.compilerOptions || {}; + + const additionalModulePaths = getAdditionalModulePaths(options); + + return { + additionalModulePaths: additionalModulePaths, + webpackAliases: getWebpackAliases(options), + jestAliases: getJestAliases(options), + hasTsConfig, + }; +} + +module.exports = getModules(); diff --git a/frontend/config/paths.js b/frontend/config/paths.js new file mode 100644 index 00000000..4e3d30e2 --- /dev/null +++ b/frontend/config/paths.js @@ -0,0 +1,75 @@ +'use strict'; + +const path = require('path'); +const fs = require('fs'); +const getPublicUrlOrPath = require('react-dev-utils/getPublicUrlOrPath'); + +// Make sure any symlinks in the project folder are resolved: +// https://github.com/facebook/create-react-app/issues/637 +const appDirectory = fs.realpathSync(process.cwd()); +const resolveApp = relativePath => path.resolve(appDirectory, relativePath); + +// We use `PUBLIC_URL` environment variable or "homepage" field to infer +// "public path" at which the app is served. +// webpack needs to know it to put the right + + + + + + + +
+ + + diff --git a/internal/api/v2_playground_index.jsx.tmpl b/internal/api/v2_playground_index.jsx.tmpl new file mode 100644 index 00000000..70095fc2 --- /dev/null +++ b/internal/api/v2_playground_index.jsx.tmpl @@ -0,0 +1,3 @@ +import ThePlaygroundInQuestion from '{{ .ImportPath }}'; + +{{ .RuntimeJS }} diff --git a/internal/api/v2_playground_runtime.jsx b/internal/api/v2_playground_runtime.jsx new file mode 100644 index 00000000..172fee54 --- /dev/null +++ b/internal/api/v2_playground_runtime.jsx @@ -0,0 +1,64 @@ +/** + * Copyright 2021 Marius Wilms, Christoph Labacher. All rights reserved. + * + * Use of this source code is governed by a BSD-style + * license that can be found in the LICENSE file. + */ + +import React, { useEffect, useLayoutEffect } from 'react'; +import ReactDOM from 'react-dom'; + +const handleOnLoad = () => { + const id = document.querySelector('body').getAttribute('data-id'); + window.parent.postMessage( + { + id, + contentHeight: document.querySelector('html').offsetHeight, + }, + '*' + ); +}; + +const PlaygroundWrapper = () => { + useLayoutEffect(handleOnLoad); + + useEffect(() => { + // This is called after all images loaded + window.addEventListener('load', handleOnLoad); + + return () => { + window.removeEventListener('load', handleOnLoad); + }; + }, []); + + useEffect(() => { + const resizeObserver = new ResizeObserver(handleOnLoad); + resizeObserver.observe(document.body); + + return () => { + resizeObserver.unobserve(document.body); + }; + }, []); + + const noPadding = window.frameElement.attributes.nopadding; + + /* eslint-disable react/jsx-no-undef */ + return ( +
+ +
+ ); + /* eslint-enable */ +}; + +document.addEventListener('DOMContentLoaded', () => { + ReactDOM.render(, document.getElementById('root')); +}); diff --git a/internal/ddt/node.go b/internal/ddt/node.go index 3c2586ef..103e7d35 100644 --- a/internal/ddt/node.go +++ b/internal/ddt/node.go @@ -9,6 +9,7 @@ package ddt import ( "crypto/sha1" "fmt" + "hash/adler32" "io/ioutil" "log" "os" @@ -195,6 +196,10 @@ func (n *Node) CalculateHash() (string, error) { return n.hash, nil } +func (n *Node) Id() string { + return strings.ToUpper(fmt.Sprintf("N%x", adler32.Checksum([]byte(n.Path)))) +} + // Returns the normalized URL path fragment, that can be used to // address this node i.e Input/Password. func (n *Node) URL() string { @@ -441,6 +446,19 @@ func (n *Node) Docs() ([]*NodeDoc, error) { return docs, nil } +func (n *Node) GetDoc(id string) (bool, *NodeDoc, error) { + docs, err := n.Docs() + if err != nil { + return false, nil, err + } + for _, doc := range docs { + if doc.Id() == id { + return true, doc, nil + } + } + return false, nil, nil +} + // Returns a list of crumbs. The last element is the current active // one. Does not include a root ddt. func (n *Node) Crumbs(get NodeGetter) []*Node { diff --git a/internal/ddt/node_doc.go b/internal/ddt/node_doc.go index d07a24ca..5556f774 100644 --- a/internal/ddt/node_doc.go +++ b/internal/ddt/node_doc.go @@ -9,6 +9,7 @@ package ddt import ( "bytes" "fmt" + "hash/adler32" "io/ioutil" "path/filepath" "strconv" @@ -26,6 +27,14 @@ type NodeDoc struct { path string } +func (d NodeDoc) Id() string { + return strings.ToUpper(fmt.Sprintf("D%x", adler32.Checksum([]byte(d.path)))) +} + +func (d NodeDoc) URL() string { + return filepath.Base(d.path) +} + // Order is a hint for outside sorting mechanisms. func (d NodeDoc) Order() uint64 { return orderNumber(filepath.Base(d.path)) @@ -105,6 +114,19 @@ func (d NodeDoc) Raw() ([]byte, error) { return ioutil.ReadFile(d.path) } +func (d NodeDoc) GetPlayground(id string) (bool, *NodeDocComponent, error) { + cmps, err := d.Components() + if err != nil { + return false, nil, err + } + for _, cmp := range cmps { + if cmp.Id() == id { + return true, cmp, nil + } + } + return false, nil, nil +} + // Components as found in the raw document. func (d NodeDoc) Components() ([]*NodeDocComponent, error) { components := make([]*NodeDocComponent, 0) @@ -249,7 +271,16 @@ func extractComponents(contents []byte, components []*NodeDocComponent) []byte { // Replaces placeholders with components. func insertComponents(contents []byte, components []*NodeDocComponent) []byte { for _, component := range components { - contents = bytes.ReplaceAll(contents, []byte(component.Placeholder()), []byte(component.Raw)) + // Add the components ID as data-component attribute. We cannot use + // NodeDocTransformer as component code cannot run through it, as the + // DOM parser will destroy casing of the component code. + raw := strings.Replace( + component.Raw, + fmt.Sprintf("<%s", component.Name), + fmt.Sprintf("<%s data-component=\"%s\"", component.Name, component.Id()), + 1, + ) + contents = bytes.ReplaceAll(contents, []byte(component.Placeholder()), []byte(raw)) } return contents } diff --git a/internal/ddt/node_doc_component.go b/internal/ddt/node_doc_component.go index b7f3b6b7..7b193c32 100644 --- a/internal/ddt/node_doc_component.go +++ b/internal/ddt/node_doc_component.go @@ -7,10 +7,11 @@ package ddt import ( + "crypto/sha1" "fmt" - "math/rand" "regexp" "strings" + "strconv" ) const ( @@ -18,25 +19,26 @@ const ( ) type NodeDocComponent struct { - Id int // Unique ID + Name string // i.e. CodeBlock + Raw string + RawInner string + Level int // Nesting level Position int // Start position inside document. Length int // Length of the component code. } -func NewNodeDocComponent(raw string, level int, position int) *NodeDocComponent { - return &NodeDocComponent{ - Id: rand.Intn(maxComponentsPerNodeDoc), - Raw: raw, - Level: level, - Position: position, - Length: len(raw), - } +func (c *NodeDocComponent) Id() string { + cleaner := regexp.MustCompile(`\s`) + + content := strings.ToLower(cleaner.ReplaceAllString(c.RawInner, "")) + content += strconv.Itoa(c.Position) + return fmt.Sprintf("%x", sha1.Sum([]byte(content))) } func (c *NodeDocComponent) Placeholder() string { - return fmt.Sprintf("dsk+component+%d", c.Id) + return fmt.Sprintf("dsk+component+%s", c.Id()) } // TODO: Implement @@ -57,13 +59,16 @@ func findComponentsInMarkdown(contents []byte) []*NodeDocComponent { var isCode bool var current strings.Builder + var tagName string var openingTag string var closingTag string var openingTagPosition int + tagNameRegexp := regexp.MustCompile(`^<([a-zA-Z0-9]+)`) + for i := 0; i < len(c); i++ { - if c[i] == '`' && (i-1 < 0 || c[i-1] != '\\') { + if !isConsuming && c[i] == '`' && (i-1 < 0 || c[i-1] != '\\') { if i+2 < len(c) && c[i+1] == '`' && c[i+2] == '`' { i += 2 } @@ -81,23 +86,30 @@ func findComponentsInMarkdown(contents []byte) []*NodeDocComponent { // need to check if we need to end consumption. if c[i] == '>' { if isLookingForTag { - re := regexp.MustCompile(`^<[a-zA-Z0-9]+`) + tagName = tagNameRegexp.FindStringSubmatch(current.String())[1] openingTag = current.String() - closingTag = fmt.Sprintf("%s>", strings.Replace(re.FindString(openingTag), "<", "", tagName) isLookingForTag = false continue } if strings.Contains(current.String(), closingTag) { - found = append(found, NewNodeDocComponent( - current.String(), - 0, // Currently finding only top level components. - openingTagPosition, - )) + cmp := &NodeDocComponent{ + Name: tagName, + + Raw: current.String(), + RawInner: strings.TrimSuffix(strings.TrimPrefix(current.String(), openingTag), closingTag), + + Level: 0, // Currently finding only top level components. + Position: openingTagPosition, + Length: current.Len(), + } + found = append(found, cmp) current.Reset() + tagName = "" openingTag = "" closingTag = "" diff --git a/internal/ddt/node_doc_test.go b/internal/ddt/node_doc_test.go index d2486657..db20671b 100644 --- a/internal/ddt/node_doc_test.go +++ b/internal/ddt/node_doc_test.go @@ -43,11 +43,10 @@ func TestAddComponentProtection(t *testing.T) { ` expected0 := ` -dsk+component+23 +dsk+component+356a192b7913b04c54574d18c28d46e6395428ab ` components0 := []*NodeDocComponent{ &NodeDocComponent{ - Id: 23, Raw: raw0[1 : len(raw0)-1], Length: 77, Position: 1, @@ -63,11 +62,10 @@ dsk+component+23 Yellow and green are the colors of spring. ` expected1 := ` -Yellow and dsk+component+23 are the colors of spring. +Yellow and dsk+component+7b52009b64fd0a2a49e6d8a939753077792b0554 are the colors of spring. ` components1 := []*NodeDocComponent{ &NodeDocComponent{ - Id: 23, Raw: "green", Length: len("green"), Position: 12, @@ -89,13 +87,13 @@ The following visual design has been agreed upon by our team: expected2 := ` The following visual design has been agreed upon by our team: -dsk+component+0 +dsk+component+c66c65175fecc3103b3b587be9b5b230889c8628 -dsk+component+1 +dsk+component+8ee51caaa2c2f4ee2e5b4b7ef5a89db7df1068d7 ` components2 := []*NodeDocComponent{ - &NodeDocComponent{Id: 0, Level: 0, Raw: "Hi there!", Position: 64, Length: 26}, - &NodeDocComponent{Id: 1, Level: 0, Raw: "Don't do this", Position: 92, Length: 32}, + &NodeDocComponent{Level: 0, Raw: "Hi there!", Position: 64, Length: 26}, + &NodeDocComponent{Level: 0, Raw: "Don't do this", Position: 92, Length: 32}, } result2 := extractComponents([]byte(raw2), components2) @@ -110,13 +108,13 @@ func TestAddComponentProtectionOnLastLine(t *testing.T) { Use banners to highlight things people shouldn’t miss.` expected0 := ` -dsk+component+0 +dsk+component+356a192b7913b04c54574d18c28d46e6395428ab -dsk+component+1` +dsk+component+7224f997fc148baa0b7f81c1eda6fcc3fd003db0` components0 := []*NodeDocComponent{ - &NodeDocComponent{Id: 0, Level: 0, Raw: "Use banners to highlight things people shouldn’t miss.", Position: 1, Length: 103}, - &NodeDocComponent{Id: 1, Level: 0, Raw: "Use banners to highlight things people shouldn’t miss.", Position: 106, Length: 103}, + &NodeDocComponent{Level: 0, Raw: "Use banners to highlight things people shouldn’t miss.", Position: 1, Length: 103}, + &NodeDocComponent{Level: 0, Raw: "Use banners to highlight things people shouldn’t miss.", Position: 106, Length: 103}, } result0 := extractComponents([]byte(raw0), components0) @@ -127,14 +125,14 @@ dsk+component+1` func TestRemoveComponentProtection(t *testing.T) { raw0 := ` -Yellow and dsk+component+23 are the colors of spring. +Yellow and dsk+component+356a192b7913b04c54574d18c28d46e6395428ab are the colors of spring. ` expected0 := ` -Yellow and green are the colors of spring. +Yellow and green are the colors of spring. ` components0 := []*NodeDocComponent{ &NodeDocComponent{ - Id: 23, + Name: "ColorSwatch", Raw: "green", Length: len("green") - 2, Position: 1, @@ -212,13 +210,13 @@ hello

The following visual design has been agreed upon by our team:

-

Hi there!

+

Hi there!

-

Don't do this

+

Don't do this

hello

-

+

Hello Headline

` @@ -242,11 +240,9 @@ func TestAddRemoveComponentProtectionSymmetry(t *testing.T) { ` components0 := findComponentsInMarkdown([]byte(raw0)) - components0[0].Id = 0 - components0[1].Id = 1 expectedComponents0 := []*NodeDocComponent{ - &NodeDocComponent{Id: 0, Level: 0, Raw: "Use banners to highlight things people shouldn’t miss.", Position: 1, Length: 103}, - &NodeDocComponent{Id: 1, Level: 0, Raw: "Use banners to highlight things people shouldn’t miss.", Position: 106, Length: 103}, + &NodeDocComponent{Level: 0, Raw: "Use banners to highlight things people shouldn’t miss.", Position: 1, Length: 103}, + &NodeDocComponent{Level: 0, Raw: "Use banners to highlight things people shouldn’t miss.", Position: 106, Length: 103}, } if len(components0) != len(expectedComponents0) { t.Errorf("Failed number of components mismatch, got: %d", len(components0)) @@ -262,18 +258,22 @@ func TestAddRemoveComponentProtectionSymmetry(t *testing.T) { added0 := extractComponents([]byte(raw0), components0) addedExpected0 := ` -dsk+component+0 +dsk+component+a13029de9a7c3e98cab5a4b7643e04c0506e5f87 -dsk+component+1 +dsk+component+6db2e565d313338d2ff3134499686b3198c4a1f4 ` if string(added0) != addedExpected0 { t.Errorf("Failed, got: %s", added0) } removed0 := insertComponents(added0, components0) - removedExpected0 := raw0 + removedExpected0 := ` +Use banners to highlight things people shouldn’t miss. + +Use banners to highlight things people shouldn’t miss. +` if string(removed0) != removedExpected0 { - t.Errorf("Failed, got: %s", removed0) + t.Errorf("Failed, expected\n%s\ngot\n%s", removedExpected0, removed0) } } diff --git a/internal/ddt/node_doc_transformer.go b/internal/ddt/node_doc_transformer.go index b2053cab..a6fc7b8c 100644 --- a/internal/ddt/node_doc_transformer.go +++ b/internal/ddt/node_doc_transformer.go @@ -308,7 +308,9 @@ func (dt NodeDocTransformer) maybeMakeAbsolute(t html.Token, attrName string) (h q := u.Query() // We programatically add to the query, and add // it store it on the dnu after we are finished. - q.Set("v", dt.nodeSource) + if (!q.Has("v")) { + q.Set("v", dt.nodeSource) + } dnu.RawQuery = q.Encode() ok, _, dna := dt.attr(t, "data-node-asset") diff --git a/internal/httputil/error.go b/internal/httputil/error.go index 7865a746..1e878559 100644 --- a/internal/httputil/error.go +++ b/internal/httputil/error.go @@ -9,11 +9,13 @@ package httputil import "net/http" var ( - Err = &Error{http.StatusInternalServerError, "Techniker ist informiert"} - ErrUnsafePath = &Error{http.StatusBadRequest, "Directory traversal attempt detected!"} - ErrNotFound = &Error{http.StatusNotFound, "Not found"} - ErrNoSuchNode = &Error{http.StatusNotFound, "No such node"} - ErrNoSuchAsset = &Error{http.StatusNotFound, "No such asset"} + Err = &Error{http.StatusInternalServerError, "Techniker ist informiert"} + ErrUnsafePath = &Error{http.StatusBadRequest, "Directory traversal attempt detected!"} + ErrNotFound = &Error{http.StatusNotFound, "Not found"} + ErrNoSuchNode = &Error{http.StatusNotFound, "No such node"} + ErrNoSuchAsset = &Error{http.StatusNotFound, "No such asset"} + ErrNoSuchDoc = &Error{http.StatusNotFound, "No such document"} + ErrNoSuchPlayground = &Error{http.StatusNotFound, "No such playground"} ) type Error struct { diff --git a/internal/httputil/responder.go b/internal/httputil/responder.go index 0c480c46..8bbfc3ab 100644 --- a/internal/httputil/responder.go +++ b/internal/httputil/responder.go @@ -54,7 +54,9 @@ func (re *Responder) OK(data interface{}) { if re.ContentType != "application/json" { re.w.WriteHeader(http.StatusOK) - re.w.Write(data.([]byte)) + if data != nil { + re.w.Write(data.([]byte)) + } return } diff --git a/internal/notify/watcher.go b/internal/notify/watcher.go index 3a09d824..b0c42bcd 100644 --- a/internal/notify/watcher.go +++ b/internal/notify/watcher.go @@ -12,8 +12,8 @@ import ( "path/filepath" "strings" - "github.com/rundsk/dsk/internal/bus" core "github.com/rjeczalik/notify" + "github.com/rundsk/dsk/internal/bus" ) func NewWatcher(path string) (*Watcher, error) { diff --git a/internal/plex/app.go b/internal/plex/app.go index a0a4deeb..80a081df 100644 --- a/internal/plex/app.go +++ b/internal/plex/app.go @@ -20,14 +20,15 @@ import ( git "gopkg.in/src-d/go-git.v4" ) -func NewApp(version string, livePath string, frontendPath string) *App { +func NewApp(version string, livePath string, componentsPath string, frontendPath string) *App { log.Print("Initializing application...") return &App{ - Teardown: &Teardown{Scope: "app"}, - Version: version, - livePath: livePath, - frontendPath: frontendPath, + Teardown: &Teardown{Scope: "app"}, + Version: version, + livePath: livePath, + componentsPath: componentsPath, + frontendPath: frontendPath, } } @@ -44,6 +45,9 @@ type App struct { // livePath is the absolute path to the live DDT. livePath string + // componentsPath is an absolute path to a directory containing (transpiled and bundled) assets of a component library + componentsPath string + LiveConfigDB config.DB Broker *bus.Broker @@ -52,6 +56,8 @@ type App struct { Sources *Sources + Components *Components + Frontend *frontend.Frontend } @@ -208,6 +214,17 @@ func (app *App) OpenVersions(ctx context.Context) error { return nil } +func (app *App) OpenComponents(ctx context.Context) error { + cmps, err := NewComponents(app.componentsPath) + if err != nil { + return err + } + + cmps.Detect() + app.Components = cmps + return err +} + func (app *App) Close() error { return app.Teardown.Close() } diff --git a/internal/plex/components.go b/internal/plex/components.go new file mode 100644 index 00000000..7916b9dc --- /dev/null +++ b/internal/plex/components.go @@ -0,0 +1,115 @@ +// Copyright 2021 Marius Wilms, Christoph Labacher. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package plex + +import ( + "encoding/json" + "io/ioutil" + "log" + "net/http" + "os" + "path/filepath" + "strings" +) + +const packageJson = "package.json" + +var ( + cssEntryNames = []string{ + "index.css", + "styles.css", + "style.css", + "main.css", + } +) + +type packageJsonFields struct { + Name string `json:"name"` +} + +// NewComponents sniffs around to make sure that a package.json is found +// at the path provided by env var. If it is, and its name doesn't match +// the path (in node module's resolution), we fudge the path by +// dropping it at a symlinked path instead. +func NewComponents(pathEnvVar string) (*Components, error) { + log.Printf("Initializing components from path %s...", pathEnvVar) + nodePath := filepath.Clean(pathEnvVar) + + rawPkgJson, err := ioutil.ReadFile(filepath.Join(nodePath, packageJson)) + if err != nil { + return nil, err + } + + var pkgJson packageJsonFields + json.Unmarshal(rawPkgJson, &pkgJson) + + if err != nil { + return nil, err + } + + if filepath.Base(nodePath) != pkgJson.Name { + dir := filepath.Join("dist", pkgJson.Name) + + nodePath = "dist" + if err := os.Remove(dir); err != nil && !os.IsNotExist(err) { + return nil, err + } + + splitName := strings.Split(pkgJson.Name, "/") + + if len(splitName) > 1 { + os.MkdirAll(filepath.Dir(dir), os.ModePerm) + } + + err = os.Symlink(filepath.Clean(pathEnvVar), dir) + + if err != nil { + return nil, err + } + } + + path, err := filepath.Abs(nodePath) + log.Printf("Using path %s as JS entry point", path) + return &Components{ + FS: http.Dir(pathEnvVar), + // This is different because the CSS entrypoint doesn't use ESBuild's package.json lookup and NODE_ENV. + Path: filepath.Clean(pathEnvVar), + JSEntryPoint: path, + }, err +} + +type Components struct { + FS http.FileSystem + + Path string + + PackageName string + JSEntryPoint string + CSSEntryPoint string +} + +func (cmps *Components) Detect() { + hasFile := func(path string) bool { + normalizedPath := filepath.Join(cmps.Path, path) + if _, err := os.Stat(normalizedPath); err == nil { + return true + } + log.Printf("Failed to load %s components at %s", path, normalizedPath) + return false + } + + // We could potentially use https://stackoverflow.com/questions/32037150/style-field-in-package-json#comment73005816_32042285, but other than bits of postcss, I haven't seen this approach used in the wild + for _, prefix := range []string{"build", "dist"} { + for _, f := range cssEntryNames { + curr := filepath.Join(prefix, f) + if hasFile(curr) { + log.Printf("Using path %s as CSS entry point", curr) + cmps.CSSEntryPoint = curr + return + } + } + } +}