Created
August 29, 2021 21:17
-
-
Save gaggle/2aa62e176b1e9159481af95d283ce1bb to your computer and use it in GitHub Desktop.
Prototyping pnpm & Docker
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
$ pnpm --silent --workspace-root pnpm-context -- -p='!.idea/' -p='bin/' -p='tsconfig.json' apps/api/Dockerfile -l | |
<list of all the files that'll be part of the Docker context> | |
$ pnpm --silent --workspace-root pnpm-context -- -p='!.idea/' -p='bin/' -p='tsconfig.json' apps/api/Dockerfile | docker build --progress=plain --build-arg PACKAGE_PATH=apps/api - -t mono-api | |
$ docker run --rm -it --name=mono-api mono-api |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
FROM node:16-alpine AS base | |
ARG PNPM_VERSION=6.14.3 | |
RUN npm --global install pnpm@${PNPM_VERSION} | |
WORKDIR /root/monorepo | |
FROM base AS dev | |
ARG PACKAGE_PATH | |
ENV CI=true | |
COPY ./meta . | |
RUN --mount=type=cache,id=pnpm-store,target=/root/.pnpm-store\ | |
# ↑ By caching the content-addressable store we stop downloading the same packages again and again | |
PRISMA_SKIP_POSTINSTALL_GENERATE=true\ | |
pnpm install --filter "{${PACKAGE_PATH}}..." --frozen-lockfile\ | |
--unsafe-perm\ | |
# ↑ Docker runs pnpm as root and then pnpm won't run package scripts unless we pass this arg | |
| grep -v "cross-device link not permitted\|Falling back to copying packages from store" | |
# ↑ This inverted grep match is because using Docker's 'cache' mount type | |
# causes Docker to place the pnpm content-addressable store on a different virtual drive, | |
# which prohibits pnpm from symlinking its content to its virtual store (in node_modules/.pnpm), | |
# and that causes pnpm to fall back on copying the files. And that's fine, | |
# except pnpm emits a lot of warnings doing that, so here we filter out those warnings. | |
COPY ./libs . | |
RUN pnpm build --if-present --filter "{${PACKAGE_PATH}}^..." | |
# ↑ First build libs separately, because it gives a chance to cache dependencies if they haven't changed | |
COPY ./all . | |
RUN pnpm db:generate --filter "{${PACKAGE_PATH}}" | |
RUN pnpm test --if-present --filter "{${PACKAGE_PATH}}" | |
RUN NODE_ENV=production pnpm build --filter "{${PACKAGE_PATH}}" | |
FROM dev AS assets | |
RUN pnpm recursive exec -- rm -rf ./src ./node_modules && find -name "*.spec.*" -not -path "*/node_modules/*" -delete | |
# ↑ We need a clean slate of built files to be used in a production-optimised stage | |
FROM base as prod | |
ARG PACKAGE_PATH | |
ENV CI=true | |
COPY ./meta . | |
RUN --mount=type=cache,id=pnpm-store,target=/root/.pnpm-store\ | |
PRISMA_SKIP_POSTINSTALL_GENERATE=true\ | |
pnpm install --filter "{${PACKAGE_PATH}}..." --frozen-lockfile --prod\ | |
--unsafe-perm\ | |
| grep -v "cross-device link not permitted\|Falling back to copying packages from store" | |
COPY --from=assets /root/monorepo . | |
RUN pnpm db:generate --filter "{${PACKAGE_PATH}}" | |
WORKDIR /root/monorepo/${PACKAGE_PATH} | |
EXPOSE 3002 | |
ENV NODE_ENV=production | |
ENTRYPOINT ["pnpm", "build:start"] |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env node | |
import meow from 'meow' | |
import os from 'os' | |
import { basename, dirname, join, relative, resolve } from 'path' | |
import { create } from 'tar' | |
import { globby } from 'globby' | |
import { parsePackageSelector, readProjects } from '@pnpm/filter-workspace-packages' | |
import { pipe as rawPipe } from 'mississippi' | |
import { promises as fs } from 'fs' | |
import { promisify } from 'util' | |
const pipe = promisify(rawPipe) | |
const SCRIPT = basename(process.argv[1]) | |
const cli = meow(` | |
Usage | |
$ ${SCRIPT} [--patterns=regex]... [--list-files] <Dockerfile-path> | |
Options | |
--list-files, -l Don't generate tar, just list files. Useful for debugging. | |
--patterns, -p Additional patterns used to find files (can be specified multiple times) | |
Examples | |
$ ${SCRIPT} packages/app/Dockerfile | |
`, { | |
allowUnknownFlags: false, | |
autoHelp: false, | |
description: `./${SCRIPT}`, | |
flags: { | |
help: { type: 'boolean', alias: 'h' }, | |
listFiles: { type: 'boolean', alias: 'l' }, | |
patterns: { type: 'string', alias: 'p', isMultiple: true } | |
}, | |
importMeta: import.meta | |
}) | |
if (cli.flags.help) { | |
cli.showHelp(0) | |
} | |
/** | |
* @typedef ParsedCLI | |
* @type {object} | |
* @property {boolean} listFiles | |
* @property {string[]} patterns | |
* @property {string} dockerFile | |
*/ | |
/** | |
* @param {string[]} input | |
* @param {object} flags | |
* @returns {Promise<ParsedCLI>} | |
*/ | |
async function parseCli ({ input, flags }) { | |
const dockerFile = input.shift() | |
if (!dockerFile) throw new Error('Must specify path to Dockerfile') | |
if (!await fileExists(dockerFile)) throw new Error(`Dockerfile not found: ${dockerFile}`) | |
flags.patterns = ['!.git/', 'package.json', 'pnpm-*.yaml'].concat(flags.patterns) | |
return { dockerFile, ...flags } | |
} | |
/** | |
* @param {ParsedCLI} cli | |
*/ | |
async function main (cli) { | |
const cwd = process.cwd() | |
const projectName = dirname(cli.dockerFile) | |
const projects = Object.keys((await readProjects( | |
cwd, | |
[parsePackageSelector(`{${projectName}}`, cwd)] | |
)).selectedProjectsGraph).map(p => relative(cwd, p)) | |
const libProjects = Object.keys((await readProjects( | |
cwd, | |
[parsePackageSelector(`{${projectName}}^...`, cwd)] | |
)).selectedProjectsGraph).map(p => relative(cwd, p)) | |
/** @type {string[]} */ | |
const patternFiles = await globby( | |
cli.patterns, | |
{ gitignore: true, dot: true }) | |
/** @type {string[]} */ | |
const projectFiles = await globby( | |
projects.concat(cli.patterns), | |
{ gitignore: true, dot: true }) | |
/** @type {string[]} */ | |
const libFiles = await globby( | |
libProjects.concat(cli.patterns), | |
{ gitignore: true, dot: true }) | |
await withTmpdir(async (tmpdir) => { | |
await fs.copyFile(cli.dockerFile, join(tmpdir, 'Dockerfile')) | |
await Promise.all(patternFiles.map(async f => { | |
if (f === cli.dockerFile) return | |
const metaDest = join(tmpdir, 'meta', f) | |
await fs.mkdir(dirname(metaDest), { recursive: true }) | |
await fs.copyFile(f, metaDest) | |
})) | |
await Promise.all(projectFiles.map(async f => { | |
if (f === cli.dockerFile) return | |
if (['package.json', 'pnpm-lock.yaml', 'pnpm-workspace.yaml'].includes(basename(f))) { | |
const metaDest = join(tmpdir, 'meta', f) | |
await fs.mkdir(dirname(metaDest), { recursive: true }) | |
await fs.copyFile(f, metaDest) | |
} | |
const allDest = join(tmpdir, 'all', f) | |
await fs.mkdir(dirname(allDest), { recursive: true }) | |
await fs.copyFile(f, allDest) | |
})) | |
await Promise.all(libFiles.map(async f => { | |
if (f === cli.dockerFile) return | |
if (['package.json', 'pnpm-lock.yaml', 'pnpm-workspace.yaml'].includes(basename(f))) { | |
const metaDest = join(tmpdir, 'meta', f) | |
await fs.mkdir(dirname(metaDest), { recursive: true }) | |
await fs.copyFile(f, metaDest) | |
} | |
const allDest = join(tmpdir, 'libs', f) | |
await fs.mkdir(dirname(allDest), { recursive: true }) | |
await fs.copyFile(f, allDest) | |
})) | |
/** @type {string[]} */ | |
const files = [] | |
for await (const f of yieldFiles(tmpdir)) { | |
files.push(relative(tmpdir, f)) | |
} | |
if (cli.listFiles) { | |
for await (const path of files) { | |
console.log(path) | |
} | |
} else { | |
await pipe(create({ gzip: true, cwd: tmpdir }, files), process.stdout) | |
} | |
}) | |
} | |
await parseCli(cli) | |
.then(main) | |
.catch(err => { | |
throw err | |
}) | |
async function * yieldFiles (dirPath) { | |
const paths = await fs.readdir(dirPath, { withFileTypes: true }) | |
for (const path of paths) { | |
const res = resolve(dirPath, path.name) | |
if (path.isDirectory()) { | |
yield * yieldFiles(res) | |
} else { | |
yield res | |
} | |
} | |
} | |
/** | |
* @param {function(string):Promise<void>} callable | |
*/ | |
async function withTmpdir (callable) { | |
const tmpdir = await fs.mkdtemp(join(os.tmpdir(), SCRIPT)) | |
let result | |
try { | |
result = await callable(tmpdir) | |
} finally { | |
await fs.rm(tmpdir, { recursive: true }) | |
} | |
return result | |
} | |
/** | |
* @param {string} path | |
* @returns {Promise<boolean>} | |
*/ | |
async function fileExists (path) { | |
try { | |
await fs.stat(path) | |
} catch (err) { | |
return false | |
} | |
return true | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment