diff --git a/.dockerignore b/.dockerignore index 6e5a39cf4aba48..ccb5b2a28906d6 100644 --- a/.dockerignore +++ b/.dockerignore @@ -19,9 +19,18 @@ test process.json app.json .travis.yml +app-minimal +.idea +.env +.editorconfig +Procfile +now.json +jsconfig.json +package-lock.json #git but keep the git commit hash .git/logs .git/objects .git/index .git/info +.git/hooks diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml index f1ccd0d8935b07..3f32daca50c1b3 100644 --- a/.github/workflows/docker-release.yml +++ b/.github/workflows/docker-release.yml @@ -9,7 +9,7 @@ on: - 'lib/**' - '!**/maintainer.js' - '!**/radar.js' - - '!**/radar-rules.js.js' + - '!**/radar-rules.js' - 'Dockerfile' - 'package.json' - 'yarn.lock' @@ -46,8 +46,8 @@ jobs: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - - name: Extract Docker metadata - id: meta + - name: Extract Docker metadata (ordinary version) + id: meta-ordinary uses: docker/metadata-action@v3 with: images: ${{ secrets.DOCKER_USERNAME }}/rsshub @@ -56,26 +56,54 @@ jobs: type=raw,value={{date 'YYYY-MM-DD'}},enable=true flavor: latest=false - - name: Build and push Docker image + - name: Build and push Docker image (ordinary version) uses: docker/build-push-action@v2 with: context: . push: true - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} + tags: ${{ steps.meta-ordinary.outputs.tags }} + labels: ${{ steps.meta-ordinary.outputs.labels }} platforms: linux/amd64,linux/arm/v7,linux/arm64 cache-from: type=gha,scope=docker-release cache-to: type=gha,mode=max,scope=docker-release + # cache-from: type=registry,ref=${{ secrets.DOCKER_USERNAME }}/rsshub:buildcache + # cache-to: type=registry,ref=${{ secrets.DOCKER_USERNAME }}/rsshub:buildcache,mode=max + + - name: Extract Docker metadata (Chromium-bundled version) + id: meta-chromium-bundled + uses: docker/metadata-action@v3 + with: + images: ${{ secrets.DOCKER_USERNAME }}/rsshub + tags: | + type=raw,value=chromium-bundled,enable=true + type=raw,value=chromium-bundled-{{date 'YYYY-MM-DD'}},enable=true + flavor: latest=false + + - name: Build and push Docker image (Chromium-bundled version) + uses: docker/build-push-action@v2 + with: + context: . + build-args: PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=0 + push: true + tags: ${{ steps.meta-chromium-bundled.outputs.tags }} + labels: ${{ steps.meta-chromium-bundled.outputs.labels }} + platforms: linux/amd64 # bundled Chromium is only available on amd64 + cache-from: | + type=gha,scope=docker-release + type=registry,ref=${{ secrets.DOCKER_USERNAME }}/rsshub:chromium-bundled + # type=registry,ref=${{ secrets.DOCKER_USERNAME }}/rsshub:buildcache + cache-to: type=inline,ref=${{ secrets.DOCKER_USERNAME }}/rsshub:chromium-bundled # inline cache is enough + description: runs-on: ubuntu-latest needs: check-env if: needs.check-env.outputs.check-docker == 'true' steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v3 - - name: Docker Hub Description - uses: peter-evans/dockerhub-description@v3 - with: - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - repository: ${{ secrets.DOCKER_USERNAME }}/rsshub + - name: Docker Hub Description + uses: peter-evans/dockerhub-description@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + repository: ${{ secrets.DOCKER_USERNAME }}/rsshub diff --git a/.github/workflows/docker-test.yml b/.github/workflows/docker-test.yml index fced928a7df5e9..b40c6205c8dcea 100644 --- a/.github/workflows/docker-test.yml +++ b/.github/workflows/docker-test.yml @@ -36,13 +36,19 @@ jobs: uses: docker/build-push-action@v2 with: context: . + build-args: PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=0 # also test bundling Chromium load: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} + platforms: linux/amd64 # explicit cache-from: | - type=gha,scope=docker-test + type=registry,ref=${{ secrets.DOCKER_USERNAME }}/rsshub:chromium-bundled type=gha,scope=docker-release - cache-to: type=gha,mode=max,scope=docker-test + # ! build on amd64 is fast enough, and cache between PRs never hit, so never waste the 10GB cache limit ! + # cache-from: | + # type=gha,scope=docker-test + # type=gha,scope=docker-release + # cache-to: type=gha,mode=max,scope=docker-test - name: Run dockerfile run: | diff --git a/Dockerfile b/Dockerfile index 5570dbf5d323cc..b742a950f882e1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,65 +1,185 @@ -FROM node:14-buster-slim as dep-builder +FROM node:16-bullseye-slim as dep-builder -LABEL MAINTAINER https://github.com/DIYgod/RSSHub/ +# bash has already been the default shell +#RUN ln -sf /bin/bash /bin/sh -ARG USE_CHINA_NPM_REGISTRY=0 -ARG PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 - -RUN ln -sf /bin/bash /bin/sh -RUN apt-get update && apt-get install -yq libgconf-2-4 apt-transport-https git dumb-init python3 build-essential --no-install-recommends +# these deps are no longer needed since we use yarn instead of npm to install dependencies +# the installation of dumb-init has been moved to the app stage to improve concurrency and speed up builds on arm/arm64 +#RUN \ +# set -ex && \ +# apt-get update && \ +# apt-get install -yq --no-install-recommends \ +# libgconf-2-4 apt-transport-https git dumb-init python3 build-essential \ +# && \ +# rm -rf /var/lib/apt/lists/* WORKDIR /app -COPY ./yarn.lock /app -COPY ./package.json /app +# place ARG statement before RUN statement which need it to avoid cache miss +ARG USE_CHINA_NPM_REGISTRY=0 +RUN \ + set -ex && \ + if [ "$USE_CHINA_NPM_REGISTRY" = 1 ]; then \ + echo 'use npm mirror' && \ + npm config set registry https://registry.npmmirror.com && \ + yarn config set registry https://registry.npmmirror.com ; \ + fi; + +COPY ./yarn.lock /app/ +COPY ./package.json /app/ + +# lazy install Chromium to avoid cache miss, only install production dependencies to minimize the image size +RUN \ + set -ex && \ + export PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true && \ + yarn install --production --frozen-lockfile --network-timeout 1000000 && \ + yarn cache clean + +# --------------------------------------------------------------------------------------------------------------------- + +FROM debian:bullseye-slim as dep-version-parser +# This stage is necessary to limit the cache miss scope. +# With this stage, any modification to package.json won't break the build cache of the next two stages as long as the +# version unchanged. +# node:16-bullseye-slim is based on debian:bullseye-slim so this stage would not cause any additional download. + +WORKDIR /ver +COPY ./package.json /app/ +RUN \ + set -ex && \ + grep -Po '(?<="puppeteer": ")[^\s"]*(?=")' /app/package.json | tee /ver/.puppeteer_version && \ + grep -Po '(?<="@vercel/nft": ")[^\s"]*(?=")' /app/package.json | tee /ver/.nft_version && \ + grep -Po '(?<="fs-extra": ")[^\s"]*(?=")' /app/package.json | tee /ver/.fs_extra_version + +# --------------------------------------------------------------------------------------------------------------------- + +FROM node:16-bullseye-slim as docker-minifier +# The stage is used to further reduce the image size by removing unused files. + +WORKDIR /minifier +COPY --from=dep-version-parser /ver/* /minifier/ + +ARG USE_CHINA_NPM_REGISTRY=0 +RUN \ + set -ex && \ + if [ "$USE_CHINA_NPM_REGISTRY" = 1 ]; then \ + npm config set registry https://registry.npmmirror.com && \ + yarn config set registry https://registry.npmmirror.com ; \ + fi; \ + yarn add @vercel/nft@$(cat .nft_version) fs-extra@$(cat .fs_extra_version) && \ + yarn cache clean -RUN if [ "$USE_CHINA_NPM_REGISTRY" = 1 ]; then \ - echo 'use npm mirror'; npm config set registry https://registry.npmmirror.com; \ - fi; +COPY . /app +COPY --from=dep-builder /app /app -RUN npm i -g npm +RUN \ + set -ex && \ + cp /app/scripts/docker/minify-docker.js /minifier/ && \ + export PROJECT_ROOT=/app && \ + node /minifier/minify-docker.js && \ + rm -rf /app/node_modules /app/scripts && \ + mv /app/app-minimal/node_modules /app/ && \ + rm -rf /app/app-minimal && \ + ls -la /app && \ + du -hd1 /app -RUN if [ "$PUPPETEER_SKIP_CHROMIUM_DOWNLOAD" = 0 ]; then \ - unset PUPPETEER_SKIP_CHROMIUM_DOWNLOAD ;\ - else \ - export PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true ;\ - fi; +# --------------------------------------------------------------------------------------------------------------------- -RUN yarn --frozen-lockfile --network-timeout 1000000 -COPY . /app -RUN node scripts/docker/minify-docker.js +FROM node:16-bullseye-slim as chromium-downloader +# This stage is necessary to improve build concurrency and minimize the image size. +# Yeah, downloading Chromium never needs those dependencies below. +WORKDIR /app +COPY --from=dep-version-parser /ver/.puppeteer_version /app/.puppeteer_version -FROM node:14-slim as app +ARG USE_CHINA_NPM_REGISTRY=0 +ARG PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 +# https://github.com/puppeteer/puppeteer#q-why-doesnt-puppeteer-vxxx-work-with-chromium-vyyy +RUN \ + set -ex ; \ + if [ "$PUPPETEER_SKIP_CHROMIUM_DOWNLOAD" = 0 ]; then \ + if [ "$USE_CHINA_NPM_REGISTRY" = 1 ]; then \ + npm config set registry https://registry.npmmirror.com && \ + yarn config set registry https://registry.npmmirror.com ; \ + fi; \ + echo 'Downloading Chromium...' && \ + unset PUPPETEER_SKIP_CHROMIUM_DOWNLOAD && \ + yarn add puppeteer@$(cat /app/.puppeteer_version) && \ + yarn cache clean ; \ + else \ + mkdir -p /app/node_modules/puppeteer ; \ + fi; + +# --------------------------------------------------------------------------------------------------------------------- + +FROM node:16-bullseye-slim as app + +LABEL org.opencontainers.image.authors="https://github.com/DIYgod/RSSHub" ENV NODE_ENV production ENV TZ Asia/Shanghai -ARG PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 WORKDIR /app -COPY . /app -COPY --from=dep-builder /app/app-minimal/node_modules /app/node_modules -COPY --from=dep-builder /usr/bin/dumb-init /usr/bin/dumb-init - -RUN if [ "$PUPPETEER_SKIP_CHROMIUM_DOWNLOAD" = 0 ]; then \ - apt-get update \ - && apt-get install -y wget gnupg ca-certificates --no-install-recommends \ - && wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - \ - && sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' \ - && set -ex \ - && apt-get update \ - && apt-get install -y google-chrome-unstable fonts-ipafont-gothic fonts-wqy-zenhei fonts-thai-tlwg fonts-kacst fonts-freefont-ttf \ - ca-certificates \ - fonts-liberation libappindicator3-1 libasound2 libatk-bridge2.0-0 libatk1.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 \ - libfontconfig1 libgbm1 libgcc1 libglib2.0-0 libgtk-3-0 libnspr4 libnss3 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 \ - libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 \ - libxrender1 libxss1 libxtst6 lsb-release \ - --no-install-recommends \ - && rm -rf /var/lib/apt/lists/* \ - && apt-get purge --auto-remove -y wget gnupg; \ - fi; + +# install deps first to avoid cache miss or disturbing buildkit to build concurrently +ARG PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=1 +# https://github.com/puppeteer/puppeteer/blob/main/docs/troubleshooting.md#chrome-headless-doesnt-launch-on-unix +# https://github.com/puppeteer/puppeteer/issues/7822 +# https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html#noteworthy-obsolete-packages +RUN \ + set -ex && \ + apt-get update && \ + apt-get install -yq --no-install-recommends \ + dumb-init \ + ; \ + if [ "$PUPPETEER_SKIP_CHROMIUM_DOWNLOAD" = 0 ]; then \ + apt-get install -yq --no-install-recommends \ + ca-certificates fonts-liberation wget xdg-utils \ + libasound2 libatk-bridge2.0-0 libatk1.0-0 libatspi2.0-0 libcairo2 libcups2 libdbus-1-3 libdrm2 libexpat1 \ + libgbm1 libglib2.0-0 libnspr4 libnss3 libpango-1.0-0 libx11-6 libxcb1 libxcomposite1 libxdamage1 libxext6 \ + libxfixes3 libxkbcommon0 libxrandr2 \ + ; \ + fi; \ + rm -rf /var/lib/apt/lists/* + +COPY --from=chromium-downloader /app/node_modules/puppeteer /app/node_modules/puppeteer + +# if grep matches nothing then it will exit with 1, thus, we cannot `set -e` here +RUN \ + set -x && \ + if [ "$PUPPETEER_SKIP_CHROMIUM_DOWNLOAD" = 0 ]; then \ + echo 'Verifying Chromium installation...' && \ + ldd $(find /app/node_modules/puppeteer/ -name chrome) | grep "not found" ; \ + if [ "$?" = 0 ]; then \ + echo "!!! Chromium has unmet shared libs !!!" && \ + exit 1 ; \ + else \ + echo "Awesome! All shared libs are met!" ; \ + fi; \ + fi; + +COPY --from=docker-minifier /app /app EXPOSE 1200 ENTRYPOINT ["dumb-init", "--"] CMD ["npm", "run", "start"] + +# --------------------------------------------------------------------------------------------------------------------- + +# In case Chromium has unmet shared libs, here is some magic to find and install the packages they belong to: +# In most case you can just stop at `grep ^lib` and add those packages to the above stage. +# +# apt-get update && \ +# apt install -yq --no-install-recommends \ +# apt-file \ +# && \ +# apt-file update && \ +# ldd $(find /app/node_modules/puppeteer/ -name chrome) | grep -Po "\S+(?= => not found)" | \ +# sed 's/\./\\./g' | awk '{print $1"$"}' | apt-file search -xlf - | grep ^lib | \ +# xargs -d '\n' -- \ +# apt-get install -yq --no-install-recommends \ +# && \ +# apt purge -yq --auto-remove \ +# apt-file \ +# rm -rf /tmp/.chromium_path /var/lib/apt/lists/* diff --git a/docker-compose.yml b/docker-compose.yml index e0d25b02e1c8c5..8e475dfcf4074d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,6 +2,9 @@ version: '3' services: rsshub: + # two ways to enable puppeteer: + # * (only on amd64/x86_64) comment out marked lines, then use this image instead: diygod/rsshub:chromium-bundled + # * (on all supported architectures, but consumes more disk space and memory) leave anything unchanged image: diygod/rsshub restart: always ports: @@ -10,18 +13,18 @@ services: NODE_ENV: production CACHE_TYPE: redis REDIS_URL: 'redis://redis:6379/' - PUPPETEER_WS_ENDPOINT: 'ws://browserless:3000' + PUPPETEER_WS_ENDPOINT: 'ws://browserless:3000' # marked depends_on: - redis - - browserless + - browserless # marked - browserless: - image: browserless/chrome - restart: always - ulimits: - core: - hard: 0 - soft: 0 + browserless: # marked + image: browserless/chrome # marked + restart: always # marked + ulimits: # marked + core: # marked + hard: 0 # marked + soft: 0 # marked redis: image: redis:alpine diff --git a/docs/en/install/README.md b/docs/en/install/README.md index 93df0ee4028c46..eb54232c1010b3 100644 --- a/docs/en/install/README.md +++ b/docs/en/install/README.md @@ -24,6 +24,7 @@ Deploy for public access may require: ## Docker Image We recommend using the latest version `diygod/rsshub` (i.e. `diygod/rsshub:latest`) of the docker image. + When the latest version is unstable, you can use the image with a date tag for temporary use. For example: ```bash @@ -32,6 +33,10 @@ $ docker pull diygod/rsshub:2021-06-18 You can back to the latest version when the code has been fixed and rebuild the image. +If you need to enable puppeteer on x86_64 (amd64), `diygod/rsshub:chromium-bundled` is a good choice. If date specified, it will become: `diygod/rsshub:chromium-bundled-2021-06-18`. + +On all supported architectures, to enable puppeteer, using Docker Compose is another good choice. However, it consumes more disk space and memory. By modifiying the `docker-compose.yml` file, you can use `diygod/rsshub:chromium-bundled` instead to reduce the disk space and memory consumption. + ## Docker Compose Deployment ### Install @@ -39,7 +44,13 @@ You can back to the latest version when the code has been fixed and rebuild the Download [docker-compose.yml](https://github.com/DIYgod/RSSHub/blob/master/docker-compose.yml) ```bash -wget https://raw.githubusercontent.com/DIYgod/RSSHub/master/docker-compose.yml +$ wget https://raw.githubusercontent.com/DIYgod/RSSHub/master/docker-compose.yml +``` + +Check if any configuration needs to be changed + +```bash +$ vi docker-compose.yml # or your favorite editor ``` Create a docker volume to persist Redis caches @@ -76,6 +87,8 @@ Edit `environment` in [docker-compose.yml](https://github.com/DIYgod/RSSHub/blob ## Docker Deployment +> **To enable puppeteer, replace `diygod/rsshub` with `diygod/rsshub:chromium-bundled` in EACH command (only on x86_64)** + ### Install Execute the following command to pull RSSHub's docker image. @@ -119,7 +132,7 @@ For example, adding `-e CACHE_EXPIRE=3600` will set the cache time to 1 hour. $ docker run -d --name rsshub -p 1200:1200 -e CACHE_EXPIRE=3600 -e GITHUB_ACCESS_TOKEN=example diygod/rsshub ``` -This deployment method does not include puppeteer and Redis dependencies. Use the Docker Compose deployment method or deploy external dependencies yourself if you need it. +This deployment method does not include puppeteer (unless using `diygod/rsshub:chromium-bundled` instead on x86_64) and Redis dependencies. Use the Docker Compose deployment method or deploy external dependencies yourself if you need it. To configure more options please refer to [Configuration](#configuration). diff --git a/docs/install/README.md b/docs/install/README.md index c4ecaecd78cb55..0785d1a4677d0d 100644 --- a/docs/install/README.md +++ b/docs/install/README.md @@ -4,7 +4,7 @@ sidebar: auto # 部署 -部署 RSSHub 需要基本的计算机编程常识,如果您在部署过程中遇到无法解决的问题请到 [issues](https://github.com/DIYgod/RSSHub/issues) 寻找类似的问题或 [向我们提问](https://github.com/DIYgod/RSSHub/issues/new/choose),我们会尽快给您答复 +部署 RSSHub 需要基本的计算机编程常识,如果您在部署过程中遇到无法解决的问题请到 [issues](https://github.com/DIYgod/RSSHub/issues) 寻找类似的问题或 [向我们提问](https://github.com/DIYgod/RSSHub/issues/new/choose),我们会尽快给您答复。 部署涉及到以下基本编程常识: @@ -23,14 +23,19 @@ sidebar: auto ## Docker 镜像 -默认推荐使用`diygod/rsshub`即`diygod/rsshub:latest`最新版镜像以获取最新路由. -当`diygod/rsshub:latest`存在问题时,可以使用以日期为标签的近期镜像临时使用,例如: +默认推荐使用 `diygod/rsshub` 即 `diygod/rsshub:latest` 最新版镜像以获取最新路由。 + +当 `diygod/rsshub:latest` 存在问题时,可以使用以日期为标签的近期镜像临时使用,例如: ```bash $ docker pull diygod/rsshub:2021-06-18 ``` -待最新镜像更新后在切换回`diygod/rsshub:latest`最新版镜像. +待最新镜像更新后再切换回 `diygod/rsshub:latest` 最新版镜像。 + +如需在 x86\_64 (amd64) 架构上部署启用 puppeteer 的版本,可使用 `diygod/rsshub:chromium-bundled`,若指定日期则为 `diygod/rsshub:chromium-bundled-2021-06-18`。 + +在所有受支持的架构上,均可使用 Docker Compose 部署以启用 puppeteer,但更消耗磁盘空间和内存。通过修改 `docker-compose.yml` 文件,也可以使用 `diygod/rsshub:chromium-bundled`,这样就没有更消耗资源的问题了。 ## Docker Compose 部署 @@ -39,7 +44,13 @@ $ docker pull diygod/rsshub:2021-06-18 下载 [docker-compose.yml](https://github.com/DIYgod/RSSHub/blob/master/docker-compose.yml) ```bash -wget https://raw.githubusercontent.com/DIYgod/RSSHub/master/docker-compose.yml +$ wget https://raw.githubusercontent.com/DIYgod/RSSHub/master/docker-compose.yml +``` + +检查有无需要修改的配置 + +```bash +$ vi docker-compose.yml # 也可以是你喜欢的编辑器 ``` 创建 volume 持久化 Redis 缓存 @@ -76,6 +87,8 @@ $ docker pull diygod/rsshub ## Docker 部署 +> **如需启用 puppeteer,请在每条命令中均将 `diygod/rsshub` 替换为 `diygod/rsshub:chromium-bundled` (仅限 x86\_64 架构)** + ### 安装 运行下面的命令下载 RSSHub 镜像 @@ -119,7 +132,7 @@ $ docker rm rsshub $ docker run -d --name rsshub -p 1200:1200 -e CACHE_EXPIRE=3600 -e GITHUB_ACCESS_TOKEN=example diygod/rsshub ``` -该部署方式不包括 puppeteer 和 redis 依赖,如有需要请改用 Docker Compose 部署方式或自行部署外部依赖 +该部署方式不包括 puppeteer (除非在 x86\_64 架构上改用 `diygod/rsshub:chromium-bundled`) 和 redis 依赖,如有需要请改用 Docker Compose 部署方式或自行部署外部依赖 更多配置项请看 [#配置](#pei-zhi) diff --git a/scripts/docker/minify-docker.js b/scripts/docker/minify-docker.js index 412a6a797ab89b..908eb490124a22 100644 --- a/scripts/docker/minify-docker.js +++ b/scripts/docker/minify-docker.js @@ -2,15 +2,25 @@ const fs = require('fs-extra'); const path = require('path'); const { nodeFileTrace } = require('@vercel/nft'); -const files = ['lib/index.js', 'api/now.js']; -const resultFolder = 'app-minimal'; +// !!! if any new dependencies are added, update the Dockerfile !!! + +const projectRoot = path.resolve(process.env.PROJECT_ROOT || path.join(__dirname, '../..')); +const resultFolder = path.join(projectRoot, 'app-minimal'); // no need to resolve, ProjectRoot is always absolute +const files = ['lib/index.js', 'api/now.js'].map((file) => path.join(projectRoot, file)); (async () => { - console.log('Start analyizing...'); + console.log('Start analyzing, project root:', projectRoot); const { fileList: fileSet } = await nodeFileTrace(files, { - base: path.resolve(path.join(__dirname, '../..')), + base: projectRoot, }); - const fileList = Array.from(fileSet); - console.log('Total files need to be copy: ' + fileList.length); - return Promise.all(fileList.map((e) => fs.copy(e, path.resolve(path.join(resultFolder, e))))); -})(); + let fileList = Array.from(fileSet); + console.log('Total touchable files:', fileList.length); + fileList = fileList.filter((file) => file.startsWith('node_modules/')); // only need node_modules + console.log('Total files need to be copied (touchable files in node_modules/):', fileList.length); + console.log('Start copying files, destination:', resultFolder); + return Promise.all(fileList.map((e) => fs.copy(path.join(projectRoot, e), path.join(resultFolder, e)))); +})().catch((err) => { + // fix unhandled promise rejections + console.error(err, err.stack); + process.exit(1); +});