feat: switch to nodejs action
This commit is contained in:
parent
7bbc71bb94
commit
e141ef57e3
4
.eslintignore
Normal file
4
.eslintignore
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
dist/
|
||||||
|
lib/
|
||||||
|
node_modules/
|
||||||
|
jest.config.js
|
81
.eslintrc.json
Normal file
81
.eslintrc.json
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
{
|
||||||
|
"plugins": [
|
||||||
|
"jest",
|
||||||
|
"@typescript-eslint"
|
||||||
|
],
|
||||||
|
"extends": [
|
||||||
|
"plugin:github/recommended"
|
||||||
|
],
|
||||||
|
"parser": "@typescript-eslint/parser",
|
||||||
|
"parserOptions": {
|
||||||
|
"ecmaVersion": 9,
|
||||||
|
"sourceType": "module",
|
||||||
|
"project": "./tsconfig.json"
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
"i18n-text/no-en": "off",
|
||||||
|
"eslint-comments/no-use": "off",
|
||||||
|
"import/no-namespace": "off",
|
||||||
|
"no-unused-vars": "off",
|
||||||
|
"@typescript-eslint/no-unused-vars": "error",
|
||||||
|
"@typescript-eslint/explicit-member-accessibility": [
|
||||||
|
"error",
|
||||||
|
{
|
||||||
|
"accessibility": "no-public"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"@typescript-eslint/no-require-imports": "error",
|
||||||
|
"@typescript-eslint/array-type": "error",
|
||||||
|
"@typescript-eslint/await-thenable": "error",
|
||||||
|
"@typescript-eslint/ban-ts-comment": "off",
|
||||||
|
"camelcase": "off",
|
||||||
|
"@typescript-eslint/consistent-type-assertions": "error",
|
||||||
|
"@typescript-eslint/explicit-function-return-type": [
|
||||||
|
"error",
|
||||||
|
{
|
||||||
|
"allowExpressions": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"@typescript-eslint/func-call-spacing": [
|
||||||
|
"error",
|
||||||
|
"never"
|
||||||
|
],
|
||||||
|
"@typescript-eslint/no-array-constructor": "error",
|
||||||
|
"@typescript-eslint/no-empty-interface": "error",
|
||||||
|
"@typescript-eslint/no-explicit-any": "error",
|
||||||
|
"@typescript-eslint/no-extraneous-class": "error",
|
||||||
|
"@typescript-eslint/no-for-in-array": "error",
|
||||||
|
"@typescript-eslint/no-inferrable-types": "error",
|
||||||
|
"@typescript-eslint/no-misused-new": "error",
|
||||||
|
"@typescript-eslint/no-namespace": "error",
|
||||||
|
"@typescript-eslint/no-non-null-assertion": "warn",
|
||||||
|
"@typescript-eslint/no-unnecessary-qualifier": "error",
|
||||||
|
"@typescript-eslint/no-unnecessary-type-assertion": "error",
|
||||||
|
"@typescript-eslint/no-useless-constructor": "error",
|
||||||
|
"@typescript-eslint/no-var-requires": "error",
|
||||||
|
"@typescript-eslint/prefer-for-of": "warn",
|
||||||
|
"@typescript-eslint/prefer-function-type": "warn",
|
||||||
|
"@typescript-eslint/prefer-includes": "error",
|
||||||
|
"@typescript-eslint/prefer-string-starts-ends-with": "error",
|
||||||
|
"@typescript-eslint/promise-function-async": "error",
|
||||||
|
"@typescript-eslint/require-array-sort-compare": "error",
|
||||||
|
"@typescript-eslint/restrict-plus-operands": "error",
|
||||||
|
"semi": "off",
|
||||||
|
"filenames/match-regex": [
|
||||||
|
"error",
|
||||||
|
"^[a-zA-Z0-9\\-.]+$",
|
||||||
|
true
|
||||||
|
],
|
||||||
|
"@typescript-eslint/semi": [
|
||||||
|
"error",
|
||||||
|
"never"
|
||||||
|
],
|
||||||
|
"@typescript-eslint/type-annotation-spacing": "error",
|
||||||
|
"@typescript-eslint/unbound-method": "error"
|
||||||
|
},
|
||||||
|
"env": {
|
||||||
|
"node": true,
|
||||||
|
"es6": true,
|
||||||
|
"jest/globals": true
|
||||||
|
}
|
||||||
|
}
|
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
dist/** -diff linguist-generated=true
|
5
.github/workflows/codacy-analysis.yml
vendored
5
.github/workflows/codacy-analysis.yml
vendored
@ -19,6 +19,11 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
codacy-security-scan:
|
codacy-security-scan:
|
||||||
|
# Cancel other workflows that are running for the same branch
|
||||||
|
# https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
name: Codacy Security Scan
|
name: Codacy Security Scan
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
|
74
.github/workflows/codeql.yml
vendored
Normal file
74
.github/workflows/codeql.yml
vendored
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
# For most projects, this workflow file will not need changing; you simply need
|
||||||
|
# to commit it to your repository.
|
||||||
|
#
|
||||||
|
# You may wish to alter this file to override the set of languages analyzed,
|
||||||
|
# or to provide custom queries or build logic.
|
||||||
|
#
|
||||||
|
# ******** NOTE ********
|
||||||
|
# We have attempted to detect the languages in your repository. Please check
|
||||||
|
# the `language` matrix defined below to confirm you have the correct set of
|
||||||
|
# supported CodeQL languages.
|
||||||
|
#
|
||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "main" ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [ "main" ]
|
||||||
|
schedule:
|
||||||
|
- cron: '44 20 * * 0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: [ 'javascript' ]
|
||||||
|
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||||
|
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v2
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
|
# By default, queries listed here will override any specified in a config file.
|
||||||
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
|
|
||||||
|
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||||
|
# queries: security-extended,security-and-quality
|
||||||
|
|
||||||
|
|
||||||
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
|
|
||||||
|
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||||
|
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||||
|
|
||||||
|
# - run: |
|
||||||
|
# echo "Run, Build Application using script"
|
||||||
|
# ./location_of_script_within_repo/buildscript.sh
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "/language:${{matrix.language}}"
|
130
.github/workflows/test.yml
vendored
130
.github/workflows/test.yml
vendored
@ -24,9 +24,80 @@ jobs:
|
|||||||
- name: shellcheck
|
- name: shellcheck
|
||||||
uses: reviewdog/action-shellcheck@v1.17
|
uses: reviewdog/action-shellcheck@v1.17
|
||||||
|
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Use Node.js 16.x
|
||||||
|
uses: actions/setup-node@v3.6.0
|
||||||
|
with:
|
||||||
|
cache: 'yarn'
|
||||||
|
node-version: '16.x'
|
||||||
|
|
||||||
|
- name: Create coverage directory and clover.xml
|
||||||
|
run: |
|
||||||
|
mkdir -p coverage
|
||||||
|
touch coverage/clover.xml
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
yarn install
|
||||||
|
|
||||||
|
- name: Run eslint on changed files
|
||||||
|
uses: tj-actions/eslint-changed-files@v18
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.PAT_TOKEN }}
|
||||||
|
config_path: ".eslintrc.json"
|
||||||
|
ignore_path: ".eslintignore"
|
||||||
|
|
||||||
|
- name: Run build and test
|
||||||
|
run: |
|
||||||
|
yarn all
|
||||||
|
|
||||||
|
- name: Verify Changed files
|
||||||
|
uses: tj-actions/verify-changed-files@v14
|
||||||
|
id: changed_files
|
||||||
|
with:
|
||||||
|
files: |
|
||||||
|
src
|
||||||
|
dist
|
||||||
|
|
||||||
|
- name: Commit files
|
||||||
|
if: steps.changed_files.outputs.files_changed == 'true'
|
||||||
|
run: |
|
||||||
|
git config --local user.email "action@github.com"
|
||||||
|
git config --local user.name "GitHub Action"
|
||||||
|
git add src dist
|
||||||
|
git commit -m "Added missing changes and modified dist assets."
|
||||||
|
|
||||||
|
- name: Push changes
|
||||||
|
if: steps.changed_files.outputs.files_changed == 'true'
|
||||||
|
uses: ad-m/github-push-action@master
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.PAT_TOKEN }}
|
||||||
|
branch: ${{ github.head_ref }}
|
||||||
|
|
||||||
|
- name: Upload build assets
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: build-assets
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
- name: Run codacy-coverage-reporter
|
||||||
|
uses: codacy/codacy-coverage-reporter-action@v1
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
project-token: ${{ secrets.CODACY_PROJECT_TOKEN }}
|
||||||
|
coverage-reports: coverage/lcov.info
|
||||||
|
|
||||||
test-multiple-repositories:
|
test-multiple-repositories:
|
||||||
name: Test with multiple repositories
|
name: Test with multiple repositories
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout into dir1
|
- name: Checkout into dir1
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
@ -34,16 +105,24 @@ jobs:
|
|||||||
submodules: true
|
submodules: true
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
path: dir1
|
path: dir1
|
||||||
|
|
||||||
|
- name: Download build assets
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: build-assets
|
||||||
|
|
||||||
- name: Run changed-files with defaults on the dir1
|
- name: Run changed-files with defaults on the dir1
|
||||||
id: changed-files-dir1
|
id: changed-files-dir1
|
||||||
uses: ./dir1
|
uses: ./dir1
|
||||||
with:
|
with:
|
||||||
path: dir1
|
path: dir1
|
||||||
|
|
||||||
- name: Show output
|
- name: Show output
|
||||||
run: |
|
run: |
|
||||||
echo '${{ toJSON(steps.changed-files-dir1.outputs) }}'
|
echo '${{ toJSON(steps.changed-files-dir1.outputs) }}'
|
||||||
shell:
|
shell:
|
||||||
bash
|
bash
|
||||||
|
|
||||||
- name: List all modified files
|
- name: List all modified files
|
||||||
run: |
|
run: |
|
||||||
for file in ${{ steps.changed-files-dir1.outputs.modified_files }}; do
|
for file in ${{ steps.changed-files-dir1.outputs.modified_files }}; do
|
||||||
@ -51,22 +130,26 @@ jobs:
|
|||||||
done
|
done
|
||||||
shell:
|
shell:
|
||||||
bash
|
bash
|
||||||
|
|
||||||
- name: Checkout into dir2
|
- name: Checkout into dir2
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
submodules: true
|
submodules: true
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
path: dir2
|
path: dir2
|
||||||
|
|
||||||
- name: Run changed-files with defaults on the dir2
|
- name: Run changed-files with defaults on the dir2
|
||||||
id: changed-files-dir2
|
id: changed-files-dir2
|
||||||
uses: ./dir2
|
uses: ./dir2
|
||||||
with:
|
with:
|
||||||
path: dir2
|
path: dir2
|
||||||
|
|
||||||
- name: Show output
|
- name: Show output
|
||||||
run: |
|
run: |
|
||||||
echo '${{ toJSON(steps.changed-files-dir2.outputs) }}'
|
echo '${{ toJSON(steps.changed-files-dir2.outputs) }}'
|
||||||
shell:
|
shell:
|
||||||
bash
|
bash
|
||||||
|
|
||||||
- name: List all modified files
|
- name: List all modified files
|
||||||
run: |
|
run: |
|
||||||
for file in ${{ steps.changed-files-dir2.outputs.modified_files }}; do
|
for file in ${{ steps.changed-files-dir2.outputs.modified_files }}; do
|
||||||
@ -77,6 +160,7 @@ jobs:
|
|||||||
|
|
||||||
test-using-since-and-until:
|
test-using-since-and-until:
|
||||||
name: Test changed-files using since and until
|
name: Test changed-files using since and until
|
||||||
|
needs: build
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.event_name == 'push'
|
if: github.event_name == 'push'
|
||||||
|
|
||||||
@ -86,6 +170,11 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Download build assets
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: build-assets
|
||||||
|
|
||||||
- name: Run changed-files since 2022-08-19
|
- name: Run changed-files since 2022-08-19
|
||||||
id: changed-files-since
|
id: changed-files-since
|
||||||
uses: ./
|
uses: ./
|
||||||
@ -129,6 +218,7 @@ jobs:
|
|||||||
test-similar-base-and-commit-sha:
|
test-similar-base-and-commit-sha:
|
||||||
name: Test changed-files similar base and commit sha
|
name: Test changed-files similar base and commit sha
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout to branch
|
- name: Checkout to branch
|
||||||
@ -136,6 +226,11 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Download build assets
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: build-assets
|
||||||
|
|
||||||
- name: Run changed-files with similar base and commit sha
|
- name: Run changed-files with similar base and commit sha
|
||||||
id: changed-files
|
id: changed-files
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
@ -159,6 +254,7 @@ jobs:
|
|||||||
test-unset-github-output-env:
|
test-unset-github-output-env:
|
||||||
name: Test unset GITHUB_OUTPUT env
|
name: Test unset GITHUB_OUTPUT env
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout to branch
|
- name: Checkout to branch
|
||||||
@ -166,6 +262,11 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Download build assets
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: build-assets
|
||||||
|
|
||||||
- name: Run changed-files with unset GITHUB_OUTPUT env
|
- name: Run changed-files with unset GITHUB_OUTPUT env
|
||||||
id: changed-files
|
id: changed-files
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
@ -182,6 +283,7 @@ jobs:
|
|||||||
test-limited-commit-history:
|
test-limited-commit-history:
|
||||||
name: Test changed-files with limited commit history
|
name: Test changed-files with limited commit history
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
max-parallel: 4
|
max-parallel: 4
|
||||||
@ -195,6 +297,11 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
fetch-depth: ${{ matrix.fetch-depth }}
|
fetch-depth: ${{ matrix.fetch-depth }}
|
||||||
|
|
||||||
|
- name: Download build assets
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: build-assets
|
||||||
|
|
||||||
- name: Run changed-files
|
- name: Run changed-files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: ./
|
uses: ./
|
||||||
@ -210,11 +317,17 @@ jobs:
|
|||||||
test-non-existent-base-sha:
|
test-non-existent-base-sha:
|
||||||
name: Test changed-files non existent base sha
|
name: Test changed-files non existent base sha
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout to branch
|
- name: Checkout to branch
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Download build assets
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: build-assets
|
||||||
|
|
||||||
- name: Run changed-files with non existent base sha
|
- name: Run changed-files with non existent base sha
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: ./
|
uses: ./
|
||||||
@ -257,11 +370,17 @@ jobs:
|
|||||||
test-non-existent-sha:
|
test-non-existent-sha:
|
||||||
name: Test changed-files non existent sha
|
name: Test changed-files non existent sha
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout to branch
|
- name: Checkout to branch
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Download build assets
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: build-assets
|
||||||
|
|
||||||
- name: Run changed-files with non existent sha
|
- name: Run changed-files with non existent sha
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: ./
|
uses: ./
|
||||||
@ -304,6 +423,7 @@ jobs:
|
|||||||
test-submodules:
|
test-submodules:
|
||||||
name: Test changed-files with submodule
|
name: Test changed-files with submodule
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
max-parallel: 4
|
max-parallel: 4
|
||||||
@ -318,6 +438,11 @@ jobs:
|
|||||||
submodules: recursive
|
submodules: recursive
|
||||||
fetch-depth: ${{ matrix.fetch-depth }}
|
fetch-depth: ${{ matrix.fetch-depth }}
|
||||||
|
|
||||||
|
- name: Download build assets
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: build-assets
|
||||||
|
|
||||||
- name: Run changed-files with submodule
|
- name: Run changed-files with submodule
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: ./
|
uses: ./
|
||||||
@ -341,6 +466,7 @@ jobs:
|
|||||||
test:
|
test:
|
||||||
name: Test changed-files
|
name: Test changed-files
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
needs: build
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
max-parallel: 4
|
max-parallel: 4
|
||||||
@ -354,6 +480,10 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
submodules: true
|
submodules: true
|
||||||
fetch-depth: ${{ matrix.fetch-depth }}
|
fetch-depth: ${{ matrix.fetch-depth }}
|
||||||
|
- name: Download build assets
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: build-assets
|
||||||
- name: Dump GitHub context
|
- name: Dump GitHub context
|
||||||
env:
|
env:
|
||||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||||
|
110
.gitignore
vendored
110
.gitignore
vendored
@ -1,5 +1,107 @@
|
|||||||
.idea/
|
# Dependency directory
|
||||||
.envrc
|
node_modules
|
||||||
tag.sh
|
|
||||||
untag.sh
|
# Rest pulled from https://github.com/github/gitignore/blob/master/Node.gitignore
|
||||||
|
# Logs
|
||||||
|
logs
|
||||||
|
*.log
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
lerna-debug.log*
|
||||||
|
|
||||||
|
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||||
|
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||||
|
|
||||||
|
# Runtime data
|
||||||
|
pids
|
||||||
|
*.pid
|
||||||
|
*.seed
|
||||||
|
*.pid.lock
|
||||||
|
|
||||||
|
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||||
|
lib-cov
|
||||||
|
|
||||||
|
# Coverage directory used by tools like istanbul
|
||||||
|
coverage
|
||||||
|
*.lcov
|
||||||
|
|
||||||
|
# nyc test coverage
|
||||||
|
.nyc_output
|
||||||
|
|
||||||
|
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||||
|
.grunt
|
||||||
|
|
||||||
|
# Bower dependency directory (https://bower.io/)
|
||||||
|
bower_components
|
||||||
|
|
||||||
|
# node-waf configuration
|
||||||
|
.lock-wscript
|
||||||
|
|
||||||
|
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||||
|
build/Release
|
||||||
|
|
||||||
|
# Dependency directories
|
||||||
|
jspm_packages/
|
||||||
|
|
||||||
|
# TypeScript v1 declaration files
|
||||||
|
typings/
|
||||||
|
|
||||||
|
# TypeScript cache
|
||||||
|
*.tsbuildinfo
|
||||||
|
|
||||||
|
# Optional npm cache directory
|
||||||
|
.npm
|
||||||
|
|
||||||
|
# Optional eslint cache
|
||||||
|
.eslintcache
|
||||||
|
|
||||||
|
# Optional REPL history
|
||||||
|
.node_repl_history
|
||||||
|
|
||||||
|
# Output of 'npm pack'
|
||||||
|
*.tgz
|
||||||
|
|
||||||
|
# Yarn Integrity file
|
||||||
|
.yarn-integrity
|
||||||
|
|
||||||
|
# dotenv environment variables file
|
||||||
|
./.env
|
||||||
|
.env/../.env
|
||||||
|
./.env.local
|
||||||
|
./.env/../.env.local
|
||||||
|
.env
|
||||||
|
.env.test
|
||||||
|
|
||||||
|
# parcel-bundler cache (https://parceljs.org/)
|
||||||
|
.cache
|
||||||
|
|
||||||
|
# next.js build output
|
||||||
|
.next
|
||||||
|
|
||||||
|
# nuxt.js build output
|
||||||
|
.nuxt
|
||||||
|
|
||||||
|
# vuepress build output
|
||||||
|
.vuepress/dist
|
||||||
|
|
||||||
|
# Serverless directories
|
||||||
|
.serverless/
|
||||||
|
|
||||||
|
# FuseBox cache
|
||||||
|
.fusebox/
|
||||||
|
|
||||||
|
# DynamoDB Local files
|
||||||
|
.dynamodb/
|
||||||
|
|
||||||
|
# OS metadata
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
|
# Ignore built ts files
|
||||||
|
__tests__/runner/*
|
||||||
|
lib/**/*
|
||||||
|
|
||||||
|
# IDEA
|
||||||
|
.idea/
|
||||||
|
|
||||||
|
3
.prettierignore
Normal file
3
.prettierignore
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
dist/
|
||||||
|
lib/
|
||||||
|
node_modules/
|
10
.prettierrc.json
Normal file
10
.prettierrc.json
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"printWidth": 80,
|
||||||
|
"tabWidth": 2,
|
||||||
|
"useTabs": false,
|
||||||
|
"semi": false,
|
||||||
|
"singleQuote": true,
|
||||||
|
"trailingComma": "none",
|
||||||
|
"bracketSpacing": false,
|
||||||
|
"arrowParens": "avoid"
|
||||||
|
}
|
145
action.yml
145
action.yml
@ -23,6 +23,10 @@ inputs:
|
|||||||
description: "Source file(s) used to populate the `files` input."
|
description: "Source file(s) used to populate the `files` input."
|
||||||
required: false
|
required: false
|
||||||
default: ""
|
default: ""
|
||||||
|
files-from-source-file-separator:
|
||||||
|
description: 'Separator used to split the `files-from-source-file` input'
|
||||||
|
default: "\n"
|
||||||
|
required: false
|
||||||
files:
|
files:
|
||||||
description: "File and directory patterns to detect changes using only these list of file(s) (Defaults to the entire repo) **NOTE:** Multiline file/directory patterns should not include quotes."
|
description: "File and directory patterns to detect changes using only these list of file(s) (Defaults to the entire repo) **NOTE:** Multiline file/directory patterns should not include quotes."
|
||||||
required: false
|
required: false
|
||||||
@ -43,6 +47,10 @@ inputs:
|
|||||||
description: "Source file(s) used to populate the `files_ignore` input"
|
description: "Source file(s) used to populate the `files_ignore` input"
|
||||||
required: false
|
required: false
|
||||||
default: ""
|
default: ""
|
||||||
|
files_ignore_from_source_file_separator:
|
||||||
|
description: 'Separator used to split the `files_ignore_from_source_file` input'
|
||||||
|
default: "\n"
|
||||||
|
required: false
|
||||||
sha:
|
sha:
|
||||||
description: "Specify a different commit SHA used for comparing changes"
|
description: "Specify a different commit SHA used for comparing changes"
|
||||||
required: false
|
required: false
|
||||||
@ -111,179 +119,50 @@ inputs:
|
|||||||
outputs:
|
outputs:
|
||||||
added_files:
|
added_files:
|
||||||
description: "Returns only files that are Added (A)."
|
description: "Returns only files that are Added (A)."
|
||||||
value: ${{ steps.changed-files.outputs.added_files }}
|
|
||||||
copied_files:
|
copied_files:
|
||||||
description: "Returns only files that are Copied (C)."
|
description: "Returns only files that are Copied (C)."
|
||||||
value: ${{ steps.changed-files.outputs.copied_files }}
|
|
||||||
deleted_files:
|
deleted_files:
|
||||||
description: "Returns only files that are Deleted (D)."
|
description: "Returns only files that are Deleted (D)."
|
||||||
value: ${{ steps.changed-files.outputs.deleted_files }}
|
|
||||||
modified_files:
|
modified_files:
|
||||||
description: "Returns only files that are Modified (M)."
|
description: "Returns only files that are Modified (M)."
|
||||||
value: ${{ steps.changed-files.outputs.modified_files }}
|
|
||||||
renamed_files:
|
renamed_files:
|
||||||
description: "Returns only files that are Renamed (R)."
|
description: "Returns only files that are Renamed (R)."
|
||||||
value: ${{ steps.changed-files.outputs.renamed_files }}
|
|
||||||
all_old_new_renamed_files:
|
all_old_new_renamed_files:
|
||||||
description: "Returns only files that are Renamed and list their old and new names. **NOTE:** This requires setting `include_all_old_new_renamed_files` to `true` (R)"
|
description: "Returns only files that are Renamed and list their old and new names. **NOTE:** This requires setting `include_all_old_new_renamed_files` to `true` (R)"
|
||||||
value: ${{ steps.changed-files.outputs.all_old_new_renamed_files }}
|
|
||||||
type_changed_files:
|
type_changed_files:
|
||||||
description: "Returns only files that have their file type changed (T)."
|
description: "Returns only files that have their file type changed (T)."
|
||||||
value: ${{ steps.changed-files.outputs.type_changed_files }}
|
|
||||||
unmerged_files:
|
unmerged_files:
|
||||||
description: "Returns only files that are Unmerged (U)."
|
description: "Returns only files that are Unmerged (U)."
|
||||||
value: ${{ steps.changed-files.outputs.unmerged_files }}
|
|
||||||
unknown_files:
|
unknown_files:
|
||||||
description: "Returns only files that are Unknown (X)."
|
description: "Returns only files that are Unknown (X)."
|
||||||
value: ${{ steps.changed-files.outputs.unknown_files }}
|
|
||||||
all_changed_and_modified_files:
|
all_changed_and_modified_files:
|
||||||
description: "Returns all changed and modified files i.e. *a combination of (ACMRDTUX)*"
|
description: "Returns all changed and modified files i.e. *a combination of (ACMRDTUX)*"
|
||||||
value: ${{ steps.changed-files.outputs.all_changed_and_modified_files }}
|
|
||||||
all_changed_files:
|
all_changed_files:
|
||||||
description: "Returns all changed files i.e. *a combination of all added, copied, modified and renamed files (ACMR)*"
|
description: "Returns all changed files i.e. *a combination of all added, copied, modified and renamed files (ACMR)*"
|
||||||
value: ${{ steps.changed-files.outputs.all_changed_files }}
|
|
||||||
any_changed:
|
any_changed:
|
||||||
description: "Returns `true` when any of the filenames provided using the `files` input has changed. If no `files` have been specified,an empty string `''` is returned. i.e. *using a combination of all added, copied, modified and renamed files (ACMR)*."
|
description: "Returns `true` when any of the filenames provided using the `files` input has changed. i.e. *using a combination of all added, copied, modified and renamed files (ACMR)*."
|
||||||
value: ${{ steps.changed-files.outputs.any_changed }}
|
|
||||||
only_changed:
|
only_changed:
|
||||||
description: "Returns `true` when only files provided using the `files` input has changed. If no `files` have been specified,an empty string `''` is returned. i.e. *using a combination of all added, copied, modified and renamed files (ACMR)*."
|
description: "Returns `true` when only files provided using the `files` input has changed. i.e. *using a combination of all added, copied, modified and renamed files (ACMR)*."
|
||||||
value: ${{ steps.changed-files.outputs.only_changed }}
|
|
||||||
other_changed_files:
|
other_changed_files:
|
||||||
description: "Returns all other changed files not listed in the files input i.e. *using a combination of all added, copied, modified and renamed files (ACMR)*."
|
description: "Returns all other changed files not listed in the files input i.e. *using a combination of all added, copied, modified and renamed files (ACMR)*."
|
||||||
value: ${{ steps.changed-files.outputs.other_changed_files }}
|
|
||||||
all_modified_files:
|
all_modified_files:
|
||||||
description: "Returns all changed files i.e. *a combination of all added, copied, modified, renamed and deleted files (ACMRD)*."
|
description: "Returns all changed files i.e. *a combination of all added, copied, modified, renamed and deleted files (ACMRD)*."
|
||||||
value: ${{ steps.changed-files.outputs.all_modified_files }}
|
|
||||||
any_modified:
|
any_modified:
|
||||||
description: "Returns `true` when any of the filenames provided using the `files` input has been modified. If no `files` have been specified,an empty string `''` is returned. i.e. *using a combination of all added, copied, modified, renamed, and deleted files (ACMRD)*."
|
description: "Returns `true` when any of the filenames provided using the `files` input has been modified. If no `files` have been specified,an empty string `''` is returned. i.e. *using a combination of all added, copied, modified, renamed, and deleted files (ACMRD)*."
|
||||||
value: ${{ steps.changed-files.outputs.any_modified }}
|
|
||||||
only_modified:
|
only_modified:
|
||||||
description: "Returns `true` when only files provided using the `files` input has been modified. If no `files` have been specified,an empty string `''` is returned.(ACMRD)."
|
description: "Returns `true` when only files provided using the `files` input has been modified. If no `files` have been specified,an empty string `''` is returned.(ACMRD)."
|
||||||
value: ${{ steps.changed-files.outputs.only_modified }}
|
|
||||||
other_modified_files:
|
other_modified_files:
|
||||||
description: "Returns all other modified files not listed in the files input i.e. *a combination of all added, copied, modified, and deleted files (ACMRD)*"
|
description: "Returns all other modified files not listed in the files input i.e. *a combination of all added, copied, modified, and deleted files (ACMRD)*"
|
||||||
value: ${{ steps.changed-files.outputs.other_modified_files }}
|
|
||||||
any_deleted:
|
any_deleted:
|
||||||
description: "Returns `true` when any of the filenames provided using the `files` input has been deleted. If no `files` have been specified,an empty string `''` is returned. (D)"
|
description: "Returns `true` when any of the filenames provided using the `files` input has been deleted. If no `files` have been specified,an empty string `''` is returned. (D)"
|
||||||
value: ${{ steps.changed-files.outputs.any_deleted }}
|
|
||||||
only_deleted:
|
only_deleted:
|
||||||
description: "Returns `true` when only files provided using the `files` input has been deleted. If no `files` have been specified,an empty string `''` is returned. (D)"
|
description: "Returns `true` when only files provided using the `files` input has been deleted. If no `files` have been specified,an empty string `''` is returned. (D)"
|
||||||
value: ${{ steps.changed-files.outputs.only_deleted }}
|
|
||||||
other_deleted_files:
|
other_deleted_files:
|
||||||
description: "Returns all other deleted files not listed in the files input i.e. *a combination of all deleted files (D)*"
|
description: "Returns all other deleted files not listed in the files input i.e. *a combination of all deleted files (D)*"
|
||||||
value: ${{ steps.changed-files.outputs.other_deleted_files }}
|
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: 'node16'
|
||||||
steps:
|
main: 'dist/index.js'
|
||||||
- run: |
|
|
||||||
# "Calculating the previous and current SHA..."
|
|
||||||
bash $GITHUB_ACTION_PATH/diff-sha.sh
|
|
||||||
id: changed-files-diff-sha
|
|
||||||
shell: bash
|
|
||||||
env:
|
|
||||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
|
||||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
|
||||||
GITHUB_REF: ${{ github.ref }}
|
|
||||||
GITHUB_SHA: ${{ github.sha }}
|
|
||||||
GITHUB_WORKSPACE: ${{ github.workspace }}
|
|
||||||
GITHUB_EVENT_BASE_REF: ${{ github.event.base_ref }}
|
|
||||||
GITHUB_EVENT_HEAD_REPO_FORK: ${{ github.event.pull_request.head.repo.fork }}
|
|
||||||
GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }}
|
|
||||||
GITHUB_EVENT_PULL_REQUEST_BASE_REF: ${{ github.event.pull_request.base.ref }}
|
|
||||||
GITHUB_EVENT_PULL_REQUEST_HEAD_REF: ${{ github.event.pull_request.head.ref }}
|
|
||||||
GITHUB_EVENT_PULL_REQUEST_BASE_SHA: ${{ github.event.pull_request.base.sha }}
|
|
||||||
GITHUB_EVENT_PULL_REQUEST_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
|
||||||
GITHUB_EVENT_PULL_REQUEST_COMMITS: ${{ github.event.pull_request.commits }}
|
|
||||||
GITHUB_EVENT_BEFORE: ${{ github.event.before }}
|
|
||||||
GITHUB_EVENT_FORCED: ${{ github.event.forced }}
|
|
||||||
GITHUB_REFNAME: ${{ github.ref_name }}
|
|
||||||
# INPUT_<VARIABLE_NAME> is not available in Composite run steps
|
|
||||||
# https://docs.github.com/en/actions/creating-actions/metadata-syntax-for-github-actions#example-specifying-inputs
|
|
||||||
INPUT_SHA: ${{ inputs.sha }}
|
|
||||||
INPUT_BASE_SHA: ${{ inputs.base_sha }}
|
|
||||||
INPUT_SINCE: ${{ inputs.since }}
|
|
||||||
INPUT_UNTIL: ${{ inputs.until }}
|
|
||||||
INPUT_PATH: ${{ inputs.path }}
|
|
||||||
INPUT_FETCH_DEPTH: ${{ inputs.fetch_depth }}
|
|
||||||
INPUT_SINCE_LAST_REMOTE_COMMIT: ${{ inputs.since_last_remote_commit }}
|
|
||||||
- name: Glob match
|
|
||||||
uses: tj-actions/glob@v17.2.6
|
|
||||||
id: glob
|
|
||||||
with:
|
|
||||||
files: ${{ inputs.files }}
|
|
||||||
files-separator: ${{ inputs.files_separator }}
|
|
||||||
excluded-files: ${{ inputs.files_ignore }}
|
|
||||||
excluded-files-separator: ${{ inputs.files_ignore_separator }}
|
|
||||||
files-from-source-file: ${{ inputs.files_from_source_file }}
|
|
||||||
excluded-files-from-source-file: ${{ inputs.files_ignore_from_source_file}}
|
|
||||||
escape-paths: true
|
|
||||||
working-directory: ${{ inputs.path }}
|
|
||||||
base-sha: ${{ steps.changed-files-diff-sha.outputs.previous_sha }}
|
|
||||||
sha: ${{ steps.changed-files-diff-sha.outputs.current_sha }}
|
|
||||||
diff: ${{ steps.changed-files-diff-sha.outputs.diff }}
|
|
||||||
match-directories: ${{ inputs.match_directories }}
|
|
||||||
include-deleted-files: true
|
|
||||||
separator: "|"
|
|
||||||
- run: |
|
|
||||||
bash $GITHUB_ACTION_PATH/get-changed-paths.sh
|
|
||||||
id: changed-files
|
|
||||||
shell: bash
|
|
||||||
env:
|
|
||||||
GITHUB_WORKSPACE: ${{ github.workspace }}
|
|
||||||
GITHUB_EVENT_PULL_REQUEST_BASE_REF: ${{ github.event.pull_request.base.ref }}
|
|
||||||
GITHUB_EVENT_PULL_REQUEST_HEAD_REPO_FORK: ${{ github.event.pull_request.head.repo.fork }}
|
|
||||||
# INPUT_<VARIABLE_NAME> is not available in Composite run steps
|
|
||||||
# https://docs.github.com/en/actions/creating-actions/metadata-syntax-for-github-actions#example-specifying-inputs
|
|
||||||
INPUT_FILES_PATTERN_FILE: ${{ steps.glob.outputs.paths-output-file }}
|
|
||||||
INPUT_SEPARATOR: ${{ inputs.separator }}
|
|
||||||
INPUT_PATH: ${{ inputs.path }}
|
|
||||||
INPUT_PREVIOUS_SHA: ${{ steps.changed-files-diff-sha.outputs.previous_sha }}
|
|
||||||
INPUT_CURRENT_SHA: ${{ steps.changed-files-diff-sha.outputs.current_sha }}
|
|
||||||
INPUT_TARGET_BRANCH: ${{ steps.changed-files-diff-sha.outputs.target_branch }}
|
|
||||||
INPUT_CURRENT_BRANCH: ${{ steps.changed-files-diff-sha.outputs.current_branch }}
|
|
||||||
INPUT_DIFF: ${{ steps.changed-files-diff-sha.outputs.diff }}
|
|
||||||
INPUT_QUOTEPATH: ${{ inputs.quotepath }}
|
|
||||||
INPUT_INCLUDE_ALL_OLD_NEW_RENAMED_FILES: ${{ inputs.include_all_old_new_renamed_files }}
|
|
||||||
INPUT_OLD_NEW_SEPARATOR: ${{ inputs.old_new_separator }}
|
|
||||||
INPUT_OLD_NEW_FILES_SEPARATOR: ${{ inputs.old_new_files_separator }}
|
|
||||||
INPUT_DIFF_RELATIVE: ${{ inputs.diff_relative }}
|
|
||||||
INPUT_DIR_NAMES: ${{ inputs.dir_names }}
|
|
||||||
INPUT_DIR_NAMES_MAX_DEPTH: ${{ inputs.dir_names_max_depth }}
|
|
||||||
INPUT_DIR_NAMES_EXCLUDE_ROOT: ${{ inputs.dir_names_exclude_root }}
|
|
||||||
INPUT_JSON: ${{ inputs.json }}
|
|
||||||
INPUT_HAS_CUSTOM_PATTERNS: ${{ steps.glob.outputs.has-custom-patterns }}
|
|
||||||
INPUT_JSON_RAW_FORMAT: ${{ inputs.json_raw_format }}
|
|
||||||
- name: Generate output files
|
|
||||||
uses: tj-actions/json2file@v1.7.2
|
|
||||||
if: inputs.write_output_files == 'true'
|
|
||||||
with:
|
|
||||||
outputs: ${{ toJSON(steps.changed-files.outputs) }}
|
|
||||||
directory: ${{ inputs.output_dir }}
|
|
||||||
skip_missing_keys: true
|
|
||||||
keys: |
|
|
||||||
added_files
|
|
||||||
copied_files
|
|
||||||
deleted_files
|
|
||||||
modified_files
|
|
||||||
renamed_files
|
|
||||||
all_old_new_renamed_files
|
|
||||||
type_changed_files
|
|
||||||
unmerged_files
|
|
||||||
unknown_files
|
|
||||||
all_changed_and_modified_files
|
|
||||||
all_changed_files
|
|
||||||
any_changed
|
|
||||||
only_changed
|
|
||||||
other_changed_files
|
|
||||||
all_modified_files
|
|
||||||
any_modified
|
|
||||||
only_modified
|
|
||||||
other_modified_files
|
|
||||||
any_deleted
|
|
||||||
only_deleted
|
|
||||||
other_deleted_files
|
|
||||||
extension: ${{ steps.changed-files.outputs.outputs_extension }}
|
|
||||||
|
|
||||||
branding:
|
branding:
|
||||||
icon: file-text
|
icon: file-text
|
||||||
|
323
diff-sha.sh
323
diff-sha.sh
@ -1,323 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
INITIAL_COMMIT="false"
|
|
||||||
GITHUB_OUTPUT=${GITHUB_OUTPUT:-""}
|
|
||||||
EXTRA_ARGS="--no-tags --prune --recurse-submodules"
|
|
||||||
PREVIOUS_SHA=""
|
|
||||||
CURRENT_SHA=""
|
|
||||||
DIFF="..."
|
|
||||||
IS_TAG="false"
|
|
||||||
SOURCE_BRANCH=""
|
|
||||||
|
|
||||||
if [[ "$GITHUB_REF" == "refs/tags/"* ]]; then
|
|
||||||
IS_TAG="true"
|
|
||||||
EXTRA_ARGS="--prune --no-recurse-submodules"
|
|
||||||
SOURCE_BRANCH=${GITHUB_EVENT_BASE_REF#refs/heads/}
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z $GITHUB_EVENT_PULL_REQUEST_BASE_REF || "$GITHUB_EVENT_HEAD_REPO_FORK" == "true" ]]; then
|
|
||||||
DIFF=".."
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "::group::changed-files-diff-sha"
|
|
||||||
|
|
||||||
if [[ -n $INPUT_PATH ]]; then
|
|
||||||
REPO_DIR="$GITHUB_WORKSPACE/$INPUT_PATH"
|
|
||||||
|
|
||||||
echo "::debug::Resolving repository path: $REPO_DIR"
|
|
||||||
if [[ ! -d "$REPO_DIR" ]]; then
|
|
||||||
echo "::error::Invalid repository path: $REPO_DIR"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
cd "$REPO_DIR"
|
|
||||||
fi
|
|
||||||
|
|
||||||
function __version() {
|
|
||||||
echo "$@" | awk -F. '{ printf("%d%03d%03d%03d\n", $1,$2,$3,$4); }';
|
|
||||||
}
|
|
||||||
|
|
||||||
echo "Verifying git version..."
|
|
||||||
|
|
||||||
GIT_VERSION=$(git --version | awk '{print $3}') && exit_status=$? || exit_status=$?
|
|
||||||
|
|
||||||
if [[ $exit_status -ne 0 ]]; then
|
|
||||||
echo "::error::git not installed"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ $(__version "$GIT_VERSION") -lt $(__version "2.18.0") ]]; then
|
|
||||||
echo "::error::Invalid git version. Please upgrade ($GIT_VERSION) to >= (2.18.0)"
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "Valid git version found: ($GIT_VERSION)"
|
|
||||||
fi
|
|
||||||
|
|
||||||
IS_SHALLOW=$(git rev-parse --is-shallow-repository) && exit_status=$? || exit_status=$?
|
|
||||||
|
|
||||||
if [[ $exit_status -ne 0 ]]; then
|
|
||||||
echo "::error::Unable to determine if the repository is shallow"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z $GITHUB_EVENT_PULL_REQUEST_BASE_REF ]]; then
|
|
||||||
echo "Running on a push event..."
|
|
||||||
TARGET_BRANCH=$GITHUB_REFNAME
|
|
||||||
CURRENT_BRANCH=$TARGET_BRANCH
|
|
||||||
|
|
||||||
if [[ "$IS_SHALLOW" == "true" ]]; then
|
|
||||||
echo "Fetching remote refs..."
|
|
||||||
if [[ "$IS_TAG" == "false" ]]; then
|
|
||||||
# shellcheck disable=SC2086
|
|
||||||
git fetch $EXTRA_ARGS -u --progress --deepen="$INPUT_FETCH_DEPTH" origin +refs/heads/"$CURRENT_BRANCH":refs/remotes/origin/"$CURRENT_BRANCH" 1>/dev/null || true
|
|
||||||
elif [[ "$SOURCE_BRANCH" != "" ]]; then
|
|
||||||
# shellcheck disable=SC2086
|
|
||||||
git fetch $EXTRA_ARGS -u --progress --deepen="$INPUT_FETCH_DEPTH" origin +refs/heads/"$SOURCE_BRANCH":refs/remotes/origin/"$SOURCE_BRANCH" 1>/dev/null || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
if git submodule status &>/dev/null; then
|
|
||||||
# shellcheck disable=SC2086
|
|
||||||
git submodule foreach git fetch $EXTRA_ARGS -u --progress --deepen="$INPUT_FETCH_DEPTH" || true
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "::debug::Getting HEAD SHA..."
|
|
||||||
if [[ -n "$INPUT_UNTIL" ]]; then
|
|
||||||
echo "::debug::Getting HEAD SHA for '$INPUT_UNTIL'..."
|
|
||||||
CURRENT_SHA=$(git log -1 --format="%H" --date=local --until="$INPUT_UNTIL") && exit_status=$? || exit_status=$?
|
|
||||||
|
|
||||||
if [[ $exit_status -ne 0 ]]; then
|
|
||||||
echo "::error::Invalid until date: $INPUT_UNTIL"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
if [[ -z $INPUT_SHA ]]; then
|
|
||||||
CURRENT_SHA=$(git rev-list -n 1 HEAD) && exit_status=$? || exit_status=$?
|
|
||||||
else
|
|
||||||
CURRENT_SHA=$INPUT_SHA; exit_status=$?
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "::debug::Verifying the current commit SHA: $CURRENT_SHA"
|
|
||||||
git rev-parse --quiet --verify "$CURRENT_SHA^{commit}" 1>/dev/null 2>&1 && exit_status=$? || exit_status=$?
|
|
||||||
|
|
||||||
if [[ $exit_status -ne 0 ]]; then
|
|
||||||
echo "::error::Unable to locate the current sha: $CURRENT_SHA"
|
|
||||||
echo "::error::Please verify that current sha is valid, and increase the fetch_depth to a number higher than $INPUT_FETCH_DEPTH."
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "::debug::Current SHA: $CURRENT_SHA"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z $INPUT_BASE_SHA ]]; then
|
|
||||||
if [[ -n "$INPUT_SINCE" ]]; then
|
|
||||||
echo "::debug::Getting base SHA for '$INPUT_SINCE'..."
|
|
||||||
PREVIOUS_SHA=$(git log --format="%H" --date=local --since="$INPUT_SINCE" | tail -1) && exit_status=$? || exit_status=$?
|
|
||||||
|
|
||||||
if [[ -z "$PREVIOUS_SHA" ]]; then
|
|
||||||
echo "::error::Unable to locate a previous commit for the specified date: $INPUT_SINCE"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
elif [[ "$IS_TAG" == "true" ]]; then
|
|
||||||
PREVIOUS_SHA=$(git rev-parse "$(git tag --sort=-v:refname | head -n 2 | tail -n 1)") && exit_status=$? || exit_status=$?
|
|
||||||
|
|
||||||
if [[ -z "$PREVIOUS_SHA" ]]; then
|
|
||||||
echo "::error::Unable to locate a previous commit for the specified tag: $GITHUB_REF"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
if [[ "$INPUT_SINCE_LAST_REMOTE_COMMIT" == "true" ]]; then
|
|
||||||
PREVIOUS_SHA=""
|
|
||||||
|
|
||||||
if [[ "$GITHUB_EVENT_FORCED" == "false" || -z "$GITHUB_EVENT_FORCED" ]]; then
|
|
||||||
PREVIOUS_SHA=$GITHUB_EVENT_BEFORE && exit_status=$? || exit_status=$?
|
|
||||||
else
|
|
||||||
PREVIOUS_SHA=$(git rev-list -n 1 "HEAD^") && exit_status=$? || exit_status=$?
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
PREVIOUS_SHA=$(git rev-list -n 1 "HEAD^") && exit_status=$? || exit_status=$?
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z "$PREVIOUS_SHA" || "$PREVIOUS_SHA" == "0000000000000000000000000000000000000000" ]]; then
|
|
||||||
PREVIOUS_SHA=$(git rev-list -n 1 "HEAD^") && exit_status=$? || exit_status=$?
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$PREVIOUS_SHA" == "$CURRENT_SHA" ]]; then
|
|
||||||
if ! git rev-parse "$PREVIOUS_SHA^1" &>/dev/null; then
|
|
||||||
INITIAL_COMMIT="true"
|
|
||||||
PREVIOUS_SHA=$(git rev-parse "$CURRENT_SHA")
|
|
||||||
echo "::warning::Initial commit detected no previous commit found."
|
|
||||||
else
|
|
||||||
PREVIOUS_SHA=$(git rev-parse "$PREVIOUS_SHA^1")
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
if [[ -z "$PREVIOUS_SHA" ]]; then
|
|
||||||
echo "::error::Unable to locate a previous commit."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
PREVIOUS_SHA=$INPUT_BASE_SHA
|
|
||||||
|
|
||||||
if [[ "$IS_TAG" == "true" ]]; then
|
|
||||||
TARGET_BRANCH=$(git describe --tags "$PREVIOUS_SHA")
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "::debug::Target branch $TARGET_BRANCH..."
|
|
||||||
echo "::debug::Current branch $CURRENT_BRANCH..."
|
|
||||||
|
|
||||||
echo "::debug::Verifying the previous commit SHA: $PREVIOUS_SHA"
|
|
||||||
git rev-parse --quiet --verify "$PREVIOUS_SHA^{commit}" 1>/dev/null 2>&1 && exit_status=$? || exit_status=$?
|
|
||||||
|
|
||||||
if [[ $exit_status -ne 0 ]]; then
|
|
||||||
echo "::error::Unable to locate the previous sha: $PREVIOUS_SHA"
|
|
||||||
echo "::error::Please verify that the previous sha commit is valid, and increase the fetch_depth to a number higher than $INPUT_FETCH_DEPTH."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "Running on a pull request event..."
|
|
||||||
TARGET_BRANCH=$GITHUB_EVENT_PULL_REQUEST_BASE_REF
|
|
||||||
CURRENT_BRANCH=$GITHUB_EVENT_PULL_REQUEST_HEAD_REF
|
|
||||||
|
|
||||||
if [[ "$INPUT_SINCE_LAST_REMOTE_COMMIT" == "true" ]]; then
|
|
||||||
TARGET_BRANCH=$CURRENT_BRANCH
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$IS_SHALLOW" == "true" ]]; then
|
|
||||||
echo "Fetching remote refs..."
|
|
||||||
|
|
||||||
# shellcheck disable=SC2086
|
|
||||||
if git fetch $EXTRA_ARGS -u --progress origin pull/"$GITHUB_EVENT_PULL_REQUEST_NUMBER"/head:"$CURRENT_BRANCH" 1>/dev/null; then
|
|
||||||
echo "First fetch succeeded"
|
|
||||||
else
|
|
||||||
echo "First fetch failed, falling back to second fetch"
|
|
||||||
# shellcheck disable=SC2086
|
|
||||||
git fetch $EXTRA_ARGS -u --progress --deepen="$INPUT_FETCH_DEPTH" origin +refs/heads/"$CURRENT_BRANCH"*:refs/remotes/origin/"$CURRENT_BRANCH"* 1>/dev/null || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$INPUT_SINCE_LAST_REMOTE_COMMIT" != "true" ]]; then
|
|
||||||
echo "::debug::Fetching remote target branch..."
|
|
||||||
# shellcheck disable=SC2086
|
|
||||||
git fetch $EXTRA_ARGS -u --progress --deepen="$INPUT_FETCH_DEPTH" origin +refs/heads/"$TARGET_BRANCH":refs/remotes/origin/"$TARGET_BRANCH" 1>/dev/null
|
|
||||||
git branch --track "$TARGET_BRANCH" origin/"$TARGET_BRANCH" 1>/dev/null || true
|
|
||||||
fi
|
|
||||||
|
|
||||||
if git submodule status &>/dev/null; then
|
|
||||||
# shellcheck disable=SC2086
|
|
||||||
git submodule foreach git fetch $EXTRA_ARGS -u --progress --deepen="$INPUT_FETCH_DEPTH" || true
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "::debug::Getting HEAD SHA..."
|
|
||||||
if [[ -n "$INPUT_UNTIL" ]]; then
|
|
||||||
echo "::debug::Getting HEAD SHA for '$INPUT_UNTIL'..."
|
|
||||||
CURRENT_SHA=$(git log -1 --format="%H" --date=local --until="$INPUT_UNTIL") && exit_status=$? || exit_status=$?
|
|
||||||
|
|
||||||
if [[ $exit_status -ne 0 ]]; then
|
|
||||||
echo "::error::Invalid until date: $INPUT_UNTIL"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
if [[ -z $INPUT_SHA ]]; then
|
|
||||||
CURRENT_SHA=$(git rev-list -n 1 HEAD) && exit_status=$? || exit_status=$?
|
|
||||||
else
|
|
||||||
CURRENT_SHA=$INPUT_SHA; exit_status=$?
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "::debug::Verifying the current commit SHA: $CURRENT_SHA"
|
|
||||||
git rev-parse --quiet --verify "$CURRENT_SHA^{commit}" 1>/dev/null 2>&1 && exit_status=$? || exit_status=$?
|
|
||||||
|
|
||||||
if [[ $exit_status -ne 0 ]]; then
|
|
||||||
echo "::error::Unable to locate the current sha: $CURRENT_SHA"
|
|
||||||
echo "::error::Please verify that current sha is valid, and increase the fetch_depth to a number higher than $INPUT_FETCH_DEPTH."
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "::debug::Current SHA: $CURRENT_SHA"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z $INPUT_BASE_SHA ]]; then
|
|
||||||
if [[ "$INPUT_SINCE_LAST_REMOTE_COMMIT" == "true" ]]; then
|
|
||||||
PREVIOUS_SHA=$GITHUB_EVENT_BEFORE
|
|
||||||
|
|
||||||
if ! git rev-parse --quiet --verify "$PREVIOUS_SHA^{commit}" 1>/dev/null 2>&1; then
|
|
||||||
PREVIOUS_SHA=$GITHUB_EVENT_PULL_REQUEST_BASE_SHA
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
PREVIOUS_SHA=$(git rev-parse origin/"$TARGET_BRANCH") && exit_status=$? || exit_status=$?
|
|
||||||
|
|
||||||
if [[ "$IS_SHALLOW" == "true" ]]; then
|
|
||||||
# check if the merge base is in the local history
|
|
||||||
if ! git diff --name-only --ignore-submodules=all "$PREVIOUS_SHA$DIFF$CURRENT_SHA" 1>/dev/null 2>&1; then
|
|
||||||
echo "::debug::Merge base is not in the local history, fetching remote target branch..."
|
|
||||||
# Fetch more of the target branch history until the merge base is found
|
|
||||||
for i in {1..10}; do
|
|
||||||
# shellcheck disable=SC2086
|
|
||||||
git fetch $EXTRA_ARGS -u --progress --deepen="$INPUT_FETCH_DEPTH" origin +refs/heads/"$TARGET_BRANCH":refs/remotes/origin/"$TARGET_BRANCH" 1>/dev/null
|
|
||||||
if git diff --name-only --ignore-submodules=all "$PREVIOUS_SHA$DIFF$CURRENT_SHA" 1>/dev/null 2>&1; then
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
echo "::debug::Merge base is not in the local history, fetching remote target branch again..."
|
|
||||||
echo "::debug::Attempt $i/10"
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z "$PREVIOUS_SHA" || "$PREVIOUS_SHA" == "$CURRENT_SHA" ]]; then
|
|
||||||
PREVIOUS_SHA=$GITHUB_EVENT_PULL_REQUEST_BASE_SHA && exit_status=$? || exit_status=$?
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "::debug::Previous SHA: $PREVIOUS_SHA"
|
|
||||||
else
|
|
||||||
PREVIOUS_SHA=$INPUT_BASE_SHA && exit_status=$? || exit_status=$?
|
|
||||||
fi
|
|
||||||
|
|
||||||
if ! git diff --name-only --ignore-submodules=all "$PREVIOUS_SHA$DIFF$CURRENT_SHA" 1>/dev/null 2>&1; then
|
|
||||||
DIFF=".."
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "::debug::Target branch: $TARGET_BRANCH"
|
|
||||||
echo "::debug::Current branch: $CURRENT_BRANCH"
|
|
||||||
|
|
||||||
echo "::debug::Verifying the previous commit SHA: $PREVIOUS_SHA"
|
|
||||||
git rev-parse --quiet --verify "$PREVIOUS_SHA^{commit}" 1>/dev/null 2>&1 && exit_status=$? || exit_status=$?
|
|
||||||
|
|
||||||
if [[ $exit_status -ne 0 ]]; then
|
|
||||||
echo "::error::Unable to locate the previous sha: $PREVIOUS_SHA"
|
|
||||||
echo "::error::Please verify that the previous sha is valid, and increase the fetch_depth to a number higher than $INPUT_FETCH_DEPTH."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if ! git diff --name-only --ignore-submodules=all "$PREVIOUS_SHA$DIFF$CURRENT_SHA" 1>/dev/null 2>&1; then
|
|
||||||
echo "::error::Unable to determine a difference between $PREVIOUS_SHA$DIFF$CURRENT_SHA"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$PREVIOUS_SHA" == "$CURRENT_SHA" && "$INITIAL_COMMIT" == "false" ]]; then
|
|
||||||
echo "::error::Similar commit hashes detected: previous sha: $PREVIOUS_SHA is equivalent to the current sha: $CURRENT_SHA."
|
|
||||||
echo "::error::Please verify that both commits are valid, and increase the fetch_depth to a number higher than $INPUT_FETCH_DEPTH."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
|
||||||
echo "::set-output name=target_branch::$TARGET_BRANCH"
|
|
||||||
echo "::set-output name=current_branch::$CURRENT_BRANCH"
|
|
||||||
echo "::set-output name=previous_sha::$PREVIOUS_SHA"
|
|
||||||
echo "::set-output name=current_sha::$CURRENT_SHA"
|
|
||||||
echo "::set-output name=diff::$DIFF"
|
|
||||||
else
|
|
||||||
cat <<EOF >> "$GITHUB_OUTPUT"
|
|
||||||
target_branch=$TARGET_BRANCH
|
|
||||||
current_branch=$CURRENT_BRANCH
|
|
||||||
previous_sha=$PREVIOUS_SHA
|
|
||||||
current_sha=$CURRENT_SHA
|
|
||||||
diff=$DIFF
|
|
||||||
EOF
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "::endgroup::"
|
|
7609
dist/index.js
generated
vendored
Normal file
7609
dist/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
dist/index.js.map
generated
vendored
Normal file
1
dist/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
200
dist/licenses.txt
generated
vendored
Normal file
200
dist/licenses.txt
generated
vendored
Normal file
@ -0,0 +1,200 @@
|
|||||||
|
@actions/core
|
||||||
|
MIT
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright 2019 GitHub
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
@actions/exec
|
||||||
|
MIT
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright 2019 GitHub
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
@actions/glob
|
||||||
|
MIT
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright 2019 GitHub
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
@actions/http-client
|
||||||
|
MIT
|
||||||
|
Actions Http Client for Node.js
|
||||||
|
|
||||||
|
Copyright (c) GitHub, Inc.
|
||||||
|
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
|
||||||
|
associated documentation files (the "Software"), to deal in the Software without restriction,
|
||||||
|
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||||
|
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
|
||||||
|
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||||
|
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||||
|
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||||
|
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
@actions/io
|
||||||
|
MIT
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright 2019 GitHub
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
balanced-match
|
||||||
|
MIT
|
||||||
|
(MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||||
|
of the Software, and to permit persons to whom the Software is furnished to do
|
||||||
|
so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
brace-expansion
|
||||||
|
MIT
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
concat-map
|
||||||
|
MIT
|
||||||
|
This software is released under the MIT license:
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||||
|
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||||
|
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||||
|
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||||
|
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||||
|
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
minimatch
|
||||||
|
ISC
|
||||||
|
The ISC License
|
||||||
|
|
||||||
|
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||||
|
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
tunnel
|
||||||
|
MIT
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2012 Koichi Kobayashi
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
uuid
|
||||||
|
MIT
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2010-2020 Robert Kieffer and other contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
1
dist/sourcemap-register.js
generated
vendored
Normal file
1
dist/sourcemap-register.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
@ -1,528 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
INPUT_SEPARATOR="${INPUT_SEPARATOR//'%'/'%25'}"
|
|
||||||
INPUT_SEPARATOR="${INPUT_SEPARATOR//'.'/'%2E'}"
|
|
||||||
INPUT_SEPARATOR="${INPUT_SEPARATOR//$'\n'/'%0A'}"
|
|
||||||
INPUT_SEPARATOR="${INPUT_SEPARATOR//$'\r'/'%0D'}"
|
|
||||||
|
|
||||||
GITHUB_OUTPUT=${GITHUB_OUTPUT:-""}
|
|
||||||
DIFF=$INPUT_DIFF
|
|
||||||
|
|
||||||
OUTPUTS_EXTENSION="txt"
|
|
||||||
|
|
||||||
if [[ "$INPUT_JSON" == "true" ]]; then
|
|
||||||
OUTPUTS_EXTENSION="json"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ $INPUT_QUOTEPATH == "false" ]]; then
|
|
||||||
git config --global core.quotepath off
|
|
||||||
else
|
|
||||||
git config --global core.quotepath on
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -n $INPUT_DIFF_RELATIVE ]]; then
|
|
||||||
git config --global diff.relative "$INPUT_DIFF_RELATIVE"
|
|
||||||
fi
|
|
||||||
|
|
||||||
function get_dirname_max_depth() {
|
|
||||||
while IFS='' read -r line; do
|
|
||||||
local dir="$line"
|
|
||||||
local dirs=()
|
|
||||||
IFS='/' read -ra dirs <<<"$dir"
|
|
||||||
|
|
||||||
local max_depth=${#dirs[@]}
|
|
||||||
local input_dir_names_max_depth="${INPUT_DIR_NAMES_MAX_DEPTH:-$max_depth}"
|
|
||||||
|
|
||||||
if [[ -n "$input_dir_names_max_depth" && "$input_dir_names_max_depth" -lt "$max_depth" ]]; then
|
|
||||||
max_depth="$input_dir_names_max_depth"
|
|
||||||
fi
|
|
||||||
|
|
||||||
local output="${dirs[0]}"
|
|
||||||
local depth="1"
|
|
||||||
|
|
||||||
while [ "$depth" -lt "$max_depth" ]; do
|
|
||||||
output="$output/${dirs[${depth}]}"
|
|
||||||
depth=$((depth + 1))
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ "$INPUT_DIR_NAMES_EXCLUDE_ROOT" == "true" && "$output" == "." ]]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "$output"
|
|
||||||
done < <(uniq)
|
|
||||||
}
|
|
||||||
|
|
||||||
function json_output() {
|
|
||||||
local jq_args="-sR"
|
|
||||||
if [[ "$INPUT_JSON_RAW_FORMAT" == "true" ]]; then
|
|
||||||
jq_args="$jq_args -r"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# shellcheck disable=SC2086
|
|
||||||
jq $jq_args 'split("\n") | map(select(. != "")) | @json' | sed -r 's/^"|"$//g' | tr -s /
|
|
||||||
}
|
|
||||||
|
|
||||||
function get_diff() {
|
|
||||||
local base="$1"
|
|
||||||
local sha="$2"
|
|
||||||
local filter="$3"
|
|
||||||
|
|
||||||
while IFS='' read -r sub; do
|
|
||||||
sub_commit_pre="$(git diff "$base" "$sha" -- "$sub" | { grep '^[-]Subproject commit' || true; } | awk '{print $3}')" && exit_status=$? || exit_status=$?
|
|
||||||
if [[ $exit_status -ne 0 ]]; then
|
|
||||||
echo "::warning::Failed to get previous commit for submodule ($sub) between: $base $sha. Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage" >&2
|
|
||||||
fi
|
|
||||||
|
|
||||||
sub_commit_cur="$(git diff "$base" "$sha" -- "$sub" | { grep '^[+]Subproject commit' || true; } | awk '{print $3}')" && exit_status=$? || exit_status=$?
|
|
||||||
if [[ $exit_status -ne 0 ]]; then
|
|
||||||
echo "::warning::Failed to get current commit for submodule ($sub) between: $base $sha. Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage" >&2
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -n "$sub_commit_cur" ]; then
|
|
||||||
(
|
|
||||||
cd "$sub" && (
|
|
||||||
# the strange magic number is a hardcoded "empty tree" commit sha
|
|
||||||
git diff --diff-filter="$filter" --name-only --ignore-submodules=all "${sub_commit_pre:-4b825dc642cb6eb9a060e54bf8d69288fbee4904}" "${sub_commit_cur}" | awk -v r="$sub" '{ print "" r "/" $0}' 2>/dev/null
|
|
||||||
)
|
|
||||||
) || {
|
|
||||||
echo "::warning::Failed to get changed files for submodule ($sub) between: ${sub_commit_pre:-4b825dc642cb6eb9a060e54bf8d69288fbee4904} ${sub_commit_cur}. Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage" >&2
|
|
||||||
}
|
|
||||||
fi
|
|
||||||
done < <(git submodule status --recursive | grep -v "^-" | awk '{print $2}')
|
|
||||||
|
|
||||||
if [[ "$filter" == "D" ]]; then
|
|
||||||
while read -r sub; do
|
|
||||||
echo "$sub"
|
|
||||||
done < <(git submodule status --recursive | grep -e "^-" | awk '{print $2}')
|
|
||||||
fi
|
|
||||||
|
|
||||||
git diff --diff-filter="$filter" --name-only --ignore-submodules=all "$base$DIFF$sha" && exit_status=$? || exit_status=$?
|
|
||||||
|
|
||||||
if [[ $exit_status -ne 0 ]]; then
|
|
||||||
echo "::error::Failed to get changed files between: $base$DIFF$sha" >&2
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
function get_renames() {
|
|
||||||
local base="$1"
|
|
||||||
local sha="$2"
|
|
||||||
|
|
||||||
while IFS='' read -r sub; do
|
|
||||||
sub_commit_pre="$(git diff "$base" "$sha" -- "$sub" | { grep '^[-]Subproject commit' || true; } | awk '{print $3}')" && exit_status=$? || exit_status=$?
|
|
||||||
if [[ $exit_status -ne 0 ]]; then
|
|
||||||
echo "::warning::Failed to get previous commit for submodule ($sub) between: $base $sha. Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage" >&2
|
|
||||||
fi
|
|
||||||
|
|
||||||
sub_commit_cur="$(git diff "$base" "$sha" -- "$sub" | { grep '^[+]Subproject commit' || true; } | awk '{print $3}')" && exit_status=$? || exit_status=$?
|
|
||||||
if [[ $exit_status -ne 0 ]]; then
|
|
||||||
echo "::warning::Failed to get current commit for submodule ($sub) between: $base $sha. Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage" >&2
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -n "$sub_commit_cur" ]; then
|
|
||||||
(
|
|
||||||
cd "$sub" && (
|
|
||||||
# the strange magic number is a hardcoded "empty tree" commit sha
|
|
||||||
get_renames "${sub_commit_pre:-4b825dc642cb6eb9a060e54bf8d69288fbee4904}" "${sub_commit_cur}" | awk -v r="$sub" '{ print "" r "/" $0}'
|
|
||||||
)
|
|
||||||
) || {
|
|
||||||
echo "::warning::Failed to get renamed files for submodule ($sub) between: ${sub_commit_pre:-4b825dc642cb6eb9a060e54bf8d69288fbee4904} ${sub_commit_cur}. Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage" >&2
|
|
||||||
}
|
|
||||||
fi
|
|
||||||
done < <(git submodule | awk '{print $2}')
|
|
||||||
|
|
||||||
git diff --name-status --ignore-submodules=all --diff-filter=R "$base$DIFF$sha" | { grep -E "^R" || true; } | awk -F '\t' -v d="$INPUT_OLD_NEW_SEPARATOR" '{print $2d$3}' && exit_status=$? || exit_status=$?
|
|
||||||
|
|
||||||
if [[ $exit_status -ne 0 ]]; then
|
|
||||||
echo "::error::Failed to get renamed files between: $base → $sha" >&2
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
echo "::group::changed-files"
|
|
||||||
|
|
||||||
if [[ -n $INPUT_PATH ]]; then
|
|
||||||
REPO_DIR="$GITHUB_WORKSPACE/$INPUT_PATH"
|
|
||||||
|
|
||||||
echo "Resolving repository path: $REPO_DIR"
|
|
||||||
if [[ ! -d "$REPO_DIR" ]]; then
|
|
||||||
echo "::error::Invalid repository path: $REPO_DIR"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
cd "$REPO_DIR"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Retrieving changes between $INPUT_PREVIOUS_SHA ($INPUT_TARGET_BRANCH) → $INPUT_CURRENT_SHA ($INPUT_CURRENT_BRANCH)"
|
|
||||||
|
|
||||||
if [[ "$INPUT_HAS_CUSTOM_PATTERNS" == "false" || -z "$INPUT_FILES_PATTERN_FILE" ]]; then
|
|
||||||
if [[ "$INPUT_JSON" == "false" ]]; then
|
|
||||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
|
||||||
ADDED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" A | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
COPIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" C | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
DELETED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" D | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" M | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
RENAMED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" R | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
TYPE_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" T | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
UNMERGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" U | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
UNKNOWN=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" X | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
ALL_CHANGED_AND_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "*ACDMRTUX" | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
ALL_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMR" | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
ALL_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMRD" | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
else
|
|
||||||
ADDED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" A | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
COPIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" C | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
DELETED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" D | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" M | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
RENAMED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" R | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
TYPE_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" T | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
UNMERGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" U | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
UNKNOWN=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" X | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
ALL_CHANGED_AND_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "*ACDMRTUX" | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
ALL_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMR" | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
ALL_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMRD" | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
fi
|
|
||||||
if [[ $INPUT_INCLUDE_ALL_OLD_NEW_RENAMED_FILES == "true" ]]; then
|
|
||||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
|
||||||
ALL_OLD_NEW_RENAMED=$(get_renames "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_OLD_NEW_FILES_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
else
|
|
||||||
ALL_OLD_NEW_RENAMED=$(get_renames "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" | awk -v d="$INPUT_OLD_NEW_FILES_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
|
||||||
ADDED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" A | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
COPIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" C | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
DELETED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" D | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" M | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
RENAMED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" R | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
TYPE_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" T | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
UNMERGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" U | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
UNKNOWN=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" X | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
ALL_CHANGED_AND_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "*ACDMRTUX" | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
ALL_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMR" | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
ALL_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMRD" | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
else
|
|
||||||
ADDED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" A | json_output)
|
|
||||||
COPIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" C | json_output)
|
|
||||||
DELETED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" D | json_output)
|
|
||||||
MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" M | json_output)
|
|
||||||
RENAMED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" R | json_output)
|
|
||||||
TYPE_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" T | json_output)
|
|
||||||
UNMERGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" U | json_output)
|
|
||||||
UNKNOWN=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" X | json_output)
|
|
||||||
ALL_CHANGED_AND_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "*ACDMRTUX" | json_output)
|
|
||||||
ALL_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMR" | json_output)
|
|
||||||
ALL_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMRD" | json_output)
|
|
||||||
fi
|
|
||||||
if [[ $INPUT_INCLUDE_ALL_OLD_NEW_RENAMED_FILES == "true" ]]; then
|
|
||||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
|
||||||
ALL_OLD_NEW_RENAMED=$(get_renames "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
else
|
|
||||||
ALL_OLD_NEW_RENAMED=$(get_renames "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" | json_output)
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
ADDED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" A | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
COPIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" C | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
DELETED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" D | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" M | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
RENAMED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" R | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
TYPE_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" T | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
UNMERGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" U | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
UNKNOWN=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" X | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
ALL_CHANGED_AND_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "*ACDMRTUX" | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
ALL_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMR" | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
ALL_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMRD" | { grep -x -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
if [[ $INPUT_INCLUDE_ALL_OLD_NEW_RENAMED_FILES == "true" ]]; then
|
|
||||||
ALL_OLD_NEW_RENAMED=$(get_renames "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" | { grep -w -E -f "$INPUT_FILES_PATTERN_FILE" || true; } | awk -v d="$INPUT_OLD_NEW_FILES_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
fi
|
|
||||||
|
|
||||||
ALL_OTHER_CHANGED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMR" | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
|
|
||||||
if [[ -n "${ALL_CHANGED}" ]]; then
|
|
||||||
echo "::debug::Matching changed files: ${ALL_CHANGED}"
|
|
||||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
|
||||||
echo "::set-output name=any_changed::true"
|
|
||||||
else
|
|
||||||
echo "any_changed=true" >>"$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
|
||||||
echo "::set-output name=any_changed::false"
|
|
||||||
else
|
|
||||||
echo "any_changed=false" >>"$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
OTHER_CHANGED=""
|
|
||||||
|
|
||||||
if [[ -n $ALL_OTHER_CHANGED ]]; then
|
|
||||||
if [[ -n "$ALL_CHANGED" ]]; then
|
|
||||||
OTHER_CHANGED=$(echo "${ALL_OTHER_CHANGED}|${ALL_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | sort | uniq -u | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
else
|
|
||||||
OTHER_CHANGED=$ALL_OTHER_CHANGED
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$INPUT_JSON" == "false" ]]; then
|
|
||||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
|
||||||
OTHER_CHANGED=$(echo "${OTHER_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
else
|
|
||||||
OTHER_CHANGED=$(echo "${OTHER_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
|
||||||
OTHER_CHANGED=$(echo "${OTHER_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
else
|
|
||||||
OTHER_CHANGED=$(echo "${OTHER_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -n "${OTHER_CHANGED}" && "${OTHER_CHANGED}" != "[]" ]]; then
|
|
||||||
echo "::debug::Non Matching changed files: ${OTHER_CHANGED}"
|
|
||||||
|
|
||||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
|
||||||
echo "::set-output name=only_changed::false"
|
|
||||||
echo "::set-output name=other_changed_files::$OTHER_CHANGED"
|
|
||||||
else
|
|
||||||
echo "only_changed=false" >>"$GITHUB_OUTPUT"
|
|
||||||
echo "other_changed_files=$OTHER_CHANGED" >>"$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
|
|
||||||
elif [[ -n "${ALL_CHANGED}" ]]; then
|
|
||||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
|
||||||
echo "::set-output name=only_changed::true"
|
|
||||||
else
|
|
||||||
echo "only_changed=true" >>"$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
ALL_OTHER_MODIFIED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" "ACMRD" | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
|
|
||||||
if [[ -n "${ALL_MODIFIED}" ]]; then
|
|
||||||
echo "::debug::Matching modified files: ${ALL_MODIFIED}"
|
|
||||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
|
||||||
echo "::set-output name=any_modified::true"
|
|
||||||
else
|
|
||||||
echo "any_modified=true" >>"$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
|
||||||
echo "::set-output name=any_modified::false"
|
|
||||||
else
|
|
||||||
echo "any_modified=false" >>"$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
OTHER_MODIFIED=""
|
|
||||||
|
|
||||||
if [[ -n $ALL_OTHER_MODIFIED ]]; then
|
|
||||||
if [[ -n "$ALL_MODIFIED" ]]; then
|
|
||||||
OTHER_MODIFIED=$(echo "${ALL_OTHER_MODIFIED}|${ALL_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | sort | uniq -u | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
else
|
|
||||||
OTHER_MODIFIED=$ALL_OTHER_MODIFIED
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$INPUT_JSON" == "false" ]]; then
|
|
||||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
|
||||||
OTHER_MODIFIED=$(echo "${OTHER_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
else
|
|
||||||
OTHER_MODIFIED=$(echo "${OTHER_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
|
||||||
OTHER_MODIFIED=$(echo "${OTHER_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
else
|
|
||||||
OTHER_MODIFIED=$(echo "${OTHER_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -n "${OTHER_MODIFIED}" && "$OTHER_MODIFIED" != "[]" ]]; then
|
|
||||||
echo "::debug::Non Matching modified files: ${OTHER_MODIFIED}"
|
|
||||||
|
|
||||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
|
||||||
echo "::set-output name=only_modified::false"
|
|
||||||
echo "::set-output name=other_modified_files::$OTHER_MODIFIED"
|
|
||||||
else
|
|
||||||
echo "only_modified=false" >>"$GITHUB_OUTPUT"
|
|
||||||
echo "other_modified_files=$OTHER_MODIFIED" >>"$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
elif [[ -n "${ALL_MODIFIED}" ]]; then
|
|
||||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
|
||||||
echo "::set-output name=only_modified::true"
|
|
||||||
else
|
|
||||||
echo "only_modified=true" >>"$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
ALL_OTHER_DELETED=$(get_diff "$INPUT_PREVIOUS_SHA" "$INPUT_CURRENT_SHA" D | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
|
|
||||||
if [[ -n "${DELETED}" ]]; then
|
|
||||||
echo "::debug::Matching deleted files: ${DELETED}"
|
|
||||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
|
||||||
echo "::set-output name=any_deleted::true"
|
|
||||||
else
|
|
||||||
echo "any_deleted=true" >>"$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
|
||||||
echo "::set-output name=any_deleted::false"
|
|
||||||
else
|
|
||||||
echo "any_deleted=false" >>"$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
OTHER_DELETED=""
|
|
||||||
|
|
||||||
if [[ -n $ALL_OTHER_DELETED ]]; then
|
|
||||||
if [[ -n "$DELETED" ]]; then
|
|
||||||
OTHER_DELETED=$(echo "${ALL_OTHER_DELETED}|${DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | sort | uniq -u | awk -v d="|" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
else
|
|
||||||
OTHER_DELETED=$ALL_OTHER_DELETED
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$INPUT_JSON" == "false" ]]; then
|
|
||||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
|
||||||
OTHER_DELETED=$(echo "${OTHER_DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
else
|
|
||||||
OTHER_DELETED=$(echo "${OTHER_DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
|
||||||
OTHER_DELETED=$(echo "${OTHER_DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
else
|
|
||||||
OTHER_DELETED=$(echo "${OTHER_DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -n "${OTHER_DELETED}" && "${OTHER_DELETED}" != "[]" ]]; then
|
|
||||||
echo "::debug::Non Matching deleted files: ${OTHER_DELETED}"
|
|
||||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
|
||||||
echo "::set-output name=only_deleted::false"
|
|
||||||
echo "::set-output name=other_deleted_files::$OTHER_DELETED"
|
|
||||||
else
|
|
||||||
echo "only_deleted=false" >>"$GITHUB_OUTPUT"
|
|
||||||
echo "other_deleted_files=$OTHER_DELETED" >>"$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
elif [[ -n "${DELETED}" ]]; then
|
|
||||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
|
||||||
echo "::set-output name=only_deleted::true"
|
|
||||||
else
|
|
||||||
echo "only_deleted=true" >>"$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
if [[ "$INPUT_JSON" == "false" ]]; then
|
|
||||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
|
||||||
ADDED=$(echo "${ADDED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
COPIED=$(echo "${COPIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
DELETED=$(echo "${DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
MODIFIED=$(echo "${MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
RENAMED=$(echo "${RENAMED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
TYPE_CHANGED=$(echo "${TYPE_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
UNMERGED=$(echo "${UNMERGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
UNKNOWN=$(echo "${UNKNOWN}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
ALL_CHANGED_AND_MODIFIED=$(echo "${ALL_CHANGED_AND_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
ALL_CHANGED=$(echo "${ALL_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
ALL_MODIFIED=$(echo "${ALL_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
else
|
|
||||||
ADDED=$(echo "${ADDED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
COPIED=$(echo "${COPIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
DELETED=$(echo "${DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
MODIFIED=$(echo "${MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
RENAMED=$(echo "${RENAMED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
TYPE_CHANGED=$(echo "${TYPE_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
UNMERGED=$(echo "${UNMERGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
UNKNOWN=$(echo "${UNKNOWN}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
ALL_CHANGED_AND_MODIFIED=$(echo "${ALL_CHANGED_AND_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
ALL_CHANGED=$(echo "${ALL_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
ALL_MODIFIED=$(echo "${ALL_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | awk -v d="$INPUT_SEPARATOR" '{s=(NR==1?s:s d)$0}END{print s}')
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
if [[ "$INPUT_DIR_NAMES" == "true" ]]; then
|
|
||||||
ADDED=$(echo "${ADDED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
COPIED=$(echo "${COPIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
DELETED=$(echo "${DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
MODIFIED=$(echo "${MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
RENAMED=$(echo "${RENAMED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
TYPE_CHANGED=$(echo "${TYPE_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
UNMERGED=$(echo "${UNMERGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
UNKNOWN=$(echo "${UNKNOWN}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
ALL_CHANGED_AND_MODIFIED=$(echo "${ALL_CHANGED_AND_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
ALL_CHANGED=$(echo "${ALL_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
ALL_MODIFIED=$(echo "${ALL_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | xargs -I {} dirname {} | get_dirname_max_depth | uniq | json_output)
|
|
||||||
else
|
|
||||||
ADDED=$(echo "${ADDED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
|
||||||
COPIED=$(echo "${COPIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
|
||||||
DELETED=$(echo "${DELETED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
|
||||||
MODIFIED=$(echo "${MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
|
||||||
RENAMED=$(echo "${RENAMED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
|
||||||
TYPE_CHANGED=$(echo "${TYPE_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
|
||||||
UNMERGED=$(echo "${UNMERGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
|
||||||
UNKNOWN=$(echo "${UNKNOWN}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
|
||||||
ALL_CHANGED_AND_MODIFIED=$(echo "${ALL_CHANGED_AND_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
|
||||||
ALL_CHANGED=$(echo "${ALL_CHANGED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
|
||||||
ALL_MODIFIED=$(echo "${ALL_MODIFIED}" | awk '{gsub(/\|/,"\n"); print $0;}' | json_output)
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "::debug::Added files: $ADDED"
|
|
||||||
echo "::debug::Copied files: $COPIED"
|
|
||||||
echo "::debug::Deleted files: $DELETED"
|
|
||||||
echo "::debug::Modified files: $MODIFIED"
|
|
||||||
echo "::debug::Renamed files: $RENAMED"
|
|
||||||
echo "::debug::Type Changed files: $TYPE_CHANGED"
|
|
||||||
echo "::debug::Unmerged files: $UNMERGED"
|
|
||||||
echo "::debug::Unknown files: $UNKNOWN"
|
|
||||||
echo "::debug::All changed and modified files: $ALL_CHANGED_AND_MODIFIED"
|
|
||||||
echo "::debug::All changed files: $ALL_CHANGED"
|
|
||||||
echo "::debug::All modified files: $ALL_MODIFIED"
|
|
||||||
if [[ $INPUT_INCLUDE_ALL_OLD_NEW_RENAMED_FILES == "true" ]]; then
|
|
||||||
echo "::debug::All old & new renamed files: $ALL_OLD_NEW_RENAMED"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
|
||||||
echo "::set-output name=added_files::$ADDED"
|
|
||||||
echo "::set-output name=copied_files::$COPIED"
|
|
||||||
echo "::set-output name=deleted_files::$DELETED"
|
|
||||||
echo "::set-output name=modified_files::$MODIFIED"
|
|
||||||
echo "::set-output name=renamed_files::$RENAMED"
|
|
||||||
echo "::set-output name=type_changed_files::$TYPE_CHANGED"
|
|
||||||
echo "::set-output name=unmerged_files::$UNMERGED"
|
|
||||||
echo "::set-output name=unknown_files::$UNKNOWN"
|
|
||||||
echo "::set-output name=all_changed_and_modified_files::$ALL_CHANGED_AND_MODIFIED"
|
|
||||||
echo "::set-output name=all_changed_files::$ALL_CHANGED"
|
|
||||||
echo "::set-output name=all_modified_files::$ALL_MODIFIED"
|
|
||||||
echo "::set-output name=outputs_extension::$OUTPUTS_EXTENSION"
|
|
||||||
else
|
|
||||||
cat <<EOF >>"$GITHUB_OUTPUT"
|
|
||||||
added_files=$ADDED
|
|
||||||
copied_files=$COPIED
|
|
||||||
deleted_files=$DELETED
|
|
||||||
modified_files=$MODIFIED
|
|
||||||
renamed_files=$RENAMED
|
|
||||||
type_changed_files=$TYPE_CHANGED
|
|
||||||
unmerged_files=$UNMERGED
|
|
||||||
unknown_files=$UNKNOWN
|
|
||||||
all_changed_and_modified_files=$ALL_CHANGED_AND_MODIFIED
|
|
||||||
all_changed_files=$ALL_CHANGED
|
|
||||||
all_modified_files=$ALL_MODIFIED
|
|
||||||
outputs_extension=$OUTPUTS_EXTENSION
|
|
||||||
EOF
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ $INPUT_INCLUDE_ALL_OLD_NEW_RENAMED_FILES == "true" ]]; then
|
|
||||||
if [[ -z "$GITHUB_OUTPUT" ]]; then
|
|
||||||
echo "::set-output name=all_old_new_renamed_files::$ALL_OLD_NEW_RENAMED"
|
|
||||||
else
|
|
||||||
echo "all_old_new_renamed_files=$ALL_OLD_NEW_RENAMED" >>"$GITHUB_OUTPUT"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "::endgroup::"
|
|
13
jest.config.js
Normal file
13
jest.config.js
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
module.exports = {
|
||||||
|
clearMocks: true,
|
||||||
|
moduleFileExtensions: ['js', 'ts'],
|
||||||
|
testMatch: ['**/*.test.ts'],
|
||||||
|
transform: {
|
||||||
|
'^.+\\.ts$': 'ts-jest'
|
||||||
|
},
|
||||||
|
verbose: true,
|
||||||
|
testTimeout: 10000,
|
||||||
|
setupFiles: [
|
||||||
|
"<rootDir>/jest/setupEnv.cjs"
|
||||||
|
]
|
||||||
|
}
|
9
jest/setupEnv.cjs
Normal file
9
jest/setupEnv.cjs
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
const path = require('path')
|
||||||
|
|
||||||
|
process.env.TESTING = "1"
|
||||||
|
process.env.GITHUB_WORKSPACE = path.join(
|
||||||
|
path.resolve(__dirname, '..'), '.'
|
||||||
|
)
|
||||||
|
process.env.GITHUB_ACTION_PATH = path.join(
|
||||||
|
path.resolve(__dirname, '..'), '.'
|
||||||
|
)
|
54
package.json
Normal file
54
package.json
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
{
|
||||||
|
"name": "@tj-actions/glob",
|
||||||
|
"version": "17.2.5",
|
||||||
|
"description": "Glob pattern matching github action",
|
||||||
|
"main": "lib/main.js",
|
||||||
|
"publishConfig": {
|
||||||
|
"registry": "https://npm.pkg.github.com"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc",
|
||||||
|
"format": "prettier --write **/*.ts",
|
||||||
|
"format-check": "prettier --check **/*.ts",
|
||||||
|
"lint": "eslint src/**/*.ts",
|
||||||
|
"package": "ncc build lib/main.js --source-map --license licenses.txt",
|
||||||
|
"test": "jest --coverage",
|
||||||
|
"all": "yarn build && yarn format && yarn lint && yarn package && yarn test"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/tj-actions/glob.git"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"actions",
|
||||||
|
"glob",
|
||||||
|
"github-actions"
|
||||||
|
],
|
||||||
|
"author": "Tonye Jack",
|
||||||
|
"license": "MIT",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/tj-actions/glob/issues"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/tj-actions/glob#readme",
|
||||||
|
"dependencies": {
|
||||||
|
"@actions/core": "1.10.0",
|
||||||
|
"@actions/exec": "1.1.1",
|
||||||
|
"@actions/glob": "0.4.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/jest": "29.5.1",
|
||||||
|
"@types/node": "20.2.1",
|
||||||
|
"@types/uuid": "9.0.1",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.59.6",
|
||||||
|
"@typescript-eslint/parser": "5.59.6",
|
||||||
|
"@vercel/ncc": "0.36.1",
|
||||||
|
"eslint": "8.41.0",
|
||||||
|
"eslint-plugin-github": "4.7.0",
|
||||||
|
"eslint-plugin-jest": "27.2.1",
|
||||||
|
"eslint-plugin-prettier": "^4.2.1",
|
||||||
|
"jest": "29.5.0",
|
||||||
|
"prettier": "2.8.8",
|
||||||
|
"ts-jest": "29.1.0",
|
||||||
|
"typescript": "5.0.4"
|
||||||
|
}
|
||||||
|
}
|
5
src/__tests__/main.test.ts
Normal file
5
src/__tests__/main.test.ts
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
describe('main test', () => {
|
||||||
|
it('adds two numbers', async () => {
|
||||||
|
expect(1 + 1).toEqual(2)
|
||||||
|
})
|
||||||
|
})
|
153
src/changedFiles.ts
Normal file
153
src/changedFiles.ts
Normal file
@ -0,0 +1,153 @@
|
|||||||
|
import {Pattern} from '@actions/glob/lib/internal-pattern'
|
||||||
|
import * as path from 'path'
|
||||||
|
|
||||||
|
import {SHAResult} from './commitSha'
|
||||||
|
import {Inputs} from './inputs'
|
||||||
|
import {
|
||||||
|
getDirnameMaxDepth,
|
||||||
|
getSubmodulePath,
|
||||||
|
gitDiff,
|
||||||
|
gitRenamedFiles,
|
||||||
|
gitSubmoduleDiffSHA,
|
||||||
|
jsonOutput
|
||||||
|
} from './utils'
|
||||||
|
|
||||||
|
export const getRenamedFiles = async ({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
hasSubmodule,
|
||||||
|
shaResult
|
||||||
|
}: {
|
||||||
|
inputs: Inputs
|
||||||
|
workingDirectory: string
|
||||||
|
hasSubmodule: boolean
|
||||||
|
shaResult: SHAResult
|
||||||
|
}): Promise<string> => {
|
||||||
|
let renamedFiles = await gitRenamedFiles({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
sha1: shaResult.previousSha,
|
||||||
|
sha2: shaResult.currentSha,
|
||||||
|
diff: shaResult.diff,
|
||||||
|
oldNewSeparator: inputs.oldNewSeparator
|
||||||
|
})
|
||||||
|
|
||||||
|
if (hasSubmodule) {
|
||||||
|
for (const submodulePath of await getSubmodulePath({
|
||||||
|
cwd: workingDirectory
|
||||||
|
})) {
|
||||||
|
const submoduleShaResult = await gitSubmoduleDiffSHA({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
parentSha1: shaResult.previousSha,
|
||||||
|
parentSha2: shaResult.currentSha,
|
||||||
|
submodulePath,
|
||||||
|
diff: shaResult.diff
|
||||||
|
})
|
||||||
|
|
||||||
|
const submoduleWorkingDirectory = path.join(
|
||||||
|
workingDirectory,
|
||||||
|
submodulePath
|
||||||
|
)
|
||||||
|
|
||||||
|
if (submoduleShaResult.currentSha && submoduleShaResult.previousSha) {
|
||||||
|
const submoduleRenamedFiles = await gitRenamedFiles({
|
||||||
|
cwd: submoduleWorkingDirectory,
|
||||||
|
sha1: submoduleShaResult.previousSha,
|
||||||
|
sha2: submoduleShaResult.currentSha,
|
||||||
|
diff: shaResult.diff,
|
||||||
|
oldNewSeparator: inputs.oldNewSeparator,
|
||||||
|
isSubmodule: true
|
||||||
|
})
|
||||||
|
renamedFiles.push(...submoduleRenamedFiles)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (inputs.dirNames) {
|
||||||
|
renamedFiles = renamedFiles.map(renamedFile =>
|
||||||
|
getDirnameMaxDepth({
|
||||||
|
pathStr: renamedFile,
|
||||||
|
dirNamesMaxDepth: inputs.dirNamesMaxDepth,
|
||||||
|
excludeRoot: inputs.dirNamesExcludeRoot
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (inputs.json) {
|
||||||
|
return jsonOutput({value: renamedFiles, rawFormat: inputs.jsonRawFormat})
|
||||||
|
}
|
||||||
|
|
||||||
|
return renamedFiles.join(inputs.oldNewFilesSeparator)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getDiffFiles = async ({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
hasSubmodule,
|
||||||
|
shaResult,
|
||||||
|
diffFilter,
|
||||||
|
filePatterns = []
|
||||||
|
}: {
|
||||||
|
inputs: Inputs
|
||||||
|
workingDirectory: string
|
||||||
|
hasSubmodule: boolean
|
||||||
|
shaResult: SHAResult
|
||||||
|
diffFilter: string
|
||||||
|
filePatterns?: Pattern[]
|
||||||
|
}): Promise<string> => {
|
||||||
|
let files = await gitDiff({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
sha1: shaResult.previousSha,
|
||||||
|
sha2: shaResult.currentSha,
|
||||||
|
diff: shaResult.diff,
|
||||||
|
diffFilter,
|
||||||
|
filePatterns
|
||||||
|
})
|
||||||
|
|
||||||
|
if (hasSubmodule) {
|
||||||
|
for (const submodulePath of await getSubmodulePath({
|
||||||
|
cwd: workingDirectory
|
||||||
|
})) {
|
||||||
|
const submoduleShaResult = await gitSubmoduleDiffSHA({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
parentSha1: shaResult.previousSha,
|
||||||
|
parentSha2: shaResult.currentSha,
|
||||||
|
submodulePath,
|
||||||
|
diff: shaResult.diff
|
||||||
|
})
|
||||||
|
|
||||||
|
const submoduleWorkingDirectory = path.join(
|
||||||
|
workingDirectory,
|
||||||
|
submodulePath
|
||||||
|
)
|
||||||
|
|
||||||
|
if (submoduleShaResult.currentSha && submoduleShaResult.previousSha) {
|
||||||
|
const submoduleFiles = await gitDiff({
|
||||||
|
cwd: submoduleWorkingDirectory,
|
||||||
|
sha1: submoduleShaResult.previousSha,
|
||||||
|
sha2: submoduleShaResult.currentSha,
|
||||||
|
diff: shaResult.diff,
|
||||||
|
diffFilter,
|
||||||
|
isSubmodule: true,
|
||||||
|
filePatterns
|
||||||
|
})
|
||||||
|
files.push(...submoduleFiles)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (inputs.dirNames) {
|
||||||
|
files = files.map(file =>
|
||||||
|
getDirnameMaxDepth({
|
||||||
|
pathStr: file,
|
||||||
|
dirNamesMaxDepth: inputs.dirNamesMaxDepth,
|
||||||
|
excludeRoot: inputs.dirNamesExcludeRoot
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (inputs.json) {
|
||||||
|
return jsonOutput({value: files, rawFormat: inputs.jsonRawFormat})
|
||||||
|
}
|
||||||
|
|
||||||
|
return files.join(inputs.separator)
|
||||||
|
}
|
443
src/commitSha.ts
Normal file
443
src/commitSha.ts
Normal file
@ -0,0 +1,443 @@
|
|||||||
|
import * as core from '@actions/core'
|
||||||
|
|
||||||
|
import {Env} from './env'
|
||||||
|
import {Inputs} from './inputs'
|
||||||
|
import {
|
||||||
|
canDiffCommits,
|
||||||
|
getBranchHeadSha,
|
||||||
|
getHeadSha,
|
||||||
|
getParentHeadSha,
|
||||||
|
getPreviousGitTag,
|
||||||
|
gitFetch,
|
||||||
|
gitFetchSubmodules,
|
||||||
|
gitLog,
|
||||||
|
verifyCommitSha
|
||||||
|
} from './utils'
|
||||||
|
|
||||||
|
const getCurrentSHA = async ({
|
||||||
|
inputs,
|
||||||
|
workingDirectory
|
||||||
|
}: {
|
||||||
|
inputs: Inputs
|
||||||
|
workingDirectory: string
|
||||||
|
}): Promise<string> => {
|
||||||
|
let currentSha = ''
|
||||||
|
core.debug('Getting current SHA...')
|
||||||
|
|
||||||
|
if (inputs.until) {
|
||||||
|
core.debug(`Getting base SHA for '${inputs.until}'...`)
|
||||||
|
try {
|
||||||
|
currentSha = await gitLog({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
args: [
|
||||||
|
'--format',
|
||||||
|
'"%H"',
|
||||||
|
'-n',
|
||||||
|
'1',
|
||||||
|
'--date',
|
||||||
|
'local',
|
||||||
|
'--until',
|
||||||
|
inputs.until
|
||||||
|
]
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
core.error(
|
||||||
|
'Invalid until date: ' + inputs.until + '. ' + (error as Error).message
|
||||||
|
)
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!currentSha) {
|
||||||
|
currentSha = await getHeadSha({cwd: workingDirectory})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await verifyCommitSha({sha: currentSha, cwd: workingDirectory})
|
||||||
|
core.debug(`Current SHA: ${currentSha}`)
|
||||||
|
|
||||||
|
return currentSha
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SHAResult {
|
||||||
|
previousSha: string
|
||||||
|
currentSha: string
|
||||||
|
currentBranch: string
|
||||||
|
targetBranch: string
|
||||||
|
diff: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getSHAForPushEvent = async (
|
||||||
|
inputs: Inputs,
|
||||||
|
env: Env,
|
||||||
|
workingDirectory: string,
|
||||||
|
isShallow: boolean,
|
||||||
|
hasSubmodule: boolean,
|
||||||
|
gitExtraArgs: string[],
|
||||||
|
isTag: boolean
|
||||||
|
): Promise<SHAResult> => {
|
||||||
|
let targetBranch = env.GITHUB_REFNAME
|
||||||
|
let currentBranch = targetBranch
|
||||||
|
let initialCommit = false
|
||||||
|
|
||||||
|
let currentSha = inputs.sha
|
||||||
|
let previousSha = inputs.baseSha
|
||||||
|
let diff = '..'
|
||||||
|
|
||||||
|
if (isShallow) {
|
||||||
|
core.info('Repository is shallow, fetching more history...')
|
||||||
|
|
||||||
|
if (isTag) {
|
||||||
|
const sourceBranch = env.GITHUB_EVENT_BASE_REF.replace('refs/heads/', '')
|
||||||
|
await gitFetch({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
args: [
|
||||||
|
...gitExtraArgs,
|
||||||
|
'-u',
|
||||||
|
'--progress',
|
||||||
|
`--deepen=${inputs.fetchDepth}`,
|
||||||
|
'origin',
|
||||||
|
`+refs/tags/${sourceBranch}:refs/remotes/origin/${sourceBranch}`
|
||||||
|
]
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
await gitFetch({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
args: [
|
||||||
|
...gitExtraArgs,
|
||||||
|
'-u',
|
||||||
|
'--progress',
|
||||||
|
`--deepen=${inputs.fetchDepth}`,
|
||||||
|
'origin',
|
||||||
|
`+refs/heads/${targetBranch}:refs/remotes/origin/${targetBranch}`
|
||||||
|
]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasSubmodule) {
|
||||||
|
await gitFetchSubmodules({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
args: [
|
||||||
|
...gitExtraArgs,
|
||||||
|
'-u',
|
||||||
|
'--progress',
|
||||||
|
`--deepen=${inputs.fetchDepth}`
|
||||||
|
]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (previousSha && currentSha && currentBranch && targetBranch) {
|
||||||
|
await verifyCommitSha({sha: currentSha, cwd: workingDirectory})
|
||||||
|
await verifyCommitSha({sha: previousSha, cwd: workingDirectory})
|
||||||
|
|
||||||
|
core.info(`Previous SHA: ${previousSha}`)
|
||||||
|
core.info(`Current SHA: ${currentSha}`)
|
||||||
|
return {
|
||||||
|
previousSha,
|
||||||
|
currentSha,
|
||||||
|
currentBranch,
|
||||||
|
targetBranch,
|
||||||
|
diff
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
currentSha = await getCurrentSHA({inputs, workingDirectory})
|
||||||
|
|
||||||
|
if (!previousSha) {
|
||||||
|
core.debug('Getting previous SHA...')
|
||||||
|
if (inputs.since) {
|
||||||
|
core.debug(`Getting base SHA for '${inputs.since}'...`)
|
||||||
|
try {
|
||||||
|
previousSha = await gitLog({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
args: [
|
||||||
|
'--format',
|
||||||
|
'"%H"',
|
||||||
|
'-n',
|
||||||
|
'1',
|
||||||
|
'--date',
|
||||||
|
'local',
|
||||||
|
'--since',
|
||||||
|
inputs.since
|
||||||
|
]
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
core.error(
|
||||||
|
'Invalid since date: ' +
|
||||||
|
inputs.since +
|
||||||
|
'. ' +
|
||||||
|
(error as Error).message
|
||||||
|
)
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
} else if (isTag) {
|
||||||
|
core.debug('Getting previous SHA for tag...')
|
||||||
|
const {sha, tag} = await getPreviousGitTag({cwd: workingDirectory})
|
||||||
|
previousSha = sha
|
||||||
|
targetBranch = tag
|
||||||
|
} else {
|
||||||
|
if (inputs.sinceLastRemoteCommit) {
|
||||||
|
core.debug('Getting previous SHA for last remote commit...')
|
||||||
|
|
||||||
|
if (env.GITHUB_EVENT_FORCED === 'false' || !env.GITHUB_EVENT_FORCED) {
|
||||||
|
previousSha = env.GITHUB_EVENT_BEFORE
|
||||||
|
} else {
|
||||||
|
previousSha = await getParentHeadSha({cwd: workingDirectory})
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
core.debug('Getting previous SHA for last commit...')
|
||||||
|
previousSha = await getParentHeadSha({cwd: workingDirectory})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
!previousSha ||
|
||||||
|
previousSha === '0000000000000000000000000000000000000000'
|
||||||
|
) {
|
||||||
|
previousSha = await getParentHeadSha({cwd: workingDirectory})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (previousSha === currentSha) {
|
||||||
|
if (!(await getParentHeadSha({cwd: workingDirectory}))) {
|
||||||
|
core.warning('Initial commit detected no previous commit found.')
|
||||||
|
initialCommit = true
|
||||||
|
previousSha = currentSha
|
||||||
|
} else {
|
||||||
|
previousSha = await getParentHeadSha({cwd: workingDirectory})
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!previousSha) {
|
||||||
|
core.error('Unable to locate a previous commit.')
|
||||||
|
throw new Error('Unable to locate a previous commit.')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await verifyCommitSha({sha: previousSha, cwd: workingDirectory})
|
||||||
|
core.debug(`Previous SHA: ${previousSha}`)
|
||||||
|
|
||||||
|
core.debug(`Target branch: ${targetBranch}`)
|
||||||
|
core.debug(`Current branch: ${currentBranch}`)
|
||||||
|
|
||||||
|
if (!initialCommit && previousSha === currentSha) {
|
||||||
|
core.error(
|
||||||
|
`Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.`
|
||||||
|
)
|
||||||
|
core.error(
|
||||||
|
`Please verify that both commits are valid, and increase the fetch_depth to a number higher than ${inputs.fetchDepth}.`
|
||||||
|
)
|
||||||
|
throw new Error('Similar commit hashes detected.')
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
previousSha,
|
||||||
|
currentSha,
|
||||||
|
currentBranch,
|
||||||
|
targetBranch,
|
||||||
|
diff
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getSHAForPullRequestEvent = async (
|
||||||
|
inputs: Inputs,
|
||||||
|
env: Env,
|
||||||
|
workingDirectory: string,
|
||||||
|
isShallow: boolean,
|
||||||
|
hasSubmodule: boolean,
|
||||||
|
gitExtraArgs: string[]
|
||||||
|
): Promise<SHAResult> => {
|
||||||
|
let targetBranch = env.GITHUB_EVENT_PULL_REQUEST_BASE_REF
|
||||||
|
let currentBranch = env.GITHUB_EVENT_PULL_REQUEST_HEAD_REF
|
||||||
|
let currentSha = inputs.sha
|
||||||
|
let previousSha = inputs.baseSha
|
||||||
|
let diff = '...'
|
||||||
|
|
||||||
|
if (inputs.sinceLastRemoteCommit) {
|
||||||
|
targetBranch = currentBranch
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isShallow) {
|
||||||
|
core.info('Repository is shallow, fetching more history...')
|
||||||
|
|
||||||
|
const prFetchExitCode = await gitFetch({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
args: [
|
||||||
|
...gitExtraArgs,
|
||||||
|
'-u',
|
||||||
|
'--progress',
|
||||||
|
'origin',
|
||||||
|
`pull/${env.GITHUB_EVENT_PULL_REQUEST_NUMBER}/head:${currentBranch}`
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
if (prFetchExitCode !== 0) {
|
||||||
|
await gitFetch({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
args: [
|
||||||
|
...gitExtraArgs,
|
||||||
|
'-u',
|
||||||
|
'--progress',
|
||||||
|
`--deepen=${inputs.fetchDepth}`,
|
||||||
|
'origin',
|
||||||
|
`+refs/heads/${currentBranch}*:refs/remotes/origin/${currentBranch}*`
|
||||||
|
]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!inputs.sinceLastRemoteCommit) {
|
||||||
|
core.debug('Fetching target branch...')
|
||||||
|
await gitFetch({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
args: [
|
||||||
|
...gitExtraArgs,
|
||||||
|
'-u',
|
||||||
|
'--progress',
|
||||||
|
`--deepen=${inputs.fetchDepth}`,
|
||||||
|
'origin',
|
||||||
|
`+refs/heads/${targetBranch}:refs/remotes/origin/${targetBranch}`
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
if (hasSubmodule) {
|
||||||
|
await gitFetchSubmodules({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
args: [
|
||||||
|
...gitExtraArgs,
|
||||||
|
'-u',
|
||||||
|
'--progress',
|
||||||
|
`--deepen=${inputs.fetchDepth}`
|
||||||
|
]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (previousSha && currentSha && currentBranch && targetBranch) {
|
||||||
|
await verifyCommitSha({sha: currentSha, cwd: workingDirectory})
|
||||||
|
await verifyCommitSha({sha: previousSha, cwd: workingDirectory})
|
||||||
|
|
||||||
|
core.info(`Previous SHA: ${previousSha}`)
|
||||||
|
core.info(`Current SHA: ${currentSha}`)
|
||||||
|
return {
|
||||||
|
previousSha,
|
||||||
|
currentSha,
|
||||||
|
currentBranch,
|
||||||
|
targetBranch,
|
||||||
|
diff
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
currentSha = await getCurrentSHA({inputs, workingDirectory})
|
||||||
|
|
||||||
|
if (
|
||||||
|
!env.GITHUB_EVENT_PULL_REQUEST_BASE_REF ||
|
||||||
|
env.GITHUB_EVENT_HEAD_REPO_FORK == 'true'
|
||||||
|
) {
|
||||||
|
diff = '..'
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!previousSha) {
|
||||||
|
if (inputs.sinceLastRemoteCommit) {
|
||||||
|
previousSha = env.GITHUB_EVENT_BEFORE
|
||||||
|
|
||||||
|
if (
|
||||||
|
(await verifyCommitSha({
|
||||||
|
sha: currentSha,
|
||||||
|
cwd: workingDirectory,
|
||||||
|
showAsErrorMessage: false
|
||||||
|
})) !== 0
|
||||||
|
) {
|
||||||
|
previousSha = env.GITHUB_EVENT_PULL_REQUEST_BASE_SHA
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
previousSha = await getBranchHeadSha({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
branch: `origin/${targetBranch}`
|
||||||
|
})
|
||||||
|
|
||||||
|
if (isShallow) {
|
||||||
|
if (
|
||||||
|
await canDiffCommits({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
sha1: previousSha,
|
||||||
|
sha2: currentSha,
|
||||||
|
diff
|
||||||
|
})
|
||||||
|
) {
|
||||||
|
core.debug(
|
||||||
|
'Merge base is not in the local history, fetching remote target branch...'
|
||||||
|
)
|
||||||
|
|
||||||
|
for (let i = 1; i <= 10; i++) {
|
||||||
|
await gitFetch({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
args: [
|
||||||
|
...gitExtraArgs,
|
||||||
|
'-u',
|
||||||
|
'--progress',
|
||||||
|
`--deepen=${inputs.fetchDepth}`,
|
||||||
|
'origin',
|
||||||
|
`+refs/heads/${targetBranch}:refs/remotes/origin/${targetBranch}`
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
if (
|
||||||
|
await canDiffCommits({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
sha1: previousSha,
|
||||||
|
sha2: currentSha,
|
||||||
|
diff
|
||||||
|
})
|
||||||
|
) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
core.debug(
|
||||||
|
'Merge base is not in the local history, fetching remote target branch again...'
|
||||||
|
)
|
||||||
|
core.debug(`Attempt ${i}/10`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!previousSha || previousSha === currentSha) {
|
||||||
|
previousSha = env.GITHUB_EVENT_PULL_REQUEST_BASE_SHA
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
!(await canDiffCommits({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
sha1: previousSha,
|
||||||
|
sha2: currentSha,
|
||||||
|
diff
|
||||||
|
}))
|
||||||
|
) {
|
||||||
|
diff = '..'
|
||||||
|
}
|
||||||
|
|
||||||
|
await verifyCommitSha({sha: previousSha, cwd: workingDirectory})
|
||||||
|
core.debug(`Previous SHA: ${previousSha}`)
|
||||||
|
|
||||||
|
if (
|
||||||
|
!(await canDiffCommits({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
sha1: previousSha,
|
||||||
|
sha2: currentSha,
|
||||||
|
diff
|
||||||
|
}))
|
||||||
|
) {
|
||||||
|
throw new Error(
|
||||||
|
`Unable to determine a difference between ${previousSha}${diff}${currentSha}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
previousSha,
|
||||||
|
currentSha,
|
||||||
|
currentBranch,
|
||||||
|
targetBranch,
|
||||||
|
diff
|
||||||
|
}
|
||||||
|
}
|
33
src/env.ts
Normal file
33
src/env.ts
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
export type Env = {
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_HEAD_REF: string
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_BASE_REF: string
|
||||||
|
GITHUB_EVENT_BEFORE: string
|
||||||
|
GITHUB_REFNAME: string
|
||||||
|
GITHUB_REF: string
|
||||||
|
GITHUB_EVENT_BASE_REF: string
|
||||||
|
GITHUB_EVENT_HEAD_REPO_FORK: string
|
||||||
|
GITHUB_WORKSPACE: string
|
||||||
|
GITHUB_EVENT_FORCED: string
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_NUMBER: string
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_BASE_SHA: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getEnv = (): Env => {
|
||||||
|
return {
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_HEAD_REF:
|
||||||
|
process.env.GITHUB_EVENT_PULL_REQUEST_HEAD_REF || '',
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_BASE_REF:
|
||||||
|
process.env.GITHUB_EVENT_PULL_REQUEST_BASE_REF || '',
|
||||||
|
GITHUB_EVENT_BEFORE: process.env.GITHUB_EVENT_BEFORE || '',
|
||||||
|
GITHUB_REFNAME: process.env.GITHUB_REFNAME || '',
|
||||||
|
GITHUB_REF: process.env.GITHUB_REF || '',
|
||||||
|
GITHUB_EVENT_BASE_REF: process.env.GITHUB_EVENT_BASE_REF || '',
|
||||||
|
GITHUB_EVENT_HEAD_REPO_FORK: process.env.GITHUB_EVENT_HEAD_REPO_FORK || '',
|
||||||
|
GITHUB_WORKSPACE: process.env.GITHUB_WORKSPACE || '',
|
||||||
|
GITHUB_EVENT_FORCED: process.env.GITHUB_EVENT_FORCED || '',
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_NUMBER:
|
||||||
|
process.env.GITHUB_EVENT_PULL_REQUEST_NUMBER || '',
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_BASE_SHA:
|
||||||
|
process.env.GITHUB_EVENT_PULL_REQUEST_BASE_SHA || ''
|
||||||
|
}
|
||||||
|
}
|
153
src/inputs.ts
Normal file
153
src/inputs.ts
Normal file
@ -0,0 +1,153 @@
|
|||||||
|
import * as core from '@actions/core'
|
||||||
|
|
||||||
|
export type Inputs = {
|
||||||
|
files: string
|
||||||
|
filesSeparator: string
|
||||||
|
filesFromSourceFile: string
|
||||||
|
filesFromSourceFileSeparator: string
|
||||||
|
filesIgnore: string
|
||||||
|
filesIgnoreSeparator: string
|
||||||
|
filesIgnoreFromSourceFile: string
|
||||||
|
filesIgnoreFromSourceFileSeparator: string
|
||||||
|
separator: string
|
||||||
|
includeAllOldNewRenamedFiles: boolean
|
||||||
|
oldNewSeparator: string
|
||||||
|
oldNewFilesSeparator: string
|
||||||
|
sha: string
|
||||||
|
baseSha: string
|
||||||
|
since: string
|
||||||
|
until: string
|
||||||
|
path: string
|
||||||
|
quotePath: boolean
|
||||||
|
diffRelative: boolean
|
||||||
|
dirNames: boolean
|
||||||
|
dirNamesMaxDepth?: number
|
||||||
|
dirNamesExcludeRoot: boolean
|
||||||
|
json: boolean
|
||||||
|
jsonRawFormat: boolean
|
||||||
|
fetchDepth?: number
|
||||||
|
sinceLastRemoteCommit: boolean
|
||||||
|
writeOutputFiles: boolean
|
||||||
|
outputDir: string
|
||||||
|
matchDirectories: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getInputs = (): Inputs => {
|
||||||
|
const files = core.getInput('files', {required: false})
|
||||||
|
const filesSeparator = core.getInput('files-separator', {
|
||||||
|
required: false,
|
||||||
|
trimWhitespace: false
|
||||||
|
})
|
||||||
|
const filesIgnore = core.getInput('files-ignore', {required: false})
|
||||||
|
const filesIgnoreSeparator = core.getInput('files-ignore-separator', {
|
||||||
|
required: false,
|
||||||
|
trimWhitespace: false
|
||||||
|
})
|
||||||
|
const filesFromSourceFile = core.getInput('files-from-source-file', {
|
||||||
|
required: false
|
||||||
|
})
|
||||||
|
const filesFromSourceFileSeparator = core.getInput(
|
||||||
|
'files-from-source-file-separator',
|
||||||
|
{
|
||||||
|
required: false,
|
||||||
|
trimWhitespace: false
|
||||||
|
}
|
||||||
|
)
|
||||||
|
const filesIgnoreFromSourceFile = core.getInput(
|
||||||
|
'files-ignore-from-source-file',
|
||||||
|
{required: false}
|
||||||
|
)
|
||||||
|
const filesIgnoreFromSourceFileSeparator = core.getInput(
|
||||||
|
'files-ignore-from-source-file-separator',
|
||||||
|
{
|
||||||
|
required: false,
|
||||||
|
trimWhitespace: false
|
||||||
|
}
|
||||||
|
)
|
||||||
|
const separator = core.getInput('separator', {
|
||||||
|
required: true,
|
||||||
|
trimWhitespace: false
|
||||||
|
})
|
||||||
|
const includeAllOldNewRenamedFiles = core.getBooleanInput(
|
||||||
|
'include-all-old-new-renamed-files',
|
||||||
|
{required: false}
|
||||||
|
)
|
||||||
|
const oldNewSeparator = core.getInput('old-new-separator', {
|
||||||
|
required: true,
|
||||||
|
trimWhitespace: false
|
||||||
|
})
|
||||||
|
const oldNewFilesSeparator = core.getInput('old-new-files-separator', {
|
||||||
|
required: true,
|
||||||
|
trimWhitespace: false
|
||||||
|
})
|
||||||
|
const sha = core.getInput('sha', {required: false})
|
||||||
|
const baseSha = core.getInput('base-sha', {required: false})
|
||||||
|
const since = core.getInput('since', {required: false})
|
||||||
|
const until = core.getInput('until', {required: false})
|
||||||
|
const path = core.getInput('path', {required: false})
|
||||||
|
const quotePath = core.getBooleanInput('quotepath', {required: false})
|
||||||
|
const diffRelative = core.getBooleanInput('diff-relative', {required: false})
|
||||||
|
const dirNames = core.getBooleanInput('dir-names', {required: false})
|
||||||
|
const dirNamesMaxDepth = core.getInput('dir-names-max-depth', {
|
||||||
|
required: false
|
||||||
|
})
|
||||||
|
const dirNamesExcludeRoot = core.getBooleanInput('dir-names-exclude-root', {
|
||||||
|
required: false
|
||||||
|
})
|
||||||
|
const json = core.getBooleanInput('json', {required: false})
|
||||||
|
const jsonRawFormat = core.getBooleanInput('json-raw-format', {
|
||||||
|
required: false
|
||||||
|
})
|
||||||
|
const fetchDepth = core.getInput('fetch-depth', {required: false})
|
||||||
|
const sinceLastRemoteCommit = core.getBooleanInput(
|
||||||
|
'since-last-remote-commit',
|
||||||
|
{required: false}
|
||||||
|
)
|
||||||
|
const writeOutputFiles = core.getBooleanInput('write-output-files', {
|
||||||
|
required: false
|
||||||
|
})
|
||||||
|
const outputDir = core.getInput('output-dir', {required: false})
|
||||||
|
const matchDirectories = core.getBooleanInput('match-directories', {
|
||||||
|
required: false
|
||||||
|
})
|
||||||
|
|
||||||
|
const inputs: Inputs = {
|
||||||
|
files,
|
||||||
|
filesSeparator,
|
||||||
|
filesFromSourceFile,
|
||||||
|
filesFromSourceFileSeparator,
|
||||||
|
filesIgnore,
|
||||||
|
filesIgnoreSeparator,
|
||||||
|
filesIgnoreFromSourceFile,
|
||||||
|
filesIgnoreFromSourceFileSeparator,
|
||||||
|
separator,
|
||||||
|
includeAllOldNewRenamedFiles,
|
||||||
|
oldNewSeparator,
|
||||||
|
oldNewFilesSeparator,
|
||||||
|
sha,
|
||||||
|
baseSha,
|
||||||
|
since,
|
||||||
|
until,
|
||||||
|
path,
|
||||||
|
quotePath,
|
||||||
|
diffRelative,
|
||||||
|
dirNames,
|
||||||
|
dirNamesExcludeRoot,
|
||||||
|
json,
|
||||||
|
jsonRawFormat,
|
||||||
|
sinceLastRemoteCommit,
|
||||||
|
writeOutputFiles,
|
||||||
|
outputDir,
|
||||||
|
matchDirectories
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fetchDepth) {
|
||||||
|
inputs.fetchDepth = parseInt(fetchDepth, 10)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dirNamesMaxDepth) {
|
||||||
|
inputs.dirNamesMaxDepth = parseInt(dirNamesMaxDepth, 10)
|
||||||
|
}
|
||||||
|
|
||||||
|
return inputs
|
||||||
|
}
|
380
src/main.ts
Normal file
380
src/main.ts
Normal file
@ -0,0 +1,380 @@
|
|||||||
|
import * as core from '@actions/core'
|
||||||
|
import path from 'path'
|
||||||
|
import {getDiffFiles, getRenamedFiles} from './changedFiles'
|
||||||
|
import {
|
||||||
|
getSHAForPullRequestEvent,
|
||||||
|
getSHAForPushEvent,
|
||||||
|
SHAResult
|
||||||
|
} from './commitSha'
|
||||||
|
import {getEnv} from './env'
|
||||||
|
import {getInputs} from './inputs'
|
||||||
|
import {
|
||||||
|
getFilePatterns,
|
||||||
|
isRepoShallow,
|
||||||
|
setOutput,
|
||||||
|
submoduleExists,
|
||||||
|
updateGitGlobalConfig,
|
||||||
|
verifyMinimumGitVersion
|
||||||
|
} from './utils'
|
||||||
|
|
||||||
|
export async function run(): Promise<void> {
|
||||||
|
const env = getEnv()
|
||||||
|
const inputs = getInputs()
|
||||||
|
|
||||||
|
await verifyMinimumGitVersion()
|
||||||
|
|
||||||
|
let quotePathValue = 'on'
|
||||||
|
|
||||||
|
if (!inputs.quotePath) {
|
||||||
|
quotePathValue = 'off'
|
||||||
|
}
|
||||||
|
|
||||||
|
await updateGitGlobalConfig({
|
||||||
|
name: 'core.quotepath',
|
||||||
|
value: quotePathValue
|
||||||
|
})
|
||||||
|
|
||||||
|
if (inputs.diffRelative) {
|
||||||
|
await updateGitGlobalConfig({
|
||||||
|
name: 'diff.relative',
|
||||||
|
value: 'true'
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const workingDirectory = path.resolve(
|
||||||
|
env.GITHUB_WORKSPACE || process.cwd(),
|
||||||
|
inputs.path
|
||||||
|
)
|
||||||
|
const isShallow = await isRepoShallow({cwd: workingDirectory})
|
||||||
|
const hasSubmodule = await submoduleExists({cwd: workingDirectory})
|
||||||
|
let gitExtraArgs = ['--no-tags', '--prune', '--recurse-submodules']
|
||||||
|
const isTag = env.GITHUB_REF?.startsWith('refs/tags/')
|
||||||
|
|
||||||
|
if (isTag) {
|
||||||
|
gitExtraArgs = ['--prune', '--no-recurse-submodules']
|
||||||
|
}
|
||||||
|
|
||||||
|
let shaResult: SHAResult
|
||||||
|
|
||||||
|
if (!env.GITHUB_EVENT_PULL_REQUEST_BASE_REF) {
|
||||||
|
core.info('Running on a push event...')
|
||||||
|
shaResult = await getSHAForPushEvent(
|
||||||
|
inputs,
|
||||||
|
env,
|
||||||
|
workingDirectory,
|
||||||
|
isShallow,
|
||||||
|
hasSubmodule,
|
||||||
|
gitExtraArgs,
|
||||||
|
isTag
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
core.info('Running on a pull request event...')
|
||||||
|
shaResult = await getSHAForPullRequestEvent(
|
||||||
|
inputs,
|
||||||
|
env,
|
||||||
|
workingDirectory,
|
||||||
|
isShallow,
|
||||||
|
hasSubmodule,
|
||||||
|
gitExtraArgs
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const filePatterns = await getFilePatterns({
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
const addedFiles = await getDiffFiles({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
hasSubmodule,
|
||||||
|
shaResult,
|
||||||
|
diffFilter: 'A',
|
||||||
|
filePatterns
|
||||||
|
})
|
||||||
|
core.debug(`Added files: ${addedFiles}`)
|
||||||
|
await setOutput({
|
||||||
|
key: 'added_files',
|
||||||
|
value: addedFiles,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
const copiedFiles = await getDiffFiles({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
hasSubmodule,
|
||||||
|
shaResult,
|
||||||
|
diffFilter: 'C',
|
||||||
|
filePatterns
|
||||||
|
})
|
||||||
|
core.debug(`Copied files: ${copiedFiles}`)
|
||||||
|
await setOutput({
|
||||||
|
key: 'copied_files',
|
||||||
|
value: copiedFiles,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
const modifiedFiles = await getDiffFiles({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
hasSubmodule,
|
||||||
|
shaResult,
|
||||||
|
diffFilter: 'M',
|
||||||
|
filePatterns
|
||||||
|
})
|
||||||
|
core.debug(`Modified files: ${modifiedFiles}`)
|
||||||
|
await setOutput({
|
||||||
|
key: 'modified_files',
|
||||||
|
value: modifiedFiles,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
const renamedFiles = await getDiffFiles({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
hasSubmodule,
|
||||||
|
shaResult,
|
||||||
|
diffFilter: 'R',
|
||||||
|
filePatterns
|
||||||
|
})
|
||||||
|
core.debug(`Renamed files: ${renamedFiles}`)
|
||||||
|
await setOutput({
|
||||||
|
key: 'renamed_files',
|
||||||
|
value: renamedFiles,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
const typeChangedFiles = await getDiffFiles({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
hasSubmodule,
|
||||||
|
shaResult,
|
||||||
|
diffFilter: 'T',
|
||||||
|
filePatterns
|
||||||
|
})
|
||||||
|
core.debug(`Type changed files: ${typeChangedFiles}`)
|
||||||
|
await setOutput({
|
||||||
|
key: 'type_changed_files',
|
||||||
|
value: typeChangedFiles,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
const unmergedFiles = await getDiffFiles({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
hasSubmodule,
|
||||||
|
shaResult,
|
||||||
|
diffFilter: 'U',
|
||||||
|
filePatterns
|
||||||
|
})
|
||||||
|
core.debug(`Unmerged files: ${unmergedFiles}`)
|
||||||
|
await setOutput({
|
||||||
|
key: 'unmerged_files',
|
||||||
|
value: unmergedFiles,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
const unknownFiles = await getDiffFiles({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
hasSubmodule,
|
||||||
|
shaResult,
|
||||||
|
diffFilter: 'X',
|
||||||
|
filePatterns
|
||||||
|
})
|
||||||
|
core.debug(`Unknown files: ${unknownFiles}`)
|
||||||
|
await setOutput({
|
||||||
|
key: 'unknown_files',
|
||||||
|
value: unknownFiles,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
const allChangedAndModifiedFiles = await getDiffFiles({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
hasSubmodule,
|
||||||
|
shaResult,
|
||||||
|
diffFilter: 'ACDMRTUX',
|
||||||
|
filePatterns
|
||||||
|
})
|
||||||
|
core.debug(`All changed and modified files: ${allChangedAndModifiedFiles}`)
|
||||||
|
await setOutput({
|
||||||
|
key: 'all_changed_and_modified_files',
|
||||||
|
value: allChangedAndModifiedFiles,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
const allChangedFiles = await getDiffFiles({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
hasSubmodule,
|
||||||
|
shaResult,
|
||||||
|
diffFilter: 'ACMR',
|
||||||
|
filePatterns
|
||||||
|
})
|
||||||
|
core.debug(`All changed files: ${allChangedFiles}`)
|
||||||
|
await setOutput({
|
||||||
|
key: 'all_changed_files',
|
||||||
|
value: allChangedFiles,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
await setOutput({
|
||||||
|
key: 'any_changed',
|
||||||
|
value: allChangedFiles && filePatterns.length > 0,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
const allOtherChangedFiles = await getDiffFiles({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
hasSubmodule,
|
||||||
|
shaResult,
|
||||||
|
diffFilter: 'ACMR'
|
||||||
|
})
|
||||||
|
core.debug(`All other changed files: ${allOtherChangedFiles}`)
|
||||||
|
|
||||||
|
const otherChangedFiles = allOtherChangedFiles
|
||||||
|
.split(inputs.filesSeparator)
|
||||||
|
.filter(
|
||||||
|
filePath =>
|
||||||
|
!allChangedFiles.split(inputs.filesSeparator).includes(filePath)
|
||||||
|
)
|
||||||
|
|
||||||
|
const onlyChanged =
|
||||||
|
otherChangedFiles.length === 0 && allChangedFiles.length > 0
|
||||||
|
|
||||||
|
await setOutput({
|
||||||
|
key: 'only_changed',
|
||||||
|
value: onlyChanged,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
await setOutput({
|
||||||
|
key: 'other_changed_files',
|
||||||
|
value: otherChangedFiles.join(inputs.filesSeparator),
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
const allModifiedFiles = await getDiffFiles({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
hasSubmodule,
|
||||||
|
shaResult,
|
||||||
|
diffFilter: 'ACMRD',
|
||||||
|
filePatterns
|
||||||
|
})
|
||||||
|
core.debug(`All modified files: ${allModifiedFiles}`)
|
||||||
|
await setOutput({
|
||||||
|
key: 'all_modified_files',
|
||||||
|
value: allModifiedFiles,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
await setOutput({
|
||||||
|
key: 'any_modified',
|
||||||
|
value: allModifiedFiles && filePatterns.length > 0,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
const allOtherModifiedFiles = await getDiffFiles({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
hasSubmodule,
|
||||||
|
shaResult,
|
||||||
|
diffFilter: 'ACMRD'
|
||||||
|
})
|
||||||
|
|
||||||
|
const otherModifiedFiles = allOtherModifiedFiles
|
||||||
|
.split(inputs.filesSeparator)
|
||||||
|
.filter(
|
||||||
|
filePath =>
|
||||||
|
!allModifiedFiles.split(inputs.filesSeparator).includes(filePath)
|
||||||
|
)
|
||||||
|
|
||||||
|
const onlyModified =
|
||||||
|
otherModifiedFiles.length === 0 && allModifiedFiles.length > 0
|
||||||
|
|
||||||
|
await setOutput({
|
||||||
|
key: 'only_modified',
|
||||||
|
value: onlyModified,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
await setOutput({
|
||||||
|
key: 'other_modified_files',
|
||||||
|
value: otherModifiedFiles.join(inputs.filesSeparator),
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
const deletedFiles = await getDiffFiles({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
hasSubmodule,
|
||||||
|
shaResult,
|
||||||
|
diffFilter: 'D',
|
||||||
|
filePatterns
|
||||||
|
})
|
||||||
|
core.debug(`Deleted files: ${deletedFiles}`)
|
||||||
|
await setOutput({
|
||||||
|
key: 'deleted_files',
|
||||||
|
value: deletedFiles,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
await setOutput({
|
||||||
|
key: 'any_deleted',
|
||||||
|
value: deletedFiles && filePatterns.length > 0,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
const allOtherDeletedFiles = await getDiffFiles({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
hasSubmodule,
|
||||||
|
shaResult,
|
||||||
|
diffFilter: 'D'
|
||||||
|
})
|
||||||
|
|
||||||
|
const otherDeletedFiles = allOtherDeletedFiles
|
||||||
|
.split(inputs.filesSeparator)
|
||||||
|
.filter(
|
||||||
|
filePath => !deletedFiles.split(inputs.filesSeparator).includes(filePath)
|
||||||
|
)
|
||||||
|
|
||||||
|
const onlyDeleted = otherDeletedFiles.length === 0 && deletedFiles.length > 0
|
||||||
|
|
||||||
|
await setOutput({
|
||||||
|
key: 'only_deleted',
|
||||||
|
value: onlyDeleted,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
await setOutput({
|
||||||
|
key: 'other_deleted_files',
|
||||||
|
value: otherDeletedFiles.join(inputs.filesSeparator),
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
|
||||||
|
if (inputs.includeAllOldNewRenamedFiles) {
|
||||||
|
const allOldNewRenamedFiles = await getRenamedFiles({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
hasSubmodule,
|
||||||
|
shaResult
|
||||||
|
})
|
||||||
|
core.debug(`All old new renamed files: ${allOldNewRenamedFiles}`)
|
||||||
|
await setOutput({
|
||||||
|
key: 'all_old_new_renamed_files',
|
||||||
|
value: allOldNewRenamedFiles,
|
||||||
|
inputs
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* istanbul ignore if */
|
||||||
|
if (!process.env.TESTING) {
|
||||||
|
// eslint-disable-next-line github/no-then
|
||||||
|
run().catch(e => {
|
||||||
|
core.setFailed(e.message || e)
|
||||||
|
})
|
||||||
|
}
|
707
src/utils.ts
Normal file
707
src/utils.ts
Normal file
@ -0,0 +1,707 @@
|
|||||||
|
import * as core from '@actions/core'
|
||||||
|
import * as exec from '@actions/exec'
|
||||||
|
import {MatchKind} from '@actions/glob/lib/internal-match-kind'
|
||||||
|
|
||||||
|
import {Pattern} from '@actions/glob/lib/internal-pattern'
|
||||||
|
import * as patternHelper from '@actions/glob/lib/internal-pattern-helper'
|
||||||
|
import * as path from 'path'
|
||||||
|
import {createReadStream, promises as fs} from 'fs'
|
||||||
|
import {createInterface} from 'readline'
|
||||||
|
|
||||||
|
import {Inputs} from './inputs'
|
||||||
|
|
||||||
|
const MINIMUM_GIT_VERSION = '2.18.0'
|
||||||
|
|
||||||
|
const versionToNumber = (version: string): number => {
|
||||||
|
const [major, minor, patch] = version.split('.').map(Number)
|
||||||
|
return major * 1000000 + minor * 1000 + patch
|
||||||
|
}
|
||||||
|
|
||||||
|
export const verifyMinimumGitVersion = async (): Promise<void> => {
|
||||||
|
const {exitCode, stdout, stderr} = await exec.getExecOutput('git', [
|
||||||
|
'--version'
|
||||||
|
])
|
||||||
|
|
||||||
|
if (exitCode !== 0) {
|
||||||
|
throw new Error(stderr || 'An unexpected error occurred')
|
||||||
|
}
|
||||||
|
|
||||||
|
const gitVersion = stdout.trim()
|
||||||
|
|
||||||
|
if (versionToNumber(gitVersion) < versionToNumber(MINIMUM_GIT_VERSION)) {
|
||||||
|
throw new Error(
|
||||||
|
`Minimum required git version is ${MINIMUM_GIT_VERSION}, your version is ${gitVersion}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function escapeString(value: string): string {
|
||||||
|
return value.replace(/[|\\{}()[\]^$+*?.]/g, '\\$&')
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function exists(filePath: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await fs.access(filePath)
|
||||||
|
return true
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getPatterns(filePatterns: string): Promise<Pattern[]> {
|
||||||
|
const IS_WINDOWS: boolean = process.platform === 'win32'
|
||||||
|
const patterns = []
|
||||||
|
|
||||||
|
if (IS_WINDOWS) {
|
||||||
|
filePatterns = filePatterns.replace(/\r\n/g, '\n')
|
||||||
|
filePatterns = filePatterns.replace(/\r/g, '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
const lines = filePatterns.split('\n').map(filePattern => filePattern.trim())
|
||||||
|
|
||||||
|
for (let line of lines) {
|
||||||
|
// Empty or comment
|
||||||
|
if (!(!line || line.startsWith('#'))) {
|
||||||
|
line = IS_WINDOWS ? line.replace(/\\/g, '/') : line
|
||||||
|
const pattern = new Pattern(line)
|
||||||
|
// @ts-ignore
|
||||||
|
pattern.minimatch.options.nobrace = false
|
||||||
|
// @ts-ignore
|
||||||
|
pattern.minimatch.make()
|
||||||
|
patterns.push(pattern)
|
||||||
|
|
||||||
|
if (
|
||||||
|
pattern.trailingSeparator ||
|
||||||
|
pattern.segments[pattern.segments.length - 1] !== '**'
|
||||||
|
) {
|
||||||
|
patterns.push(
|
||||||
|
new Pattern(pattern.negate, true, pattern.segments.concat('**'))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return patterns
|
||||||
|
}
|
||||||
|
|
||||||
|
async function* lineOfFileGenerator({
|
||||||
|
filePath,
|
||||||
|
excludedFiles
|
||||||
|
}: {
|
||||||
|
filePath: string
|
||||||
|
excludedFiles: boolean
|
||||||
|
}): AsyncIterableIterator<string> {
|
||||||
|
const fileStream = createReadStream(filePath)
|
||||||
|
/* istanbul ignore next */
|
||||||
|
fileStream.on('error', error => {
|
||||||
|
throw error
|
||||||
|
})
|
||||||
|
const rl = createInterface({
|
||||||
|
input: fileStream,
|
||||||
|
crlfDelay: Infinity
|
||||||
|
})
|
||||||
|
for await (const line of rl) {
|
||||||
|
if (!line.startsWith('#') && line !== '') {
|
||||||
|
if (excludedFiles) {
|
||||||
|
if (line.startsWith('!')) {
|
||||||
|
yield line
|
||||||
|
} else {
|
||||||
|
yield `!${line}`
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
yield line
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getFilesFromSourceFile({
|
||||||
|
filePaths,
|
||||||
|
excludedFiles = false
|
||||||
|
}: {
|
||||||
|
filePaths: string[]
|
||||||
|
excludedFiles?: boolean
|
||||||
|
}): Promise<string[]> {
|
||||||
|
const lines = []
|
||||||
|
for (const filePath of filePaths) {
|
||||||
|
for await (const line of lineOfFileGenerator({filePath, excludedFiles})) {
|
||||||
|
lines.push(line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return lines
|
||||||
|
}
|
||||||
|
|
||||||
|
export const updateGitGlobalConfig = async ({
|
||||||
|
name,
|
||||||
|
value
|
||||||
|
}: {
|
||||||
|
name: string
|
||||||
|
value: string
|
||||||
|
}): Promise<void> => {
|
||||||
|
const {exitCode, stderr} = await exec.getExecOutput(
|
||||||
|
'git',
|
||||||
|
['config', '--global', name, value],
|
||||||
|
{
|
||||||
|
ignoreReturnCode: true
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
/* istanbul ignore if */
|
||||||
|
if (exitCode !== 0 || stderr) {
|
||||||
|
core.warning(stderr || `Couldn't update git global config ${name}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const isRepoShallow = async ({cwd}: {cwd: string}): Promise<boolean> => {
|
||||||
|
const {exitCode, stdout, stderr} = await exec.getExecOutput(
|
||||||
|
'git',
|
||||||
|
['rev-parse', '--is-shallow-repository'],
|
||||||
|
{cwd}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (exitCode !== 0) {
|
||||||
|
throw new Error(stderr || 'An unexpected error occurred')
|
||||||
|
}
|
||||||
|
|
||||||
|
return stdout.trim() === 'true'
|
||||||
|
}
|
||||||
|
|
||||||
|
export const submoduleExists = async ({
|
||||||
|
cwd
|
||||||
|
}: {
|
||||||
|
cwd: string
|
||||||
|
}): Promise<boolean> => {
|
||||||
|
const {exitCode, stdout, stderr} = await exec.getExecOutput(
|
||||||
|
'git',
|
||||||
|
['submodule', 'status'],
|
||||||
|
{cwd}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (exitCode !== 0) {
|
||||||
|
throw new Error(stderr || 'An unexpected error occurred')
|
||||||
|
}
|
||||||
|
|
||||||
|
return stdout.trim() !== ''
|
||||||
|
}
|
||||||
|
|
||||||
|
export const gitFetch = async ({
|
||||||
|
args,
|
||||||
|
cwd
|
||||||
|
}: {
|
||||||
|
args: string[]
|
||||||
|
cwd: string
|
||||||
|
}): Promise<number> => {
|
||||||
|
const {exitCode, stderr} = await exec.getExecOutput(
|
||||||
|
'git',
|
||||||
|
['fetch', ...args],
|
||||||
|
{cwd}
|
||||||
|
)
|
||||||
|
|
||||||
|
/* istanbul ignore if */
|
||||||
|
if (exitCode !== 0 || stderr) {
|
||||||
|
core.warning(stderr || "Couldn't fetch repository")
|
||||||
|
}
|
||||||
|
|
||||||
|
return exitCode
|
||||||
|
}
|
||||||
|
|
||||||
|
export const gitFetchSubmodules = async ({
|
||||||
|
args,
|
||||||
|
cwd
|
||||||
|
}: {
|
||||||
|
args: string[]
|
||||||
|
cwd: string
|
||||||
|
}): Promise<void> => {
|
||||||
|
const {exitCode, stderr} = await exec.getExecOutput(
|
||||||
|
'git',
|
||||||
|
['submodule', 'foreach', 'git', 'fetch', ...args],
|
||||||
|
{cwd}
|
||||||
|
)
|
||||||
|
|
||||||
|
/* istanbul ignore if */
|
||||||
|
if (exitCode !== 0 || stderr) {
|
||||||
|
core.warning(stderr || "Couldn't fetch submodules")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getSubmodulePath = async ({
|
||||||
|
cwd
|
||||||
|
}: {
|
||||||
|
cwd: string
|
||||||
|
}): Promise<string[]> => {
|
||||||
|
const {exitCode, stdout, stderr} = await exec.getExecOutput(
|
||||||
|
'git',
|
||||||
|
['submodule', 'status'],
|
||||||
|
{cwd}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (exitCode !== 0) {
|
||||||
|
core.warning(stderr || "Couldn't get submodule names")
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
return stdout
|
||||||
|
.split('\n')
|
||||||
|
.filter(Boolean)
|
||||||
|
.map(line => line.split(' ')[1])
|
||||||
|
}
|
||||||
|
|
||||||
|
export const gitSubmoduleDiffSHA = async ({
|
||||||
|
cwd,
|
||||||
|
parentSha1,
|
||||||
|
parentSha2,
|
||||||
|
submodulePath,
|
||||||
|
diff
|
||||||
|
}: {
|
||||||
|
cwd: string
|
||||||
|
parentSha1: string
|
||||||
|
parentSha2: string
|
||||||
|
submodulePath: string
|
||||||
|
diff: string
|
||||||
|
}): Promise<{previousSha?: string; currentSha?: string}> => {
|
||||||
|
const {exitCode, stdout, stderr} = await exec.getExecOutput(
|
||||||
|
'git',
|
||||||
|
['diff', parentSha1, parentSha2, '--', submodulePath],
|
||||||
|
{cwd}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (exitCode !== 0) {
|
||||||
|
throw new Error(stderr || 'An unexpected error occurred')
|
||||||
|
}
|
||||||
|
|
||||||
|
const subprojectCommitPreRegex =
|
||||||
|
/^(?<preCommit>-)Subproject commit (?<commitHash>.+)$/m
|
||||||
|
const subprojectCommitCurRegex =
|
||||||
|
/^(?<curCommit>\+)Subproject commit (?<commitHash>.+)$/m
|
||||||
|
|
||||||
|
const previousSha =
|
||||||
|
subprojectCommitPreRegex.exec(stdout)?.groups?.commitHash ||
|
||||||
|
'4b825dc642cb6eb9a060e54bf8d69288fbee4904'
|
||||||
|
const currentSha = subprojectCommitCurRegex.exec(stdout)?.groups?.commitHash
|
||||||
|
|
||||||
|
if (currentSha) {
|
||||||
|
return {previousSha, currentSha}
|
||||||
|
}
|
||||||
|
|
||||||
|
core.warning(
|
||||||
|
`No submodule commit found for ${submodulePath} between ${parentSha1}${diff}${parentSha2}`
|
||||||
|
)
|
||||||
|
return {}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const gitRenamedFiles = async ({
|
||||||
|
cwd,
|
||||||
|
sha1,
|
||||||
|
sha2,
|
||||||
|
diff,
|
||||||
|
oldNewSeparator,
|
||||||
|
isSubmodule = false
|
||||||
|
}: {
|
||||||
|
cwd: string
|
||||||
|
sha1: string
|
||||||
|
sha2: string
|
||||||
|
diff: string
|
||||||
|
oldNewSeparator: string
|
||||||
|
isSubmodule?: boolean
|
||||||
|
}): Promise<string[]> => {
|
||||||
|
const {exitCode, stderr, stdout} = await exec.getExecOutput(
|
||||||
|
'git',
|
||||||
|
[
|
||||||
|
'diff',
|
||||||
|
'--name-status',
|
||||||
|
'--ignore-submodules=all',
|
||||||
|
'--diff-filter=R',
|
||||||
|
`${sha1}${diff}${sha2}`
|
||||||
|
],
|
||||||
|
{cwd}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (exitCode !== 0) {
|
||||||
|
if (isSubmodule) {
|
||||||
|
core.warning(
|
||||||
|
stderr ||
|
||||||
|
`Failed to get renamed files for submodule between: ${sha1}${diff}${sha2}`
|
||||||
|
)
|
||||||
|
core.warning(
|
||||||
|
'Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage'
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
core.error(
|
||||||
|
stderr || `Failed to get renamed files between: ${sha1}${diff}${sha2}`
|
||||||
|
)
|
||||||
|
throw new Error('Unable to get renamed files')
|
||||||
|
}
|
||||||
|
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
return stdout
|
||||||
|
.trim()
|
||||||
|
.split('\n')
|
||||||
|
.map(line => {
|
||||||
|
const [, oldPath, newPath] = line.split('\t')
|
||||||
|
return `${escapeString(oldPath)}${oldNewSeparator}${escapeString(
|
||||||
|
newPath
|
||||||
|
)}`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export const gitDiff = async ({
|
||||||
|
cwd,
|
||||||
|
sha1,
|
||||||
|
sha2,
|
||||||
|
diff,
|
||||||
|
diffFilter,
|
||||||
|
filePatterns = [],
|
||||||
|
isSubmodule = false
|
||||||
|
}: {
|
||||||
|
cwd: string
|
||||||
|
sha1: string
|
||||||
|
sha2: string
|
||||||
|
diffFilter: string
|
||||||
|
diff: string
|
||||||
|
filePatterns?: Pattern[]
|
||||||
|
isSubmodule?: boolean
|
||||||
|
}): Promise<string[]> => {
|
||||||
|
const {exitCode, stdout, stderr} = await exec.getExecOutput(
|
||||||
|
'git',
|
||||||
|
[
|
||||||
|
'diff',
|
||||||
|
'--name-only',
|
||||||
|
'--ignore-submodules=all',
|
||||||
|
`--diff-filter=${diffFilter}`,
|
||||||
|
`${sha1}${diff}${sha2}`
|
||||||
|
],
|
||||||
|
{cwd}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (exitCode !== 0) {
|
||||||
|
if (isSubmodule) {
|
||||||
|
core.warning(
|
||||||
|
stderr ||
|
||||||
|
`Failed to get changed files for submodule between: ${sha1}${diff}${sha2}`
|
||||||
|
)
|
||||||
|
core.warning(
|
||||||
|
'Please ensure that submodules are initialized and up to date. See: https://github.com/actions/checkout#usage'
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
core.error(
|
||||||
|
stderr || `Failed to get changed files between: ${sha1}${diff}${sha2}`
|
||||||
|
)
|
||||||
|
throw new Error('Unable to get changed files')
|
||||||
|
}
|
||||||
|
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
return stdout
|
||||||
|
.split('\n')
|
||||||
|
.filter(filePath => {
|
||||||
|
if (filePatterns.length === 0) {
|
||||||
|
return filePath !== ''
|
||||||
|
}
|
||||||
|
|
||||||
|
const match = patternHelper.match(filePatterns, filePath)
|
||||||
|
return filePath !== '' && match === MatchKind.All
|
||||||
|
})
|
||||||
|
.map(p => escapeString(p))
|
||||||
|
}
|
||||||
|
|
||||||
|
export const gitLog = async ({
|
||||||
|
args,
|
||||||
|
cwd
|
||||||
|
}: {
|
||||||
|
args: string[]
|
||||||
|
cwd: string
|
||||||
|
}): Promise<string> => {
|
||||||
|
const {exitCode, stdout, stderr} = await exec.getExecOutput(
|
||||||
|
'git',
|
||||||
|
['log', ...args],
|
||||||
|
{cwd}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (exitCode !== 0) {
|
||||||
|
throw new Error(stderr || 'An unexpected error occurred')
|
||||||
|
}
|
||||||
|
|
||||||
|
return stdout.trim()
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getHeadSha = async ({cwd}: {cwd: string}): Promise<string> => {
|
||||||
|
const {exitCode, stdout, stderr} = await exec.getExecOutput(
|
||||||
|
'git',
|
||||||
|
['rev-parse', 'HEAD'],
|
||||||
|
{cwd}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (exitCode !== 0) {
|
||||||
|
throw new Error(stderr || 'Unable to get HEAD sha')
|
||||||
|
}
|
||||||
|
|
||||||
|
return stdout.trim()
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getParentHeadSha = async ({
|
||||||
|
cwd
|
||||||
|
}: {
|
||||||
|
cwd: string
|
||||||
|
}): Promise<string> => {
|
||||||
|
const {exitCode, stdout, stderr} = await exec.getExecOutput(
|
||||||
|
'git',
|
||||||
|
['rev-parse', 'HEAD^'],
|
||||||
|
{cwd}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (exitCode !== 0) {
|
||||||
|
throw new Error(stderr || 'Unable to get HEAD^ sha')
|
||||||
|
}
|
||||||
|
|
||||||
|
return stdout.trim()
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getBranchHeadSha = async ({
|
||||||
|
branch,
|
||||||
|
cwd
|
||||||
|
}: {
|
||||||
|
branch: string
|
||||||
|
cwd: string
|
||||||
|
}): Promise<string> => {
|
||||||
|
const {exitCode, stdout, stderr} = await exec.getExecOutput(
|
||||||
|
'git',
|
||||||
|
['rev-parse', branch],
|
||||||
|
{cwd}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (exitCode !== 0) {
|
||||||
|
throw new Error(stderr || `Unable to get ${branch} head sha`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return stdout.trim()
|
||||||
|
}
|
||||||
|
|
||||||
|
export const verifyCommitSha = async ({
|
||||||
|
sha,
|
||||||
|
cwd,
|
||||||
|
showAsErrorMessage = true
|
||||||
|
}: {
|
||||||
|
sha: string
|
||||||
|
cwd: string
|
||||||
|
showAsErrorMessage?: boolean
|
||||||
|
}): Promise<number> => {
|
||||||
|
const {exitCode, stderr} = await exec.getExecOutput(
|
||||||
|
'git',
|
||||||
|
['rev-parse', '--quiet', '--verify', `${sha}^{commit}`],
|
||||||
|
{cwd}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (exitCode !== 0) {
|
||||||
|
if (showAsErrorMessage) {
|
||||||
|
core.error(`Unable to locate the commit sha: ${sha}`)
|
||||||
|
core.error(
|
||||||
|
"Please verify that the commit sha is correct, and increase the 'fetch_depth' input if needed"
|
||||||
|
)
|
||||||
|
core.debug(stderr)
|
||||||
|
} else {
|
||||||
|
core.warning(`Unable to locate the commit sha: ${sha}`)
|
||||||
|
core.debug(stderr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return exitCode
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getPreviousGitTag = async ({
|
||||||
|
cwd
|
||||||
|
}: {
|
||||||
|
cwd: string
|
||||||
|
}): Promise<{tag: string; sha: string}> => {
|
||||||
|
const {exitCode, stdout, stderr} = await exec.getExecOutput(
|
||||||
|
'git',
|
||||||
|
['tag', '--sort=-version:refname'],
|
||||||
|
{cwd}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (exitCode !== 0) {
|
||||||
|
throw new Error(stderr || 'Unable to get previous tag')
|
||||||
|
}
|
||||||
|
|
||||||
|
const tags = stdout.trim().split('\n')
|
||||||
|
|
||||||
|
if (tags.length < 2) {
|
||||||
|
core.warning('No previous tag found')
|
||||||
|
return {tag: '', sha: ''}
|
||||||
|
}
|
||||||
|
|
||||||
|
const previousTag = tags[1]
|
||||||
|
|
||||||
|
const {
|
||||||
|
exitCode: exitCode2,
|
||||||
|
stdout: stdout2,
|
||||||
|
stderr: stderr2
|
||||||
|
} = await exec.getExecOutput('git', ['rev-parse', previousTag], {cwd})
|
||||||
|
|
||||||
|
if (exitCode2 !== 0) {
|
||||||
|
throw new Error(stderr2 || 'Unable to get previous tag')
|
||||||
|
}
|
||||||
|
|
||||||
|
const sha = stdout2.trim()
|
||||||
|
|
||||||
|
return {tag: previousTag, sha}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const canDiffCommits = async ({
|
||||||
|
cwd,
|
||||||
|
sha1,
|
||||||
|
sha2,
|
||||||
|
diff
|
||||||
|
}: {
|
||||||
|
cwd: string
|
||||||
|
sha1: string
|
||||||
|
sha2: string
|
||||||
|
diff: string
|
||||||
|
}): Promise<boolean> => {
|
||||||
|
const {exitCode, stderr} = await exec.getExecOutput(
|
||||||
|
'git',
|
||||||
|
['diff', '--name-only', '--ignore-submodules=all', `${sha1}${diff}${sha2}`],
|
||||||
|
{cwd}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (exitCode !== 0) {
|
||||||
|
core.warning(stderr || `Unable find merge base between ${sha1} and ${sha2}`)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getDirnameMaxDepth = ({
|
||||||
|
pathStr,
|
||||||
|
dirNamesMaxDepth,
|
||||||
|
excludeRoot
|
||||||
|
}: {
|
||||||
|
pathStr: string
|
||||||
|
dirNamesMaxDepth?: number
|
||||||
|
excludeRoot?: boolean
|
||||||
|
}): string => {
|
||||||
|
const pathArr = pathStr.split(path.sep)
|
||||||
|
const maxDepth = Math.min(dirNamesMaxDepth || pathArr.length, pathArr.length)
|
||||||
|
let output = pathArr[0]
|
||||||
|
|
||||||
|
for (let i = 1; i < maxDepth; i++) {
|
||||||
|
output = path.join(output, pathArr[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
if (excludeRoot && output === '.') {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
|
||||||
|
return output
|
||||||
|
}
|
||||||
|
|
||||||
|
export const jsonOutput = ({
|
||||||
|
value,
|
||||||
|
rawFormat
|
||||||
|
}: {
|
||||||
|
value: any
|
||||||
|
rawFormat: boolean
|
||||||
|
}): string => {
|
||||||
|
if (rawFormat) {
|
||||||
|
return JSON.stringify(value)
|
||||||
|
} else {
|
||||||
|
return JSON.stringify(value, null, 2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getFilePatterns = async ({
|
||||||
|
inputs
|
||||||
|
}: {
|
||||||
|
inputs: Inputs
|
||||||
|
}): Promise<Pattern[]> => {
|
||||||
|
let filesPatterns: string = inputs.files
|
||||||
|
.split(inputs.filesSeparator)
|
||||||
|
.filter(p => p !== '')
|
||||||
|
.join('\n')
|
||||||
|
|
||||||
|
core.debug(`files patterns: ${filesPatterns}`)
|
||||||
|
|
||||||
|
if (inputs.filesFromSourceFile !== '') {
|
||||||
|
const inputFilesFromSourceFile = inputs.filesFromSourceFile
|
||||||
|
.split(inputs.filesFromSourceFileSeparator)
|
||||||
|
.filter(p => p !== '')
|
||||||
|
|
||||||
|
const filesFromSourceFiles = (
|
||||||
|
await getFilesFromSourceFile({filePaths: inputFilesFromSourceFile})
|
||||||
|
).join('\n')
|
||||||
|
|
||||||
|
core.debug(`files from source files patterns: ${filesFromSourceFiles}`)
|
||||||
|
|
||||||
|
filesPatterns = filesPatterns.concat('\n', filesFromSourceFiles)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (inputs.filesIgnore) {
|
||||||
|
const filesIgnorePatterns = inputs.filesIgnore
|
||||||
|
.split(inputs.filesIgnoreSeparator)
|
||||||
|
.filter(p => p !== '')
|
||||||
|
.map(p => {
|
||||||
|
if (!p.startsWith('!')) {
|
||||||
|
p = `!${p}`
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
})
|
||||||
|
.join('\n')
|
||||||
|
|
||||||
|
core.debug(`files ignore patterns: ${filesIgnorePatterns}`)
|
||||||
|
|
||||||
|
filesPatterns = filesPatterns.concat('\n', filesIgnorePatterns)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (inputs.filesIgnoreFromSourceFile) {
|
||||||
|
const inputFilesIgnoreFromSourceFile = inputs.filesIgnoreFromSourceFile
|
||||||
|
.split(inputs.filesIgnoreFromSourceFileSeparator)
|
||||||
|
.filter(p => p !== '')
|
||||||
|
|
||||||
|
const filesIgnoreFromSourceFiles = (
|
||||||
|
await getFilesFromSourceFile({
|
||||||
|
filePaths: inputFilesIgnoreFromSourceFile,
|
||||||
|
excludedFiles: true
|
||||||
|
})
|
||||||
|
).join('\n')
|
||||||
|
|
||||||
|
core.debug(
|
||||||
|
`files ignore from source files patterns: ${filesIgnoreFromSourceFiles}`
|
||||||
|
)
|
||||||
|
|
||||||
|
filesPatterns = filesPatterns.concat('\n', filesIgnoreFromSourceFiles)
|
||||||
|
}
|
||||||
|
|
||||||
|
const patterns = await getPatterns(filesPatterns)
|
||||||
|
|
||||||
|
core.debug(`patterns: ${patterns}`)
|
||||||
|
|
||||||
|
return patterns
|
||||||
|
}
|
||||||
|
|
||||||
|
export const setOutput = async ({
|
||||||
|
key,
|
||||||
|
value,
|
||||||
|
inputs
|
||||||
|
}: {
|
||||||
|
key: string
|
||||||
|
value: string | boolean
|
||||||
|
inputs: Inputs
|
||||||
|
}): Promise<void> => {
|
||||||
|
const cleanedValue = value.toString().trim()
|
||||||
|
|
||||||
|
if (inputs.writeOutputFiles) {
|
||||||
|
const outputDir = inputs.outputDir || '.github/outputs'
|
||||||
|
const extension = inputs.json ? 'json' : 'txt'
|
||||||
|
const outputFilePath = path.join(outputDir, `${key}.${extension}`)
|
||||||
|
|
||||||
|
if (!(await exists(outputDir))) {
|
||||||
|
await fs.mkdir(outputDir, {recursive: true})
|
||||||
|
}
|
||||||
|
await fs.writeFile(outputFilePath, cleanedValue)
|
||||||
|
} else {
|
||||||
|
core.setOutput(key, cleanedValue)
|
||||||
|
}
|
||||||
|
}
|
12
tsconfig.json
Normal file
12
tsconfig.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
|
||||||
|
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
|
||||||
|
"outDir": "./lib", /* Redirect output structure to the directory. */
|
||||||
|
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||||
|
"strict": true, /* Enable all strict type-checking options. */
|
||||||
|
"noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
|
||||||
|
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||||
|
},
|
||||||
|
"exclude": ["node_modules", "jest/setEnvVars.cjs"]
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user