Compare commits
No commits in common. "main" and "v37.1.1" have entirely different histories.
@ -187,73 +187,6 @@
|
|||||||
"contributions": [
|
"contributions": [
|
||||||
"doc"
|
"doc"
|
||||||
]
|
]
|
||||||
},
|
|
||||||
{
|
|
||||||
"login": "V0lantis",
|
|
||||||
"name": "Arthur",
|
|
||||||
"avatar_url": "https://avatars.githubusercontent.com/u/37664438?v=4",
|
|
||||||
"profile": "https://arthurvolant.com",
|
|
||||||
"contributions": [
|
|
||||||
"bug",
|
|
||||||
"code"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"login": "rodrigorfk",
|
|
||||||
"name": "Rodrigo Fior Kuntzer",
|
|
||||||
"avatar_url": "https://avatars.githubusercontent.com/u/1995033?v=4",
|
|
||||||
"profile": "https://github.com/rodrigorfk",
|
|
||||||
"contributions": [
|
|
||||||
"code",
|
|
||||||
"test",
|
|
||||||
"bug"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"login": "levenleven",
|
|
||||||
"name": "Aleksey Levenstein",
|
|
||||||
"avatar_url": "https://avatars.githubusercontent.com/u/6463364?v=4",
|
|
||||||
"profile": "https://github.com/levenleven",
|
|
||||||
"contributions": [
|
|
||||||
"doc"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"login": "dan-hill2802",
|
|
||||||
"name": "Daniel Hill",
|
|
||||||
"avatar_url": "https://avatars.githubusercontent.com/u/5046322?v=4",
|
|
||||||
"profile": "https://github.com/dan-hill2802",
|
|
||||||
"contributions": [
|
|
||||||
"doc"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"login": "KeisukeYamashita",
|
|
||||||
"name": "KeisukeYamashita",
|
|
||||||
"avatar_url": "https://avatars.githubusercontent.com/u/23056537?v=4",
|
|
||||||
"profile": "https://keisukeyamashita.com",
|
|
||||||
"contributions": [
|
|
||||||
"doc"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"login": "codesculpture",
|
|
||||||
"name": "Aravind",
|
|
||||||
"avatar_url": "https://avatars.githubusercontent.com/u/63452117?v=4",
|
|
||||||
"profile": "https://github.com/codesculpture",
|
|
||||||
"contributions": [
|
|
||||||
"code",
|
|
||||||
"bug"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"login": "Whadup",
|
|
||||||
"name": "Lukas Pfahler",
|
|
||||||
"avatar_url": "https://avatars.githubusercontent.com/u/2308119?v=4",
|
|
||||||
"profile": "https://lukaspfahler.de",
|
|
||||||
"contributions": [
|
|
||||||
"code"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"contributorsPerLine": 7,
|
"contributorsPerLine": 7,
|
||||||
@ -262,6 +195,5 @@
|
|||||||
"repoType": "github",
|
"repoType": "github",
|
||||||
"repoHost": "https://github.com",
|
"repoHost": "https://github.com",
|
||||||
"skipCi": true,
|
"skipCi": true,
|
||||||
"commitConvention": "angular",
|
"commitConvention": "angular"
|
||||||
"commitType": "docs"
|
|
||||||
}
|
}
|
||||||
|
@ -1,4 +0,0 @@
|
|||||||
---
|
|
||||||
exclude_paths:
|
|
||||||
- "*.md"
|
|
||||||
- "dist/**"
|
|
@ -2,4 +2,3 @@ dist/
|
|||||||
lib/
|
lib/
|
||||||
node_modules/
|
node_modules/
|
||||||
jest.config.js
|
jest.config.js
|
||||||
coverage/
|
|
@ -5,8 +5,7 @@
|
|||||||
"github"
|
"github"
|
||||||
],
|
],
|
||||||
"extends": [
|
"extends": [
|
||||||
"plugin:github/recommended",
|
"plugin:github/recommended"
|
||||||
"plugin:prettier/recommended"
|
|
||||||
],
|
],
|
||||||
"parser": "@typescript-eslint/parser",
|
"parser": "@typescript-eslint/parser",
|
||||||
"parserOptions": {
|
"parserOptions": {
|
||||||
|
12
.github/FUNDING.yml
vendored
Normal file
12
.github/FUNDING.yml
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
# These are supported funding model platforms
|
||||||
|
|
||||||
|
github: jackton1
|
||||||
|
patreon: # Replace with a single Patreon username
|
||||||
|
open_collective: tj-actions
|
||||||
|
ko_fi: # Replace with a single Ko-fi username
|
||||||
|
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||||
|
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||||
|
liberapay: # Replace with a single Liberapay username
|
||||||
|
issuehunt: # Replace with a single IssueHunt username
|
||||||
|
otechie: # Replace with a single Otechie username
|
||||||
|
custom: []
|
98
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
Normal file
98
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
Normal file
@ -0,0 +1,98 @@
|
|||||||
|
name: 🐞 Bug
|
||||||
|
description: Create a report to help us improve
|
||||||
|
title: "[BUG] <title>"
|
||||||
|
labels: [bug, needs triage]
|
||||||
|
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
Thanks for taking the time to fill out this bug report!
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Is there an existing issue for this?
|
||||||
|
description: Please search to see if an issue already exists for the bug you encountered.
|
||||||
|
options:
|
||||||
|
- label: I have searched the existing issues
|
||||||
|
required: true
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Does this issue exist in the latest version?
|
||||||
|
description: Please view all releases to confirm that this issue hasn't already been fixed.
|
||||||
|
options:
|
||||||
|
- label: I'm using the latest release
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: what-happened
|
||||||
|
attributes:
|
||||||
|
label: Describe the bug?
|
||||||
|
description: A clear and concise description of what the bug is
|
||||||
|
placeholder: Tell us what you see!
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: reproduce
|
||||||
|
attributes:
|
||||||
|
label: To Reproduce
|
||||||
|
description: Steps to reproduce the behavior?
|
||||||
|
placeholder: |
|
||||||
|
1. In this environment...
|
||||||
|
2. With this config...
|
||||||
|
3. Run '...'
|
||||||
|
4. See error...
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: dropdown
|
||||||
|
id: os
|
||||||
|
attributes:
|
||||||
|
label: What OS are you seeing the problem on?
|
||||||
|
multiple: true
|
||||||
|
options:
|
||||||
|
- all
|
||||||
|
- ubuntu-latest or ubuntu-20.04
|
||||||
|
- ubuntu-18.04
|
||||||
|
- macos-latest or macos-10.15
|
||||||
|
- macos-11
|
||||||
|
- windows-latest or windows-2019
|
||||||
|
- windows-2016
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: expected
|
||||||
|
attributes:
|
||||||
|
label: Expected behavior?
|
||||||
|
description: A clear and concise description of what you expected to happen.
|
||||||
|
placeholder: Tell us what you expected!
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: logs
|
||||||
|
attributes:
|
||||||
|
label: Relevant log output
|
||||||
|
description: Please copy and paste any relevant log output which is obtained after enabling debug logging. This will be automatically formatted into code, so no need for backticks.
|
||||||
|
placeholder: |
|
||||||
|
1. Re-running the workflow with debug logging enabled.
|
||||||
|
2. Copy or download the log archive.
|
||||||
|
3. Paste the contents here or upload the file in a subsequent comment.
|
||||||
|
render: shell
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Anything else?
|
||||||
|
description: |
|
||||||
|
Links? or References?
|
||||||
|
|
||||||
|
Anything that will give us more context about the issue you are encountering!
|
||||||
|
|
||||||
|
Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: checkboxes
|
||||||
|
id: terms
|
||||||
|
attributes:
|
||||||
|
label: Code of Conduct
|
||||||
|
description: By submitting this issue, you agree to follow our [Code of Conduct](../blob/main/CODE_OF_CONDUCT.md)
|
||||||
|
options:
|
||||||
|
- label: I agree to follow this project's Code of Conduct
|
||||||
|
required: true
|
59
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
Normal file
59
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
name: Feature request
|
||||||
|
description: Suggest an idea for this project
|
||||||
|
title: "[Feature] <title>"
|
||||||
|
labels: [enhancement]
|
||||||
|
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
Thanks for taking the time to fill out this feature request!
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: Is this feature missing in the latest version?
|
||||||
|
description: Please upgrade to the latest version to verify that this feature is still missing.
|
||||||
|
options:
|
||||||
|
- label: I'm using the latest release
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: what-happened
|
||||||
|
attributes:
|
||||||
|
label: Is your feature request related to a problem? Please describe.
|
||||||
|
description: |
|
||||||
|
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||||
|
placeholder: Tell us what you see!
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: requests
|
||||||
|
attributes:
|
||||||
|
label: Describe the solution you'd like?
|
||||||
|
description: A clear and concise description of what you want to happen.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
- type: textarea
|
||||||
|
id: alternative
|
||||||
|
attributes:
|
||||||
|
label: Describe alternatives you've considered?
|
||||||
|
description: A clear and concise description of any alternative solutions or features you've considered.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Anything else?
|
||||||
|
description: |
|
||||||
|
Links? or References?
|
||||||
|
|
||||||
|
Add any other context or screenshots about the feature request here.
|
||||||
|
|
||||||
|
Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: checkboxes
|
||||||
|
id: terms
|
||||||
|
attributes:
|
||||||
|
label: Code of Conduct
|
||||||
|
description: By submitting this issue, you agree to follow our [Code of Conduct](../blob/main/CODE_OF_CONDUCT.md)
|
||||||
|
options:
|
||||||
|
- label: I agree to follow this project's Code of Conduct
|
||||||
|
required: true
|
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
@ -15,10 +15,3 @@ updates:
|
|||||||
open-pull-requests-limit: 10
|
open-pull-requests-limit: 10
|
||||||
labels:
|
labels:
|
||||||
- "merge when passing"
|
- "merge when passing"
|
||||||
- package-ecosystem: gitsubmodule
|
|
||||||
directory: /
|
|
||||||
schedule:
|
|
||||||
interval: daily
|
|
||||||
open-pull-requests-limit: 10
|
|
||||||
labels:
|
|
||||||
- "merge when passing"
|
|
||||||
|
33
.github/workflows/auto-approve.yml
vendored
Normal file
33
.github/workflows/auto-approve.yml
vendored
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
name: Auto approve
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request_target
|
||||||
|
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
auto-approve:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: hmarr/auto-approve-action@v3
|
||||||
|
if: |
|
||||||
|
(
|
||||||
|
github.event.pull_request.user.login == 'dependabot[bot]' ||
|
||||||
|
github.event.pull_request.user.login == 'dependabot' ||
|
||||||
|
github.event.pull_request.user.login == 'dependabot-preview[bot]' ||
|
||||||
|
github.event.pull_request.user.login == 'dependabot-preview' ||
|
||||||
|
github.event.pull_request.user.login == 'renovate[bot]' ||
|
||||||
|
github.event.pull_request.user.login == 'renovate' ||
|
||||||
|
github.event.pull_request.user.login == 'github-actions[bot]'
|
||||||
|
)
|
||||||
|
&&
|
||||||
|
(
|
||||||
|
github.actor == 'dependabot[bot]' ||
|
||||||
|
github.actor == 'dependabot' ||
|
||||||
|
github.actor == 'dependabot-preview[bot]' ||
|
||||||
|
github.actor == 'dependabot-preview' ||
|
||||||
|
github.actor == 'renovate[bot]' ||
|
||||||
|
github.actor == 'renovate' ||
|
||||||
|
github.actor == 'github-actions[bot]'
|
||||||
|
)
|
||||||
|
with:
|
||||||
|
github-token: ${{ secrets.PAT_TOKEN }}
|
16
.github/workflows/codacy-analysis.yml
vendored
16
.github/workflows/codacy-analysis.yml
vendored
@ -17,24 +17,24 @@ on:
|
|||||||
schedule:
|
schedule:
|
||||||
- cron: '15 16 * * 2'
|
- cron: '15 16 * * 2'
|
||||||
|
|
||||||
permissions:
|
|
||||||
actions: read
|
|
||||||
contents: read
|
|
||||||
security-events: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
codacy-security-scan:
|
codacy-security-scan:
|
||||||
|
# Cancel other workflows that are running for the same branch
|
||||||
|
# https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
name: Codacy Security Scan
|
name: Codacy Security Scan
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
# Checkout the repository to the GitHub Actions runner
|
# Checkout the repository to the GitHub Actions runner
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
# Execute Codacy Analysis CLI and generate a SARIF output with the security issues identified during the analysis
|
# Execute Codacy Analysis CLI and generate a SARIF output with the security issues identified during the analysis
|
||||||
- name: Run Codacy Analysis CLI
|
- name: Run Codacy Analysis CLI
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: codacy/codacy-analysis-cli-action@v4.4.5
|
uses: codacy/codacy-analysis-cli-action@v4.3.0
|
||||||
with:
|
with:
|
||||||
# Check https://github.com/codacy/codacy-analysis-cli#project-token to get your project token from your Codacy repository
|
# Check https://github.com/codacy/codacy-analysis-cli#project-token to get your project token from your Codacy repository
|
||||||
# You can also omit the token and run the tools that support default configurations
|
# You can also omit the token and run the tools that support default configurations
|
||||||
@ -51,6 +51,6 @@ jobs:
|
|||||||
# Upload the SARIF file generated in the previous step
|
# Upload the SARIF file generated in the previous step
|
||||||
- name: Upload SARIF results file
|
- name: Upload SARIF results file
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
uses: github/codeql-action/upload-sarif@v3
|
uses: github/codeql-action/upload-sarif@v2
|
||||||
with:
|
with:
|
||||||
sarif_file: results.sarif
|
sarif_file: results.sarif
|
||||||
|
13
.github/workflows/codeql.yml
vendored
13
.github/workflows/codeql.yml
vendored
@ -20,11 +20,6 @@ on:
|
|||||||
schedule:
|
schedule:
|
||||||
- cron: '44 20 * * 0'
|
- cron: '44 20 * * 0'
|
||||||
|
|
||||||
permissions:
|
|
||||||
actions: read
|
|
||||||
contents: read
|
|
||||||
security-events: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
analyze:
|
analyze:
|
||||||
name: Analyze
|
name: Analyze
|
||||||
@ -43,11 +38,11 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3
|
uses: github/codeql-action/init@v2
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
@ -61,7 +56,7 @@ jobs:
|
|||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@v3
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
@ -74,6 +69,6 @@ jobs:
|
|||||||
# ./location_of_script_within_repo/buildscript.sh
|
# ./location_of_script_within_repo/buildscript.sh
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v3
|
uses: github/codeql-action/analyze@v2
|
||||||
with:
|
with:
|
||||||
category: "/language:${{matrix.language}}"
|
category: "/language:${{matrix.language}}"
|
||||||
|
7
.github/workflows/greetings.yml
vendored
7
.github/workflows/greetings.yml
vendored
@ -2,17 +2,12 @@ name: Greetings
|
|||||||
|
|
||||||
on: [pull_request_target, issues]
|
on: [pull_request_target, issues]
|
||||||
|
|
||||||
permissions:
|
|
||||||
pull-requests: write
|
|
||||||
issues: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
greeting:
|
greeting:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/first-interaction@v1
|
- uses: actions/first-interaction@v1
|
||||||
continue-on-error: true
|
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.PAT_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
issue-message: "Thanks for reporting this issue, don't forget to star this project if you haven't already to help us reach a wider audience."
|
issue-message: "Thanks for reporting this issue, don't forget to star this project if you haven't already to help us reach a wider audience."
|
||||||
pr-message: "Thanks for implementing a fix, could you ensure that the test covers your changes if applicable."
|
pr-message: "Thanks for implementing a fix, could you ensure that the test covers your changes if applicable."
|
||||||
|
178
.github/workflows/issue-comment-job-example.yml
vendored
178
.github/workflows/issue-comment-job-example.yml
vendored
@ -1,178 +0,0 @@
|
|||||||
name: Issue Comment Job Example
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
on:
|
|
||||||
issue_comment:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
pr_commented:
|
|
||||||
# This job only runs for pull request comments
|
|
||||||
name: PR comment
|
|
||||||
if: ${{ github.event.issue.pull_request }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- run: |
|
|
||||||
echo A comment on PR $NUMBER
|
|
||||||
env:
|
|
||||||
NUMBER: ${{ github.event.issue.number }}
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
submodules: true
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Dump GitHub context
|
|
||||||
env:
|
|
||||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
|
||||||
run: echo "$GITHUB_CONTEXT"
|
|
||||||
|
|
||||||
- name: Run changed-files with defaults
|
|
||||||
id: changed-files
|
|
||||||
uses: ./
|
|
||||||
|
|
||||||
- name: Show output
|
|
||||||
run: |
|
|
||||||
echo '${{ toJSON(steps.changed-files.outputs) }}'
|
|
||||||
shell:
|
|
||||||
bash
|
|
||||||
|
|
||||||
- name: Run changed-files for old new filenames test rename
|
|
||||||
id: changed-files-all-old-new-renamed-files
|
|
||||||
uses: ./
|
|
||||||
with:
|
|
||||||
base_sha: d1c0ee4
|
|
||||||
sha: 4d04215
|
|
||||||
fetch_depth: 60000
|
|
||||||
include_all_old_new_renamed_files: true
|
|
||||||
|
|
||||||
- name: Show output
|
|
||||||
run: |
|
|
||||||
echo '${{ toJSON(steps.changed-files-all-old-new-renamed-files.outputs) }}'
|
|
||||||
shell:
|
|
||||||
bash
|
|
||||||
|
|
||||||
- name: Show output
|
|
||||||
run: |
|
|
||||||
echo '${{ toJSON(steps.changed-files-all-old-new-renamed-files.outputs) }}'
|
|
||||||
shell:
|
|
||||||
bash
|
|
||||||
|
|
||||||
- name: Check all_old_new_renamed_files output on non windows platform
|
|
||||||
if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files, 'test/test rename 1.txt,test/test rename-1.txt') && runner.os != 'Windows'"
|
|
||||||
run: |
|
|
||||||
echo "Invalid output: Expected to include (test/test rename 1.txt,test/test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files }})"
|
|
||||||
exit 1
|
|
||||||
shell:
|
|
||||||
bash
|
|
||||||
|
|
||||||
- name: Check all_old_new_renamed_files output on windows platform
|
|
||||||
if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files, 'test\\test rename 1.txt,test\\test rename-1.txt') && runner.os == 'Windows'"
|
|
||||||
run: |
|
|
||||||
echo "Invalid output: Expected to not include (test\\test rename 1.txt,test\\test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files }})"
|
|
||||||
exit 1
|
|
||||||
shell:
|
|
||||||
bash
|
|
||||||
|
|
||||||
- name: Check the renamed_files output on non windows platform
|
|
||||||
if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.renamed_files, 'test/test rename-1.txt') && runner.os != 'Windows'"
|
|
||||||
run: |
|
|
||||||
echo "Invalid output: Expected to include (test/test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.renamed_files }})"
|
|
||||||
exit 1
|
|
||||||
shell:
|
|
||||||
bash
|
|
||||||
|
|
||||||
- name: Check the renamed_files output on windows platform
|
|
||||||
if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.renamed_files, 'test\\test rename-1.txt') && runner.os == 'Windows'"
|
|
||||||
run: |
|
|
||||||
echo "Invalid output: Expected to not include (test\\test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.renamed_files }})"
|
|
||||||
exit 1
|
|
||||||
shell:
|
|
||||||
bash
|
|
||||||
|
|
||||||
issue_commented:
|
|
||||||
# This job only runs for issue comments
|
|
||||||
name: Issue comment
|
|
||||||
if: ${{ !github.event.issue.pull_request }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- run: |
|
|
||||||
echo A comment on issue $NUMBER
|
|
||||||
env:
|
|
||||||
NUMBER: ${{ github.event.issue.number }}
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
submodules: true
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Dump GitHub context
|
|
||||||
env:
|
|
||||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
|
||||||
run: echo "$GITHUB_CONTEXT"
|
|
||||||
|
|
||||||
- name: Run changed-files with defaults
|
|
||||||
id: changed-files
|
|
||||||
uses: ./
|
|
||||||
|
|
||||||
- name: Show output
|
|
||||||
run: |
|
|
||||||
echo '${{ toJSON(steps.changed-files.outputs) }}'
|
|
||||||
shell:
|
|
||||||
bash
|
|
||||||
|
|
||||||
- name: Run changed-files for old new filenames test rename
|
|
||||||
id: changed-files-all-old-new-renamed-files
|
|
||||||
uses: ./
|
|
||||||
with:
|
|
||||||
base_sha: d1c0ee4
|
|
||||||
sha: 4d04215
|
|
||||||
fetch_depth: 60000
|
|
||||||
include_all_old_new_renamed_files: true
|
|
||||||
|
|
||||||
- name: Show output
|
|
||||||
run: |
|
|
||||||
echo '${{ toJSON(steps.changed-files-all-old-new-renamed-files.outputs) }}'
|
|
||||||
shell:
|
|
||||||
bash
|
|
||||||
|
|
||||||
- name: Show output
|
|
||||||
run: |
|
|
||||||
echo '${{ toJSON(steps.changed-files-all-old-new-renamed-files.outputs) }}'
|
|
||||||
shell:
|
|
||||||
bash
|
|
||||||
|
|
||||||
- name: Check all_old_new_renamed_files output on non windows platform
|
|
||||||
if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files, 'test/test rename 1.txt,test/test rename-1.txt') && runner.os != 'Windows'"
|
|
||||||
run: |
|
|
||||||
echo "Invalid output: Expected to include (test/test rename 1.txt,test/test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files }})"
|
|
||||||
exit 1
|
|
||||||
shell:
|
|
||||||
bash
|
|
||||||
|
|
||||||
- name: Check all_old_new_renamed_files output on windows platform
|
|
||||||
if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files, 'test\\test rename 1.txt,test\\test rename-1.txt') && runner.os == 'Windows'"
|
|
||||||
run: |
|
|
||||||
echo "Invalid output: Expected to not include (test\\test rename 1.txt,test\\test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files }})"
|
|
||||||
exit 1
|
|
||||||
shell:
|
|
||||||
bash
|
|
||||||
|
|
||||||
- name: Check the renamed_files output on non windows platform
|
|
||||||
if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.renamed_files, 'test/test rename-1.txt') && runner.os != 'Windows'"
|
|
||||||
run: |
|
|
||||||
echo "Invalid output: Expected to include (test/test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.renamed_files }})"
|
|
||||||
exit 1
|
|
||||||
shell:
|
|
||||||
bash
|
|
||||||
|
|
||||||
- name: Check the renamed_files output on windows platform
|
|
||||||
if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.renamed_files, 'test\\test rename-1.txt') && runner.os == 'Windows'"
|
|
||||||
run: |
|
|
||||||
echo "Invalid output: Expected to not include (test\\test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.renamed_files }})"
|
|
||||||
exit 1
|
|
||||||
shell:
|
|
||||||
bash
|
|
@ -1,11 +1,9 @@
|
|||||||
name: Manual Triggered Job Example
|
name: Manual Test
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
name: Test changed-files
|
name: Test changed-files
|
||||||
@ -18,7 +16,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
submodules: true
|
submodules: true
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
@ -1,7 +1,4 @@
|
|||||||
name: Matrix Example
|
name: Matrix Test
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
@ -11,35 +8,37 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
changed-files:
|
changed-files:
|
||||||
name: Get changed files
|
name: Get changes
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
matrix: ${{ steps.changed-files.outputs.all_changed_files }}
|
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed-files
|
id: changed-files
|
||||||
uses: ./
|
uses: ./
|
||||||
with:
|
with:
|
||||||
matrix: true
|
json: true
|
||||||
|
quotepath: false
|
||||||
- name: List all changed files
|
- name: List all changed files
|
||||||
run: echo '${{ steps.changed-files.outputs.all_changed_files }}'
|
run: echo '${{ steps.changed-files.outputs.all_changed_files }}'
|
||||||
|
- id: set-matrix
|
||||||
|
run: echo "matrix={\"files\":${{ steps.changed-files.outputs.all_changed_files }}}" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
matrix-job:
|
matrix-job:
|
||||||
name: Run Matrix Job
|
name: Run Matrix Job
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [changed-files]
|
needs: [changed-files]
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix: ${{ fromJSON(needs.changed-files.outputs.matrix) }}
|
||||||
files: ${{ fromJSON(needs.changed-files.outputs.matrix) }}
|
|
||||||
max-parallel: 4
|
max-parallel: 4
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
- name: Test
|
- name: Test
|
||||||
run: |
|
run: |
|
||||||
echo ${{ matrix.files }}
|
echo ${{ matrix.files }}
|
67
.github/workflows/multi-job-example.yml
vendored
67
.github/workflows/multi-job-example.yml
vendored
@ -1,67 +0,0 @@
|
|||||||
name: Multi Job Example
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- "**"
|
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- "**"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
changed-files:
|
|
||||||
name: Get changed files
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
all_changed_files: ${{ steps.changed-files.outputs.all_changed_files }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Get changed files
|
|
||||||
id: changed-files
|
|
||||||
uses: ./
|
|
||||||
- name: List all changed files
|
|
||||||
run: echo '${{ steps.changed-files.outputs.all_changed_files }}'
|
|
||||||
|
|
||||||
view-changed-files:
|
|
||||||
name: View all changed files
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [changed-files]
|
|
||||||
steps:
|
|
||||||
- name: List all changed files
|
|
||||||
run: |
|
|
||||||
echo '${{ needs.changed-files.outputs.all_changed_files }}'
|
|
||||||
|
|
||||||
|
|
||||||
changed-files-rest-api:
|
|
||||||
name: Get changed files using REST API
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
all_changed_files: ${{ steps.changed-files.outputs.all_changed_files }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Get changed files
|
|
||||||
id: changed-files
|
|
||||||
continue-on-error: ${{ github.event_name == 'push' }}
|
|
||||||
uses: ./
|
|
||||||
with:
|
|
||||||
use_rest_api: true
|
|
||||||
- name: List all changed files
|
|
||||||
run: echo '${{ steps.changed-files.outputs.all_changed_files }}'
|
|
||||||
|
|
||||||
view-changed-files-rest-api:
|
|
||||||
name: View all changed files using REST API
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [changed-files-rest-api]
|
|
||||||
steps:
|
|
||||||
- name: List all changed files
|
|
||||||
run: |
|
|
||||||
echo '${{ needs.changed-files-rest-api.outputs.all_changed_files }}'
|
|
29
.github/workflows/submodule-sync.yml
vendored
Normal file
29
.github/workflows/submodule-sync.yml
vendored
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
sync:
|
||||||
|
name: Submodule Sync
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
submodules: recursive
|
||||||
|
|
||||||
|
- name: Git Sumbodule Update
|
||||||
|
run: |
|
||||||
|
git pull --recurse-submodules
|
||||||
|
git submodule update --remote --recursive
|
||||||
|
|
||||||
|
- name: Create Pull Request
|
||||||
|
uses: peter-evans/create-pull-request@v5.0.2
|
||||||
|
with:
|
||||||
|
title: "Updated submodule"
|
||||||
|
labels: "merge when passing"
|
||||||
|
branch: "chore/update-submodule"
|
||||||
|
commit-message: "Updated submodule"
|
||||||
|
body: "Updated submodule"
|
||||||
|
token: ${{ secrets.PAT_TOKEN }}
|
21
.github/workflows/sync-release-version.yml
vendored
21
.github/workflows/sync-release-version.yml
vendored
@ -1,9 +1,4 @@
|
|||||||
name: Update release version
|
name: Update release version.
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
@ -13,11 +8,11 @@ jobs:
|
|||||||
update-version:
|
update-version:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Run release-tagger
|
- name: Run release-tagger
|
||||||
uses: tj-actions/release-tagger@v4
|
uses: tj-actions/release-tagger@v3
|
||||||
- name: Sync release version.
|
- name: Sync release version.
|
||||||
uses: tj-actions/sync-release-version@v13
|
uses: tj-actions/sync-release-version@v13
|
||||||
id: sync-release-version
|
id: sync-release-version
|
||||||
@ -26,18 +21,10 @@ jobs:
|
|||||||
only_major: true
|
only_major: true
|
||||||
paths: |
|
paths: |
|
||||||
README.md
|
README.md
|
||||||
- name: Sync release package version.
|
|
||||||
uses: tj-actions/sync-release-version@v13
|
|
||||||
id: sync-release-package-version
|
|
||||||
with:
|
|
||||||
pattern: '"version": "'
|
|
||||||
strip_prefix: "v"
|
|
||||||
paths: |
|
|
||||||
package.json
|
|
||||||
- name: Run git-cliff
|
- name: Run git-cliff
|
||||||
uses: tj-actions/git-cliff@v1
|
uses: tj-actions/git-cliff@v1
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
uses: peter-evans/create-pull-request@v7.0.5
|
uses: peter-evans/create-pull-request@v5.0.2
|
||||||
with:
|
with:
|
||||||
base: "main"
|
base: "main"
|
||||||
labels: "merge when passing"
|
labels: "merge when passing"
|
||||||
|
955
.github/workflows/test.yml
vendored
955
.github/workflows/test.yml
vendored
File diff suppressed because it is too large
Load Diff
15
.github/workflows/update-readme.yml
vendored
15
.github/workflows/update-readme.yml
vendored
@ -1,9 +1,5 @@
|
|||||||
name: Format README.md
|
name: Format README.md
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
@ -13,21 +9,18 @@ jobs:
|
|||||||
sync-assets:
|
sync-assets:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Run auto-doc
|
- name: Run auto-doc
|
||||||
uses: tj-actions/auto-doc@v3
|
uses: tj-actions/auto-doc@v2
|
||||||
with:
|
|
||||||
use_code_blocks: true
|
|
||||||
use_major_version: true
|
|
||||||
|
|
||||||
- name: Run remark
|
- name: Run remark
|
||||||
uses: tj-actions/remark@v3
|
uses: tj-actions/remark@v3
|
||||||
|
|
||||||
- name: Verify Changed files
|
- name: Verify Changed files
|
||||||
uses: tj-actions/verify-changed-files@v20
|
uses: tj-actions/verify-changed-files@v16
|
||||||
id: verify_changed_files
|
id: verify_changed_files
|
||||||
with:
|
with:
|
||||||
files: |
|
files: |
|
||||||
@ -41,7 +34,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
if: failure()
|
if: failure()
|
||||||
uses: peter-evans/create-pull-request@v7
|
uses: peter-evans/create-pull-request@v5
|
||||||
with:
|
with:
|
||||||
base: "main"
|
base: "main"
|
||||||
labels: "merge when passing"
|
labels: "merge when passing"
|
||||||
|
41
.github/workflows/workflow-run-example.yml
vendored
41
.github/workflows/workflow-run-example.yml
vendored
@ -1,41 +0,0 @@
|
|||||||
name: Workflow Run Example
|
|
||||||
on:
|
|
||||||
workflow_run:
|
|
||||||
workflows: [Matrix Example]
|
|
||||||
types: [completed]
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
on-success:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Get changed files
|
|
||||||
id: changed-files
|
|
||||||
uses: ./
|
|
||||||
|
|
||||||
- name: Echo list of changed files on success
|
|
||||||
run: |
|
|
||||||
echo "Changed files on success:"
|
|
||||||
echo "${{ steps.changed-files.outputs.all_changed_files }}"
|
|
||||||
|
|
||||||
on-failure:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: ${{ github.event.workflow_run.conclusion == 'failure' }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Get changed files
|
|
||||||
id: changed-files
|
|
||||||
uses: ./
|
|
||||||
|
|
||||||
- name: Echo list of changed files on failure
|
|
||||||
run: |
|
|
||||||
echo "Changed files on failure:"
|
|
||||||
echo "${{ steps.changed-files.outputs.all_changed_files }}"
|
|
2808
HISTORY.md
2808
HISTORY.md
File diff suppressed because it is too large
Load Diff
32
SECURITY.md
32
SECURITY.md
@ -1,32 +0,0 @@
|
|||||||
# Security Policy
|
|
||||||
|
|
||||||
## Proactive Security Measures
|
|
||||||
|
|
||||||
To proactively detect and address security vulnerabilities, we utilize several robust tools and processes:
|
|
||||||
|
|
||||||
- **Dependency Updates:** We use [Renovate](https://renovatebot.com) and [Dependabot](https://docs.github.com/en/code-security/dependabot/dependabot-security-updates/about-dependabot-security-updates) to keep our dependencies updated and promptly patch detected vulnerabilities through automated PRs.
|
|
||||||
- **[GitHub's Security Features](https://github.com/features/security):** Our repository and dependencies are continuously monitored via GitHub's security features, which include:
|
|
||||||
- **Code Scanning:** Using GitHub's CodeQL, all pull requests are scanned to identify potential vulnerabilities in our source code.
|
|
||||||
- **Automated Alerts:** Dependabot identifies vulnerabilities based on the GitHub Advisory Database and opens PRs with patches, while automated [secret scanning](https://docs.github.com/en/enterprise-cloud@latest/code-security/secret-scanning/about-secret-scanning#about-secret-scanning-for-partner-patterns) provides alerts for detected secrets.
|
|
||||||
- **[GitGuardian Security Checks](https://www.gitguardian.com/):** We employ GitGuardian to ensure security checks are performed on the codebase, enhancing the overall security of our project.
|
|
||||||
- **Code Analysis and Security Scanning:** With the help of [Codacy Static Code Analysis](https://www.codacy.com/) and [Codacy Security Scan](https://security.codacy.com/), we conduct thorough analyses and scans of our code for potential security risks.
|
|
||||||
|
|
||||||
## Reporting Security Vulnerabilities
|
|
||||||
|
|
||||||
Despite our best efforts to deliver secure software, we acknowledge the invaluable role of the community in identifying security breaches.
|
|
||||||
|
|
||||||
### Private Vulnerability Disclosures
|
|
||||||
|
|
||||||
We request all suspected vulnerabilities to be responsibly and privately disclosed by sending an email to [support@tj-actions.online](mailto:support@tj-actions.online).
|
|
||||||
|
|
||||||
### Public Vulnerability Disclosures
|
|
||||||
|
|
||||||
For publicly disclosed security vulnerabilities, please **IMMEDIATELY** email [support@tj-actions.online](mailto:support@tj-actions.online) with the details for prompt action.
|
|
||||||
|
|
||||||
Upon confirmation of a breach, reporters will receive full credit and recognition for their contribution. Please note, that we do not offer monetary compensation for reporting vulnerabilities.
|
|
||||||
|
|
||||||
## Communication of Security Breaches
|
|
||||||
|
|
||||||
We will utilize the [GitHub Security Advisory](https://github.com/tj-actions/changed-files/security/advisories) to communicate any security breaches. The advisory will be made public once a patch has been released to rectify the issue.
|
|
||||||
|
|
||||||
We appreciate your cooperation and contribution to maintaining the security of our software. Remember, a secure community is a strong community.
|
|
150
action.yml
150
action.yml
@ -4,11 +4,11 @@ author: tj-actions
|
|||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
separator:
|
separator:
|
||||||
description: "Split character for output strings."
|
description: "Split character for output strings"
|
||||||
required: false
|
required: false
|
||||||
default: " "
|
default: " "
|
||||||
include_all_old_new_renamed_files:
|
include_all_old_new_renamed_files:
|
||||||
description: "Include `all_old_new_renamed_files` output. Note this can generate a large output See: #501."
|
description: "Include `all_old_new_renamed_files` output. Note this can generate a large output See: [#501](https://github.com/tj-actions/changed-files/issues/501)."
|
||||||
required: false
|
required: false
|
||||||
default: "false"
|
default: "false"
|
||||||
old_new_separator:
|
old_new_separator:
|
||||||
@ -24,13 +24,11 @@ inputs:
|
|||||||
required: false
|
required: false
|
||||||
default: ""
|
default: ""
|
||||||
files_from_source_file_separator:
|
files_from_source_file_separator:
|
||||||
description: "Separator used to split the `files_from_source_file` input."
|
description: 'Separator used to split the `files_from_source_file` input'
|
||||||
default: "\n"
|
default: "\n"
|
||||||
required: false
|
required: false
|
||||||
files:
|
files:
|
||||||
description: |
|
description: "File and directory patterns used to detect changes (Defaults to the entire repo if unset) **NOTE:** Multiline file/directory patterns should not include quotes."
|
||||||
File and directory patterns used to detect changes (Defaults to the entire repo if unset).
|
|
||||||
NOTE: Multiline file/directory patterns should not include quotes.
|
|
||||||
required: false
|
required: false
|
||||||
default: ""
|
default: ""
|
||||||
files_separator:
|
files_separator:
|
||||||
@ -42,7 +40,7 @@ inputs:
|
|||||||
required: false
|
required: false
|
||||||
default: ""
|
default: ""
|
||||||
files_yaml_from_source_file:
|
files_yaml_from_source_file:
|
||||||
description: "Source file(s) used to populate the `files_yaml` input. Example: https://github.com/tj-actions/changed-files/blob/main/test/changed-files.yml"
|
description: "Source file(s) used to populate the `files_yaml` input. [Example](https://github.com/tj-actions/changed-files/blob/main/test/changed-files.yml)"
|
||||||
required: false
|
required: false
|
||||||
default: ""
|
default: ""
|
||||||
files_yaml_from_source_file_separator:
|
files_yaml_from_source_file_separator:
|
||||||
@ -54,7 +52,7 @@ inputs:
|
|||||||
required: false
|
required: false
|
||||||
default: ""
|
default: ""
|
||||||
files_ignore_yaml_from_source_file:
|
files_ignore_yaml_from_source_file:
|
||||||
description: "Source file(s) used to populate the `files_ignore_yaml` input. Example: https://github.com/tj-actions/changed-files/blob/main/test/changed-files.yml"
|
description: "Source file(s) used to populate the `files_ignore_yaml` input. [Example](https://github.com/tj-actions/changed-files/blob/main/test/changed-files.yml)"
|
||||||
required: false
|
required: false
|
||||||
default: ""
|
default: ""
|
||||||
files_ignore_yaml_from_source_file_separator:
|
files_ignore_yaml_from_source_file_separator:
|
||||||
@ -62,7 +60,7 @@ inputs:
|
|||||||
default: "\n"
|
default: "\n"
|
||||||
required: false
|
required: false
|
||||||
files_ignore:
|
files_ignore:
|
||||||
description: "Ignore changes to these file(s). NOTE: Multiline file/directory patterns should not include quotes."
|
description: "Ignore changes to these file(s) **NOTE:** Multiline file/directory patterns should not include quotes."
|
||||||
required: false
|
required: false
|
||||||
default: ""
|
default: ""
|
||||||
files_ignore_separator:
|
files_ignore_separator:
|
||||||
@ -78,10 +76,10 @@ inputs:
|
|||||||
default: "\n"
|
default: "\n"
|
||||||
required: false
|
required: false
|
||||||
sha:
|
sha:
|
||||||
description: "Specify a different commit SHA or branch used for comparing changes"
|
description: "Specify a different commit SHA used for comparing changes"
|
||||||
required: false
|
required: false
|
||||||
base_sha:
|
base_sha:
|
||||||
description: "Specify a different base commit SHA or branch used for comparing changes"
|
description: "Specify a different base commit SHA used for comparing changes"
|
||||||
required: false
|
required: false
|
||||||
since:
|
since:
|
||||||
description: "Get changed files for commits whose timestamp is older than the given time."
|
description: "Get changed files for commits whose timestamp is older than the given time."
|
||||||
@ -96,16 +94,16 @@ inputs:
|
|||||||
required: false
|
required: false
|
||||||
default: "."
|
default: "."
|
||||||
quotepath:
|
quotepath:
|
||||||
description: "Use non-ASCII characters to match files and output the filenames completely verbatim by setting this to `false`"
|
description: "Use non-ascii characters to match files and output the filenames completely verbatim by setting this to `false`"
|
||||||
default: "true"
|
default: "true"
|
||||||
required: false
|
required: false
|
||||||
diff_relative:
|
diff_relative:
|
||||||
description: "Exclude changes outside the current directory and show path names relative to it. NOTE: This requires you to specify the top-level directory via the `path` input."
|
description: "Exclude changes outside the current directory and show path names relative to it. **NOTE:** This requires you to specify the top level directory via the `path` input."
|
||||||
required: false
|
required: false
|
||||||
default: "true"
|
default: "true"
|
||||||
dir_names:
|
dir_names:
|
||||||
default: "false"
|
default: "false"
|
||||||
description: "Output unique changed directories instead of filenames. NOTE: This returns `.` for changed files located in the current working directory which defaults to `$GITHUB_WORKSPACE`."
|
description: "Output unique changed directories instead of filenames. **NOTE:** This returns `.` for changed files located in the current working directory which defaults to `$GITHUB_WORKSPACE`."
|
||||||
required: false
|
required: false
|
||||||
dir_names_max_depth:
|
dir_names_max_depth:
|
||||||
description: "Limit the directory output to a maximum depth e.g `test/test1/test2` with max depth of `2` returns `test/test1`."
|
description: "Limit the directory output to a maximum depth e.g `test/test1/test2` with max depth of `2` returns `test/test1`."
|
||||||
@ -114,42 +112,20 @@ inputs:
|
|||||||
description: "Exclude the current directory represented by `.` from the output when `dir_names` is set to `true`."
|
description: "Exclude the current directory represented by `.` from the output when `dir_names` is set to `true`."
|
||||||
required: false
|
required: false
|
||||||
default: "false"
|
default: "false"
|
||||||
dir_names_include_files:
|
|
||||||
description: "File and directory patterns to include in the output when `dir_names` is set to `true`. NOTE: This returns only the matching files and also the directory names."
|
|
||||||
required: false
|
|
||||||
default: ""
|
|
||||||
dir_names_include_files_separator:
|
|
||||||
description: "Separator used to split the `dir_names_include_files` input"
|
|
||||||
default: "\n"
|
|
||||||
required: false
|
|
||||||
dir_names_deleted_files_include_only_deleted_dirs:
|
|
||||||
description: "Include only directories that have been deleted as opposed to directory names of files that have been deleted in the `deleted_files` output when `dir_names` is set to `true`."
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
json:
|
json:
|
||||||
description: "Output list of changed files in a JSON formatted string which can be used for matrix jobs. Example: https://github.com/tj-actions/changed-files/blob/main/.github/workflows/matrix-example.yml"
|
description: "Output list of changed files in a JSON formatted string which can be used for matrix jobs."
|
||||||
required: false
|
required: false
|
||||||
default: "false"
|
default: "false"
|
||||||
escape_json:
|
escape_json:
|
||||||
description: "Escape JSON output."
|
description: "Escape JSON output."
|
||||||
required: false
|
required: false
|
||||||
default: "true"
|
default: "true"
|
||||||
safe_output:
|
|
||||||
description: "Apply sanitization to output filenames before being set as output."
|
|
||||||
required: false
|
|
||||||
default: "true"
|
|
||||||
fetch_depth:
|
fetch_depth:
|
||||||
description: "Depth of additional branch history fetched. NOTE: This can be adjusted to resolve errors with insufficient history."
|
description: "Depth of additional branch history fetched. **NOTE**: This can be adjusted to resolve errors with insufficient history."
|
||||||
required: false
|
required: false
|
||||||
default: "25"
|
default: "50"
|
||||||
skip_initial_fetch:
|
skip_initial_fetch:
|
||||||
description: |
|
description: "Skip the initial fetch to improve performance."
|
||||||
Skip initially fetching additional history to improve performance for shallow repositories.
|
|
||||||
NOTE: This could lead to errors with missing history. It's intended to be used when you've fetched all necessary history to perform the diff.
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
fetch_additional_submodule_history:
|
|
||||||
description: "Fetch additional history for submodules."
|
|
||||||
required: false
|
required: false
|
||||||
default: "false"
|
default: "false"
|
||||||
since_last_remote_commit:
|
since_last_remote_commit:
|
||||||
@ -157,7 +133,7 @@ inputs:
|
|||||||
required: false
|
required: false
|
||||||
default: "false"
|
default: "false"
|
||||||
write_output_files:
|
write_output_files:
|
||||||
description: "Write outputs to the `output_dir` defaults to `.github/outputs` folder. NOTE: This creates a `.txt` file by default and a `.json` file if `json` is set to `true`."
|
description: "Write outputs to the `output_dir` defaults to `.github/outputs` folder. **NOTE:** This creates a `.txt` file by default and a `.json` file if `json` is set to `true`."
|
||||||
required: false
|
required: false
|
||||||
default: "false"
|
default: "false"
|
||||||
output_dir:
|
output_dir:
|
||||||
@ -176,74 +152,14 @@ inputs:
|
|||||||
description: "Recover deleted files to a new destination directory, defaults to the original location."
|
description: "Recover deleted files to a new destination directory, defaults to the original location."
|
||||||
required: false
|
required: false
|
||||||
default: ""
|
default: ""
|
||||||
recover_files:
|
|
||||||
description: |
|
|
||||||
File and directory patterns used to recover deleted files,
|
|
||||||
defaults to the patterns provided via the `files`, `files_from_source_file`, `files_ignore` and `files_ignore_from_source_file` inputs
|
|
||||||
or all deleted files if no patterns are provided.
|
|
||||||
required: false
|
|
||||||
default: ""
|
|
||||||
recover_files_separator:
|
|
||||||
description: "Separator used to split the `recover_files` input"
|
|
||||||
default: "\n"
|
|
||||||
required: false
|
|
||||||
recover_files_ignore:
|
|
||||||
description: "File and directory patterns to ignore when recovering deleted files."
|
|
||||||
required: false
|
|
||||||
default: ""
|
|
||||||
recover_files_ignore_separator:
|
|
||||||
description: "Separator used to split the `recover_files_ignore` input"
|
|
||||||
default: "\n"
|
|
||||||
required: false
|
|
||||||
token:
|
token:
|
||||||
description: "GitHub token used to fetch changed files from Github's API."
|
description: "Github token used to fetch changed files from Github's API."
|
||||||
required: false
|
required: false
|
||||||
default: ${{ github.token }}
|
default: ${{ github.token }}
|
||||||
api_url:
|
api_url:
|
||||||
description: "Github API URL."
|
description: "Github API URL."
|
||||||
required: false
|
required: false
|
||||||
default: ${{ github.api_url }}
|
default: ${{ github.api_url }}
|
||||||
use_rest_api:
|
|
||||||
description: "Force the use of Github's REST API even when a local copy of the repository exists"
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
fail_on_initial_diff_error:
|
|
||||||
description: "Fail when the initial diff fails."
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
fail_on_submodule_diff_error:
|
|
||||||
description: "Fail when the submodule diff fails."
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
negation_patterns_first:
|
|
||||||
description: "Apply the negation patterns first. NOTE: This affects how changed files are matched."
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
matrix:
|
|
||||||
description: "Output changed files in a format that can be used for matrix jobs. Alias for setting inputs `json` to `true` and `escape_json` to `false`."
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
exclude_submodules:
|
|
||||||
description: "Exclude changes to submodules."
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
fetch_missing_history_max_retries:
|
|
||||||
description: "Maximum number of retries to fetch missing history."
|
|
||||||
required: false
|
|
||||||
default: "20"
|
|
||||||
use_posix_path_separator:
|
|
||||||
description: "Use POSIX path separator `/` for output file paths on Windows."
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
tags_pattern:
|
|
||||||
description: "Tags pattern to include."
|
|
||||||
required: false
|
|
||||||
default: "*"
|
|
||||||
tags_ignore_pattern:
|
|
||||||
description: "Tags pattern to ignore."
|
|
||||||
required: false
|
|
||||||
default: ""
|
|
||||||
|
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
added_files:
|
added_files:
|
||||||
@ -283,48 +199,44 @@ outputs:
|
|||||||
unknown_files_count:
|
unknown_files_count:
|
||||||
description: "Returns the number of `unknown_files`"
|
description: "Returns the number of `unknown_files`"
|
||||||
all_changed_and_modified_files:
|
all_changed_and_modified_files:
|
||||||
description: "Returns all changed and modified files i.e. a combination of (ACMRDTUX)"
|
description: "Returns all changed and modified files i.e. *a combination of (ACMRDTUX)*"
|
||||||
all_changed_and_modified_files_count:
|
all_changed_and_modified_files_count:
|
||||||
description: "Returns the number of `all_changed_and_modified_files`"
|
description: "Returns the number of `all_changed_and_modified_files`"
|
||||||
all_changed_files:
|
all_changed_files:
|
||||||
description: "Returns all changed files i.e. a combination of all added, copied, modified and renamed files (ACMR)"
|
description: "Returns all changed files i.e. *a combination of all added, copied, modified and renamed files (ACMR)*"
|
||||||
all_changed_files_count:
|
all_changed_files_count:
|
||||||
description: "Returns the number of `all_changed_files`"
|
description: "Returns the number of `all_changed_files`"
|
||||||
any_changed:
|
any_changed:
|
||||||
description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs have changed. This defaults to `true` when no patterns are specified. i.e. *includes a combination of all added, copied, modified and renamed files (ACMR)*."
|
description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs has changed. i.e. *using a combination of all added, copied, modified and renamed files (ACMR)*."
|
||||||
only_changed:
|
only_changed:
|
||||||
description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs have changed. i.e. *includes a combination of all added, copied, modified and renamed files (ACMR)*."
|
description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs has changed. i.e. *using a combination of all added, copied, modified and renamed files (ACMR)*."
|
||||||
other_changed_files:
|
other_changed_files:
|
||||||
description: "Returns all other changed files not listed in the files input i.e. includes a combination of all added, copied, modified and renamed files (ACMR)."
|
description: "Returns all other changed files not listed in the files input i.e. *using a combination of all added, copied, modified and renamed files (ACMR)*."
|
||||||
other_changed_files_count:
|
other_changed_files_count:
|
||||||
description: "Returns the number of `other_changed_files`"
|
description: "Returns the number of `other_changed_files`"
|
||||||
all_modified_files:
|
all_modified_files:
|
||||||
description: "Returns all changed files i.e. a combination of all added, copied, modified, renamed and deleted files (ACMRD)."
|
description: "Returns all changed files i.e. *a combination of all added, copied, modified, renamed and deleted files (ACMRD)*."
|
||||||
all_modified_files_count:
|
all_modified_files_count:
|
||||||
description: "Returns the number of `all_modified_files`"
|
description: "Returns the number of `all_modified_files`"
|
||||||
any_modified:
|
any_modified:
|
||||||
description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs have been modified. This defaults to `true` when no patterns are specified. i.e. *includes a combination of all added, copied, modified, renamed, and deleted files (ACMRD)*."
|
description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs has been modified. i.e. *using a combination of all added, copied, modified, renamed, and deleted files (ACMRD)*."
|
||||||
only_modified:
|
only_modified:
|
||||||
description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs have been modified. (ACMRD)."
|
description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs has been modified. (ACMRD)."
|
||||||
other_modified_files:
|
other_modified_files:
|
||||||
description: "Returns all other modified files not listed in the files input i.e. a combination of all added, copied, modified, and deleted files (ACMRD)"
|
description: "Returns all other modified files not listed in the files input i.e. *a combination of all added, copied, modified, and deleted files (ACMRD)*"
|
||||||
other_modified_files_count:
|
other_modified_files_count:
|
||||||
description: "Returns the number of `other_modified_files`"
|
description: "Returns the number of `other_modified_files`"
|
||||||
any_deleted:
|
any_deleted:
|
||||||
description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs have been deleted. This defaults to `true` when no patterns are specified. (D)"
|
description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs has been deleted. (D)"
|
||||||
only_deleted:
|
only_deleted:
|
||||||
description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs have been deleted. (D)"
|
description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs has been deleted. (D)"
|
||||||
other_deleted_files:
|
other_deleted_files:
|
||||||
description: "Returns all other deleted files not listed in the files input i.e. a combination of all deleted files (D)"
|
description: "Returns all other deleted files not listed in the files input i.e. *a combination of all deleted files (D)*"
|
||||||
other_deleted_files_count:
|
other_deleted_files_count:
|
||||||
description: "Returns the number of `other_deleted_files`"
|
description: "Returns the number of `other_deleted_files`"
|
||||||
modified_keys:
|
|
||||||
description: "Returns all modified YAML keys when the `files_yaml` input is used. i.e. key that contains any path that has either been added, copied, modified, and deleted (ACMRD)"
|
|
||||||
changed_keys:
|
|
||||||
description: "Returns all changed YAML keys when the `files_yaml` input is used. i.e. key that contains any path that has either been added, copied, modified, and renamed (ACMR)"
|
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: 'node20'
|
using: 'node16'
|
||||||
main: 'dist/index.js'
|
main: 'dist/index.js'
|
||||||
|
|
||||||
branding:
|
branding:
|
||||||
|
43223
dist/index.js
generated
vendored
43223
dist/index.js
generated
vendored
File diff suppressed because one or more lines are too long
2
dist/index.js.map
generated
vendored
2
dist/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
5408
dist/licenses.txt
generated
vendored
5408
dist/licenses.txt
generated
vendored
File diff suppressed because it is too large
Load Diff
2
dist/sourcemap-register.js
generated
vendored
2
dist/sourcemap-register.js
generated
vendored
File diff suppressed because one or more lines are too long
@ -10,4 +10,4 @@ module.exports = {
|
|||||||
setupFiles: [
|
setupFiles: [
|
||||||
"<rootDir>/jest/setupEnv.cjs"
|
"<rootDir>/jest/setupEnv.cjs"
|
||||||
]
|
]
|
||||||
};
|
}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
const path = require('path')
|
const path = require('path')
|
||||||
|
|
||||||
|
process.env.TESTING = "1"
|
||||||
process.env.GITHUB_WORKSPACE = path.join(
|
process.env.GITHUB_WORKSPACE = path.join(
|
||||||
path.resolve(__dirname, '..'), '.'
|
path.resolve(__dirname, '..'), '.'
|
||||||
)
|
)
|
||||||
|
41
package.json
41
package.json
@ -1,25 +1,24 @@
|
|||||||
{
|
{
|
||||||
"name": "@tj-actions/changed-files",
|
"name": "@tj-actions/glob",
|
||||||
"version": "45.0.4",
|
"version": "17.2.5",
|
||||||
"description": "Github action to retrieve all (added, copied, modified, deleted, renamed, type changed, unmerged, unknown) files and directories.",
|
"description": "Glob pattern matching github action",
|
||||||
"main": "lib/main.js",
|
"main": "lib/main.js",
|
||||||
"publishConfig": {
|
"publishConfig": {
|
||||||
"registry": "https://npm.pkg.github.com"
|
"registry": "https://npm.pkg.github.com"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "tsc",
|
"build": "tsc",
|
||||||
"format": "prettier --write src/*.ts src/**/*.ts",
|
"format": "prettier --write **/*.ts",
|
||||||
"format-check": "prettier --check src/*.ts src/**/*.ts",
|
"format-check": "prettier --check **/*.ts",
|
||||||
"lint": "eslint src/*.ts src/**/*.ts --max-warnings 0",
|
"lint": "eslint src/**/*.ts",
|
||||||
"lint:fix": "eslint --fix src/*.ts src/**/*.ts",
|
"lint:fix": "eslint --fix src/**/*.ts",
|
||||||
"package": "ncc build lib/main.js --source-map --license licenses.txt",
|
"package": "ncc build lib/main.js --source-map --license licenses.txt",
|
||||||
"test": "jest --coverage",
|
"test": "jest --coverage",
|
||||||
"update-snapshot": "jest -u",
|
|
||||||
"all": "yarn build && yarn format && yarn lint && yarn package && yarn test"
|
"all": "yarn build && yarn format && yarn lint && yarn package && yarn test"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://github.com/tj-actions/changed-files.git"
|
"url": "git+https://github.com/tj-actions/glob.git"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"actions",
|
"actions",
|
||||||
@ -29,15 +28,14 @@
|
|||||||
"author": "Tonye Jack",
|
"author": "Tonye Jack",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/tj-actions/changed-files/issues"
|
"url": "https://github.com/tj-actions/glob/issues"
|
||||||
},
|
},
|
||||||
"homepage": "https://github.com/tj-actions/changed-files#readme",
|
"homepage": "https://github.com/tj-actions/glob#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/github": "^6.0.0",
|
"@actions/github": "^5.1.1",
|
||||||
"@octokit/rest": "^21.0.0",
|
"@octokit/rest": "^19.0.13",
|
||||||
"@stdlib/utils-convert-path": "^0.2.1",
|
|
||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
"micromatch": "^4.0.5",
|
"micromatch": "^4.0.5",
|
||||||
"yaml": "^2.3.1"
|
"yaml": "^2.3.1"
|
||||||
@ -46,15 +44,14 @@
|
|||||||
"@types/jest": "^29.5.2",
|
"@types/jest": "^29.5.2",
|
||||||
"@types/lodash": "^4.14.195",
|
"@types/lodash": "^4.14.195",
|
||||||
"@types/micromatch": "^4.0.2",
|
"@types/micromatch": "^4.0.2",
|
||||||
"@types/node": "^22.0.0",
|
"@types/node": "^20.3.2",
|
||||||
"@types/uuid": "^10.0.0",
|
"@types/uuid": "^9.0.2",
|
||||||
"@typescript-eslint/eslint-plugin": "^7.0.0",
|
"@typescript-eslint/eslint-plugin": "^6.0.0",
|
||||||
"@typescript-eslint/parser": "^7.0.0",
|
"@typescript-eslint/parser": "^6.0.0",
|
||||||
"@vercel/ncc": "^0.38.0",
|
"@vercel/ncc": "^0.36.1",
|
||||||
"eslint": "^8.43.0",
|
"eslint": "^8.43.0",
|
||||||
"eslint-config-prettier": "^9.0.0",
|
"eslint-plugin-github": "^4.8.0",
|
||||||
"eslint-plugin-github": "^5.0.0",
|
"eslint-plugin-jest": "^27.2.2",
|
||||||
"eslint-plugin-jest": "^28.0.0",
|
|
||||||
"eslint-plugin-prettier": "^5.0.0-alpha.2",
|
"eslint-plugin-prettier": "^5.0.0-alpha.2",
|
||||||
"jest": "^29.5.0",
|
"jest": "^29.5.0",
|
||||||
"prettier": "^3.0.0",
|
"prettier": "^3.0.0",
|
||||||
|
@ -1,373 +0,0 @@
|
|||||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
|
||||||
|
|
||||||
exports[`getInputs should correctly parse boolean inputs 1`] = `
|
|
||||||
{
|
|
||||||
"apiUrl": "",
|
|
||||||
"baseSha": "",
|
|
||||||
"diffRelative": "false",
|
|
||||||
"dirNames": "false",
|
|
||||||
"dirNamesDeletedFilesIncludeOnlyDeletedDirs": "false",
|
|
||||||
"dirNamesExcludeCurrentDir": "false",
|
|
||||||
"dirNamesIncludeFiles": "",
|
|
||||||
"dirNamesIncludeFilesSeparator": "",
|
|
||||||
"escapeJson": false,
|
|
||||||
"excludeSubmodules": "false",
|
|
||||||
"failOnInitialDiffError": "false",
|
|
||||||
"failOnSubmoduleDiffError": "false",
|
|
||||||
"fetchAdditionalSubmoduleHistory": "false",
|
|
||||||
"fetchMissingHistoryMaxRetries": 20,
|
|
||||||
"files": "",
|
|
||||||
"filesFromSourceFile": "",
|
|
||||||
"filesFromSourceFileSeparator": "",
|
|
||||||
"filesIgnore": "",
|
|
||||||
"filesIgnoreFromSourceFile": "",
|
|
||||||
"filesIgnoreFromSourceFileSeparator": "",
|
|
||||||
"filesIgnoreSeparator": "",
|
|
||||||
"filesIgnoreYaml": "",
|
|
||||||
"filesIgnoreYamlFromSourceFile": "",
|
|
||||||
"filesIgnoreYamlFromSourceFileSeparator": "",
|
|
||||||
"filesSeparator": "",
|
|
||||||
"filesYaml": "",
|
|
||||||
"filesYamlFromSourceFile": "",
|
|
||||||
"filesYamlFromSourceFileSeparator": "",
|
|
||||||
"includeAllOldNewRenamedFiles": "false",
|
|
||||||
"json": true,
|
|
||||||
"negationPatternsFirst": "false",
|
|
||||||
"oldNewFilesSeparator": " ",
|
|
||||||
"oldNewSeparator": ",",
|
|
||||||
"outputDir": "",
|
|
||||||
"outputRenamedFilesAsDeletedAndAdded": "false",
|
|
||||||
"path": ".",
|
|
||||||
"quotepath": "false",
|
|
||||||
"recoverDeletedFiles": "false",
|
|
||||||
"recoverDeletedFilesToDestination": "",
|
|
||||||
"recoverFiles": "",
|
|
||||||
"recoverFilesIgnore": "",
|
|
||||||
"recoverFilesIgnoreSeparator": "
|
|
||||||
",
|
|
||||||
"recoverFilesSeparator": "
|
|
||||||
",
|
|
||||||
"safeOutput": "false",
|
|
||||||
"separator": "",
|
|
||||||
"sha": "",
|
|
||||||
"since": "",
|
|
||||||
"sinceLastRemoteCommit": "false",
|
|
||||||
"skipInitialFetch": "true",
|
|
||||||
"tagsIgnorePattern": "",
|
|
||||||
"tagsPattern": "*",
|
|
||||||
"token": "",
|
|
||||||
"until": "",
|
|
||||||
"usePosixPathSeparator": "false",
|
|
||||||
"useRestApi": "false",
|
|
||||||
"writeOutputFiles": "false",
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
|
|
||||||
exports[`getInputs should correctly parse numeric inputs 1`] = `
|
|
||||||
{
|
|
||||||
"apiUrl": "",
|
|
||||||
"baseSha": "",
|
|
||||||
"diffRelative": true,
|
|
||||||
"dirNames": false,
|
|
||||||
"dirNamesDeletedFilesIncludeOnlyDeletedDirs": false,
|
|
||||||
"dirNamesExcludeCurrentDir": false,
|
|
||||||
"dirNamesIncludeFiles": "",
|
|
||||||
"dirNamesIncludeFilesSeparator": "",
|
|
||||||
"dirNamesMaxDepth": 2,
|
|
||||||
"escapeJson": false,
|
|
||||||
"excludeSubmodules": false,
|
|
||||||
"failOnInitialDiffError": false,
|
|
||||||
"failOnSubmoduleDiffError": false,
|
|
||||||
"fetchAdditionalSubmoduleHistory": false,
|
|
||||||
"fetchDepth": 5,
|
|
||||||
"files": "",
|
|
||||||
"filesFromSourceFile": "",
|
|
||||||
"filesFromSourceFileSeparator": "",
|
|
||||||
"filesIgnore": "",
|
|
||||||
"filesIgnoreFromSourceFile": "",
|
|
||||||
"filesIgnoreFromSourceFileSeparator": "",
|
|
||||||
"filesIgnoreSeparator": "",
|
|
||||||
"filesIgnoreYaml": "",
|
|
||||||
"filesIgnoreYamlFromSourceFile": "",
|
|
||||||
"filesIgnoreYamlFromSourceFileSeparator": "",
|
|
||||||
"filesSeparator": "",
|
|
||||||
"filesYaml": "",
|
|
||||||
"filesYamlFromSourceFile": "",
|
|
||||||
"filesYamlFromSourceFileSeparator": "",
|
|
||||||
"includeAllOldNewRenamedFiles": false,
|
|
||||||
"json": false,
|
|
||||||
"negationPatternsFirst": false,
|
|
||||||
"oldNewFilesSeparator": "",
|
|
||||||
"oldNewSeparator": "",
|
|
||||||
"outputDir": "",
|
|
||||||
"outputRenamedFilesAsDeletedAndAdded": false,
|
|
||||||
"path": "",
|
|
||||||
"quotepath": true,
|
|
||||||
"recoverDeletedFiles": false,
|
|
||||||
"recoverDeletedFilesToDestination": "",
|
|
||||||
"recoverFiles": "",
|
|
||||||
"recoverFilesIgnore": "",
|
|
||||||
"recoverFilesIgnoreSeparator": "",
|
|
||||||
"recoverFilesSeparator": "",
|
|
||||||
"safeOutput": false,
|
|
||||||
"separator": "",
|
|
||||||
"sha": "",
|
|
||||||
"since": "",
|
|
||||||
"sinceLastRemoteCommit": false,
|
|
||||||
"skipInitialFetch": false,
|
|
||||||
"tagsIgnorePattern": "",
|
|
||||||
"tagsPattern": "",
|
|
||||||
"token": "",
|
|
||||||
"until": "",
|
|
||||||
"usePosixPathSeparator": false,
|
|
||||||
"useRestApi": false,
|
|
||||||
"writeOutputFiles": false,
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
|
|
||||||
exports[`getInputs should correctly parse string inputs 1`] = `
|
|
||||||
{
|
|
||||||
"apiUrl": "https://api.github.com",
|
|
||||||
"baseSha": "",
|
|
||||||
"diffRelative": true,
|
|
||||||
"dirNames": false,
|
|
||||||
"dirNamesDeletedFilesIncludeOnlyDeletedDirs": false,
|
|
||||||
"dirNamesExcludeCurrentDir": false,
|
|
||||||
"dirNamesIncludeFiles": "",
|
|
||||||
"dirNamesIncludeFilesSeparator": "",
|
|
||||||
"escapeJson": false,
|
|
||||||
"excludeSubmodules": false,
|
|
||||||
"failOnInitialDiffError": false,
|
|
||||||
"failOnSubmoduleDiffError": false,
|
|
||||||
"fetchAdditionalSubmoduleHistory": false,
|
|
||||||
"files": "",
|
|
||||||
"filesFromSourceFile": "",
|
|
||||||
"filesFromSourceFileSeparator": "",
|
|
||||||
"filesIgnore": "",
|
|
||||||
"filesIgnoreFromSourceFile": "",
|
|
||||||
"filesIgnoreFromSourceFileSeparator": "",
|
|
||||||
"filesIgnoreSeparator": "",
|
|
||||||
"filesIgnoreYaml": "",
|
|
||||||
"filesIgnoreYamlFromSourceFile": "",
|
|
||||||
"filesIgnoreYamlFromSourceFileSeparator": "",
|
|
||||||
"filesSeparator": "",
|
|
||||||
"filesYaml": "",
|
|
||||||
"filesYamlFromSourceFile": "",
|
|
||||||
"filesYamlFromSourceFileSeparator": "",
|
|
||||||
"includeAllOldNewRenamedFiles": false,
|
|
||||||
"json": false,
|
|
||||||
"negationPatternsFirst": false,
|
|
||||||
"oldNewFilesSeparator": "",
|
|
||||||
"oldNewSeparator": "",
|
|
||||||
"outputDir": "",
|
|
||||||
"outputRenamedFilesAsDeletedAndAdded": false,
|
|
||||||
"path": "",
|
|
||||||
"quotepath": true,
|
|
||||||
"recoverDeletedFiles": false,
|
|
||||||
"recoverDeletedFilesToDestination": "",
|
|
||||||
"recoverFiles": "",
|
|
||||||
"recoverFilesIgnore": "",
|
|
||||||
"recoverFilesIgnoreSeparator": "",
|
|
||||||
"recoverFilesSeparator": "",
|
|
||||||
"safeOutput": false,
|
|
||||||
"separator": "",
|
|
||||||
"sha": "",
|
|
||||||
"since": "",
|
|
||||||
"sinceLastRemoteCommit": false,
|
|
||||||
"skipInitialFetch": false,
|
|
||||||
"tagsIgnorePattern": "",
|
|
||||||
"tagsPattern": "",
|
|
||||||
"token": "token",
|
|
||||||
"until": "",
|
|
||||||
"usePosixPathSeparator": false,
|
|
||||||
"useRestApi": false,
|
|
||||||
"writeOutputFiles": false,
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
|
|
||||||
exports[`getInputs should handle invalid numeric inputs correctly 1`] = `
|
|
||||||
{
|
|
||||||
"apiUrl": "",
|
|
||||||
"baseSha": "",
|
|
||||||
"diffRelative": true,
|
|
||||||
"dirNames": false,
|
|
||||||
"dirNamesDeletedFilesIncludeOnlyDeletedDirs": false,
|
|
||||||
"dirNamesExcludeCurrentDir": false,
|
|
||||||
"dirNamesIncludeFiles": "",
|
|
||||||
"dirNamesIncludeFilesSeparator": "",
|
|
||||||
"dirNamesMaxDepth": 2,
|
|
||||||
"escapeJson": false,
|
|
||||||
"excludeSubmodules": false,
|
|
||||||
"failOnInitialDiffError": false,
|
|
||||||
"failOnSubmoduleDiffError": false,
|
|
||||||
"fetchAdditionalSubmoduleHistory": false,
|
|
||||||
"fetchDepth": NaN,
|
|
||||||
"files": "",
|
|
||||||
"filesFromSourceFile": "",
|
|
||||||
"filesFromSourceFileSeparator": "",
|
|
||||||
"filesIgnore": "",
|
|
||||||
"filesIgnoreFromSourceFile": "",
|
|
||||||
"filesIgnoreFromSourceFileSeparator": "",
|
|
||||||
"filesIgnoreSeparator": "",
|
|
||||||
"filesIgnoreYaml": "",
|
|
||||||
"filesIgnoreYamlFromSourceFile": "",
|
|
||||||
"filesIgnoreYamlFromSourceFileSeparator": "",
|
|
||||||
"filesSeparator": "",
|
|
||||||
"filesYaml": "",
|
|
||||||
"filesYamlFromSourceFile": "",
|
|
||||||
"filesYamlFromSourceFileSeparator": "",
|
|
||||||
"includeAllOldNewRenamedFiles": false,
|
|
||||||
"json": false,
|
|
||||||
"negationPatternsFirst": false,
|
|
||||||
"oldNewFilesSeparator": "",
|
|
||||||
"oldNewSeparator": "",
|
|
||||||
"outputDir": "",
|
|
||||||
"outputRenamedFilesAsDeletedAndAdded": false,
|
|
||||||
"path": "",
|
|
||||||
"quotepath": true,
|
|
||||||
"recoverDeletedFiles": false,
|
|
||||||
"recoverDeletedFilesToDestination": "",
|
|
||||||
"recoverFiles": "",
|
|
||||||
"recoverFilesIgnore": "",
|
|
||||||
"recoverFilesIgnoreSeparator": "",
|
|
||||||
"recoverFilesSeparator": "",
|
|
||||||
"safeOutput": false,
|
|
||||||
"separator": "",
|
|
||||||
"sha": "",
|
|
||||||
"since": "",
|
|
||||||
"sinceLastRemoteCommit": false,
|
|
||||||
"skipInitialFetch": false,
|
|
||||||
"tagsIgnorePattern": "",
|
|
||||||
"tagsPattern": "",
|
|
||||||
"token": "",
|
|
||||||
"until": "",
|
|
||||||
"usePosixPathSeparator": false,
|
|
||||||
"useRestApi": false,
|
|
||||||
"writeOutputFiles": false,
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
|
|
||||||
exports[`getInputs should handle negative numeric inputs correctly 1`] = `
|
|
||||||
{
|
|
||||||
"apiUrl": "",
|
|
||||||
"baseSha": "",
|
|
||||||
"diffRelative": true,
|
|
||||||
"dirNames": false,
|
|
||||||
"dirNamesDeletedFilesIncludeOnlyDeletedDirs": false,
|
|
||||||
"dirNamesExcludeCurrentDir": false,
|
|
||||||
"dirNamesIncludeFiles": "",
|
|
||||||
"dirNamesIncludeFilesSeparator": "",
|
|
||||||
"dirNamesMaxDepth": -2,
|
|
||||||
"escapeJson": false,
|
|
||||||
"excludeSubmodules": false,
|
|
||||||
"failOnInitialDiffError": false,
|
|
||||||
"failOnSubmoduleDiffError": false,
|
|
||||||
"fetchAdditionalSubmoduleHistory": false,
|
|
||||||
"fetchDepth": 2,
|
|
||||||
"files": "",
|
|
||||||
"filesFromSourceFile": "",
|
|
||||||
"filesFromSourceFileSeparator": "",
|
|
||||||
"filesIgnore": "",
|
|
||||||
"filesIgnoreFromSourceFile": "",
|
|
||||||
"filesIgnoreFromSourceFileSeparator": "",
|
|
||||||
"filesIgnoreSeparator": "",
|
|
||||||
"filesIgnoreYaml": "",
|
|
||||||
"filesIgnoreYamlFromSourceFile": "",
|
|
||||||
"filesIgnoreYamlFromSourceFileSeparator": "",
|
|
||||||
"filesSeparator": "",
|
|
||||||
"filesYaml": "",
|
|
||||||
"filesYamlFromSourceFile": "",
|
|
||||||
"filesYamlFromSourceFileSeparator": "",
|
|
||||||
"includeAllOldNewRenamedFiles": false,
|
|
||||||
"json": false,
|
|
||||||
"negationPatternsFirst": false,
|
|
||||||
"oldNewFilesSeparator": "",
|
|
||||||
"oldNewSeparator": "",
|
|
||||||
"outputDir": "",
|
|
||||||
"outputRenamedFilesAsDeletedAndAdded": false,
|
|
||||||
"path": "",
|
|
||||||
"quotepath": true,
|
|
||||||
"recoverDeletedFiles": false,
|
|
||||||
"recoverDeletedFilesToDestination": "",
|
|
||||||
"recoverFiles": "",
|
|
||||||
"recoverFilesIgnore": "",
|
|
||||||
"recoverFilesIgnoreSeparator": "",
|
|
||||||
"recoverFilesSeparator": "",
|
|
||||||
"safeOutput": false,
|
|
||||||
"separator": "",
|
|
||||||
"sha": "",
|
|
||||||
"since": "",
|
|
||||||
"sinceLastRemoteCommit": false,
|
|
||||||
"skipInitialFetch": false,
|
|
||||||
"tagsIgnorePattern": "",
|
|
||||||
"tagsPattern": "",
|
|
||||||
"token": "",
|
|
||||||
"until": "",
|
|
||||||
"usePosixPathSeparator": false,
|
|
||||||
"useRestApi": false,
|
|
||||||
"writeOutputFiles": false,
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
|
|
||||||
exports[`getInputs should return default values when no inputs are provided 1`] = `
|
|
||||||
{
|
|
||||||
"apiUrl": "",
|
|
||||||
"baseSha": "",
|
|
||||||
"diffRelative": true,
|
|
||||||
"dirNames": false,
|
|
||||||
"dirNamesDeletedFilesIncludeOnlyDeletedDirs": false,
|
|
||||||
"dirNamesExcludeCurrentDir": false,
|
|
||||||
"dirNamesIncludeFiles": "",
|
|
||||||
"dirNamesIncludeFilesSeparator": "",
|
|
||||||
"escapeJson": false,
|
|
||||||
"excludeSubmodules": false,
|
|
||||||
"failOnInitialDiffError": false,
|
|
||||||
"failOnSubmoduleDiffError": false,
|
|
||||||
"fetchAdditionalSubmoduleHistory": false,
|
|
||||||
"fetchMissingHistoryMaxRetries": 20,
|
|
||||||
"files": "",
|
|
||||||
"filesFromSourceFile": "",
|
|
||||||
"filesFromSourceFileSeparator": "",
|
|
||||||
"filesIgnore": "",
|
|
||||||
"filesIgnoreFromSourceFile": "",
|
|
||||||
"filesIgnoreFromSourceFileSeparator": "",
|
|
||||||
"filesIgnoreSeparator": "",
|
|
||||||
"filesIgnoreYaml": "",
|
|
||||||
"filesIgnoreYamlFromSourceFile": "",
|
|
||||||
"filesIgnoreYamlFromSourceFileSeparator": "",
|
|
||||||
"filesSeparator": "",
|
|
||||||
"filesYaml": "",
|
|
||||||
"filesYamlFromSourceFile": "",
|
|
||||||
"filesYamlFromSourceFileSeparator": "",
|
|
||||||
"includeAllOldNewRenamedFiles": false,
|
|
||||||
"json": false,
|
|
||||||
"negationPatternsFirst": false,
|
|
||||||
"oldNewFilesSeparator": " ",
|
|
||||||
"oldNewSeparator": ",",
|
|
||||||
"outputDir": "",
|
|
||||||
"outputRenamedFilesAsDeletedAndAdded": false,
|
|
||||||
"path": ".",
|
|
||||||
"quotepath": true,
|
|
||||||
"recoverDeletedFiles": false,
|
|
||||||
"recoverDeletedFilesToDestination": "",
|
|
||||||
"recoverFiles": "",
|
|
||||||
"recoverFilesIgnore": "",
|
|
||||||
"recoverFilesIgnoreSeparator": "
|
|
||||||
",
|
|
||||||
"recoverFilesSeparator": "
|
|
||||||
",
|
|
||||||
"safeOutput": false,
|
|
||||||
"separator": "",
|
|
||||||
"sha": "",
|
|
||||||
"since": "",
|
|
||||||
"sinceLastRemoteCommit": false,
|
|
||||||
"skipInitialFetch": false,
|
|
||||||
"tagsIgnorePattern": "",
|
|
||||||
"tagsPattern": "*",
|
|
||||||
"token": "",
|
|
||||||
"until": "",
|
|
||||||
"usePosixPathSeparator": false,
|
|
||||||
"useRestApi": false,
|
|
||||||
"writeOutputFiles": false,
|
|
||||||
}
|
|
||||||
`;
|
|
@ -1,153 +0,0 @@
|
|||||||
import * as core from '@actions/core'
|
|
||||||
import {getInputs, Inputs} from '../inputs'
|
|
||||||
import {DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS} from '../constant'
|
|
||||||
|
|
||||||
jest.mock('@actions/core')
|
|
||||||
|
|
||||||
describe('getInputs', () => {
|
|
||||||
afterEach(() => {
|
|
||||||
jest.clearAllMocks()
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should return default values when no inputs are provided', () => {
|
|
||||||
;(core.getInput as jest.Mock).mockImplementation(name => {
|
|
||||||
const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => {
|
|
||||||
return g[1].toUpperCase()
|
|
||||||
}) as keyof Inputs
|
|
||||||
|
|
||||||
return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] ||
|
|
||||||
'') as string
|
|
||||||
})
|
|
||||||
;(core.getBooleanInput as jest.Mock).mockImplementation(name => {
|
|
||||||
const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => {
|
|
||||||
return g[1].toUpperCase()
|
|
||||||
}) as keyof Inputs
|
|
||||||
|
|
||||||
return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] ||
|
|
||||||
false) as boolean
|
|
||||||
})
|
|
||||||
expect(getInputs()).toMatchSnapshot()
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should correctly parse boolean inputs', () => {
|
|
||||||
;(core.getInput as jest.Mock).mockImplementation(name => {
|
|
||||||
const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => {
|
|
||||||
return g[1].toUpperCase()
|
|
||||||
}) as keyof Inputs
|
|
||||||
|
|
||||||
return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] ||
|
|
||||||
'') as string
|
|
||||||
})
|
|
||||||
;(core.getBooleanInput as jest.Mock).mockImplementation(name => {
|
|
||||||
switch (name) {
|
|
||||||
case 'matrix':
|
|
||||||
return 'true'
|
|
||||||
case 'skip_initial_fetch':
|
|
||||||
return 'true'
|
|
||||||
default:
|
|
||||||
return 'false'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
expect(getInputs()).toMatchSnapshot()
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should handle matrix alias correctly', () => {
|
|
||||||
;(core.getBooleanInput as jest.Mock).mockImplementation(name => {
|
|
||||||
return name === 'matrix' ? 'true' : 'false'
|
|
||||||
})
|
|
||||||
|
|
||||||
const inputs = getInputs()
|
|
||||||
expect(inputs).toHaveProperty('json', true)
|
|
||||||
expect(inputs).toHaveProperty('escapeJson', false)
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should correctly parse string inputs', () => {
|
|
||||||
;(core.getInput as jest.Mock).mockImplementation(name => {
|
|
||||||
switch (name) {
|
|
||||||
case 'token':
|
|
||||||
return 'token'
|
|
||||||
case 'api_url':
|
|
||||||
return 'https://api.github.com'
|
|
||||||
default:
|
|
||||||
return ''
|
|
||||||
}
|
|
||||||
})
|
|
||||||
;(core.getBooleanInput as jest.Mock).mockImplementation(name => {
|
|
||||||
const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => {
|
|
||||||
return g[1].toUpperCase()
|
|
||||||
}) as keyof Inputs
|
|
||||||
|
|
||||||
return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] ||
|
|
||||||
false) as boolean
|
|
||||||
})
|
|
||||||
expect(getInputs()).toMatchSnapshot()
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should correctly parse numeric inputs', () => {
|
|
||||||
;(core.getInput as jest.Mock).mockImplementation(name => {
|
|
||||||
switch (name) {
|
|
||||||
case 'fetch_depth':
|
|
||||||
return '5'
|
|
||||||
case 'dir_names_max_depth':
|
|
||||||
return '2'
|
|
||||||
default:
|
|
||||||
return ''
|
|
||||||
}
|
|
||||||
})
|
|
||||||
;(core.getBooleanInput as jest.Mock).mockImplementation(name => {
|
|
||||||
const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => {
|
|
||||||
return g[1].toUpperCase()
|
|
||||||
}) as keyof Inputs
|
|
||||||
|
|
||||||
return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] ||
|
|
||||||
false) as boolean
|
|
||||||
})
|
|
||||||
expect(getInputs()).toMatchSnapshot()
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should handle invalid numeric inputs correctly', () => {
|
|
||||||
;(core.getInput as jest.Mock).mockImplementation(name => {
|
|
||||||
// TODO: Add validation for invalid numbers which should result in an error instead of NaN
|
|
||||||
switch (name) {
|
|
||||||
case 'fetch_depth':
|
|
||||||
return 'invalid'
|
|
||||||
case 'dir_names_max_depth':
|
|
||||||
return '2'
|
|
||||||
default:
|
|
||||||
return ''
|
|
||||||
}
|
|
||||||
})
|
|
||||||
;(core.getBooleanInput as jest.Mock).mockImplementation(name => {
|
|
||||||
const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => {
|
|
||||||
return g[1].toUpperCase()
|
|
||||||
}) as keyof Inputs
|
|
||||||
|
|
||||||
return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] ||
|
|
||||||
false) as boolean
|
|
||||||
})
|
|
||||||
expect(getInputs()).toMatchSnapshot()
|
|
||||||
})
|
|
||||||
|
|
||||||
test('should handle negative numeric inputs correctly', () => {
|
|
||||||
;(core.getInput as jest.Mock).mockImplementation(name => {
|
|
||||||
// TODO: Add validation for negative numbers which should result in an error
|
|
||||||
switch (name) {
|
|
||||||
case 'fetch_depth':
|
|
||||||
return '-5'
|
|
||||||
case 'dir_names_max_depth':
|
|
||||||
return '-2'
|
|
||||||
default:
|
|
||||||
return ''
|
|
||||||
}
|
|
||||||
})
|
|
||||||
;(core.getBooleanInput as jest.Mock).mockImplementation(name => {
|
|
||||||
const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => {
|
|
||||||
return g[1].toUpperCase()
|
|
||||||
}) as keyof Inputs
|
|
||||||
|
|
||||||
return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] ||
|
|
||||||
false) as boolean
|
|
||||||
})
|
|
||||||
expect(getInputs()).toMatchSnapshot()
|
|
||||||
})
|
|
||||||
})
|
|
5
src/__tests__/main.test.ts
Normal file
5
src/__tests__/main.test.ts
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
describe('main test', () => {
|
||||||
|
it('adds two numbers', async () => {
|
||||||
|
expect(1 + 1).toEqual(2)
|
||||||
|
})
|
||||||
|
})
|
@ -1,761 +0,0 @@
|
|||||||
import * as core from '@actions/core'
|
|
||||||
import * as exec from '@actions/exec'
|
|
||||||
import {ChangeTypeEnum} from '../changedFiles'
|
|
||||||
import {Inputs} from '../inputs'
|
|
||||||
import {
|
|
||||||
getDirname,
|
|
||||||
getDirnameMaxDepth,
|
|
||||||
getFilteredChangedFiles,
|
|
||||||
getPreviousGitTag,
|
|
||||||
normalizeSeparators,
|
|
||||||
warnUnsupportedRESTAPIInputs
|
|
||||||
} from '../utils'
|
|
||||||
|
|
||||||
const originalPlatform = process.platform
|
|
||||||
|
|
||||||
function mockedPlatform(platform: string): void {
|
|
||||||
Object.defineProperty(process, 'platform', {
|
|
||||||
value: platform
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('utils test', () => {
|
|
||||||
afterEach(() => {
|
|
||||||
Object.defineProperty(process, 'platform', {
|
|
||||||
value: originalPlatform
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('getDirnameMaxDepth_function', () => {
|
|
||||||
// Tests that the function returns the correct dirname when the relative path has multiple directories
|
|
||||||
it('test_multiple_directories', () => {
|
|
||||||
const result = getDirnameMaxDepth({
|
|
||||||
relativePath: 'path/to/some/file',
|
|
||||||
dirNamesMaxDepth: 2,
|
|
||||||
excludeCurrentDir: false
|
|
||||||
})
|
|
||||||
expect(result).toEqual('path/to')
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function returns the correct dirname when the relative path has only one directory
|
|
||||||
it('test_single_directory', () => {
|
|
||||||
const result = getDirnameMaxDepth({
|
|
||||||
relativePath: 'path/to',
|
|
||||||
dirNamesMaxDepth: 1,
|
|
||||||
excludeCurrentDir: false
|
|
||||||
})
|
|
||||||
expect(result).toEqual('path')
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function returns the correct dirname when the relative path has no directories
|
|
||||||
it('test_no_directories', () => {
|
|
||||||
const result = getDirnameMaxDepth({
|
|
||||||
relativePath: 'file.txt',
|
|
||||||
dirNamesMaxDepth: 1,
|
|
||||||
excludeCurrentDir: false
|
|
||||||
})
|
|
||||||
expect(result).toEqual('.')
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function returns the correct dirname when dirNamesMaxDepth is set to a value less than the number of directories in the relative path
|
|
||||||
it('test_dirnames_max_depth_less_than_num_directories', () => {
|
|
||||||
const result = getDirnameMaxDepth({
|
|
||||||
relativePath: 'path/to/some/file',
|
|
||||||
dirNamesMaxDepth: 1,
|
|
||||||
excludeCurrentDir: false
|
|
||||||
})
|
|
||||||
expect(result).toEqual('path')
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function returns an empty string when excludeCurrentDir is true and the output is '.'
|
|
||||||
it('test_exclude_current_dir_is_true_and_output_is_dot', () => {
|
|
||||||
const result = getDirnameMaxDepth({
|
|
||||||
relativePath: '.',
|
|
||||||
dirNamesMaxDepth: 1,
|
|
||||||
excludeCurrentDir: true
|
|
||||||
})
|
|
||||||
expect(result).toEqual('')
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function returns the correct dirname when the relative path is a Windows drive root and excludeCurrentDir is true
|
|
||||||
it('test_windows_drive_root_and_exclude_current_dir_is_true', () => {
|
|
||||||
mockedPlatform('win32')
|
|
||||||
const result = getDirnameMaxDepth({
|
|
||||||
relativePath: 'C:\\',
|
|
||||||
dirNamesMaxDepth: 1,
|
|
||||||
excludeCurrentDir: true
|
|
||||||
})
|
|
||||||
expect(result).toEqual('')
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that getDirnameMaxDepth handles a relative path with a trailing separator correctly
|
|
||||||
it('test_trailing_separator', () => {
|
|
||||||
const input = {
|
|
||||||
relativePath: 'path/to/dir/',
|
|
||||||
dirNamesMaxDepth: 2,
|
|
||||||
excludeCurrentDir: true
|
|
||||||
}
|
|
||||||
const expectedOutput = 'path/to'
|
|
||||||
const actualOutput = getDirnameMaxDepth(input)
|
|
||||||
expect(actualOutput).toEqual(expectedOutput)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that getDirnameMaxDepth returns an empty string when excludeCurrentDir is true and the output is '.'
|
|
||||||
it('test_trailing_separator_exclude_current_dir', () => {
|
|
||||||
const input = {
|
|
||||||
relativePath: 'file',
|
|
||||||
excludeCurrentDir: true
|
|
||||||
}
|
|
||||||
const expectedOutput = ''
|
|
||||||
const actualOutput = getDirnameMaxDepth(input)
|
|
||||||
expect(actualOutput).toEqual(expectedOutput)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that getDirnameMaxDepth returns the correct output for a Windows UNC root path
|
|
||||||
it('test_windows_unc_root', () => {
|
|
||||||
mockedPlatform('win32')
|
|
||||||
const input = {
|
|
||||||
relativePath: '\\hello',
|
|
||||||
dirNamesMaxDepth: 2,
|
|
||||||
excludeCurrentDir: true
|
|
||||||
}
|
|
||||||
const expectedOutput = ''
|
|
||||||
expect(getDirnameMaxDepth(input)).toEqual(expectedOutput)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that getDirnameMaxDepth returns an empty string when given a Windows UNC root and excludeCurrentDir is true
|
|
||||||
it('test_windows_unc_root_exclude_current_dir', () => {
|
|
||||||
mockedPlatform('win32')
|
|
||||||
const relativePath = '\\hello'
|
|
||||||
const result = getDirnameMaxDepth({
|
|
||||||
relativePath,
|
|
||||||
excludeCurrentDir: true
|
|
||||||
})
|
|
||||||
expect(result).toEqual('')
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that getDirnameMaxDepth returns the correct dirname with a relative path that contains both forward and backward slashes
|
|
||||||
it('test_relative_path_with_slashes', () => {
|
|
||||||
const relativePath = 'path/to\file'
|
|
||||||
const expectedOutput = 'path'
|
|
||||||
const actualOutput = getDirnameMaxDepth({relativePath})
|
|
||||||
expect(actualOutput).toEqual(expectedOutput)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that getDirnameMaxDepth returns the correct dirname for a relative path that contains special characters
|
|
||||||
it('test_special_characters', () => {
|
|
||||||
const relativePath =
|
|
||||||
'path/with/special/characters/!@#$%^&*()_+{}|:<>?[];,./'
|
|
||||||
const expectedDirname = 'path/with/special/characters'
|
|
||||||
const actualDirname = getDirnameMaxDepth({relativePath})
|
|
||||||
expect(actualDirname).toEqual(expectedDirname)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('getDirname_function', () => {
|
|
||||||
// Tests that the function returns the correct dirname for a valid path
|
|
||||||
it('test valid path', () => {
|
|
||||||
expect(getDirname('/path/to/file')).toEqual('/path/to')
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function returns the correct dirname for a valid Windows UNC root path
|
|
||||||
it('test windows unc root path', () => {
|
|
||||||
mockedPlatform('win32')
|
|
||||||
expect(getDirname('\\helloworld')).toEqual('.')
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function returns the correct dirname for a path with a trailing slash
|
|
||||||
it('test path with trailing slash', () => {
|
|
||||||
expect(getDirname('/path/to/file/')).toEqual('/path/to')
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function returns the correct dirname for a Windows UNC root path with a trailing slash
|
|
||||||
it('test windows unc root path with trailing slash', () => {
|
|
||||||
mockedPlatform('win32')
|
|
||||||
expect(getDirname('\\hello\\world\\')).toEqual('.')
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function returns the correct dirname for a path with multiple slashes
|
|
||||||
it('test path with multiple slashes', () => {
|
|
||||||
expect(getDirname('/path//to/file')).toEqual('/path/to')
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function returns the correct dirname for a Windows UNC root path with multiple slashes
|
|
||||||
it('test windows unc root path with multiple slashes', () => {
|
|
||||||
mockedPlatform('win32')
|
|
||||||
expect(getDirname('\\hello\\world')).toEqual('.')
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('normalizeSeparators_function', () => {
|
|
||||||
// Tests that forward slashes are normalized on Linux
|
|
||||||
it('test forward slashes linux', () => {
|
|
||||||
const input = 'path/to/file'
|
|
||||||
const expectedOutput = 'path/to/file'
|
|
||||||
const actualOutput = normalizeSeparators(input)
|
|
||||||
expect(actualOutput).toEqual(expectedOutput)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that backslashes are normalized on Windows
|
|
||||||
it('test backslashes windows', () => {
|
|
||||||
mockedPlatform('win32')
|
|
||||||
const input = 'path\\to\\file'
|
|
||||||
const expectedOutput = 'path\\to\\file'
|
|
||||||
const actualOutput = normalizeSeparators(input)
|
|
||||||
expect(actualOutput).toEqual(expectedOutput)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that forward slashes are normalized on Windows
|
|
||||||
it('test mixed slashes windows', () => {
|
|
||||||
mockedPlatform('win32')
|
|
||||||
const input = 'path/to/file'
|
|
||||||
const expectedOutput = 'path\\to\\file'
|
|
||||||
const actualOutput = normalizeSeparators(input)
|
|
||||||
expect(actualOutput).toEqual(expectedOutput)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that mixed slashes are normalized on Windows
|
|
||||||
it('test mixed slashes windows', () => {
|
|
||||||
mockedPlatform('win32')
|
|
||||||
const input = 'path\\to/file'
|
|
||||||
const expectedOutput = 'path\\to\\file'
|
|
||||||
const actualOutput = normalizeSeparators(input)
|
|
||||||
expect(actualOutput).toEqual(expectedOutput)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that an empty string returns an empty string
|
|
||||||
it('test empty string', () => {
|
|
||||||
const input = ''
|
|
||||||
const expectedOutput = ''
|
|
||||||
const actualOutput = normalizeSeparators(input)
|
|
||||||
expect(actualOutput).toEqual(expectedOutput)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that multiple consecutive slashes are removed
|
|
||||||
it('test multiple consecutive slashes', () => {
|
|
||||||
const input = 'path//to//file'
|
|
||||||
const expectedOutput = 'path/to/file'
|
|
||||||
const actualOutput = normalizeSeparators(input)
|
|
||||||
expect(actualOutput).toEqual(expectedOutput)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that UNC format is preserved on Windows
|
|
||||||
it('test unc format windows', () => {
|
|
||||||
mockedPlatform('win32')
|
|
||||||
const input = '\\\\hello\\world'
|
|
||||||
const expectedOutput = '\\\\hello\\world'
|
|
||||||
const actualOutput = normalizeSeparators(input)
|
|
||||||
expect(actualOutput).toEqual(expectedOutput)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that a drive root is preserved on Windows
|
|
||||||
it('test drive root windows', () => {
|
|
||||||
mockedPlatform('win32')
|
|
||||||
const input = 'C:\\'
|
|
||||||
const expectedOutput = 'C:\\'
|
|
||||||
const actualOutput = normalizeSeparators(input)
|
|
||||||
expect(actualOutput).toEqual(expectedOutput)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('getFilteredChangedFiles', () => {
|
|
||||||
// Tests that the function returns an empty object when allDiffFiles and filePatterns are empty
|
|
||||||
it('should return an empty object when allDiffFiles and filePatterns are empty', async () => {
|
|
||||||
const result = await getFilteredChangedFiles({
|
|
||||||
allDiffFiles: {
|
|
||||||
[ChangeTypeEnum.Added]: [],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
},
|
|
||||||
filePatterns: []
|
|
||||||
})
|
|
||||||
expect(result).toEqual({
|
|
||||||
[ChangeTypeEnum.Added]: [],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function returns allDiffFiles when filePatterns is empty
|
|
||||||
it('should return allDiffFiles when filePatterns is empty', async () => {
|
|
||||||
const allDiffFiles = {
|
|
||||||
[ChangeTypeEnum.Added]: ['file1.txt'],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
}
|
|
||||||
const result = await getFilteredChangedFiles({
|
|
||||||
allDiffFiles,
|
|
||||||
filePatterns: []
|
|
||||||
})
|
|
||||||
expect(result).toEqual(allDiffFiles)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function returns an empty object when allDiffFiles is empty
|
|
||||||
it('should return an empty object when allDiffFiles is empty', async () => {
|
|
||||||
const result = await getFilteredChangedFiles({
|
|
||||||
allDiffFiles: {
|
|
||||||
[ChangeTypeEnum.Added]: [],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
},
|
|
||||||
filePatterns: ['*.txt']
|
|
||||||
})
|
|
||||||
expect(result).toEqual({
|
|
||||||
[ChangeTypeEnum.Added]: [],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function returns only the files that match the file patterns on non windows platforms
|
|
||||||
it('should return only the files that match the file patterns', async () => {
|
|
||||||
const allDiffFiles = {
|
|
||||||
[ChangeTypeEnum.Added]: [
|
|
||||||
'file1.txt',
|
|
||||||
'file2.md',
|
|
||||||
'file3.txt',
|
|
||||||
'test/dir/file4.txt',
|
|
||||||
'test/dir/file5.txt',
|
|
||||||
'dir/file6.md'
|
|
||||||
],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
}
|
|
||||||
const result = await getFilteredChangedFiles({
|
|
||||||
allDiffFiles,
|
|
||||||
filePatterns: ['*.txt']
|
|
||||||
})
|
|
||||||
expect(result).toEqual({
|
|
||||||
[ChangeTypeEnum.Added]: ['file1.txt', 'file3.txt'],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function returns only the files that match the file patterns on windows
|
|
||||||
it('should return only the files that match the file patterns on windows', async () => {
|
|
||||||
mockedPlatform('win32')
|
|
||||||
const allDiffFiles = {
|
|
||||||
[ChangeTypeEnum.Added]: [
|
|
||||||
'file1.txt',
|
|
||||||
'file2.md',
|
|
||||||
'file3.txt',
|
|
||||||
'test\\dir\\file4.txt',
|
|
||||||
'test\\dir\\file5.txt',
|
|
||||||
'dir\\file6.md'
|
|
||||||
],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
}
|
|
||||||
const result = await getFilteredChangedFiles({
|
|
||||||
allDiffFiles,
|
|
||||||
filePatterns: ['*.txt']
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(result).toEqual({
|
|
||||||
[ChangeTypeEnum.Added]: ['file1.txt', 'file3.txt'],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function returns only the files that match the file patterns with globstar on non windows platforms
|
|
||||||
it('should return only the files that match the file patterns with globstar', async () => {
|
|
||||||
const allDiffFiles = {
|
|
||||||
[ChangeTypeEnum.Added]: [
|
|
||||||
'file1.txt',
|
|
||||||
'file2.md',
|
|
||||||
'file3.txt',
|
|
||||||
'test/dir/file4.txt',
|
|
||||||
'test/dir/file5.txt',
|
|
||||||
'dir/file6.md'
|
|
||||||
],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
}
|
|
||||||
const result = await getFilteredChangedFiles({
|
|
||||||
allDiffFiles,
|
|
||||||
filePatterns: ['**.txt']
|
|
||||||
})
|
|
||||||
expect(result).toEqual({
|
|
||||||
[ChangeTypeEnum.Added]: [
|
|
||||||
'file1.txt',
|
|
||||||
'file3.txt',
|
|
||||||
'test/dir/file4.txt',
|
|
||||||
'test/dir/file5.txt'
|
|
||||||
],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function returns only the files that match the file patterns with globstar on windows
|
|
||||||
it('should return only the files that match the file patterns with globstar on windows', async () => {
|
|
||||||
mockedPlatform('win32')
|
|
||||||
const allDiffFiles = {
|
|
||||||
[ChangeTypeEnum.Added]: ['test\\test rename-1.txt'],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
}
|
|
||||||
const result = await getFilteredChangedFiles({
|
|
||||||
allDiffFiles,
|
|
||||||
filePatterns: ['test/**']
|
|
||||||
})
|
|
||||||
expect(result).toEqual({
|
|
||||||
[ChangeTypeEnum.Added]: ['test\\test rename-1.txt'],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function returns an empty object when there are no files that match the file patterns
|
|
||||||
it('should return an empty object when there are no files that match the file patterns', async () => {
|
|
||||||
const allDiffFiles = {
|
|
||||||
[ChangeTypeEnum.Added]: ['file1.md', 'file2.md', 'file3.md'],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
}
|
|
||||||
const result = await getFilteredChangedFiles({
|
|
||||||
allDiffFiles,
|
|
||||||
filePatterns: ['*.txt']
|
|
||||||
})
|
|
||||||
expect(result).toEqual({
|
|
||||||
[ChangeTypeEnum.Added]: [],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that the function can handle file names with special characters
|
|
||||||
it('should handle file names with special characters', async () => {
|
|
||||||
const allDiffFiles = {
|
|
||||||
[ChangeTypeEnum.Added]: [
|
|
||||||
'file1.txt',
|
|
||||||
'file2 with spaces.txt',
|
|
||||||
'file3$$.txt'
|
|
||||||
],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
}
|
|
||||||
const result = await getFilteredChangedFiles({
|
|
||||||
allDiffFiles,
|
|
||||||
filePatterns: ['file2*.txt']
|
|
||||||
})
|
|
||||||
expect(result).toEqual({
|
|
||||||
[ChangeTypeEnum.Added]: ['file2 with spaces.txt'],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that getFilteredChangedFiles correctly filters files using glob patterns
|
|
||||||
it('should filter files using glob patterns', async () => {
|
|
||||||
const allDiffFiles = {
|
|
||||||
[ChangeTypeEnum.Added]: ['test/migrations/test.sql'],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
}
|
|
||||||
const filePatterns = ['test/migrations/**']
|
|
||||||
const filteredFiles = await getFilteredChangedFiles({
|
|
||||||
allDiffFiles,
|
|
||||||
filePatterns
|
|
||||||
})
|
|
||||||
expect(filteredFiles[ChangeTypeEnum.Added]).toEqual([
|
|
||||||
'test/migrations/test.sql'
|
|
||||||
])
|
|
||||||
})
|
|
||||||
|
|
||||||
// Tests that getFilteredChangedFiles correctly filters files using ignore glob patterns
|
|
||||||
it('should filter files using ignore glob patterns', async () => {
|
|
||||||
const allDiffFiles = {
|
|
||||||
[ChangeTypeEnum.Added]: [],
|
|
||||||
[ChangeTypeEnum.Copied]: [],
|
|
||||||
[ChangeTypeEnum.Deleted]: [],
|
|
||||||
[ChangeTypeEnum.Modified]: [
|
|
||||||
'assets/scripts/configure-minikube-linux.sh'
|
|
||||||
],
|
|
||||||
[ChangeTypeEnum.Renamed]: [],
|
|
||||||
[ChangeTypeEnum.TypeChanged]: [],
|
|
||||||
[ChangeTypeEnum.Unmerged]: [],
|
|
||||||
[ChangeTypeEnum.Unknown]: []
|
|
||||||
}
|
|
||||||
const filePatterns = [
|
|
||||||
'assets/scripts/**.sh',
|
|
||||||
'!assets/scripts/configure-minikube-linux.sh'
|
|
||||||
]
|
|
||||||
const filteredFiles = await getFilteredChangedFiles({
|
|
||||||
allDiffFiles,
|
|
||||||
filePatterns
|
|
||||||
})
|
|
||||||
expect(filteredFiles[ChangeTypeEnum.Modified]).toEqual([])
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('warnUnsupportedRESTAPIInputs', () => {
|
|
||||||
// Warns about unsupported inputs when using the REST API.
|
|
||||||
it('should warn about unsupported inputs when all inputs are supported', async () => {
|
|
||||||
const inputs: Inputs = {
|
|
||||||
files: '',
|
|
||||||
filesSeparator: '\n',
|
|
||||||
filesFromSourceFile: '',
|
|
||||||
filesFromSourceFileSeparator: '\n',
|
|
||||||
filesYaml: '',
|
|
||||||
filesYamlFromSourceFile: '',
|
|
||||||
filesYamlFromSourceFileSeparator: '\n',
|
|
||||||
filesIgnore: '',
|
|
||||||
filesIgnoreSeparator: '\n',
|
|
||||||
filesIgnoreFromSourceFile: '',
|
|
||||||
filesIgnoreFromSourceFileSeparator: '\n',
|
|
||||||
filesIgnoreYaml: '',
|
|
||||||
filesIgnoreYamlFromSourceFile: '',
|
|
||||||
filesIgnoreYamlFromSourceFileSeparator: '\n',
|
|
||||||
separator: ' ',
|
|
||||||
includeAllOldNewRenamedFiles: false,
|
|
||||||
oldNewSeparator: ',',
|
|
||||||
oldNewFilesSeparator: ' ',
|
|
||||||
sha: '1313123',
|
|
||||||
baseSha: '',
|
|
||||||
since: '',
|
|
||||||
until: '',
|
|
||||||
path: '.',
|
|
||||||
quotepath: true,
|
|
||||||
diffRelative: true,
|
|
||||||
dirNames: false,
|
|
||||||
dirNamesMaxDepth: undefined,
|
|
||||||
dirNamesExcludeCurrentDir: false,
|
|
||||||
dirNamesIncludeFiles: '',
|
|
||||||
dirNamesIncludeFilesSeparator: '\n',
|
|
||||||
dirNamesDeletedFilesIncludeOnlyDeletedDirs: false,
|
|
||||||
json: false,
|
|
||||||
escapeJson: true,
|
|
||||||
safeOutput: true,
|
|
||||||
fetchDepth: 50,
|
|
||||||
fetchAdditionalSubmoduleHistory: false,
|
|
||||||
sinceLastRemoteCommit: false,
|
|
||||||
writeOutputFiles: false,
|
|
||||||
outputDir: '.github/outputs',
|
|
||||||
outputRenamedFilesAsDeletedAndAdded: false,
|
|
||||||
recoverDeletedFiles: false,
|
|
||||||
recoverDeletedFilesToDestination: '',
|
|
||||||
recoverFiles: '',
|
|
||||||
recoverFilesSeparator: '\n',
|
|
||||||
recoverFilesIgnore: '',
|
|
||||||
recoverFilesIgnoreSeparator: '\n',
|
|
||||||
token: '${{ github.token }}',
|
|
||||||
apiUrl: '${{ github.api_url }}',
|
|
||||||
skipInitialFetch: false,
|
|
||||||
failOnInitialDiffError: false,
|
|
||||||
failOnSubmoduleDiffError: false,
|
|
||||||
negationPatternsFirst: false,
|
|
||||||
useRestApi: false,
|
|
||||||
excludeSubmodules: false,
|
|
||||||
fetchMissingHistoryMaxRetries: 20,
|
|
||||||
usePosixPathSeparator: false,
|
|
||||||
tagsPattern: '*',
|
|
||||||
tagsIgnorePattern: ''
|
|
||||||
}
|
|
||||||
|
|
||||||
const coreWarningSpy = jest.spyOn(core, 'warning')
|
|
||||||
|
|
||||||
await warnUnsupportedRESTAPIInputs({
|
|
||||||
inputs
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(coreWarningSpy).toHaveBeenCalledWith(
|
|
||||||
'Input "sha" is not supported when using GitHub\'s REST API to get changed files'
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(coreWarningSpy).toHaveBeenCalledTimes(1)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
describe('getPreviousGitTag', () => {
|
|
||||||
// Check if the environment variable GITHUB_REPOSITORY_OWNER is 'tj-actions'
|
|
||||||
const shouldSkip = !!process.env.GITHUB_EVENT_PULL_REQUEST_HEAD_REPO_FORK
|
|
||||||
// Function returns the second-latest tag and its SHA
|
|
||||||
it('should return the second latest tag and its SHA when multiple tags are present', async () => {
|
|
||||||
if (shouldSkip) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const result = await getPreviousGitTag({
|
|
||||||
cwd: '.',
|
|
||||||
tagsPattern: '*',
|
|
||||||
tagsIgnorePattern: '',
|
|
||||||
currentBranch: 'v1.0.1'
|
|
||||||
})
|
|
||||||
expect(result).toEqual({
|
|
||||||
tag: 'v1.0.0',
|
|
||||||
sha: 'f0751de6af436d4e79016e2041cf6400e0833653'
|
|
||||||
})
|
|
||||||
})
|
|
||||||
// Tags are filtered by a specified pattern when 'tagsPattern' is provided
|
|
||||||
it('should filter tags by the specified pattern', async () => {
|
|
||||||
if (shouldSkip) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const result = await getPreviousGitTag({
|
|
||||||
cwd: '.',
|
|
||||||
tagsPattern: 'v1.*',
|
|
||||||
tagsIgnorePattern: '',
|
|
||||||
currentBranch: 'v1.0.1'
|
|
||||||
})
|
|
||||||
expect(result).toEqual({
|
|
||||||
tag: 'v1.0.0',
|
|
||||||
sha: 'f0751de6af436d4e79016e2041cf6400e0833653'
|
|
||||||
})
|
|
||||||
})
|
|
||||||
// Tags are excluded by a specified ignore pattern when 'tagsIgnorePattern' is provided
|
|
||||||
it('should exclude tags by the specified ignore pattern', async () => {
|
|
||||||
if (shouldSkip) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const result = await getPreviousGitTag({
|
|
||||||
cwd: '.',
|
|
||||||
tagsPattern: '*',
|
|
||||||
tagsIgnorePattern: 'v0.*.*',
|
|
||||||
currentBranch: 'v1.0.1'
|
|
||||||
})
|
|
||||||
expect(result).toEqual({
|
|
||||||
tag: 'v1.0.0',
|
|
||||||
sha: 'f0751de6af436d4e79016e2041cf6400e0833653'
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
// No tags are available in the repository
|
|
||||||
it('should return empty values when no tags are available in the repository', async () => {
|
|
||||||
jest.spyOn(exec, 'getExecOutput').mockResolvedValueOnce({
|
|
||||||
stdout: '',
|
|
||||||
stderr: '',
|
|
||||||
exitCode: 0
|
|
||||||
})
|
|
||||||
const result = await getPreviousGitTag({
|
|
||||||
cwd: '.',
|
|
||||||
tagsPattern: '*',
|
|
||||||
tagsIgnorePattern: '',
|
|
||||||
currentBranch: ''
|
|
||||||
})
|
|
||||||
expect(result).toEqual({tag: '', sha: ''})
|
|
||||||
})
|
|
||||||
|
|
||||||
// Only one tag is available, making it impossible to find a previous tag
|
|
||||||
it('should return empty values when only one tag is available', async () => {
|
|
||||||
jest.spyOn(exec, 'getExecOutput').mockResolvedValueOnce({
|
|
||||||
stdout:
|
|
||||||
'v1.0.1|f0751de6af436d4e79016e2041cf6400e0833653|2021-01-01T00:00:00Z',
|
|
||||||
stderr: '',
|
|
||||||
exitCode: 0
|
|
||||||
})
|
|
||||||
const result = await getPreviousGitTag({
|
|
||||||
cwd: '.',
|
|
||||||
tagsPattern: '*',
|
|
||||||
tagsIgnorePattern: '',
|
|
||||||
currentBranch: 'v1.0.1'
|
|
||||||
})
|
|
||||||
expect(result).toEqual({tag: '', sha: ''})
|
|
||||||
})
|
|
||||||
|
|
||||||
// Git commands fail and throw errors
|
|
||||||
it('should throw an error when git commands fail', async () => {
|
|
||||||
jest
|
|
||||||
.spyOn(exec, 'getExecOutput')
|
|
||||||
.mockRejectedValue(new Error('git command failed'))
|
|
||||||
await expect(
|
|
||||||
getPreviousGitTag({
|
|
||||||
cwd: '.',
|
|
||||||
tagsPattern: '*',
|
|
||||||
tagsIgnorePattern: '',
|
|
||||||
currentBranch: 'v1.0.1'
|
|
||||||
})
|
|
||||||
).rejects.toThrow('git command failed')
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
@ -1,134 +1,30 @@
|
|||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
import * as github from '@actions/github'
|
import * as github from '@actions/github'
|
||||||
import type {RestEndpointMethodTypes} from '@octokit/rest'
|
import type {RestEndpointMethodTypes} from '@octokit/rest'
|
||||||
import flatten from 'lodash/flatten'
|
|
||||||
import convertPath from '@stdlib/utils-convert-path'
|
|
||||||
import mm from 'micromatch'
|
|
||||||
import * as path from 'path'
|
import * as path from 'path'
|
||||||
import {setOutputsAndGetModifiedAndChangedFilesStatus} from './changedFilesOutput'
|
|
||||||
import {DiffResult} from './commitSha'
|
import {DiffResult} from './commitSha'
|
||||||
|
import {Env} from './env'
|
||||||
import {Inputs} from './inputs'
|
import {Inputs} from './inputs'
|
||||||
import {
|
import {
|
||||||
canDiffCommits,
|
|
||||||
getAllChangedFiles,
|
|
||||||
getDirnameMaxDepth,
|
getDirnameMaxDepth,
|
||||||
getDirNamesIncludeFilesPattern,
|
|
||||||
getFilteredChangedFiles,
|
|
||||||
gitRenamedFiles,
|
gitRenamedFiles,
|
||||||
gitSubmoduleDiffSHA,
|
gitSubmoduleDiffSHA,
|
||||||
isWindows,
|
|
||||||
jsonOutput,
|
jsonOutput,
|
||||||
setArrayOutput
|
getAllChangedFiles
|
||||||
} from './utils'
|
} from './utils'
|
||||||
|
import flatten from 'lodash/flatten'
|
||||||
export const processChangedFiles = async ({
|
|
||||||
filePatterns,
|
|
||||||
allDiffFiles,
|
|
||||||
inputs,
|
|
||||||
yamlFilePatterns,
|
|
||||||
workingDirectory
|
|
||||||
}: {
|
|
||||||
filePatterns: string[]
|
|
||||||
allDiffFiles: ChangedFiles
|
|
||||||
inputs: Inputs
|
|
||||||
yamlFilePatterns: Record<string, string[]>
|
|
||||||
workingDirectory?: string
|
|
||||||
}): Promise<void> => {
|
|
||||||
if (filePatterns.length > 0) {
|
|
||||||
core.startGroup('changed-files-patterns')
|
|
||||||
const allFilteredDiffFiles = await getFilteredChangedFiles({
|
|
||||||
allDiffFiles,
|
|
||||||
filePatterns
|
|
||||||
})
|
|
||||||
core.debug(
|
|
||||||
`All filtered diff files: ${JSON.stringify(allFilteredDiffFiles)}`
|
|
||||||
)
|
|
||||||
await setOutputsAndGetModifiedAndChangedFilesStatus({
|
|
||||||
allDiffFiles,
|
|
||||||
allFilteredDiffFiles,
|
|
||||||
inputs,
|
|
||||||
filePatterns,
|
|
||||||
workingDirectory
|
|
||||||
})
|
|
||||||
core.info('All Done!')
|
|
||||||
core.endGroup()
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Object.keys(yamlFilePatterns).length > 0) {
|
|
||||||
const modifiedKeys: string[] = []
|
|
||||||
const changedKeys: string[] = []
|
|
||||||
|
|
||||||
for (const key of Object.keys(yamlFilePatterns)) {
|
|
||||||
core.startGroup(`changed-files-yaml-${key}`)
|
|
||||||
const allFilteredDiffFiles = await getFilteredChangedFiles({
|
|
||||||
allDiffFiles,
|
|
||||||
filePatterns: yamlFilePatterns[key]
|
|
||||||
})
|
|
||||||
core.debug(
|
|
||||||
`All filtered diff files for ${key}: ${JSON.stringify(
|
|
||||||
allFilteredDiffFiles
|
|
||||||
)}`
|
|
||||||
)
|
|
||||||
const {anyChanged, anyModified} =
|
|
||||||
await setOutputsAndGetModifiedAndChangedFilesStatus({
|
|
||||||
allDiffFiles,
|
|
||||||
allFilteredDiffFiles,
|
|
||||||
inputs,
|
|
||||||
filePatterns: yamlFilePatterns[key],
|
|
||||||
outputPrefix: key,
|
|
||||||
workingDirectory
|
|
||||||
})
|
|
||||||
if (anyModified) {
|
|
||||||
modifiedKeys.push(key)
|
|
||||||
}
|
|
||||||
if (anyChanged) {
|
|
||||||
changedKeys.push(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
core.info('All Done!')
|
|
||||||
core.endGroup()
|
|
||||||
}
|
|
||||||
|
|
||||||
if (modifiedKeys.length > 0) {
|
|
||||||
await setArrayOutput({
|
|
||||||
key: 'modified_keys',
|
|
||||||
inputs,
|
|
||||||
value: modifiedKeys
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (changedKeys.length > 0) {
|
|
||||||
await setArrayOutput({
|
|
||||||
key: 'changed_keys',
|
|
||||||
inputs,
|
|
||||||
value: changedKeys
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (filePatterns.length === 0 && Object.keys(yamlFilePatterns).length === 0) {
|
|
||||||
core.startGroup('changed-files-all')
|
|
||||||
await setOutputsAndGetModifiedAndChangedFilesStatus({
|
|
||||||
allDiffFiles,
|
|
||||||
allFilteredDiffFiles: allDiffFiles,
|
|
||||||
inputs,
|
|
||||||
workingDirectory
|
|
||||||
})
|
|
||||||
core.info('All Done!')
|
|
||||||
core.endGroup()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getRenamedFiles = async ({
|
export const getRenamedFiles = async ({
|
||||||
inputs,
|
inputs,
|
||||||
workingDirectory,
|
workingDirectory,
|
||||||
diffSubmodule,
|
hasSubmodule,
|
||||||
diffResult,
|
diffResult,
|
||||||
submodulePaths
|
submodulePaths
|
||||||
}: {
|
}: {
|
||||||
inputs: Inputs
|
inputs: Inputs
|
||||||
workingDirectory: string
|
workingDirectory: string
|
||||||
diffSubmodule: boolean
|
hasSubmodule: boolean
|
||||||
diffResult: DiffResult
|
diffResult: DiffResult
|
||||||
submodulePaths: string[]
|
submodulePaths: string[]
|
||||||
}): Promise<{paths: string; count: string}> => {
|
}): Promise<{paths: string; count: string}> => {
|
||||||
@ -140,7 +36,7 @@ export const getRenamedFiles = async ({
|
|||||||
oldNewSeparator: inputs.oldNewSeparator
|
oldNewSeparator: inputs.oldNewSeparator
|
||||||
})
|
})
|
||||||
|
|
||||||
if (diffSubmodule) {
|
if (hasSubmodule) {
|
||||||
for (const submodulePath of submodulePaths) {
|
for (const submodulePath of submodulePaths) {
|
||||||
const submoduleShaResult = await gitSubmoduleDiffSHA({
|
const submoduleShaResult = await gitSubmoduleDiffSHA({
|
||||||
cwd: workingDirectory,
|
cwd: workingDirectory,
|
||||||
@ -156,29 +52,11 @@ export const getRenamedFiles = async ({
|
|||||||
)
|
)
|
||||||
|
|
||||||
if (submoduleShaResult.currentSha && submoduleShaResult.previousSha) {
|
if (submoduleShaResult.currentSha && submoduleShaResult.previousSha) {
|
||||||
let diff = '...'
|
|
||||||
|
|
||||||
if (
|
|
||||||
!(await canDiffCommits({
|
|
||||||
cwd: submoduleWorkingDirectory,
|
|
||||||
sha1: submoduleShaResult.previousSha,
|
|
||||||
sha2: submoduleShaResult.currentSha,
|
|
||||||
diff
|
|
||||||
}))
|
|
||||||
) {
|
|
||||||
let message = `Unable to use three dot diff for: ${submodulePath} submodule. Falling back to two dot diff. You can set 'fetch_additional_submodule_history: true' to fetch additional submodule history in order to use three dot diff`
|
|
||||||
if (inputs.fetchAdditionalSubmoduleHistory) {
|
|
||||||
message = `To fetch additional submodule history for: ${submodulePath} you can increase history depth using 'fetch_depth' input`
|
|
||||||
}
|
|
||||||
core.info(message)
|
|
||||||
diff = '..'
|
|
||||||
}
|
|
||||||
|
|
||||||
const submoduleRenamedFiles = await gitRenamedFiles({
|
const submoduleRenamedFiles = await gitRenamedFiles({
|
||||||
cwd: submoduleWorkingDirectory,
|
cwd: submoduleWorkingDirectory,
|
||||||
sha1: submoduleShaResult.previousSha,
|
sha1: submoduleShaResult.previousSha,
|
||||||
sha2: submoduleShaResult.currentSha,
|
sha2: submoduleShaResult.currentSha,
|
||||||
diff,
|
diff: diffResult.diff,
|
||||||
oldNewSeparator: inputs.oldNewSeparator,
|
oldNewSeparator: inputs.oldNewSeparator,
|
||||||
isSubmodule: true,
|
isSubmodule: true,
|
||||||
parentDir: submodulePath
|
parentDir: submodulePath
|
||||||
@ -218,33 +96,26 @@ export type ChangedFiles = {
|
|||||||
|
|
||||||
export const getAllDiffFiles = async ({
|
export const getAllDiffFiles = async ({
|
||||||
workingDirectory,
|
workingDirectory,
|
||||||
diffSubmodule,
|
hasSubmodule,
|
||||||
diffResult,
|
diffResult,
|
||||||
submodulePaths,
|
submodulePaths,
|
||||||
outputRenamedFilesAsDeletedAndAdded,
|
outputRenamedFilesAsDeletedAndAdded
|
||||||
fetchAdditionalSubmoduleHistory,
|
|
||||||
failOnInitialDiffError,
|
|
||||||
failOnSubmoduleDiffError
|
|
||||||
}: {
|
}: {
|
||||||
workingDirectory: string
|
workingDirectory: string
|
||||||
diffSubmodule: boolean
|
hasSubmodule: boolean
|
||||||
diffResult: DiffResult
|
diffResult: DiffResult
|
||||||
submodulePaths: string[]
|
submodulePaths: string[]
|
||||||
outputRenamedFilesAsDeletedAndAdded: boolean
|
outputRenamedFilesAsDeletedAndAdded: boolean
|
||||||
fetchAdditionalSubmoduleHistory: boolean
|
|
||||||
failOnInitialDiffError: boolean
|
|
||||||
failOnSubmoduleDiffError: boolean
|
|
||||||
}): Promise<ChangedFiles> => {
|
}): Promise<ChangedFiles> => {
|
||||||
const files = await getAllChangedFiles({
|
const files = await getAllChangedFiles({
|
||||||
cwd: workingDirectory,
|
cwd: workingDirectory,
|
||||||
sha1: diffResult.previousSha,
|
sha1: diffResult.previousSha,
|
||||||
sha2: diffResult.currentSha,
|
sha2: diffResult.currentSha,
|
||||||
diff: diffResult.diff,
|
diff: diffResult.diff,
|
||||||
outputRenamedFilesAsDeletedAndAdded,
|
outputRenamedFilesAsDeletedAndAdded
|
||||||
failOnInitialDiffError
|
|
||||||
})
|
})
|
||||||
|
|
||||||
if (diffSubmodule) {
|
if (hasSubmodule) {
|
||||||
for (const submodulePath of submodulePaths) {
|
for (const submodulePath of submodulePaths) {
|
||||||
const submoduleShaResult = await gitSubmoduleDiffSHA({
|
const submoduleShaResult = await gitSubmoduleDiffSHA({
|
||||||
cwd: workingDirectory,
|
cwd: workingDirectory,
|
||||||
@ -260,33 +131,14 @@ export const getAllDiffFiles = async ({
|
|||||||
)
|
)
|
||||||
|
|
||||||
if (submoduleShaResult.currentSha && submoduleShaResult.previousSha) {
|
if (submoduleShaResult.currentSha && submoduleShaResult.previousSha) {
|
||||||
let diff = '...'
|
|
||||||
|
|
||||||
if (
|
|
||||||
!(await canDiffCommits({
|
|
||||||
cwd: submoduleWorkingDirectory,
|
|
||||||
sha1: submoduleShaResult.previousSha,
|
|
||||||
sha2: submoduleShaResult.currentSha,
|
|
||||||
diff
|
|
||||||
}))
|
|
||||||
) {
|
|
||||||
let message = `Set 'fetch_additional_submodule_history: true' to fetch additional submodule history for: ${submodulePath}`
|
|
||||||
if (fetchAdditionalSubmoduleHistory) {
|
|
||||||
message = `To fetch additional submodule history for: ${submodulePath} you can increase history depth using 'fetch_depth' input`
|
|
||||||
}
|
|
||||||
core.warning(message)
|
|
||||||
diff = '..'
|
|
||||||
}
|
|
||||||
|
|
||||||
const submoduleFiles = await getAllChangedFiles({
|
const submoduleFiles = await getAllChangedFiles({
|
||||||
cwd: submoduleWorkingDirectory,
|
cwd: submoduleWorkingDirectory,
|
||||||
sha1: submoduleShaResult.previousSha,
|
sha1: submoduleShaResult.previousSha,
|
||||||
sha2: submoduleShaResult.currentSha,
|
sha2: submoduleShaResult.currentSha,
|
||||||
diff,
|
diff: diffResult.diff,
|
||||||
isSubmodule: true,
|
isSubmodule: true,
|
||||||
parentDir: submodulePath,
|
parentDir: submodulePath,
|
||||||
outputRenamedFilesAsDeletedAndAdded,
|
outputRenamedFilesAsDeletedAndAdded
|
||||||
failOnSubmoduleDiffError
|
|
||||||
})
|
})
|
||||||
|
|
||||||
for (const changeType of Object.keys(
|
for (const changeType of Object.keys(
|
||||||
@ -304,35 +156,6 @@ export const getAllDiffFiles = async ({
|
|||||||
return files
|
return files
|
||||||
}
|
}
|
||||||
|
|
||||||
function* getFilePaths({
|
|
||||||
inputs,
|
|
||||||
filePaths,
|
|
||||||
dirNamesIncludeFilePatterns
|
|
||||||
}: {
|
|
||||||
inputs: Inputs
|
|
||||||
filePaths: string[]
|
|
||||||
dirNamesIncludeFilePatterns: string[]
|
|
||||||
}): Generator<string> {
|
|
||||||
for (const filePath of filePaths) {
|
|
||||||
if (inputs.dirNames) {
|
|
||||||
if (dirNamesIncludeFilePatterns.length > 0) {
|
|
||||||
const isWin = isWindows()
|
|
||||||
const matchOptions = {dot: true, windows: isWin, noext: true}
|
|
||||||
if (mm.isMatch(filePath, dirNamesIncludeFilePatterns, matchOptions)) {
|
|
||||||
yield filePath
|
|
||||||
}
|
|
||||||
}
|
|
||||||
yield getDirnameMaxDepth({
|
|
||||||
relativePath: filePath,
|
|
||||||
dirNamesMaxDepth: inputs.dirNamesMaxDepth,
|
|
||||||
excludeCurrentDir: inputs.dirNamesExcludeCurrentDir
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
yield filePath
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function* getChangeTypeFilesGenerator({
|
function* getChangeTypeFilesGenerator({
|
||||||
inputs,
|
inputs,
|
||||||
changedFiles,
|
changedFiles,
|
||||||
@ -342,24 +165,17 @@ function* getChangeTypeFilesGenerator({
|
|||||||
changedFiles: ChangedFiles
|
changedFiles: ChangedFiles
|
||||||
changeTypes: ChangeTypeEnum[]
|
changeTypes: ChangeTypeEnum[]
|
||||||
}): Generator<string> {
|
}): Generator<string> {
|
||||||
const dirNamesIncludeFilePatterns = getDirNamesIncludeFilesPattern({inputs})
|
|
||||||
core.debug(
|
|
||||||
`Dir names include file patterns: ${JSON.stringify(
|
|
||||||
dirNamesIncludeFilePatterns
|
|
||||||
)}`
|
|
||||||
)
|
|
||||||
|
|
||||||
for (const changeType of changeTypes) {
|
for (const changeType of changeTypes) {
|
||||||
const filePaths = changedFiles[changeType] || []
|
const files = changedFiles[changeType] || []
|
||||||
for (const filePath of getFilePaths({
|
for (const file of files) {
|
||||||
inputs,
|
if (inputs.dirNames) {
|
||||||
filePaths,
|
yield getDirnameMaxDepth({
|
||||||
dirNamesIncludeFilePatterns
|
pathStr: file,
|
||||||
})) {
|
dirNamesMaxDepth: inputs.dirNamesMaxDepth,
|
||||||
if (isWindows() && inputs.usePosixPathSeparator) {
|
excludeCurrentDir: inputs.dirNamesExcludeCurrentDir
|
||||||
yield convertPath(filePath, 'mixed')
|
})
|
||||||
} else {
|
} else {
|
||||||
yield filePath
|
yield file
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -373,15 +189,20 @@ export const getChangeTypeFiles = async ({
|
|||||||
inputs: Inputs
|
inputs: Inputs
|
||||||
changedFiles: ChangedFiles
|
changedFiles: ChangedFiles
|
||||||
changeTypes: ChangeTypeEnum[]
|
changeTypes: ChangeTypeEnum[]
|
||||||
}): Promise<{paths: string[] | string; count: string}> => {
|
}): Promise<{paths: string; count: string}> => {
|
||||||
const files = [
|
const files = [
|
||||||
...new Set(getChangeTypeFilesGenerator({inputs, changedFiles, changeTypes}))
|
...new Set(getChangeTypeFilesGenerator({inputs, changedFiles, changeTypes}))
|
||||||
].filter(Boolean)
|
]
|
||||||
|
|
||||||
const paths = inputs.json ? files : files.join(inputs.separator)
|
if (inputs.json) {
|
||||||
|
return {
|
||||||
|
paths: jsonOutput({value: files, shouldEscape: inputs.escapeJson}),
|
||||||
|
count: files.length.toString()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
paths,
|
paths: files.join(inputs.separator),
|
||||||
count: files.length.toString()
|
count: files.length.toString()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -393,24 +214,15 @@ function* getAllChangeTypeFilesGenerator({
|
|||||||
inputs: Inputs
|
inputs: Inputs
|
||||||
changedFiles: ChangedFiles
|
changedFiles: ChangedFiles
|
||||||
}): Generator<string> {
|
}): Generator<string> {
|
||||||
const dirNamesIncludeFilePatterns = getDirNamesIncludeFilesPattern({inputs})
|
for (const file of flatten(Object.values(changedFiles))) {
|
||||||
core.debug(
|
if (inputs.dirNames) {
|
||||||
`Dir names include file patterns: ${JSON.stringify(
|
yield getDirnameMaxDepth({
|
||||||
dirNamesIncludeFilePatterns
|
pathStr: file,
|
||||||
)}`
|
dirNamesMaxDepth: inputs.dirNamesMaxDepth,
|
||||||
)
|
excludeCurrentDir: inputs.dirNamesExcludeCurrentDir
|
||||||
|
})
|
||||||
const filePaths = flatten(Object.values(changedFiles))
|
|
||||||
|
|
||||||
for (const filePath of getFilePaths({
|
|
||||||
inputs,
|
|
||||||
filePaths,
|
|
||||||
dirNamesIncludeFilePatterns
|
|
||||||
})) {
|
|
||||||
if (isWindows() && inputs.usePosixPathSeparator) {
|
|
||||||
yield convertPath(filePath, 'mixed')
|
|
||||||
} else {
|
} else {
|
||||||
yield filePath
|
yield file
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -421,23 +233,30 @@ export const getAllChangeTypeFiles = async ({
|
|||||||
}: {
|
}: {
|
||||||
inputs: Inputs
|
inputs: Inputs
|
||||||
changedFiles: ChangedFiles
|
changedFiles: ChangedFiles
|
||||||
}): Promise<{paths: string[] | string; count: string}> => {
|
}): Promise<{paths: string; count: string}> => {
|
||||||
const files = [
|
const files = [
|
||||||
...new Set(getAllChangeTypeFilesGenerator({inputs, changedFiles}))
|
...new Set(getAllChangeTypeFilesGenerator({inputs, changedFiles}))
|
||||||
].filter(Boolean)
|
]
|
||||||
|
|
||||||
const paths = inputs.json ? files : files.join(inputs.separator)
|
if (inputs.json) {
|
||||||
|
return {
|
||||||
|
paths: jsonOutput({value: files, shouldEscape: inputs.escapeJson}),
|
||||||
|
count: files.length.toString()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
paths,
|
paths: files.join(inputs.separator),
|
||||||
count: files.length.toString()
|
count: files.length.toString()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getChangedFilesFromGithubAPI = async ({
|
export const getChangedFilesFromGithubAPI = async ({
|
||||||
inputs
|
inputs,
|
||||||
|
env
|
||||||
}: {
|
}: {
|
||||||
inputs: Inputs
|
inputs: Inputs
|
||||||
|
env: Env
|
||||||
}): Promise<ChangedFiles> => {
|
}): Promise<ChangedFiles> => {
|
||||||
const octokit = github.getOctokit(inputs.token, {
|
const octokit = github.getOctokit(inputs.token, {
|
||||||
baseUrl: inputs.apiUrl
|
baseUrl: inputs.apiUrl
|
||||||
@ -458,14 +277,13 @@ export const getChangedFilesFromGithubAPI = async ({
|
|||||||
const options = octokit.rest.pulls.listFiles.endpoint.merge({
|
const options = octokit.rest.pulls.listFiles.endpoint.merge({
|
||||||
owner: github.context.repo.owner,
|
owner: github.context.repo.owner,
|
||||||
repo: github.context.repo.repo,
|
repo: github.context.repo.repo,
|
||||||
pull_number: github.context.payload.pull_request?.number,
|
pull_number: env.GITHUB_EVENT_PULL_REQUEST_NUMBER,
|
||||||
per_page: 100
|
per_page: 100
|
||||||
})
|
})
|
||||||
|
|
||||||
const paginatedResponse =
|
const paginatedResponse = await octokit.paginate<
|
||||||
await octokit.paginate<
|
RestEndpointMethodTypes['pulls']['listFiles']['response']['data'][0]
|
||||||
RestEndpointMethodTypes['pulls']['listFiles']['response']['data'][0]
|
>(options)
|
||||||
>(options)
|
|
||||||
|
|
||||||
core.info(`Found ${paginatedResponse.length} changed files from GitHub API`)
|
core.info(`Found ${paginatedResponse.length} changed files from GitHub API`)
|
||||||
const statusMap: Record<string, ChangeTypeEnum> = {
|
const statusMap: Record<string, ChangeTypeEnum> = {
|
||||||
@ -484,7 +302,7 @@ export const getChangedFilesFromGithubAPI = async ({
|
|||||||
|
|
||||||
if (changeType === ChangeTypeEnum.Renamed) {
|
if (changeType === ChangeTypeEnum.Renamed) {
|
||||||
if (inputs.outputRenamedFilesAsDeletedAndAdded) {
|
if (inputs.outputRenamedFilesAsDeletedAndAdded) {
|
||||||
changedFiles[ChangeTypeEnum.Deleted].push(item.previous_filename || '')
|
changedFiles[ChangeTypeEnum.Deleted].push(item.filename)
|
||||||
changedFiles[ChangeTypeEnum.Added].push(item.filename)
|
changedFiles[ChangeTypeEnum.Added].push(item.filename)
|
||||||
} else {
|
} else {
|
||||||
changedFiles[ChangeTypeEnum.Renamed].push(item.filename)
|
changedFiles[ChangeTypeEnum.Renamed].push(item.filename)
|
||||||
|
@ -1,56 +1,63 @@
|
|||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
import path from 'path'
|
|
||||||
import {
|
import {
|
||||||
ChangedFiles,
|
ChangedFiles,
|
||||||
ChangeTypeEnum,
|
ChangeTypeEnum,
|
||||||
getAllChangeTypeFiles,
|
getAllChangeTypeFiles,
|
||||||
getChangeTypeFiles
|
getChangeTypeFiles
|
||||||
} from './changedFiles'
|
} from './changedFiles'
|
||||||
|
import {DiffResult} from './commitSha'
|
||||||
import {Inputs} from './inputs'
|
import {Inputs} from './inputs'
|
||||||
import {getOutputKey, setArrayOutput, setOutput, exists} from './utils'
|
import {getFilteredChangedFiles, recoverDeletedFiles, setOutput} from './utils'
|
||||||
|
|
||||||
const getArrayFromPaths = (
|
const getOutputKey = (key: string, outputPrefix: string): string => {
|
||||||
paths: string | string[],
|
return outputPrefix ? `${outputPrefix}_${key}` : key
|
||||||
inputs: Inputs
|
|
||||||
): string[] => {
|
|
||||||
return Array.isArray(paths) ? paths : paths.split(inputs.separator)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
|
export const setChangedFilesOutput = async ({
|
||||||
allDiffFiles,
|
allDiffFiles,
|
||||||
allFilteredDiffFiles,
|
|
||||||
inputs,
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
diffResult,
|
||||||
filePatterns = [],
|
filePatterns = [],
|
||||||
outputPrefix = '',
|
outputPrefix = ''
|
||||||
workingDirectory
|
|
||||||
}: {
|
}: {
|
||||||
allDiffFiles: ChangedFiles
|
allDiffFiles: ChangedFiles
|
||||||
allFilteredDiffFiles: ChangedFiles
|
|
||||||
inputs: Inputs
|
inputs: Inputs
|
||||||
|
workingDirectory: string
|
||||||
|
diffResult?: DiffResult
|
||||||
filePatterns?: string[]
|
filePatterns?: string[]
|
||||||
outputPrefix?: string
|
outputPrefix?: string
|
||||||
workingDirectory?: string
|
}): Promise<void> => {
|
||||||
}): Promise<{anyModified: boolean; anyChanged: boolean}> => {
|
const allFilteredDiffFiles = await getFilteredChangedFiles({
|
||||||
|
allDiffFiles,
|
||||||
|
filePatterns
|
||||||
|
})
|
||||||
|
core.debug(`All filtered diff files: ${JSON.stringify(allFilteredDiffFiles)}`)
|
||||||
|
|
||||||
|
if (diffResult) {
|
||||||
|
await recoverDeletedFiles({
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
deletedFiles: allFilteredDiffFiles[ChangeTypeEnum.Deleted],
|
||||||
|
sha: diffResult.previousSha
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
const addedFiles = await getChangeTypeFiles({
|
const addedFiles = await getChangeTypeFiles({
|
||||||
inputs,
|
inputs,
|
||||||
changedFiles: allFilteredDiffFiles,
|
changedFiles: allFilteredDiffFiles,
|
||||||
changeTypes: [ChangeTypeEnum.Added]
|
changeTypes: [ChangeTypeEnum.Added]
|
||||||
})
|
})
|
||||||
core.debug(`Added files: ${JSON.stringify(addedFiles)}`)
|
core.debug(`Added files: ${addedFiles}`)
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('added_files', outputPrefix),
|
key: getOutputKey('added_files', outputPrefix),
|
||||||
value: addedFiles.paths,
|
value: addedFiles.paths,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json,
|
|
||||||
shouldEscape: inputs.escapeJson,
|
|
||||||
safeOutput: inputs.safeOutput
|
|
||||||
})
|
})
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('added_files_count', outputPrefix),
|
key: getOutputKey('added_files_count', outputPrefix),
|
||||||
value: addedFiles.count,
|
value: addedFiles.count,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const copiedFiles = await getChangeTypeFiles({
|
const copiedFiles = await getChangeTypeFiles({
|
||||||
@ -58,22 +65,17 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
|
|||||||
changedFiles: allFilteredDiffFiles,
|
changedFiles: allFilteredDiffFiles,
|
||||||
changeTypes: [ChangeTypeEnum.Copied]
|
changeTypes: [ChangeTypeEnum.Copied]
|
||||||
})
|
})
|
||||||
core.debug(`Copied files: ${JSON.stringify(copiedFiles)}`)
|
core.debug(`Copied files: ${copiedFiles}`)
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('copied_files', outputPrefix),
|
key: getOutputKey('copied_files', outputPrefix),
|
||||||
value: copiedFiles.paths,
|
value: copiedFiles.paths,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json,
|
|
||||||
shouldEscape: inputs.escapeJson,
|
|
||||||
safeOutput: inputs.safeOutput
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('copied_files_count', outputPrefix),
|
key: getOutputKey('copied_files_count', outputPrefix),
|
||||||
value: copiedFiles.count,
|
value: copiedFiles.count,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const modifiedFiles = await getChangeTypeFiles({
|
const modifiedFiles = await getChangeTypeFiles({
|
||||||
@ -81,22 +83,17 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
|
|||||||
changedFiles: allFilteredDiffFiles,
|
changedFiles: allFilteredDiffFiles,
|
||||||
changeTypes: [ChangeTypeEnum.Modified]
|
changeTypes: [ChangeTypeEnum.Modified]
|
||||||
})
|
})
|
||||||
core.debug(`Modified files: ${JSON.stringify(modifiedFiles)}`)
|
core.debug(`Modified files: ${modifiedFiles}`)
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('modified_files', outputPrefix),
|
key: getOutputKey('modified_files', outputPrefix),
|
||||||
value: modifiedFiles.paths,
|
value: modifiedFiles.paths,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json,
|
|
||||||
shouldEscape: inputs.escapeJson,
|
|
||||||
safeOutput: inputs.safeOutput
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('modified_files_count', outputPrefix),
|
key: getOutputKey('modified_files_count', outputPrefix),
|
||||||
value: modifiedFiles.count,
|
value: modifiedFiles.count,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const renamedFiles = await getChangeTypeFiles({
|
const renamedFiles = await getChangeTypeFiles({
|
||||||
@ -104,22 +101,17 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
|
|||||||
changedFiles: allFilteredDiffFiles,
|
changedFiles: allFilteredDiffFiles,
|
||||||
changeTypes: [ChangeTypeEnum.Renamed]
|
changeTypes: [ChangeTypeEnum.Renamed]
|
||||||
})
|
})
|
||||||
core.debug(`Renamed files: ${JSON.stringify(renamedFiles)}`)
|
core.debug(`Renamed files: ${renamedFiles}`)
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('renamed_files', outputPrefix),
|
key: getOutputKey('renamed_files', outputPrefix),
|
||||||
value: renamedFiles.paths,
|
value: renamedFiles.paths,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json,
|
|
||||||
shouldEscape: inputs.escapeJson,
|
|
||||||
safeOutput: inputs.safeOutput
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('renamed_files_count', outputPrefix),
|
key: getOutputKey('renamed_files_count', outputPrefix),
|
||||||
value: renamedFiles.count,
|
value: renamedFiles.count,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const typeChangedFiles = await getChangeTypeFiles({
|
const typeChangedFiles = await getChangeTypeFiles({
|
||||||
@ -127,22 +119,17 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
|
|||||||
changedFiles: allFilteredDiffFiles,
|
changedFiles: allFilteredDiffFiles,
|
||||||
changeTypes: [ChangeTypeEnum.TypeChanged]
|
changeTypes: [ChangeTypeEnum.TypeChanged]
|
||||||
})
|
})
|
||||||
core.debug(`Type changed files: ${JSON.stringify(typeChangedFiles)}`)
|
core.debug(`Type changed files: ${typeChangedFiles}`)
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('type_changed_files', outputPrefix),
|
key: getOutputKey('type_changed_files', outputPrefix),
|
||||||
value: typeChangedFiles.paths,
|
value: typeChangedFiles.paths,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json,
|
|
||||||
shouldEscape: inputs.escapeJson,
|
|
||||||
safeOutput: inputs.safeOutput
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('type_changed_files_count', outputPrefix),
|
key: getOutputKey('type_changed_files_count', outputPrefix),
|
||||||
value: typeChangedFiles.count,
|
value: typeChangedFiles.count,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const unmergedFiles = await getChangeTypeFiles({
|
const unmergedFiles = await getChangeTypeFiles({
|
||||||
@ -150,22 +137,17 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
|
|||||||
changedFiles: allFilteredDiffFiles,
|
changedFiles: allFilteredDiffFiles,
|
||||||
changeTypes: [ChangeTypeEnum.Unmerged]
|
changeTypes: [ChangeTypeEnum.Unmerged]
|
||||||
})
|
})
|
||||||
core.debug(`Unmerged files: ${JSON.stringify(unmergedFiles)}`)
|
core.debug(`Unmerged files: ${unmergedFiles}`)
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('unmerged_files', outputPrefix),
|
key: getOutputKey('unmerged_files', outputPrefix),
|
||||||
value: unmergedFiles.paths,
|
value: unmergedFiles.paths,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json,
|
|
||||||
shouldEscape: inputs.escapeJson,
|
|
||||||
safeOutput: inputs.safeOutput
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('unmerged_files_count', outputPrefix),
|
key: getOutputKey('unmerged_files_count', outputPrefix),
|
||||||
value: unmergedFiles.count,
|
value: unmergedFiles.count,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const unknownFiles = await getChangeTypeFiles({
|
const unknownFiles = await getChangeTypeFiles({
|
||||||
@ -173,48 +155,34 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
|
|||||||
changedFiles: allFilteredDiffFiles,
|
changedFiles: allFilteredDiffFiles,
|
||||||
changeTypes: [ChangeTypeEnum.Unknown]
|
changeTypes: [ChangeTypeEnum.Unknown]
|
||||||
})
|
})
|
||||||
core.debug(`Unknown files: ${JSON.stringify(unknownFiles)}`)
|
core.debug(`Unknown files: ${unknownFiles}`)
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('unknown_files', outputPrefix),
|
key: getOutputKey('unknown_files', outputPrefix),
|
||||||
value: unknownFiles.paths,
|
value: unknownFiles.paths,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json,
|
|
||||||
shouldEscape: inputs.escapeJson,
|
|
||||||
safeOutput: inputs.safeOutput
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('unknown_files_count', outputPrefix),
|
key: getOutputKey('unknown_files_count', outputPrefix),
|
||||||
value: unknownFiles.count,
|
value: unknownFiles.count,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const allChangedAndModifiedFiles = await getAllChangeTypeFiles({
|
const allChangedAndModifiedFiles = await getAllChangeTypeFiles({
|
||||||
inputs,
|
inputs,
|
||||||
changedFiles: allFilteredDiffFiles
|
changedFiles: allFilteredDiffFiles
|
||||||
})
|
})
|
||||||
core.debug(
|
core.debug(`All changed and modified files: ${allChangedAndModifiedFiles}`)
|
||||||
`All changed and modified files: ${JSON.stringify(
|
|
||||||
allChangedAndModifiedFiles
|
|
||||||
)}`
|
|
||||||
)
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('all_changed_and_modified_files', outputPrefix),
|
key: getOutputKey('all_changed_and_modified_files', outputPrefix),
|
||||||
value: allChangedAndModifiedFiles.paths,
|
value: allChangedAndModifiedFiles.paths,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json,
|
|
||||||
shouldEscape: inputs.escapeJson,
|
|
||||||
safeOutput: inputs.safeOutput
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('all_changed_and_modified_files_count', outputPrefix),
|
key: getOutputKey('all_changed_and_modified_files_count', outputPrefix),
|
||||||
value: allChangedAndModifiedFiles.count,
|
value: allChangedAndModifiedFiles.count,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const allChangedFiles = await getChangeTypeFiles({
|
const allChangedFiles = await getChangeTypeFiles({
|
||||||
@ -227,30 +195,23 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
|
|||||||
ChangeTypeEnum.Renamed
|
ChangeTypeEnum.Renamed
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
core.debug(`All changed files: ${JSON.stringify(allChangedFiles)}`)
|
core.debug(`All changed files: ${allChangedFiles}`)
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('all_changed_files', outputPrefix),
|
key: getOutputKey('all_changed_files', outputPrefix),
|
||||||
value: allChangedFiles.paths,
|
value: allChangedFiles.paths,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json,
|
|
||||||
shouldEscape: inputs.escapeJson,
|
|
||||||
safeOutput: inputs.safeOutput
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('all_changed_files_count', outputPrefix),
|
key: getOutputKey('all_changed_files_count', outputPrefix),
|
||||||
value: allChangedFiles.count,
|
value: allChangedFiles.count,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('any_changed', outputPrefix),
|
key: getOutputKey('any_changed', outputPrefix),
|
||||||
value: allChangedFiles.paths.length > 0,
|
value: allChangedFiles.paths.length > 0 && filePatterns.length > 0,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const allOtherChangedFiles = await getChangeTypeFiles({
|
const allOtherChangedFiles = await getChangeTypeFiles({
|
||||||
@ -263,20 +224,14 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
|
|||||||
ChangeTypeEnum.Renamed
|
ChangeTypeEnum.Renamed
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
core.debug(`All other changed files: ${JSON.stringify(allOtherChangedFiles)}`)
|
core.debug(`All other changed files: ${allOtherChangedFiles}`)
|
||||||
|
|
||||||
const allOtherChangedFilesPaths: string[] = getArrayFromPaths(
|
const otherChangedFiles = allOtherChangedFiles.paths
|
||||||
allOtherChangedFiles.paths,
|
.split(inputs.separator)
|
||||||
inputs
|
.filter(
|
||||||
)
|
(filePath: string) =>
|
||||||
const allChangedFilesPaths: string[] = getArrayFromPaths(
|
!allChangedFiles.paths.split(inputs.separator).includes(filePath)
|
||||||
allChangedFiles.paths,
|
)
|
||||||
inputs
|
|
||||||
)
|
|
||||||
|
|
||||||
const otherChangedFiles = allOtherChangedFilesPaths.filter(
|
|
||||||
(filePath: string) => !allChangedFilesPaths.includes(filePath)
|
|
||||||
)
|
|
||||||
|
|
||||||
const onlyChanged =
|
const onlyChanged =
|
||||||
otherChangedFiles.length === 0 &&
|
otherChangedFiles.length === 0 &&
|
||||||
@ -286,23 +241,19 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
|
|||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('only_changed', outputPrefix),
|
key: getOutputKey('only_changed', outputPrefix),
|
||||||
value: onlyChanged,
|
value: onlyChanged,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setArrayOutput({
|
await setOutput({
|
||||||
key: 'other_changed_files',
|
key: getOutputKey('other_changed_files', outputPrefix),
|
||||||
inputs,
|
value: otherChangedFiles.join(inputs.separator),
|
||||||
value: otherChangedFiles,
|
inputs
|
||||||
outputPrefix
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('other_changed_files_count', outputPrefix),
|
key: getOutputKey('other_changed_files_count', outputPrefix),
|
||||||
value: otherChangedFiles.length.toString(),
|
value: otherChangedFiles.length.toString(),
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const allModifiedFiles = await getChangeTypeFiles({
|
const allModifiedFiles = await getChangeTypeFiles({
|
||||||
@ -316,30 +267,23 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
|
|||||||
ChangeTypeEnum.Deleted
|
ChangeTypeEnum.Deleted
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
core.debug(`All modified files: ${JSON.stringify(allModifiedFiles)}`)
|
core.debug(`All modified files: ${allModifiedFiles}`)
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('all_modified_files', outputPrefix),
|
key: getOutputKey('all_modified_files', outputPrefix),
|
||||||
value: allModifiedFiles.paths,
|
value: allModifiedFiles.paths,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json,
|
|
||||||
shouldEscape: inputs.escapeJson,
|
|
||||||
safeOutput: inputs.safeOutput
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('all_modified_files_count', outputPrefix),
|
key: getOutputKey('all_modified_files_count', outputPrefix),
|
||||||
value: allModifiedFiles.count,
|
value: allModifiedFiles.count,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('any_modified', outputPrefix),
|
key: getOutputKey('any_modified', outputPrefix),
|
||||||
value: allModifiedFiles.paths.length > 0,
|
value: allModifiedFiles.paths.length > 0 && filePatterns.length > 0,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const allOtherModifiedFiles = await getChangeTypeFiles({
|
const allOtherModifiedFiles = await getChangeTypeFiles({
|
||||||
@ -354,19 +298,12 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
|
|||||||
]
|
]
|
||||||
})
|
})
|
||||||
|
|
||||||
const allOtherModifiedFilesPaths: string[] = getArrayFromPaths(
|
const otherModifiedFiles = allOtherModifiedFiles.paths
|
||||||
allOtherModifiedFiles.paths,
|
.split(inputs.separator)
|
||||||
inputs
|
.filter(
|
||||||
)
|
(filePath: string) =>
|
||||||
|
!allModifiedFiles.paths.split(inputs.separator).includes(filePath)
|
||||||
const allModifiedFilesPaths: string[] = getArrayFromPaths(
|
)
|
||||||
allModifiedFiles.paths,
|
|
||||||
inputs
|
|
||||||
)
|
|
||||||
|
|
||||||
const otherModifiedFiles = allOtherModifiedFilesPaths.filter(
|
|
||||||
(filePath: string) => !allModifiedFilesPaths.includes(filePath)
|
|
||||||
)
|
|
||||||
|
|
||||||
const onlyModified =
|
const onlyModified =
|
||||||
otherModifiedFiles.length === 0 &&
|
otherModifiedFiles.length === 0 &&
|
||||||
@ -376,23 +313,19 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
|
|||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('only_modified', outputPrefix),
|
key: getOutputKey('only_modified', outputPrefix),
|
||||||
value: onlyModified,
|
value: onlyModified,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setArrayOutput({
|
await setOutput({
|
||||||
key: 'other_modified_files',
|
key: getOutputKey('other_modified_files', outputPrefix),
|
||||||
inputs,
|
value: otherModifiedFiles.join(inputs.separator),
|
||||||
value: otherModifiedFiles,
|
inputs
|
||||||
outputPrefix
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('other_modified_files_count', outputPrefix),
|
key: getOutputKey('other_modified_files_count', outputPrefix),
|
||||||
value: otherModifiedFiles.length.toString(),
|
value: otherModifiedFiles.length.toString(),
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const deletedFiles = await getChangeTypeFiles({
|
const deletedFiles = await getChangeTypeFiles({
|
||||||
@ -400,52 +333,23 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
|
|||||||
changedFiles: allFilteredDiffFiles,
|
changedFiles: allFilteredDiffFiles,
|
||||||
changeTypes: [ChangeTypeEnum.Deleted]
|
changeTypes: [ChangeTypeEnum.Deleted]
|
||||||
})
|
})
|
||||||
core.debug(`Deleted files: ${JSON.stringify(deletedFiles)}`)
|
core.debug(`Deleted files: ${deletedFiles}`)
|
||||||
|
|
||||||
if (
|
|
||||||
inputs.dirNamesDeletedFilesIncludeOnlyDeletedDirs &&
|
|
||||||
inputs.dirNames &&
|
|
||||||
workingDirectory
|
|
||||||
) {
|
|
||||||
const newDeletedFilesPaths: string[] = []
|
|
||||||
for (const deletedPath of getArrayFromPaths(deletedFiles.paths, inputs)) {
|
|
||||||
const dirPath = path.join(workingDirectory, deletedPath)
|
|
||||||
core.debug(`Checking if directory exists: ${dirPath}`)
|
|
||||||
if (!(await exists(dirPath))) {
|
|
||||||
core.debug(`Directory not found: ${dirPath}`)
|
|
||||||
newDeletedFilesPaths.push(deletedPath)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
deletedFiles.paths = inputs.json
|
|
||||||
? newDeletedFilesPaths
|
|
||||||
: newDeletedFilesPaths.join(inputs.separator)
|
|
||||||
deletedFiles.count = newDeletedFilesPaths.length.toString()
|
|
||||||
core.debug(`New deleted files: ${JSON.stringify(deletedFiles)}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('deleted_files', outputPrefix),
|
key: getOutputKey('deleted_files', outputPrefix),
|
||||||
value: deletedFiles.paths,
|
value: deletedFiles.paths,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json,
|
|
||||||
shouldEscape: inputs.escapeJson,
|
|
||||||
safeOutput: inputs.safeOutput
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('deleted_files_count', outputPrefix),
|
key: getOutputKey('deleted_files_count', outputPrefix),
|
||||||
value: deletedFiles.count,
|
value: deletedFiles.count,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('any_deleted', outputPrefix),
|
key: getOutputKey('any_deleted', outputPrefix),
|
||||||
value: deletedFiles.paths.length > 0,
|
value: deletedFiles.paths.length > 0 && filePatterns.length > 0,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const allOtherDeletedFiles = await getChangeTypeFiles({
|
const allOtherDeletedFiles = await getChangeTypeFiles({
|
||||||
@ -454,19 +358,11 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
|
|||||||
changeTypes: [ChangeTypeEnum.Deleted]
|
changeTypes: [ChangeTypeEnum.Deleted]
|
||||||
})
|
})
|
||||||
|
|
||||||
const allOtherDeletedFilesPaths: string[] = getArrayFromPaths(
|
const otherDeletedFiles = allOtherDeletedFiles.paths
|
||||||
allOtherDeletedFiles.paths,
|
.split(inputs.separator)
|
||||||
inputs
|
.filter(
|
||||||
)
|
filePath => !deletedFiles.paths.split(inputs.separator).includes(filePath)
|
||||||
|
)
|
||||||
const deletedFilesPaths: string[] = getArrayFromPaths(
|
|
||||||
deletedFiles.paths,
|
|
||||||
inputs
|
|
||||||
)
|
|
||||||
|
|
||||||
const otherDeletedFiles = allOtherDeletedFilesPaths.filter(
|
|
||||||
filePath => !deletedFilesPaths.includes(filePath)
|
|
||||||
)
|
|
||||||
|
|
||||||
const onlyDeleted =
|
const onlyDeleted =
|
||||||
otherDeletedFiles.length === 0 &&
|
otherDeletedFiles.length === 0 &&
|
||||||
@ -476,27 +372,18 @@ export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({
|
|||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('only_deleted', outputPrefix),
|
key: getOutputKey('only_deleted', outputPrefix),
|
||||||
value: onlyDeleted,
|
value: onlyDeleted,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setArrayOutput({
|
await setOutput({
|
||||||
key: 'other_deleted_files',
|
key: getOutputKey('other_deleted_files', outputPrefix),
|
||||||
inputs,
|
value: otherDeletedFiles.join(inputs.separator),
|
||||||
value: otherDeletedFiles,
|
inputs
|
||||||
outputPrefix
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: getOutputKey('other_deleted_files_count', outputPrefix),
|
key: getOutputKey('other_deleted_files_count', outputPrefix),
|
||||||
value: otherDeletedFiles.length.toString(),
|
value: otherDeletedFiles.length.toString(),
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir
|
|
||||||
})
|
})
|
||||||
|
|
||||||
return {
|
|
||||||
anyModified: allModifiedFiles.paths.length > 0,
|
|
||||||
anyChanged: allChangedFiles.paths.length > 0
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
383
src/commitSha.ts
383
src/commitSha.ts
@ -1,12 +1,9 @@
|
|||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
import * as github from '@actions/github'
|
|
||||||
|
|
||||||
import {Env} from './env'
|
import {Env} from './env'
|
||||||
import {Inputs} from './inputs'
|
import {Inputs} from './inputs'
|
||||||
import {
|
import {
|
||||||
canDiffCommits,
|
canDiffCommits,
|
||||||
cleanShaInput,
|
|
||||||
getCurrentBranchName,
|
|
||||||
getHeadSha,
|
getHeadSha,
|
||||||
getParentSha,
|
getParentSha,
|
||||||
getPreviousGitTag,
|
getPreviousGitTag,
|
||||||
@ -18,17 +15,15 @@ import {
|
|||||||
} from './utils'
|
} from './utils'
|
||||||
|
|
||||||
const getCurrentSHA = async ({
|
const getCurrentSHA = async ({
|
||||||
|
env,
|
||||||
inputs,
|
inputs,
|
||||||
workingDirectory
|
workingDirectory
|
||||||
}: {
|
}: {
|
||||||
|
env: Env
|
||||||
inputs: Inputs
|
inputs: Inputs
|
||||||
workingDirectory: string
|
workingDirectory: string
|
||||||
}): Promise<string> => {
|
}): Promise<string> => {
|
||||||
let currentSha = await cleanShaInput({
|
let currentSha = inputs.sha
|
||||||
sha: inputs.sha,
|
|
||||||
cwd: workingDirectory,
|
|
||||||
token: inputs.token
|
|
||||||
})
|
|
||||||
core.debug('Getting current SHA...')
|
core.debug('Getting current SHA...')
|
||||||
|
|
||||||
if (inputs.until) {
|
if (inputs.until) {
|
||||||
@ -55,16 +50,14 @@ const getCurrentSHA = async ({
|
|||||||
} else {
|
} else {
|
||||||
if (!currentSha) {
|
if (!currentSha) {
|
||||||
if (
|
if (
|
||||||
github.context.payload.pull_request?.head?.sha &&
|
env.GITHUB_EVENT_PULL_REQUEST_HEAD_SHA &&
|
||||||
(await verifyCommitSha({
|
(await verifyCommitSha({
|
||||||
sha: github.context.payload.pull_request?.head?.sha,
|
sha: env.GITHUB_EVENT_PULL_REQUEST_HEAD_SHA,
|
||||||
cwd: workingDirectory,
|
cwd: workingDirectory,
|
||||||
showAsErrorMessage: false
|
showAsErrorMessage: false
|
||||||
})) === 0
|
})) === 0
|
||||||
) {
|
) {
|
||||||
currentSha = github.context.payload.pull_request?.head?.sha
|
currentSha = env.GITHUB_EVENT_PULL_REQUEST_HEAD_SHA
|
||||||
} else if (github.context.eventName === 'merge_group') {
|
|
||||||
currentSha = github.context.payload.merge_group?.head_sha
|
|
||||||
} else {
|
} else {
|
||||||
currentSha = await getHeadSha({cwd: workingDirectory})
|
currentSha = await getHeadSha({cwd: workingDirectory})
|
||||||
}
|
}
|
||||||
@ -86,117 +79,69 @@ export interface DiffResult {
|
|||||||
initialCommit?: boolean
|
initialCommit?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
interface SHAForNonPullRequestEvent {
|
export const getSHAForPushEvent = async (
|
||||||
inputs: Inputs
|
inputs: Inputs,
|
||||||
env: Env
|
env: Env,
|
||||||
workingDirectory: string
|
workingDirectory: string,
|
||||||
isShallow: boolean
|
isShallow: boolean,
|
||||||
diffSubmodule: boolean
|
hasSubmodule: boolean,
|
||||||
gitFetchExtraArgs: string[]
|
gitFetchExtraArgs: string[],
|
||||||
isTag: boolean
|
isTag: boolean
|
||||||
remoteName: string
|
): Promise<DiffResult> => {
|
||||||
}
|
|
||||||
|
|
||||||
export const getSHAForNonPullRequestEvent = async ({
|
|
||||||
inputs,
|
|
||||||
env,
|
|
||||||
workingDirectory,
|
|
||||||
isShallow,
|
|
||||||
diffSubmodule,
|
|
||||||
gitFetchExtraArgs,
|
|
||||||
isTag,
|
|
||||||
remoteName
|
|
||||||
}: SHAForNonPullRequestEvent): Promise<DiffResult> => {
|
|
||||||
let targetBranch = env.GITHUB_REF_NAME
|
let targetBranch = env.GITHUB_REF_NAME
|
||||||
let currentBranch = targetBranch
|
const currentBranch = targetBranch
|
||||||
let initialCommit = false
|
let initialCommit = false
|
||||||
|
|
||||||
if (!inputs.skipInitialFetch) {
|
if (isShallow && !inputs.skipInitialFetch) {
|
||||||
if (isShallow) {
|
core.info('Repository is shallow, fetching more history...')
|
||||||
core.info('Repository is shallow, fetching more history...')
|
|
||||||
|
|
||||||
if (isTag) {
|
if (isTag) {
|
||||||
let sourceBranch = ''
|
const sourceBranch =
|
||||||
|
env.GITHUB_EVENT_BASE_REF.replace('refs/heads/', '') ||
|
||||||
if (github.context.payload.base_ref) {
|
env.GITHUB_EVENT_RELEASE_TARGET_COMMITISH
|
||||||
sourceBranch = github.context.payload.base_ref.replace(
|
await gitFetch({
|
||||||
'refs/heads/',
|
cwd: workingDirectory,
|
||||||
''
|
args: [
|
||||||
)
|
...gitFetchExtraArgs,
|
||||||
} else if (github.context.payload.release?.target_commitish) {
|
'-u',
|
||||||
sourceBranch = github.context.payload.release?.target_commitish
|
'--progress',
|
||||||
}
|
`--deepen=${inputs.fetchDepth}`,
|
||||||
|
'origin',
|
||||||
await gitFetch({
|
`+refs/heads/${sourceBranch}:refs/remotes/origin/${sourceBranch}`
|
||||||
cwd: workingDirectory,
|
]
|
||||||
args: [
|
})
|
||||||
...gitFetchExtraArgs,
|
|
||||||
'-u',
|
|
||||||
'--progress',
|
|
||||||
`--deepen=${inputs.fetchDepth}`,
|
|
||||||
remoteName,
|
|
||||||
`+refs/heads/${sourceBranch}:refs/remotes/${remoteName}/${sourceBranch}`
|
|
||||||
]
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
await gitFetch({
|
|
||||||
cwd: workingDirectory,
|
|
||||||
args: [
|
|
||||||
...gitFetchExtraArgs,
|
|
||||||
'-u',
|
|
||||||
'--progress',
|
|
||||||
`--deepen=${inputs.fetchDepth}`,
|
|
||||||
remoteName,
|
|
||||||
`+refs/heads/${targetBranch}:refs/remotes/${remoteName}/${targetBranch}`
|
|
||||||
]
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (diffSubmodule) {
|
|
||||||
await gitFetchSubmodules({
|
|
||||||
cwd: workingDirectory,
|
|
||||||
args: [
|
|
||||||
...gitFetchExtraArgs,
|
|
||||||
'-u',
|
|
||||||
'--progress',
|
|
||||||
`--deepen=${inputs.fetchDepth}`
|
|
||||||
]
|
|
||||||
})
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
if (diffSubmodule && inputs.fetchAdditionalSubmoduleHistory) {
|
await gitFetch({
|
||||||
await gitFetchSubmodules({
|
cwd: workingDirectory,
|
||||||
cwd: workingDirectory,
|
args: [
|
||||||
args: [
|
...gitFetchExtraArgs,
|
||||||
...gitFetchExtraArgs,
|
'-u',
|
||||||
'-u',
|
'--progress',
|
||||||
'--progress',
|
`--deepen=${inputs.fetchDepth}`,
|
||||||
`--deepen=${inputs.fetchDepth}`
|
'origin',
|
||||||
]
|
`+refs/heads/${targetBranch}:refs/remotes/origin/${targetBranch}`
|
||||||
})
|
]
|
||||||
}
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasSubmodule) {
|
||||||
|
await gitFetchSubmodules({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
args: [
|
||||||
|
...gitFetchExtraArgs,
|
||||||
|
'-u',
|
||||||
|
'--progress',
|
||||||
|
`--deepen=${inputs.fetchDepth}`
|
||||||
|
]
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const currentSha = await getCurrentSHA({inputs, workingDirectory})
|
const currentSha = await getCurrentSHA({env, inputs, workingDirectory})
|
||||||
let previousSha = await cleanShaInput({
|
let previousSha = inputs.baseSha
|
||||||
sha: inputs.baseSha,
|
|
||||||
cwd: workingDirectory,
|
|
||||||
token: inputs.token
|
|
||||||
})
|
|
||||||
const diff = '..'
|
const diff = '..'
|
||||||
const currentBranchName = await getCurrentBranchName({cwd: workingDirectory})
|
|
||||||
|
|
||||||
if (
|
if (previousSha && currentSha && currentBranch && targetBranch) {
|
||||||
currentBranchName &&
|
|
||||||
currentBranchName !== 'HEAD' &&
|
|
||||||
(currentBranchName !== targetBranch || currentBranchName !== currentBranch)
|
|
||||||
) {
|
|
||||||
targetBranch = currentBranchName
|
|
||||||
currentBranch = currentBranchName
|
|
||||||
}
|
|
||||||
|
|
||||||
if (inputs.baseSha && inputs.sha && currentBranch && targetBranch) {
|
|
||||||
if (previousSha === currentSha) {
|
if (previousSha === currentSha) {
|
||||||
core.error(
|
core.error(
|
||||||
`Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.`
|
`Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.`
|
||||||
@ -207,6 +152,7 @@ export const getSHAForNonPullRequestEvent = async ({
|
|||||||
throw new Error('Similar commit hashes detected.')
|
throw new Error('Similar commit hashes detected.')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await verifyCommitSha({sha: previousSha, cwd: workingDirectory})
|
||||||
core.debug(`Previous SHA: ${previousSha}`)
|
core.debug(`Previous SHA: ${previousSha}`)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@ -218,7 +164,7 @@ export const getSHAForNonPullRequestEvent = async ({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!previousSha || previousSha === currentSha) {
|
if (!previousSha) {
|
||||||
core.debug('Getting previous SHA...')
|
core.debug('Getting previous SHA...')
|
||||||
if (inputs.since) {
|
if (inputs.since) {
|
||||||
core.debug(`Getting base SHA for '${inputs.since}'...`)
|
core.debug(`Getting base SHA for '${inputs.since}'...`)
|
||||||
@ -240,26 +186,13 @@ export const getSHAForNonPullRequestEvent = async ({
|
|||||||
}
|
}
|
||||||
} else if (isTag) {
|
} else if (isTag) {
|
||||||
core.debug('Getting previous SHA for tag...')
|
core.debug('Getting previous SHA for tag...')
|
||||||
const {sha, tag} = await getPreviousGitTag({
|
const {sha, tag} = await getPreviousGitTag({cwd: workingDirectory})
|
||||||
cwd: workingDirectory,
|
|
||||||
tagsPattern: inputs.tagsPattern,
|
|
||||||
tagsIgnorePattern: inputs.tagsIgnorePattern,
|
|
||||||
currentBranch
|
|
||||||
})
|
|
||||||
previousSha = sha
|
previousSha = sha
|
||||||
targetBranch = tag
|
targetBranch = tag
|
||||||
} else {
|
} else {
|
||||||
if (github.context.eventName === 'merge_group') {
|
core.debug('Getting previous SHA for last remote commit...')
|
||||||
core.debug('Getting previous SHA for merge group...')
|
if (env.GITHUB_EVENT_FORCED === 'false' || !env.GITHUB_EVENT_FORCED) {
|
||||||
previousSha = github.context.payload.merge_group?.base_sha
|
previousSha = env.GITHUB_EVENT_BEFORE
|
||||||
} else {
|
|
||||||
core.debug('Getting previous SHA for last remote commit...')
|
|
||||||
if (
|
|
||||||
github.context.payload.forced === 'false' ||
|
|
||||||
!github.context.payload.forced
|
|
||||||
) {
|
|
||||||
previousSha = github.context.payload.before
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (
|
if (
|
||||||
@ -324,62 +257,55 @@ export const getSHAForNonPullRequestEvent = async ({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
interface SHAForPullRequestEvent {
|
export const getSHAForPullRequestEvent = async (
|
||||||
inputs: Inputs
|
inputs: Inputs,
|
||||||
workingDirectory: string
|
env: Env,
|
||||||
isShallow: boolean
|
workingDirectory: string,
|
||||||
diffSubmodule: boolean
|
isShallow: boolean,
|
||||||
|
hasSubmodule: boolean,
|
||||||
gitFetchExtraArgs: string[]
|
gitFetchExtraArgs: string[]
|
||||||
remoteName: string
|
): Promise<DiffResult> => {
|
||||||
}
|
let targetBranch = env.GITHUB_EVENT_PULL_REQUEST_BASE_REF
|
||||||
|
const currentBranch = env.GITHUB_EVENT_PULL_REQUEST_HEAD_REF
|
||||||
export const getSHAForPullRequestEvent = async ({
|
|
||||||
inputs,
|
|
||||||
workingDirectory,
|
|
||||||
isShallow,
|
|
||||||
diffSubmodule,
|
|
||||||
gitFetchExtraArgs,
|
|
||||||
remoteName
|
|
||||||
}: SHAForPullRequestEvent): Promise<DiffResult> => {
|
|
||||||
let targetBranch = github.context.payload.pull_request?.base?.ref
|
|
||||||
const currentBranch = github.context.payload.pull_request?.head?.ref
|
|
||||||
if (inputs.sinceLastRemoteCommit) {
|
if (inputs.sinceLastRemoteCommit) {
|
||||||
targetBranch = currentBranch
|
targetBranch = currentBranch
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!inputs.skipInitialFetch) {
|
if (isShallow && !inputs.skipInitialFetch) {
|
||||||
core.info('Repository is shallow, fetching more history...')
|
core.info('Repository is shallow, fetching more history...')
|
||||||
if (isShallow) {
|
|
||||||
let prFetchExitCode = await gitFetch({
|
let prFetchExitCode = await gitFetch({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
args: [
|
||||||
|
...gitFetchExtraArgs,
|
||||||
|
'-u',
|
||||||
|
'--progress',
|
||||||
|
'origin',
|
||||||
|
`pull/${env.GITHUB_EVENT_PULL_REQUEST_NUMBER}/head:${currentBranch}`
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
if (prFetchExitCode !== 0) {
|
||||||
|
prFetchExitCode = await gitFetch({
|
||||||
cwd: workingDirectory,
|
cwd: workingDirectory,
|
||||||
args: [
|
args: [
|
||||||
...gitFetchExtraArgs,
|
...gitFetchExtraArgs,
|
||||||
'-u',
|
'-u',
|
||||||
'--progress',
|
'--progress',
|
||||||
remoteName,
|
`--deepen=${inputs.fetchDepth}`,
|
||||||
`pull/${github.context.payload.pull_request?.number}/head:${currentBranch}`
|
'origin',
|
||||||
|
`+refs/heads/${currentBranch}*:refs/remotes/origin/${currentBranch}*`
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
|
}
|
||||||
|
|
||||||
if (prFetchExitCode !== 0) {
|
if (prFetchExitCode !== 0) {
|
||||||
prFetchExitCode = await gitFetch({
|
throw new Error(
|
||||||
cwd: workingDirectory,
|
'Failed to fetch pull request branch. Please ensure "persist-credentials" is set to "true" when checking out the repository. See: https://github.com/actions/checkout#usage'
|
||||||
args: [
|
)
|
||||||
...gitFetchExtraArgs,
|
}
|
||||||
'-u',
|
|
||||||
'--progress',
|
|
||||||
`--deepen=${inputs.fetchDepth}`,
|
|
||||||
remoteName,
|
|
||||||
`+refs/heads/${currentBranch}*:refs/remotes/${remoteName}/${currentBranch}*`
|
|
||||||
]
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (prFetchExitCode !== 0) {
|
if (!inputs.sinceLastRemoteCommit) {
|
||||||
throw new Error(
|
|
||||||
'Failed to fetch pull request branch. Please ensure "persist-credentials" is set to "true" when checking out the repository. See: https://github.com/actions/checkout#usage'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
core.debug('Fetching target branch...')
|
core.debug('Fetching target branch...')
|
||||||
await gitFetch({
|
await gitFetch({
|
||||||
cwd: workingDirectory,
|
cwd: workingDirectory,
|
||||||
@ -388,24 +314,12 @@ export const getSHAForPullRequestEvent = async ({
|
|||||||
'-u',
|
'-u',
|
||||||
'--progress',
|
'--progress',
|
||||||
`--deepen=${inputs.fetchDepth}`,
|
`--deepen=${inputs.fetchDepth}`,
|
||||||
remoteName,
|
'origin',
|
||||||
`+refs/heads/${github.context.payload.pull_request?.base?.ref}:refs/remotes/${remoteName}/${github.context.payload.pull_request?.base?.ref}`
|
`+refs/heads/${targetBranch}:refs/remotes/origin/${targetBranch}`
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
|
|
||||||
if (diffSubmodule) {
|
if (hasSubmodule) {
|
||||||
await gitFetchSubmodules({
|
|
||||||
cwd: workingDirectory,
|
|
||||||
args: [
|
|
||||||
...gitFetchExtraArgs,
|
|
||||||
'-u',
|
|
||||||
'--progress',
|
|
||||||
`--deepen=${inputs.fetchDepth}`
|
|
||||||
]
|
|
||||||
})
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (diffSubmodule && inputs.fetchAdditionalSubmoduleHistory) {
|
|
||||||
await gitFetchSubmodules({
|
await gitFetchSubmodules({
|
||||||
cwd: workingDirectory,
|
cwd: workingDirectory,
|
||||||
args: [
|
args: [
|
||||||
@ -420,15 +334,11 @@ export const getSHAForPullRequestEvent = async ({
|
|||||||
core.info('Completed fetching more history.')
|
core.info('Completed fetching more history.')
|
||||||
}
|
}
|
||||||
|
|
||||||
const currentSha = await getCurrentSHA({inputs, workingDirectory})
|
const currentSha = await getCurrentSHA({env, inputs, workingDirectory})
|
||||||
let previousSha = await cleanShaInput({
|
let previousSha = inputs.baseSha
|
||||||
sha: inputs.baseSha,
|
|
||||||
cwd: workingDirectory,
|
|
||||||
token: inputs.token
|
|
||||||
})
|
|
||||||
let diff = '...'
|
let diff = '...'
|
||||||
|
|
||||||
if (inputs.baseSha && inputs.sha && currentBranch && targetBranch) {
|
if (previousSha && currentSha && currentBranch && targetBranch) {
|
||||||
if (previousSha === currentSha) {
|
if (previousSha === currentSha) {
|
||||||
core.error(
|
core.error(
|
||||||
`Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.`
|
`Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.`
|
||||||
@ -439,6 +349,7 @@ export const getSHAForPullRequestEvent = async ({
|
|||||||
throw new Error('Similar commit hashes detected.')
|
throw new Error('Similar commit hashes detected.')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await verifyCommitSha({sha: previousSha, cwd: workingDirectory})
|
||||||
core.debug(`Previous SHA: ${previousSha}`)
|
core.debug(`Previous SHA: ${previousSha}`)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@ -450,71 +361,45 @@ export const getSHAForPullRequestEvent = async ({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!github.context.payload.pull_request?.base?.ref) {
|
if (
|
||||||
|
!env.GITHUB_EVENT_PULL_REQUEST_BASE_REF ||
|
||||||
|
env.GITHUB_EVENT_HEAD_REPO_FORK === 'true'
|
||||||
|
) {
|
||||||
diff = '..'
|
diff = '..'
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!previousSha || previousSha === currentSha) {
|
if (!previousSha) {
|
||||||
if (inputs.sinceLastRemoteCommit) {
|
if (inputs.sinceLastRemoteCommit) {
|
||||||
previousSha = github.context.payload.before
|
previousSha = env.GITHUB_EVENT_BEFORE
|
||||||
|
|
||||||
if (
|
if (
|
||||||
!previousSha ||
|
!previousSha ||
|
||||||
(previousSha &&
|
(previousSha &&
|
||||||
(await verifyCommitSha({
|
(await verifyCommitSha({sha: previousSha, cwd: workingDirectory})) !==
|
||||||
sha: previousSha,
|
0)
|
||||||
cwd: workingDirectory,
|
|
||||||
showAsErrorMessage: false
|
|
||||||
})) !== 0)
|
|
||||||
) {
|
) {
|
||||||
core.info(
|
core.warning(
|
||||||
`Unable to locate the previous commit in the local history for ${github.context.eventName} (${github.context.payload.action}) event. Falling back to the previous commit in the local history.`
|
'Unable to locate the remote branch head sha. Falling back to the previous commit in the local history.'
|
||||||
)
|
)
|
||||||
|
|
||||||
previousSha = await getParentSha({
|
previousSha = await getParentSha({
|
||||||
cwd: workingDirectory
|
cwd: workingDirectory
|
||||||
})
|
})
|
||||||
|
|
||||||
if (
|
if (!previousSha) {
|
||||||
github.context.payload.action &&
|
core.warning(
|
||||||
github.context.payload.action === 'synchronize' &&
|
'Unable to locate the previous commit in the local history. Falling back to the pull request base sha.'
|
||||||
previousSha &&
|
|
||||||
(!previousSha ||
|
|
||||||
(previousSha &&
|
|
||||||
(await verifyCommitSha({
|
|
||||||
sha: previousSha,
|
|
||||||
cwd: workingDirectory,
|
|
||||||
showAsErrorMessage: false
|
|
||||||
})) !== 0))
|
|
||||||
) {
|
|
||||||
throw new Error(
|
|
||||||
'Unable to locate the previous commit in the local history. Please ensure to checkout pull request HEAD commit instead of the merge commit. See: https://github.com/actions/checkout/blob/main/README.md#checkout-pull-request-head-commit-instead-of-merge-commit'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
!previousSha ||
|
|
||||||
(previousSha &&
|
|
||||||
(await verifyCommitSha({
|
|
||||||
sha: previousSha,
|
|
||||||
cwd: workingDirectory,
|
|
||||||
showAsErrorMessage: false
|
|
||||||
})) !== 0)
|
|
||||||
) {
|
|
||||||
throw new Error(
|
|
||||||
'Unable to locate the previous commit in the local history. Please ensure to checkout pull request HEAD commit instead of the merge commit. See: https://github.com/actions/checkout/blob/main/README.md#checkout-pull-request-head-commit-instead-of-merge-commit'
|
|
||||||
)
|
)
|
||||||
|
previousSha = env.GITHUB_EVENT_PULL_REQUEST_BASE_SHA
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
previousSha = github.context.payload.pull_request?.base?.sha
|
previousSha = await getRemoteBranchHeadSha({
|
||||||
|
cwd: workingDirectory,
|
||||||
|
branch: targetBranch
|
||||||
|
})
|
||||||
|
|
||||||
if (!previousSha) {
|
if (!previousSha) {
|
||||||
previousSha = await getRemoteBranchHeadSha({
|
previousSha = env.GITHUB_EVENT_PULL_REQUEST_BASE_SHA
|
||||||
cwd: workingDirectory,
|
|
||||||
remoteName,
|
|
||||||
branch: targetBranch
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isShallow) {
|
if (isShallow) {
|
||||||
@ -530,11 +415,7 @@ export const getSHAForPullRequestEvent = async ({
|
|||||||
'Merge base is not in the local history, fetching remote target branch...'
|
'Merge base is not in the local history, fetching remote target branch...'
|
||||||
)
|
)
|
||||||
|
|
||||||
for (
|
for (let i = 1; i <= 10; i++) {
|
||||||
let i = 1;
|
|
||||||
i <= (inputs.fetchMissingHistoryMaxRetries || 10);
|
|
||||||
i++
|
|
||||||
) {
|
|
||||||
await gitFetch({
|
await gitFetch({
|
||||||
cwd: workingDirectory,
|
cwd: workingDirectory,
|
||||||
args: [
|
args: [
|
||||||
@ -542,8 +423,8 @@ export const getSHAForPullRequestEvent = async ({
|
|||||||
'-u',
|
'-u',
|
||||||
'--progress',
|
'--progress',
|
||||||
`--deepen=${inputs.fetchDepth}`,
|
`--deepen=${inputs.fetchDepth}`,
|
||||||
remoteName,
|
'origin',
|
||||||
`+refs/heads/${targetBranch}:refs/remotes/${remoteName}/${targetBranch}`
|
`+refs/heads/${targetBranch}:refs/remotes/origin/${targetBranch}`
|
||||||
]
|
]
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -568,7 +449,7 @@ export const getSHAForPullRequestEvent = async ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!previousSha || previousSha === currentSha) {
|
if (!previousSha || previousSha === currentSha) {
|
||||||
previousSha = github.context.payload.pull_request?.base?.sha
|
previousSha = env.GITHUB_EVENT_PULL_REQUEST_BASE_SHA
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -594,12 +475,6 @@ export const getSHAForPullRequestEvent = async ({
|
|||||||
diff
|
diff
|
||||||
}))
|
}))
|
||||||
) {
|
) {
|
||||||
core.warning(
|
|
||||||
'If this pull request is from a forked repository, please set the checkout action `repository` input to the same repository as the pull request.'
|
|
||||||
)
|
|
||||||
core.warning(
|
|
||||||
'This can be done by setting actions/checkout `repository` to ${{ github.event.pull_request.head.repo.full_name }}'
|
|
||||||
)
|
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Unable to determine a difference between ${previousSha}${diff}${currentSha}`
|
`Unable to determine a difference between ${previousSha}${diff}${currentSha}`
|
||||||
)
|
)
|
||||||
@ -619,7 +494,7 @@ export const getSHAForPullRequestEvent = async ({
|
|||||||
// uses: actions/checkout@v3
|
// uses: actions/checkout@v3
|
||||||
// with:
|
// with:
|
||||||
// repository: ${{ github.event.pull_request.head.repo.full_name }}
|
// repository: ${{ github.event.pull_request.head.repo.full_name }}
|
||||||
if (github.context.eventName === 'pull_request_target') {
|
if (env.GITHUB_EVENT_NAME === 'pull_request_target') {
|
||||||
core.warning(
|
core.warning(
|
||||||
'If this pull request is from a forked repository, please set the checkout action `repository` input to the same repository as the pull request.'
|
'If this pull request is from a forked repository, please set the checkout action `repository` input to the same repository as the pull request.'
|
||||||
)
|
)
|
||||||
|
@ -1,29 +0,0 @@
|
|||||||
import {Inputs} from './inputs'
|
|
||||||
|
|
||||||
export const DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS: Partial<Inputs> = {
|
|
||||||
sha: '',
|
|
||||||
baseSha: '',
|
|
||||||
since: '',
|
|
||||||
until: '',
|
|
||||||
path: '.',
|
|
||||||
quotepath: true,
|
|
||||||
diffRelative: true,
|
|
||||||
sinceLastRemoteCommit: false,
|
|
||||||
recoverDeletedFiles: false,
|
|
||||||
recoverDeletedFilesToDestination: '',
|
|
||||||
recoverFiles: '',
|
|
||||||
recoverFilesSeparator: '\n',
|
|
||||||
recoverFilesIgnore: '',
|
|
||||||
recoverFilesIgnoreSeparator: '\n',
|
|
||||||
includeAllOldNewRenamedFiles: false,
|
|
||||||
oldNewSeparator: ',',
|
|
||||||
oldNewFilesSeparator: ' ',
|
|
||||||
skipInitialFetch: false,
|
|
||||||
fetchAdditionalSubmoduleHistory: false,
|
|
||||||
dirNamesDeletedFilesIncludeOnlyDeletedDirs: false,
|
|
||||||
excludeSubmodules: false,
|
|
||||||
fetchMissingHistoryMaxRetries: 20,
|
|
||||||
usePosixPathSeparator: false,
|
|
||||||
tagsPattern: '*',
|
|
||||||
tagsIgnorePattern: ''
|
|
||||||
}
|
|
69
src/env.ts
69
src/env.ts
@ -1,13 +1,80 @@
|
|||||||
|
import {promises as fs} from 'fs'
|
||||||
|
import * as core from '@actions/core'
|
||||||
|
|
||||||
export type Env = {
|
export type Env = {
|
||||||
GITHUB_REF_NAME: string
|
GITHUB_REF_NAME: string
|
||||||
GITHUB_REF: string
|
GITHUB_REF: string
|
||||||
GITHUB_WORKSPACE: string
|
GITHUB_WORKSPACE: string
|
||||||
|
GITHUB_EVENT_ACTION: string
|
||||||
|
GITHUB_EVENT_NAME: string
|
||||||
|
GITHUB_EVENT_FORCED: string
|
||||||
|
GITHUB_EVENT_BEFORE: string
|
||||||
|
GITHUB_EVENT_BASE_REF: string
|
||||||
|
GITHUB_EVENT_RELEASE_TARGET_COMMITISH: string
|
||||||
|
GITHUB_EVENT_HEAD_REPO_FORK: string
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_NUMBER: string
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_BASE_SHA: string
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_HEAD_SHA: string
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_HEAD_REF: string
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_BASE_REF: string
|
||||||
|
GITHUB_REPOSITORY_OWNER: string
|
||||||
|
GITHUB_REPOSITORY: string
|
||||||
|
}
|
||||||
|
|
||||||
|
type GithubEvent = {
|
||||||
|
action?: string
|
||||||
|
forced?: string
|
||||||
|
pull_request?: {
|
||||||
|
head: {
|
||||||
|
ref: string
|
||||||
|
sha: string
|
||||||
|
}
|
||||||
|
base: {
|
||||||
|
ref: string
|
||||||
|
sha: string
|
||||||
|
}
|
||||||
|
number: string
|
||||||
|
}
|
||||||
|
release?: {
|
||||||
|
target_commitish: string
|
||||||
|
}
|
||||||
|
before?: string
|
||||||
|
base_ref?: string
|
||||||
|
head?: {
|
||||||
|
repo?: {
|
||||||
|
fork: string
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getEnv = async (): Promise<Env> => {
|
export const getEnv = async (): Promise<Env> => {
|
||||||
|
const eventPath = process.env.GITHUB_EVENT_PATH
|
||||||
|
let eventJson: GithubEvent = {}
|
||||||
|
|
||||||
|
if (eventPath) {
|
||||||
|
eventJson = JSON.parse(await fs.readFile(eventPath, {encoding: 'utf8'}))
|
||||||
|
}
|
||||||
|
core.debug(`Env: ${JSON.stringify(process.env, null, 2)}`)
|
||||||
|
core.debug(`Event: ${JSON.stringify(eventJson, null, 2)}`)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_HEAD_REF: eventJson.pull_request?.head?.ref || '',
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_BASE_REF: eventJson.pull_request?.base?.ref || '',
|
||||||
|
GITHUB_EVENT_BEFORE: eventJson.before || '',
|
||||||
|
GITHUB_EVENT_BASE_REF: eventJson.base_ref || '',
|
||||||
|
GITHUB_EVENT_RELEASE_TARGET_COMMITISH:
|
||||||
|
eventJson.release?.target_commitish || '',
|
||||||
|
GITHUB_EVENT_HEAD_REPO_FORK: eventJson.head?.repo?.fork || '',
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_NUMBER: eventJson.pull_request?.number || '',
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_BASE_SHA: eventJson.pull_request?.base?.sha || '',
|
||||||
|
GITHUB_EVENT_PULL_REQUEST_HEAD_SHA: eventJson.pull_request?.head?.sha || '',
|
||||||
|
GITHUB_EVENT_FORCED: eventJson.forced || '',
|
||||||
|
GITHUB_EVENT_ACTION: eventJson.action || '',
|
||||||
GITHUB_REF_NAME: process.env.GITHUB_REF_NAME || '',
|
GITHUB_REF_NAME: process.env.GITHUB_REF_NAME || '',
|
||||||
GITHUB_REF: process.env.GITHUB_REF || '',
|
GITHUB_REF: process.env.GITHUB_REF || '',
|
||||||
GITHUB_WORKSPACE: process.env.GITHUB_WORKSPACE || ''
|
GITHUB_WORKSPACE: process.env.GITHUB_WORKSPACE || '',
|
||||||
|
GITHUB_EVENT_NAME: process.env.GITHUB_EVENT_NAME || '',
|
||||||
|
GITHUB_REPOSITORY_OWNER: process.env.GITHUB_REPOSITORY_OWNER || '',
|
||||||
|
GITHUB_REPOSITORY: process.env.GITHUB_REPOSITORY || ''
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
149
src/inputs.ts
149
src/inputs.ts
@ -24,41 +24,23 @@ export type Inputs = {
|
|||||||
since: string
|
since: string
|
||||||
until: string
|
until: string
|
||||||
path: string
|
path: string
|
||||||
quotepath: boolean
|
quotePath: boolean
|
||||||
diffRelative: boolean
|
diffRelative: boolean
|
||||||
dirNames: boolean
|
dirNames: boolean
|
||||||
dirNamesMaxDepth?: number
|
dirNamesMaxDepth?: number
|
||||||
dirNamesExcludeCurrentDir: boolean
|
dirNamesExcludeCurrentDir: boolean
|
||||||
dirNamesIncludeFiles: string
|
|
||||||
dirNamesIncludeFilesSeparator: string
|
|
||||||
dirNamesDeletedFilesIncludeOnlyDeletedDirs: boolean
|
|
||||||
json: boolean
|
json: boolean
|
||||||
escapeJson: boolean
|
escapeJson: boolean
|
||||||
safeOutput: boolean
|
|
||||||
fetchDepth?: number
|
fetchDepth?: number
|
||||||
fetchAdditionalSubmoduleHistory: boolean
|
|
||||||
sinceLastRemoteCommit: boolean
|
sinceLastRemoteCommit: boolean
|
||||||
writeOutputFiles: boolean
|
writeOutputFiles: boolean
|
||||||
outputDir: string
|
outputDir: string
|
||||||
outputRenamedFilesAsDeletedAndAdded: boolean
|
outputRenamedFilesAsDeletedAndAdded: boolean
|
||||||
recoverDeletedFiles: boolean
|
recoverDeletedFiles: boolean
|
||||||
recoverDeletedFilesToDestination: string
|
recoverDeletedFilesToDestination: string
|
||||||
recoverFiles: string
|
|
||||||
recoverFilesSeparator: string
|
|
||||||
recoverFilesIgnore: string
|
|
||||||
recoverFilesIgnoreSeparator: string
|
|
||||||
token: string
|
token: string
|
||||||
apiUrl: string
|
apiUrl: string
|
||||||
skipInitialFetch: boolean
|
skipInitialFetch: boolean
|
||||||
failOnInitialDiffError: boolean
|
|
||||||
failOnSubmoduleDiffError: boolean
|
|
||||||
negationPatternsFirst: boolean
|
|
||||||
useRestApi: boolean
|
|
||||||
excludeSubmodules: boolean
|
|
||||||
fetchMissingHistoryMaxRetries?: number
|
|
||||||
usePosixPathSeparator: boolean
|
|
||||||
tagsPattern: string
|
|
||||||
tagsIgnorePattern?: string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getInputs = (): Inputs => {
|
export const getInputs = (): Inputs => {
|
||||||
@ -137,7 +119,7 @@ export const getInputs = (): Inputs => {
|
|||||||
const since = core.getInput('since', {required: false})
|
const since = core.getInput('since', {required: false})
|
||||||
const until = core.getInput('until', {required: false})
|
const until = core.getInput('until', {required: false})
|
||||||
const path = core.getInput('path', {required: false})
|
const path = core.getInput('path', {required: false})
|
||||||
const quotepath = core.getBooleanInput('quotepath', {required: false})
|
const quotePath = core.getBooleanInput('quotepath', {required: false})
|
||||||
const diffRelative = core.getBooleanInput('diff_relative', {required: false})
|
const diffRelative = core.getBooleanInput('diff_relative', {required: false})
|
||||||
const dirNames = core.getBooleanInput('dir_names', {required: false})
|
const dirNames = core.getBooleanInput('dir_names', {required: false})
|
||||||
const dirNamesMaxDepth = core.getInput('dir_names_max_depth', {
|
const dirNamesMaxDepth = core.getInput('dir_names_max_depth', {
|
||||||
@ -149,26 +131,8 @@ export const getInputs = (): Inputs => {
|
|||||||
required: false
|
required: false
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
const dirNamesIncludeFiles = core.getInput('dir_names_include_files', {
|
const json = core.getBooleanInput('json', {required: false})
|
||||||
required: false
|
const escapeJson = core.getBooleanInput('escape_json', {required: false})
|
||||||
})
|
|
||||||
const dirNamesIncludeFilesSeparator = core.getInput(
|
|
||||||
'dir_names_include_files_separator',
|
|
||||||
{
|
|
||||||
required: false,
|
|
||||||
trimWhitespace: false
|
|
||||||
}
|
|
||||||
)
|
|
||||||
let json = core.getBooleanInput('json', {required: false})
|
|
||||||
let escapeJson = core.getBooleanInput('escape_json', {required: false})
|
|
||||||
const matrix = core.getBooleanInput('matrix', {required: false})
|
|
||||||
|
|
||||||
if (matrix) {
|
|
||||||
json = true
|
|
||||||
escapeJson = false
|
|
||||||
}
|
|
||||||
|
|
||||||
const safeOutput = core.getBooleanInput('safe_output', {required: false})
|
|
||||||
const fetchDepth = core.getInput('fetch_depth', {required: false})
|
const fetchDepth = core.getInput('fetch_depth', {required: false})
|
||||||
const sinceLastRemoteCommit = core.getBooleanInput(
|
const sinceLastRemoteCommit = core.getBooleanInput(
|
||||||
'since_last_remote_commit',
|
'since_last_remote_commit',
|
||||||
@ -189,86 +153,11 @@ export const getInputs = (): Inputs => {
|
|||||||
'recover_deleted_files_to_destination',
|
'recover_deleted_files_to_destination',
|
||||||
{required: false}
|
{required: false}
|
||||||
)
|
)
|
||||||
const recoverFiles = core.getInput('recover_files', {required: false})
|
|
||||||
const recoverFilesSeparator = core.getInput('recover_files_separator', {
|
|
||||||
required: false,
|
|
||||||
trimWhitespace: false
|
|
||||||
})
|
|
||||||
const recoverFilesIgnore = core.getInput('recover_files_ignore', {
|
|
||||||
required: false
|
|
||||||
})
|
|
||||||
const recoverFilesIgnoreSeparator = core.getInput(
|
|
||||||
'recover_files_ignore_separator',
|
|
||||||
{
|
|
||||||
required: false,
|
|
||||||
trimWhitespace: false
|
|
||||||
}
|
|
||||||
)
|
|
||||||
const token = core.getInput('token', {required: false})
|
const token = core.getInput('token', {required: false})
|
||||||
const apiUrl = core.getInput('api_url', {required: false})
|
const apiUrl = core.getInput('api_url', {required: false})
|
||||||
const skipInitialFetch = core.getBooleanInput('skip_initial_fetch', {
|
const skipInitialFetch = core.getBooleanInput('skip_initial_fetch', {
|
||||||
required: false
|
required: false
|
||||||
})
|
})
|
||||||
const fetchAdditionalSubmoduleHistory = core.getBooleanInput(
|
|
||||||
'fetch_additional_submodule_history',
|
|
||||||
{
|
|
||||||
required: false
|
|
||||||
}
|
|
||||||
)
|
|
||||||
const failOnInitialDiffError = core.getBooleanInput(
|
|
||||||
'fail_on_initial_diff_error',
|
|
||||||
{
|
|
||||||
required: false
|
|
||||||
}
|
|
||||||
)
|
|
||||||
const failOnSubmoduleDiffError = core.getBooleanInput(
|
|
||||||
'fail_on_submodule_diff_error',
|
|
||||||
{
|
|
||||||
required: false
|
|
||||||
}
|
|
||||||
)
|
|
||||||
const dirNamesDeletedFilesIncludeOnlyDeletedDirs = core.getBooleanInput(
|
|
||||||
'dir_names_deleted_files_include_only_deleted_dirs',
|
|
||||||
{
|
|
||||||
required: false
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const negationPatternsFirst = core.getBooleanInput(
|
|
||||||
'negation_patterns_first',
|
|
||||||
{
|
|
||||||
required: false
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const useRestApi = core.getBooleanInput('use_rest_api', {
|
|
||||||
required: false
|
|
||||||
})
|
|
||||||
|
|
||||||
const excludeSubmodules = core.getBooleanInput('exclude_submodules', {
|
|
||||||
required: false
|
|
||||||
})
|
|
||||||
|
|
||||||
const fetchMissingHistoryMaxRetries = core.getInput(
|
|
||||||
'fetch_missing_history_max_retries',
|
|
||||||
{required: false}
|
|
||||||
)
|
|
||||||
|
|
||||||
const usePosixPathSeparator = core.getBooleanInput(
|
|
||||||
'use_posix_path_separator',
|
|
||||||
{
|
|
||||||
required: false
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const tagsPattern = core.getInput('tags_pattern', {
|
|
||||||
required: false,
|
|
||||||
trimWhitespace: false
|
|
||||||
})
|
|
||||||
const tagsIgnorePattern = core.getInput('tags_ignore_pattern', {
|
|
||||||
required: false,
|
|
||||||
trimWhitespace: false
|
|
||||||
})
|
|
||||||
|
|
||||||
const inputs: Inputs = {
|
const inputs: Inputs = {
|
||||||
files,
|
files,
|
||||||
@ -285,8 +174,6 @@ export const getInputs = (): Inputs => {
|
|||||||
filesIgnoreYaml,
|
filesIgnoreYaml,
|
||||||
filesIgnoreYamlFromSourceFile,
|
filesIgnoreYamlFromSourceFile,
|
||||||
filesIgnoreYamlFromSourceFileSeparator,
|
filesIgnoreYamlFromSourceFileSeparator,
|
||||||
failOnInitialDiffError,
|
|
||||||
failOnSubmoduleDiffError,
|
|
||||||
separator,
|
separator,
|
||||||
// Not Supported via REST API
|
// Not Supported via REST API
|
||||||
sha,
|
sha,
|
||||||
@ -294,44 +181,28 @@ export const getInputs = (): Inputs => {
|
|||||||
since,
|
since,
|
||||||
until,
|
until,
|
||||||
path,
|
path,
|
||||||
quotepath,
|
quotePath,
|
||||||
diffRelative,
|
diffRelative,
|
||||||
sinceLastRemoteCommit,
|
sinceLastRemoteCommit,
|
||||||
recoverDeletedFiles,
|
recoverDeletedFiles,
|
||||||
recoverDeletedFilesToDestination,
|
recoverDeletedFilesToDestination,
|
||||||
recoverFiles,
|
|
||||||
recoverFilesSeparator,
|
|
||||||
recoverFilesIgnore,
|
|
||||||
recoverFilesIgnoreSeparator,
|
|
||||||
includeAllOldNewRenamedFiles,
|
includeAllOldNewRenamedFiles,
|
||||||
oldNewSeparator,
|
oldNewSeparator,
|
||||||
oldNewFilesSeparator,
|
oldNewFilesSeparator,
|
||||||
skipInitialFetch,
|
|
||||||
fetchAdditionalSubmoduleHistory,
|
|
||||||
dirNamesDeletedFilesIncludeOnlyDeletedDirs,
|
|
||||||
excludeSubmodules,
|
|
||||||
usePosixPathSeparator,
|
|
||||||
tagsPattern,
|
|
||||||
tagsIgnorePattern,
|
|
||||||
// End Not Supported via REST API
|
// End Not Supported via REST API
|
||||||
dirNames,
|
dirNames,
|
||||||
dirNamesExcludeCurrentDir,
|
dirNamesExcludeCurrentDir,
|
||||||
dirNamesIncludeFiles,
|
|
||||||
dirNamesIncludeFilesSeparator,
|
|
||||||
json,
|
json,
|
||||||
escapeJson,
|
escapeJson,
|
||||||
safeOutput,
|
|
||||||
writeOutputFiles,
|
writeOutputFiles,
|
||||||
outputDir,
|
outputDir,
|
||||||
outputRenamedFilesAsDeletedAndAdded,
|
outputRenamedFilesAsDeletedAndAdded,
|
||||||
token,
|
token,
|
||||||
apiUrl,
|
apiUrl,
|
||||||
negationPatternsFirst,
|
skipInitialFetch
|
||||||
useRestApi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fetchDepth) {
|
if (fetchDepth) {
|
||||||
// Fallback to at least 2 if the fetch_depth is less than 2
|
|
||||||
inputs.fetchDepth = Math.max(parseInt(fetchDepth, 10), 2)
|
inputs.fetchDepth = Math.max(parseInt(fetchDepth, 10), 2)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -339,13 +210,5 @@ export const getInputs = (): Inputs => {
|
|||||||
inputs.dirNamesMaxDepth = parseInt(dirNamesMaxDepth, 10)
|
inputs.dirNamesMaxDepth = parseInt(dirNamesMaxDepth, 10)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fetchMissingHistoryMaxRetries) {
|
|
||||||
// Fallback to at least 1 if the fetch_missing_history_max_retries is less than 1
|
|
||||||
inputs.fetchMissingHistoryMaxRetries = Math.max(
|
|
||||||
parseInt(fetchMissingHistoryMaxRetries, 10),
|
|
||||||
1
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return inputs
|
return inputs
|
||||||
}
|
}
|
||||||
|
244
src/main.ts
244
src/main.ts
@ -1,36 +1,31 @@
|
|||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
import * as github from '@actions/github'
|
|
||||||
import path from 'path'
|
import path from 'path'
|
||||||
import {
|
import {
|
||||||
processChangedFiles,
|
|
||||||
ChangeTypeEnum,
|
|
||||||
getAllDiffFiles,
|
getAllDiffFiles,
|
||||||
getChangedFilesFromGithubAPI,
|
getChangedFilesFromGithubAPI,
|
||||||
getRenamedFiles
|
getRenamedFiles
|
||||||
} from './changedFiles'
|
} from './changedFiles'
|
||||||
|
import {setChangedFilesOutput} from './changedFilesOutput'
|
||||||
import {
|
import {
|
||||||
DiffResult,
|
DiffResult,
|
||||||
getSHAForNonPullRequestEvent,
|
getSHAForPullRequestEvent,
|
||||||
getSHAForPullRequestEvent
|
getSHAForPushEvent
|
||||||
} from './commitSha'
|
} from './commitSha'
|
||||||
import {Env, getEnv} from './env'
|
import {Env, getEnv} from './env'
|
||||||
import {getInputs, Inputs} from './inputs'
|
import {getInputs, Inputs} from './inputs'
|
||||||
import {
|
import {
|
||||||
getFilePatterns,
|
getFilePatterns,
|
||||||
getRecoverFilePatterns,
|
|
||||||
getSubmodulePath,
|
getSubmodulePath,
|
||||||
getYamlFilePatterns,
|
getYamlFilePatterns,
|
||||||
hasLocalGitDirectory,
|
hasLocalGitDirectory,
|
||||||
isRepoShallow,
|
isRepoShallow,
|
||||||
recoverDeletedFiles,
|
|
||||||
setOutput,
|
setOutput,
|
||||||
submoduleExists,
|
submoduleExists,
|
||||||
updateGitGlobalConfig,
|
updateGitGlobalConfig,
|
||||||
verifyMinimumGitVersion,
|
verifyMinimumGitVersion
|
||||||
warnUnsupportedRESTAPIInputs
|
|
||||||
} from './utils'
|
} from './utils'
|
||||||
|
|
||||||
const getChangedFilesFromLocalGitHistory = async ({
|
const getChangedFilesFromLocalGit = async ({
|
||||||
inputs,
|
inputs,
|
||||||
env,
|
env,
|
||||||
workingDirectory,
|
workingDirectory,
|
||||||
@ -45,15 +40,15 @@ const getChangedFilesFromLocalGitHistory = async ({
|
|||||||
}): Promise<void> => {
|
}): Promise<void> => {
|
||||||
await verifyMinimumGitVersion()
|
await verifyMinimumGitVersion()
|
||||||
|
|
||||||
let quotepathValue = 'on'
|
let quotePathValue = 'on'
|
||||||
|
|
||||||
if (!inputs.quotepath) {
|
if (!inputs.quotePath) {
|
||||||
quotepathValue = 'off'
|
quotePathValue = 'off'
|
||||||
}
|
}
|
||||||
|
|
||||||
await updateGitGlobalConfig({
|
await updateGitGlobalConfig({
|
||||||
name: 'core.quotepath',
|
name: 'core.quotepath',
|
||||||
value: quotepathValue
|
value: quotePathValue
|
||||||
})
|
})
|
||||||
|
|
||||||
if (inputs.diffRelative) {
|
if (inputs.diffRelative) {
|
||||||
@ -64,26 +59,14 @@ const getChangedFilesFromLocalGitHistory = async ({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const isShallow = await isRepoShallow({cwd: workingDirectory})
|
const isShallow = await isRepoShallow({cwd: workingDirectory})
|
||||||
let diffSubmodule = false
|
const hasSubmodule = await submoduleExists({cwd: workingDirectory})
|
||||||
let gitFetchExtraArgs = ['--no-tags', '--prune']
|
let gitFetchExtraArgs = ['--no-tags', '--prune', '--recurse-submodules']
|
||||||
|
|
||||||
if (inputs.excludeSubmodules) {
|
|
||||||
core.info('Excluding submodules from the diff')
|
|
||||||
} else {
|
|
||||||
diffSubmodule = await submoduleExists({cwd: workingDirectory})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (diffSubmodule) {
|
|
||||||
gitFetchExtraArgs.push('--recurse-submodules')
|
|
||||||
}
|
|
||||||
|
|
||||||
const isTag = env.GITHUB_REF?.startsWith('refs/tags/')
|
const isTag = env.GITHUB_REF?.startsWith('refs/tags/')
|
||||||
const remoteName = 'origin'
|
|
||||||
const outputRenamedFilesAsDeletedAndAdded =
|
const outputRenamedFilesAsDeletedAndAdded =
|
||||||
inputs.outputRenamedFilesAsDeletedAndAdded
|
inputs.outputRenamedFilesAsDeletedAndAdded
|
||||||
let submodulePaths: string[] = []
|
let submodulePaths: string[] = []
|
||||||
|
|
||||||
if (diffSubmodule) {
|
if (hasSubmodule) {
|
||||||
submodulePaths = await getSubmodulePath({cwd: workingDirectory})
|
submodulePaths = await getSubmodulePath({cwd: workingDirectory})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -93,32 +76,31 @@ const getChangedFilesFromLocalGitHistory = async ({
|
|||||||
|
|
||||||
let diffResult: DiffResult
|
let diffResult: DiffResult
|
||||||
|
|
||||||
if (!github.context.payload.pull_request?.base?.ref) {
|
if (!env.GITHUB_EVENT_PULL_REQUEST_BASE_REF) {
|
||||||
core.info(`Running on a ${github.context.eventName || 'push'} event...`)
|
core.info(`Running on a ${env.GITHUB_EVENT_NAME || 'push'} event...`)
|
||||||
diffResult = await getSHAForNonPullRequestEvent({
|
diffResult = await getSHAForPushEvent(
|
||||||
inputs,
|
inputs,
|
||||||
env,
|
env,
|
||||||
workingDirectory,
|
workingDirectory,
|
||||||
isShallow,
|
isShallow,
|
||||||
diffSubmodule,
|
hasSubmodule,
|
||||||
gitFetchExtraArgs,
|
gitFetchExtraArgs,
|
||||||
isTag,
|
isTag
|
||||||
remoteName
|
)
|
||||||
})
|
|
||||||
} else {
|
} else {
|
||||||
core.info(
|
core.info(
|
||||||
`Running on a ${github.context.eventName || 'pull_request'} (${
|
`Running on a ${env.GITHUB_EVENT_NAME || 'pull_request'} (${
|
||||||
github.context.payload.action
|
env.GITHUB_EVENT_ACTION
|
||||||
}) event...`
|
}) event...`
|
||||||
)
|
)
|
||||||
diffResult = await getSHAForPullRequestEvent({
|
diffResult = await getSHAForPullRequestEvent(
|
||||||
inputs,
|
inputs,
|
||||||
|
env,
|
||||||
workingDirectory,
|
workingDirectory,
|
||||||
isShallow,
|
isShallow,
|
||||||
diffSubmodule,
|
hasSubmodule,
|
||||||
gitFetchExtraArgs,
|
gitFetchExtraArgs
|
||||||
remoteName
|
)
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (diffResult.initialCommit) {
|
if (diffResult.initialCommit) {
|
||||||
@ -133,51 +115,62 @@ const getChangedFilesFromLocalGitHistory = async ({
|
|||||||
|
|
||||||
const allDiffFiles = await getAllDiffFiles({
|
const allDiffFiles = await getAllDiffFiles({
|
||||||
workingDirectory,
|
workingDirectory,
|
||||||
diffSubmodule,
|
hasSubmodule,
|
||||||
diffResult,
|
diffResult,
|
||||||
submodulePaths,
|
submodulePaths,
|
||||||
outputRenamedFilesAsDeletedAndAdded,
|
outputRenamedFilesAsDeletedAndAdded
|
||||||
fetchAdditionalSubmoduleHistory: inputs.fetchAdditionalSubmoduleHistory,
|
|
||||||
failOnInitialDiffError: inputs.failOnInitialDiffError,
|
|
||||||
failOnSubmoduleDiffError: inputs.failOnSubmoduleDiffError
|
|
||||||
})
|
})
|
||||||
core.debug(`All diff files: ${JSON.stringify(allDiffFiles)}`)
|
core.debug(`All diff files: ${JSON.stringify(allDiffFiles)}`)
|
||||||
core.info('All Done!')
|
core.info('All Done!')
|
||||||
core.endGroup()
|
core.endGroup()
|
||||||
|
|
||||||
if (inputs.recoverDeletedFiles) {
|
if (filePatterns.length > 0) {
|
||||||
let recoverPatterns = getRecoverFilePatterns({inputs})
|
core.startGroup('changed-files-patterns')
|
||||||
|
await setChangedFilesOutput({
|
||||||
if (recoverPatterns.length > 0 && filePatterns.length > 0) {
|
allDiffFiles,
|
||||||
core.info('No recover patterns found; defaulting to file patterns')
|
filePatterns,
|
||||||
recoverPatterns = filePatterns
|
|
||||||
}
|
|
||||||
|
|
||||||
await recoverDeletedFiles({
|
|
||||||
inputs,
|
inputs,
|
||||||
workingDirectory,
|
workingDirectory,
|
||||||
deletedFiles: allDiffFiles[ChangeTypeEnum.Deleted],
|
diffResult
|
||||||
recoverPatterns,
|
|
||||||
diffResult,
|
|
||||||
diffSubmodule,
|
|
||||||
submodulePaths
|
|
||||||
})
|
})
|
||||||
|
core.info('All Done!')
|
||||||
|
core.endGroup()
|
||||||
}
|
}
|
||||||
|
|
||||||
await processChangedFiles({
|
if (Object.keys(yamlFilePatterns).length > 0) {
|
||||||
filePatterns,
|
for (const key of Object.keys(yamlFilePatterns)) {
|
||||||
allDiffFiles,
|
core.startGroup(`changed-files-yaml-${key}`)
|
||||||
inputs,
|
await setChangedFilesOutput({
|
||||||
yamlFilePatterns,
|
allDiffFiles,
|
||||||
workingDirectory
|
filePatterns: yamlFilePatterns[key],
|
||||||
})
|
outputPrefix: key,
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
diffResult
|
||||||
|
})
|
||||||
|
core.info('All Done!')
|
||||||
|
core.endGroup()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (filePatterns.length === 0 && Object.keys(yamlFilePatterns).length === 0) {
|
||||||
|
core.startGroup('changed-files-all')
|
||||||
|
await setChangedFilesOutput({
|
||||||
|
allDiffFiles,
|
||||||
|
inputs,
|
||||||
|
workingDirectory,
|
||||||
|
diffResult
|
||||||
|
})
|
||||||
|
core.info('All Done!')
|
||||||
|
core.endGroup()
|
||||||
|
}
|
||||||
|
|
||||||
if (inputs.includeAllOldNewRenamedFiles) {
|
if (inputs.includeAllOldNewRenamedFiles) {
|
||||||
core.startGroup('changed-files-all-old-new-renamed-files')
|
core.startGroup('changed-files-all-old-new-renamed-files')
|
||||||
const allOldNewRenamedFiles = await getRenamedFiles({
|
const allOldNewRenamedFiles = await getRenamedFiles({
|
||||||
inputs,
|
inputs,
|
||||||
workingDirectory,
|
workingDirectory,
|
||||||
diffSubmodule,
|
hasSubmodule,
|
||||||
diffResult,
|
diffResult,
|
||||||
submodulePaths
|
submodulePaths
|
||||||
})
|
})
|
||||||
@ -185,17 +178,12 @@ const getChangedFilesFromLocalGitHistory = async ({
|
|||||||
await setOutput({
|
await setOutput({
|
||||||
key: 'all_old_new_renamed_files',
|
key: 'all_old_new_renamed_files',
|
||||||
value: allOldNewRenamedFiles.paths,
|
value: allOldNewRenamedFiles.paths,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json,
|
|
||||||
safeOutput: inputs.safeOutput
|
|
||||||
})
|
})
|
||||||
await setOutput({
|
await setOutput({
|
||||||
key: 'all_old_new_renamed_files_count',
|
key: 'all_old_new_renamed_files_count',
|
||||||
value: allOldNewRenamedFiles.count,
|
value: allOldNewRenamedFiles.count,
|
||||||
writeOutputFiles: inputs.writeOutputFiles,
|
inputs
|
||||||
outputDir: inputs.outputDir,
|
|
||||||
json: inputs.json
|
|
||||||
})
|
})
|
||||||
core.info('All Done!')
|
core.info('All Done!')
|
||||||
core.endGroup()
|
core.endGroup()
|
||||||
@ -204,25 +192,61 @@ const getChangedFilesFromLocalGitHistory = async ({
|
|||||||
|
|
||||||
const getChangedFilesFromRESTAPI = async ({
|
const getChangedFilesFromRESTAPI = async ({
|
||||||
inputs,
|
inputs,
|
||||||
|
env,
|
||||||
|
workingDirectory,
|
||||||
filePatterns,
|
filePatterns,
|
||||||
yamlFilePatterns
|
yamlFilePatterns
|
||||||
}: {
|
}: {
|
||||||
inputs: Inputs
|
inputs: Inputs
|
||||||
|
env: Env
|
||||||
|
workingDirectory: string
|
||||||
filePatterns: string[]
|
filePatterns: string[]
|
||||||
yamlFilePatterns: Record<string, string[]>
|
yamlFilePatterns: Record<string, string[]>
|
||||||
}): Promise<void> => {
|
}): Promise<void> => {
|
||||||
const allDiffFiles = await getChangedFilesFromGithubAPI({
|
const allDiffFiles = await getChangedFilesFromGithubAPI({
|
||||||
inputs
|
inputs,
|
||||||
|
env
|
||||||
})
|
})
|
||||||
core.debug(`All diff files: ${JSON.stringify(allDiffFiles)}`)
|
core.debug(`All diff files: ${JSON.stringify(allDiffFiles)}`)
|
||||||
core.info('All Done!')
|
core.info('All Done!')
|
||||||
|
|
||||||
await processChangedFiles({
|
if (filePatterns.length > 0) {
|
||||||
filePatterns,
|
core.startGroup('changed-files-patterns')
|
||||||
allDiffFiles,
|
await setChangedFilesOutput({
|
||||||
inputs,
|
allDiffFiles,
|
||||||
yamlFilePatterns
|
filePatterns,
|
||||||
})
|
inputs,
|
||||||
|
workingDirectory
|
||||||
|
})
|
||||||
|
core.info('All Done!')
|
||||||
|
core.endGroup()
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Object.keys(yamlFilePatterns).length > 0) {
|
||||||
|
for (const key of Object.keys(yamlFilePatterns)) {
|
||||||
|
core.startGroup(`changed-files-yaml-${key}`)
|
||||||
|
await setChangedFilesOutput({
|
||||||
|
allDiffFiles,
|
||||||
|
filePatterns: yamlFilePatterns[key],
|
||||||
|
outputPrefix: key,
|
||||||
|
inputs,
|
||||||
|
workingDirectory
|
||||||
|
})
|
||||||
|
core.info('All Done!')
|
||||||
|
core.endGroup()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (filePatterns.length === 0 && Object.keys(yamlFilePatterns).length === 0) {
|
||||||
|
core.startGroup('changed-files-all')
|
||||||
|
await setChangedFilesOutput({
|
||||||
|
allDiffFiles,
|
||||||
|
inputs,
|
||||||
|
workingDirectory
|
||||||
|
})
|
||||||
|
core.info('All Done!')
|
||||||
|
core.endGroup()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function run(): Promise<void> {
|
export async function run(): Promise<void> {
|
||||||
@ -236,7 +260,7 @@ export async function run(): Promise<void> {
|
|||||||
|
|
||||||
const workingDirectory = path.resolve(
|
const workingDirectory = path.resolve(
|
||||||
env.GITHUB_WORKSPACE || process.cwd(),
|
env.GITHUB_WORKSPACE || process.cwd(),
|
||||||
inputs.useRestApi ? '.' : inputs.path
|
inputs.path
|
||||||
)
|
)
|
||||||
core.debug(`Working directory: ${workingDirectory}`)
|
core.debug(`Working directory: ${workingDirectory}`)
|
||||||
|
|
||||||
@ -255,33 +279,47 @@ export async function run(): Promise<void> {
|
|||||||
})
|
})
|
||||||
core.debug(`Yaml file patterns: ${JSON.stringify(yamlFilePatterns)}`)
|
core.debug(`Yaml file patterns: ${JSON.stringify(yamlFilePatterns)}`)
|
||||||
|
|
||||||
if (inputs.useRestApi && !github.context.payload.pull_request?.number) {
|
|
||||||
throw new Error(
|
|
||||||
"Only pull_request* events are supported when using GitHub's REST API."
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
inputs.token &&
|
inputs.token &&
|
||||||
github.context.payload.pull_request?.number &&
|
env.GITHUB_EVENT_PULL_REQUEST_NUMBER &&
|
||||||
(!hasGitDirectory || inputs.useRestApi)
|
!hasGitDirectory
|
||||||
) {
|
) {
|
||||||
core.info("Using GitHub's REST API to get changed files")
|
core.info("Using GitHub's REST API to get changed files")
|
||||||
await warnUnsupportedRESTAPIInputs({inputs})
|
const unsupportedInputs: (keyof Inputs)[] = [
|
||||||
|
'sha',
|
||||||
|
'baseSha',
|
||||||
|
'since',
|
||||||
|
'until',
|
||||||
|
'sinceLastRemoteCommit',
|
||||||
|
'recoverDeletedFiles',
|
||||||
|
'recoverDeletedFilesToDestination',
|
||||||
|
'includeAllOldNewRenamedFiles'
|
||||||
|
]
|
||||||
|
|
||||||
|
for (const input of unsupportedInputs) {
|
||||||
|
if (inputs[input]) {
|
||||||
|
core.warning(
|
||||||
|
`Input "${input}" is not supported when using GitHub's REST API to get changed files`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
await getChangedFilesFromRESTAPI({
|
await getChangedFilesFromRESTAPI({
|
||||||
inputs,
|
inputs,
|
||||||
|
env,
|
||||||
|
workingDirectory,
|
||||||
filePatterns,
|
filePatterns,
|
||||||
yamlFilePatterns
|
yamlFilePatterns
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
if (!hasGitDirectory) {
|
if (!hasGitDirectory) {
|
||||||
throw new Error(
|
core.setFailed(
|
||||||
`Unable to locate the git repository in the given path: ${workingDirectory}.\n Please run actions/checkout before this action (Make sure the 'path' input is correct).\n If you intend to use Github's REST API note that only pull_request* events are supported. Current event is "${github.context.eventName}".`
|
"Can't find local .git directory. Please run actions/checkout before this action"
|
||||||
)
|
)
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
core.info('Using local .git directory')
|
core.info('Using local .git directory')
|
||||||
await getChangedFilesFromLocalGitHistory({
|
await getChangedFilesFromLocalGit({
|
||||||
inputs,
|
inputs,
|
||||||
env,
|
env,
|
||||||
workingDirectory,
|
workingDirectory,
|
||||||
@ -291,8 +329,10 @@ export async function run(): Promise<void> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line github/no-then
|
/* istanbul ignore if */
|
||||||
run().catch(e => {
|
if (!process.env.TESTING) {
|
||||||
core.setFailed(e.message || e)
|
// eslint-disable-next-line github/no-then
|
||||||
process.exit(1)
|
run().catch(e => {
|
||||||
})
|
core.setFailed(e.message || e)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
789
src/utils.ts
789
src/utils.ts
File diff suppressed because it is too large
Load Diff
@ -1 +1 @@
|
|||||||
Subproject commit 5dfac2e9a7dc53ca33bc4682355193672c97437c
|
Subproject commit e168fac86c58062a1054c3a5540cd81577c4e2ba
|
@ -1 +0,0 @@
|
|||||||
This is a test markdown file
|
|
@ -1 +1 @@
|
|||||||
This is a test file...
|
This is a test file.
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"target": "ES2018", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
|
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
|
||||||
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
|
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
|
||||||
"outDir": "./lib", /* Redirect output structure to the directory. */
|
"outDir": "./lib", /* Redirect output structure to the directory. */
|
||||||
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||||
|
Loading…
x
Reference in New Issue
Block a user