mirror of
https://github.com/docker/login-action.git
synced 2025-08-15 04:39:38 +08:00
Compare commits
178 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
6af3c118c8 | ||
|
caca3368ce | ||
|
17f28ab24d | ||
|
a875dd0e21 | ||
|
7948fffc49 | ||
|
5fcefb941d | ||
|
3bb2d084df | ||
|
242fb9a356 | ||
|
fa72313bc3 | ||
|
088f62a4f2 | ||
|
7929ac7647 | ||
|
42d299face | ||
|
4858b0b5ea | ||
|
1d7d8649e7 | ||
|
58855695bb | ||
|
d9927c4142 | ||
|
b9a4d91ee5 | ||
|
b20b9f5e31 | ||
|
cb21399f71 | ||
|
faae4d6665 | ||
|
4d84a3c20f | ||
|
6f7ca8828b | ||
|
b776a64ec0 | ||
|
f6476db6e9 | ||
|
46ab6d5c3c | ||
|
1cce1654e0 | ||
|
9537342dee | ||
|
7f47463f56 | ||
|
8807319764 | ||
|
ebac4bd30d | ||
|
499663a42c | ||
|
70b0f7898e | ||
|
885923496b | ||
|
ab92432d0b | ||
|
1828bf2d51 | ||
|
25c0ca8bab | ||
|
f11d2ba650 | ||
|
3f83d7b89c | ||
|
c9c0083563 | ||
|
f694e84504 | ||
|
b30d77254f | ||
|
95778bc566 | ||
|
2c6df6a22f | ||
|
c41c9a5c65 | ||
|
fc6fe565d2 | ||
|
10428f39dc | ||
|
1b4cf55146 | ||
|
5bcefc987c | ||
|
169057673d | ||
|
5d62c58fc3 | ||
|
73cda5dad9 | ||
|
5ffec3343b | ||
|
305d960cac | ||
|
9a9ae26c89 | ||
|
48af9f2a97 | ||
|
c08e3a84a9 | ||
|
f12fe5c78d | ||
|
b566635cc9 | ||
|
b8e54a5ea5 | ||
|
d64238b93b | ||
|
763661a124 | ||
|
41fba5a8c6 | ||
|
f054a8b539 | ||
|
0644d98afe | ||
|
9e433e18cf | ||
|
39efbd2c12 | ||
|
4608add020 | ||
|
4fd5d8ead6 | ||
|
e2346b6971 | ||
|
2051808c83 | ||
|
7c220e1b8d | ||
|
f4cae19820 | ||
|
e5ad366574 | ||
|
45bc0ebd6b | ||
|
79068f5240 | ||
|
8e670da7a1 | ||
|
76901a9025 | ||
|
31f1bb4610 | ||
|
28eb30dcb6 | ||
|
03b00fbeba | ||
|
5934fe3407 | ||
|
3cba154eb7 | ||
|
986a54f35b | ||
|
fdb725ed4b | ||
|
bab7dcbf29 | ||
|
c0d23108f7 | ||
|
c902ecc709 | ||
|
b612a76a31 | ||
|
28218f9b04 | ||
|
7439f8b467 | ||
|
4b206288bd | ||
|
4abf7e9de3 | ||
|
175e20ba61 | ||
|
92a2593650 | ||
|
957a8f0d4a | ||
|
971b76aea9 | ||
|
5b092cf2f7 | ||
|
ab81ae06bb | ||
|
f4a3bbc2c6 | ||
|
e2302b10cc | ||
|
c2c723b5d1 | ||
|
ab80d026d4 | ||
|
9376d24995 | ||
|
13fa0663e1 | ||
|
75e7be0db4 | ||
|
d6f5c68835 | ||
|
f3364599c6 | ||
|
24646ef465 | ||
|
9f189206e8 | ||
|
aed1d0c0c1 | ||
|
31722002f5 | ||
|
2a481b4109 | ||
|
9bed62818a | ||
|
c718c795e7 | ||
|
7b79d7e834 | ||
|
f53ca527f7 | ||
|
d3160f671f | ||
|
3b14bab101 | ||
|
1e75de0e0e | ||
|
7c9afe235c | ||
|
7dc3c3a70e | ||
|
b17cf6ab8f | ||
|
27c3146301 | ||
|
12fd63324c | ||
|
a3de3de177 | ||
|
9cbd4f95c4 | ||
|
ad9eb3b250 | ||
|
71b3c789fa | ||
|
5e0bc83b38 | ||
|
1f68ce02d7 | ||
|
767b2f4b7b | ||
|
ddf06e1fed | ||
|
a5aa81adc8 | ||
|
3b9e1f51cd | ||
|
adb73476b6 | ||
|
5df5104555 | ||
|
39ef12fb7a | ||
|
1c402b7c97 | ||
|
1c2cf9942d | ||
|
4b15841c41 | ||
|
34d5f75b0d | ||
|
a579245f45 | ||
|
0f9fb80421 | ||
|
b7800fe6fd | ||
|
6e236fe59d | ||
|
53f337dbae | ||
|
f515c5bf8c | ||
|
12054aedfd | ||
|
893097890f | ||
|
afed83d63e | ||
|
99533eb7f4 | ||
|
9c109c7824 | ||
|
6304aa16ae | ||
|
a82a824586 | ||
|
e36a9c364b | ||
|
8c1b8dc260 | ||
|
a526790753 | ||
|
d50edfe977 | ||
|
a5ba64f65e | ||
|
64702f4db1 | ||
|
e56233ce43 | ||
|
1bd3567034 | ||
|
52b67bd7c8 | ||
|
d833f7c2ad | ||
|
16d491f0ca | ||
|
04f461cc60 | ||
|
25aa6aa30c | ||
|
1a211c6f27 | ||
|
12991b4d6c | ||
|
34e505eb5e | ||
|
2c57607524 | ||
|
26618cd0df | ||
|
da3da99964 | ||
|
b7cd11b1fa | ||
|
16b2f90c24 | ||
|
826c451920 | ||
|
f37c715508 | ||
|
e6dc03b339 |
2
.dockerignore
Normal file
2
.dockerignore
Normal file
@@ -0,0 +1,2 @@
|
||||
/coverage
|
||||
/node_modules
|
22
.github/CONTRIBUTING.md
vendored
22
.github/CONTRIBUTING.md
vendored
@@ -2,20 +2,24 @@
|
||||
|
||||
Hi there! We're thrilled that you'd like to contribute to this project. Your help is essential for keeping it great.
|
||||
|
||||
Contributions to this project are [released](https://help.github.com/articles/github-terms-of-service/#6-contributions-under-repository-license) to the public under the [project's open source license](LICENSE).
|
||||
Contributions to this project are [released](https://docs.github.com/en/github/site-policy/github-terms-of-service#6-contributions-under-repository-license)
|
||||
to the public under the [project's open source license](LICENSE).
|
||||
|
||||
## Submitting a pull request
|
||||
|
||||
1. [Fork](https://github.com/crazy-max/ghaction-docker-login/fork) and clone the repository
|
||||
1. [Fork](https://github.com/docker/login-action/fork) and clone the repository
|
||||
2. Configure and install the dependencies: `yarn install`
|
||||
4. Create a new branch: `git checkout -b my-branch-name`
|
||||
5. Make your change
|
||||
6. Run pre-checkin: `yarn run pre-checkin`
|
||||
7. Push to your fork and [submit a pull request](https://github.com/crazy-max/ghaction-docker-login/compare)
|
||||
8. Pat your self on the back and wait for your pull request to be reviewed and merged.
|
||||
3. Create a new branch: `git checkout -b my-branch-name`
|
||||
4. Make your changes
|
||||
5. Make sure the tests pass: `docker buildx bake test`
|
||||
6. Format code and build javascript artifacts: `docker buildx bake pre-checkin`
|
||||
7. Validate all code has correctly formatted and built: `docker buildx bake validate`
|
||||
8. Push to your fork and [submit a pull request](https://github.com/docker/login-action/compare)
|
||||
9. Pat your self on the back and wait for your pull request to be reviewed and merged.
|
||||
|
||||
Here are a few things you can do that will increase the likelihood of your pull request being accepted:
|
||||
|
||||
- Write tests.
|
||||
- Make sure the `README.md` and any other relevant **documentation are kept up-to-date**.
|
||||
- We try to follow [SemVer v2.0.0](https://semver.org/). Randomly breaking public APIs is not an option.
|
||||
- Keep your change as focused as possible. If there are multiple changes you would like to make that are not dependent upon each other, consider submitting them as **separate pull requests**.
|
||||
@@ -24,5 +28,5 @@ Here are a few things you can do that will increase the likelihood of your pull
|
||||
## Resources
|
||||
|
||||
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
||||
- [Using Pull Requests](https://help.github.com/articles/about-pull-requests/)
|
||||
- [GitHub Help](https://help.github.com)
|
||||
- [Using Pull Requests](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)
|
||||
- [GitHub Help](https://docs.github.com/en)
|
||||
|
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -1,2 +0,0 @@
|
||||
github: crazy-max
|
||||
custom: https://www.paypal.me/crazyws
|
7
.github/ISSUE_TEMPLATE/bug_report.md
vendored
7
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -13,11 +13,11 @@ about: Create a report to help us improve
|
||||
|
||||
#### Expected behaviour
|
||||
|
||||
> Tell me what should happen
|
||||
> Tell us what should happen
|
||||
|
||||
#### Actual behaviour
|
||||
|
||||
> Tell me what happens instead
|
||||
> Tell us what happens instead
|
||||
|
||||
### Configuration
|
||||
|
||||
@@ -30,4 +30,5 @@ about: Create a report to help us improve
|
||||
|
||||
### Logs
|
||||
|
||||
> Download the [log file of your build](https://help.github.com/en/actions/configuring-and-managing-workflows/managing-a-workflow-run#downloading-logs) and [attach it](https://help.github.com/en/github/managing-your-work-on-github/file-attachments-on-issues-and-pull-requests) to this issue.
|
||||
> Download the [log file of your build](https://docs.github.com/en/actions/managing-workflow-runs/using-workflow-run-logs#downloading-logs)
|
||||
> and [attach it](https://docs.github.com/en/github/managing-your-work-on-github/file-attachments-on-issues-and-pull-requests) to this issue.
|
||||
|
8
.github/SUPPORT.md
vendored
8
.github/SUPPORT.md
vendored
@@ -1,8 +1,8 @@
|
||||
# Support [](https://isitmaintained.com/project/crazy-max/ghaction-docker-login)
|
||||
# Support [](https://isitmaintained.com/project/docker/login-action)
|
||||
|
||||
## Reporting an issue
|
||||
|
||||
Please do a search in [open issues](https://github.com/crazy-max/ghaction-docker-login/issues?utf8=%E2%9C%93&q=) to see if the issue or feature request has already been filed.
|
||||
Please do a search in [open issues](https://github.com/docker/login-action/issues?utf8=%E2%9C%93&q=) to see if the issue or feature request has already been filed.
|
||||
|
||||
If you find your issue already exists, make relevant comments and add your [reaction](https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comments). Use a reaction in place of a "+1" comment.
|
||||
|
||||
@@ -21,9 +21,9 @@ File a single issue per problem and feature request.
|
||||
|
||||
The more information you can provide, the more likely someone will be successful reproducing the issue and finding a fix.
|
||||
|
||||
You are now ready to [create a new issue](https://github.com/crazy-max/ghaction-docker-login/issues/new/choose)!
|
||||
You are now ready to [create a new issue](https://github.com/docker/login-action/issues/new/choose)!
|
||||
|
||||
## Closure policy
|
||||
|
||||
* Issues that don't have the information requested above (when applicable) will be closed immediately and the poster directed to the support guidelines.
|
||||
* Issues that go a week without a response from original poster are subject to closure at my discretion.
|
||||
* Issues that go a week without a response from original poster are subject to closure at our discretion.
|
||||
|
12
.github/dependabot.yml
vendored
12
.github/dependabot.yml
vendored
@@ -4,19 +4,15 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
time: "06:00"
|
||||
timezone: "Europe/Paris"
|
||||
labels:
|
||||
- ":game_die: dependencies"
|
||||
- ":robot: bot"
|
||||
- "dependencies"
|
||||
- "bot"
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
time: "06:00"
|
||||
timezone: "Europe/Paris"
|
||||
allow:
|
||||
- dependency-type: "production"
|
||||
labels:
|
||||
- ":game_die: dependencies"
|
||||
- ":robot: bot"
|
||||
- "dependencies"
|
||||
- "bot"
|
||||
|
BIN
.github/docker-login.png
vendored
Normal file
BIN
.github/docker-login.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.3 KiB |
BIN
.github/ghaction-docker-login.png
vendored
BIN
.github/ghaction-docker-login.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 5.0 KiB |
77
.github/labels.yml
vendored
77
.github/labels.yml
vendored
@@ -1,77 +0,0 @@
|
||||
## more info https://github.com/crazy-max/ghaction-github-labeler
|
||||
- # automerge
|
||||
name: ":bell: automerge"
|
||||
color: "8f4fbc"
|
||||
description: ""
|
||||
- # bot
|
||||
name: ":robot: bot"
|
||||
color: "69cde9"
|
||||
description: ""
|
||||
- # bug
|
||||
name: ":bug: bug"
|
||||
color: "b60205"
|
||||
description: ""
|
||||
- # dependencies
|
||||
name: ":game_die: dependencies"
|
||||
color: "0366d6"
|
||||
description: ""
|
||||
- # documentation
|
||||
name: ":memo: documentation"
|
||||
color: "c5def5"
|
||||
description: ""
|
||||
- # duplicate
|
||||
name: ":busts_in_silhouette: duplicate"
|
||||
color: "cccccc"
|
||||
description: ""
|
||||
- # enhancement
|
||||
name: ":sparkles: enhancement"
|
||||
color: "0054ca"
|
||||
description: ""
|
||||
- # feature request
|
||||
name: ":bulb: feature request"
|
||||
color: "0e8a16"
|
||||
description: ""
|
||||
- # feedback
|
||||
name: ":mega: feedback"
|
||||
color: "03a9f4"
|
||||
description: ""
|
||||
- # future maybe
|
||||
name: ":rocket: future maybe"
|
||||
color: "fef2c0"
|
||||
description: ""
|
||||
- # good first issue
|
||||
name: ":hatching_chick: good first issue"
|
||||
color: "7057ff"
|
||||
description: ""
|
||||
- # help wanted
|
||||
name: ":pray: help wanted"
|
||||
color: "4caf50"
|
||||
description: ""
|
||||
- # hold
|
||||
name: ":hand: hold"
|
||||
color: "24292f"
|
||||
description: ""
|
||||
- # invalid
|
||||
name: ":no_entry_sign: invalid"
|
||||
color: "e6e6e6"
|
||||
description: ""
|
||||
- # maybe bug
|
||||
name: ":interrobang: maybe bug"
|
||||
color: "ff5722"
|
||||
description: ""
|
||||
- # needs more info
|
||||
name: ":thinking: needs more info"
|
||||
color: "795548"
|
||||
description: ""
|
||||
- # question
|
||||
name: ":question: question"
|
||||
color: "3f51b5"
|
||||
description: ""
|
||||
- # upstream
|
||||
name: ":eyes: upstream"
|
||||
color: "fbca04"
|
||||
description: ""
|
||||
- # wontfix
|
||||
name: ":coffin: wontfix"
|
||||
color: "ffffff"
|
||||
description: ""
|
289
.github/workflows/ci.yml
vendored
289
.github/workflows/ci.yml
vendored
@@ -1,85 +1,282 @@
|
||||
name: ci
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 10 * * *'
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- releases/v*
|
||||
- 'master'
|
||||
- 'releases/v*'
|
||||
|
||||
jobs:
|
||||
dockerhub:
|
||||
stop-docker:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
logout:
|
||||
- true
|
||||
- false
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2.3.1
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Login to DockerHub
|
||||
uses: ./
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME_TEST }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD_TEST }}
|
||||
logout: ${{ matrix.logout }}
|
||||
-
|
||||
name: Clear
|
||||
if: always()
|
||||
name: Stop docker
|
||||
run: |
|
||||
rm -f ${HOME}/.docker/config.json
|
||||
sudo systemctl stop docker
|
||||
-
|
||||
name: Login to GitHub Container Registry
|
||||
uses: ./
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
gpr:
|
||||
logout:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
logout:
|
||||
- true
|
||||
- false
|
||||
- true
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2.3.1
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Login to GitHub Package Registry
|
||||
name: Login to GitHub Container Registry
|
||||
uses: ./
|
||||
with:
|
||||
registry: docker.pkg.github.com
|
||||
username: ${{ github.repository_owner }}
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
logout: ${{ matrix.logout }}
|
||||
-
|
||||
name: Clear
|
||||
if: always()
|
||||
run: |
|
||||
rm -f ${HOME}/.docker/config.json
|
||||
|
||||
gitlab:
|
||||
dind:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
logout:
|
||||
- true
|
||||
- false
|
||||
env:
|
||||
DOCKER_CONFIG: $HOME/.docker
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2.3.1
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Login to GitHub Container Registry
|
||||
uses: ./
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ secrets.GHCR_USERNAME }}
|
||||
password: ${{ secrets.GHCR_PAT }}
|
||||
-
|
||||
name: DinD
|
||||
uses: docker://docker
|
||||
with:
|
||||
entrypoint: docker
|
||||
args: pull ghcr.io/docker-ghactiontest/test
|
||||
-
|
||||
name: Pull private image
|
||||
run: |
|
||||
docker image prune -a -f >/dev/null 2>&1
|
||||
docker pull ghcr.io/docker-ghactiontest/test
|
||||
|
||||
acr:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Login to ACR
|
||||
uses: ./
|
||||
with:
|
||||
registry: ${{ secrets.AZURE_REGISTRY_NAME }}.azurecr.io
|
||||
username: ${{ secrets.AZURE_CLIENT_ID }}
|
||||
password: ${{ secrets.AZURE_CLIENT_SECRET }}
|
||||
|
||||
dockerhub:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- windows-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Login to Docker Hub
|
||||
uses: ./
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
ecr:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- windows-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Login to ECR
|
||||
uses: ./
|
||||
with:
|
||||
registry: ${{ secrets.AWS_ACCOUNT_NUMBER }}.dkr.ecr.us-east-1.amazonaws.com
|
||||
username: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
password: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
||||
ecr-aws-creds:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- windows-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-1
|
||||
-
|
||||
name: Login to ECR
|
||||
uses: ./
|
||||
with:
|
||||
registry: ${{ secrets.AWS_ACCOUNT_NUMBER }}.dkr.ecr.us-east-1.amazonaws.com
|
||||
|
||||
ecr-public:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- windows-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Login to Public ECR
|
||||
uses: ./
|
||||
with:
|
||||
registry: public.ecr.aws
|
||||
username: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
password: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
env:
|
||||
AWS_REGION: us-east-1
|
||||
|
||||
ecr-public-aws-creds:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- windows-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-1
|
||||
-
|
||||
name: Login to ECR
|
||||
uses: ./
|
||||
with:
|
||||
registry: public.ecr.aws
|
||||
|
||||
github-container:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- windows-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Login to GitHub Container Registry
|
||||
uses: ./
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
gitlab:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- windows-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Login to GitLab
|
||||
uses: ./
|
||||
with:
|
||||
registry: registry.gitlab.com
|
||||
username: ${{ secrets.GITLAB_USERNAME_TEST }}
|
||||
password: ${{ secrets.GITLAB_PASSWORD_TEST }}
|
||||
logout: ${{ matrix.logout }}
|
||||
username: ${{ secrets.GITLAB_USERNAME }}
|
||||
password: ${{ secrets.GITLAB_TOKEN }}
|
||||
|
||||
google-artifact:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- windows-latest
|
||||
steps:
|
||||
-
|
||||
name: Clear
|
||||
if: always()
|
||||
run: |
|
||||
rm -f ${HOME}/.docker/config.json
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Login to Google Artifact Registry
|
||||
uses: ./
|
||||
with:
|
||||
registry: ${{ secrets.GAR_LOCATION }}-docker.pkg.dev
|
||||
username: _json_key
|
||||
password: ${{ secrets.GAR_JSON_KEY }}
|
||||
|
||||
google-container:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- windows-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Login to Google Container Registry
|
||||
uses: ./
|
||||
with:
|
||||
registry: gcr.io
|
||||
username: _json_key
|
||||
password: ${{ secrets.GCR_JSON_KEY }}
|
||||
|
23
.github/workflows/labels.yml
vendored
23
.github/workflows/labels.yml
vendored
@@ -1,23 +0,0 @@
|
||||
name: labels
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
paths:
|
||||
- '.github/labels.yml'
|
||||
- '.github/workflows/labels.yml'
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2.3.1
|
||||
-
|
||||
name: Run Labeler
|
||||
if: success()
|
||||
uses: crazy-max/ghaction-github-labeler@v2.1.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
30
.github/workflows/pre-checkin.yml
vendored
30
.github/workflows/pre-checkin.yml
vendored
@@ -1,30 +0,0 @@
|
||||
name: pre-checkin
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
|
||||
jobs:
|
||||
pre-checkin:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2.3.1
|
||||
-
|
||||
name: Install
|
||||
run: yarn install
|
||||
-
|
||||
name: Pre-checkin
|
||||
run: yarn run pre-checkin
|
||||
-
|
||||
name: Check for uncommitted changes
|
||||
run: |
|
||||
if [[ `git status --porcelain` ]]; then
|
||||
git status --porcelain
|
||||
echo "::warning::Found changes. Please run 'yarn run pre-checkin' and push"
|
||||
fi
|
34
.github/workflows/test.yml
vendored
Normal file
34
.github/workflows/test.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
name: test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'releases/v*'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'releases/v*'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Validate
|
||||
uses: docker/bake-action@v1
|
||||
with:
|
||||
targets: validate
|
||||
-
|
||||
name: Test
|
||||
uses: docker/bake-action@v1
|
||||
with:
|
||||
targets: test
|
||||
-
|
||||
name: Upload coverage
|
||||
uses: codecov/codecov-action@v2
|
||||
with:
|
||||
file: ./coverage/clover.xml
|
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"printWidth": 120,
|
||||
"printWidth": 240,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"semi": true,
|
||||
|
17
CHANGELOG.md
17
CHANGELOG.md
@@ -1,17 +0,0 @@
|
||||
# Changelog
|
||||
|
||||
## 1.1.1 (2020/08/16)
|
||||
|
||||
* Typo
|
||||
|
||||
## 1.1.0 (2020/08/15)
|
||||
|
||||
* Add tests and examples for GitLab and GitHub Package Registry
|
||||
|
||||
## 1.0.1 (2020/08/15)
|
||||
|
||||
* Add LICENSE
|
||||
|
||||
## 1.0.0 (2020/08/15)
|
||||
|
||||
* Initial version
|
204
LICENSE
204
LICENSE
@@ -1,21 +1,191 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020 CrazyMax
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
https://www.apache.org/licenses/
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
1. Definitions.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
Copyright 2013-2018 Docker, Inc.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
449
README.md
449
README.md
@@ -1,82 +1,90 @@
|
||||
[](https://github.com/crazy-max/ghaction-docker-login/releases/latest)
|
||||
[](https://github.com/docker/login-action/releases/latest)
|
||||
[](https://github.com/marketplace/actions/docker-login)
|
||||
[](https://github.com/crazy-max/ghaction-docker-login/actions?workflow=ci)
|
||||
[](https://github.com/sponsors/crazy-max)
|
||||
[](https://www.paypal.me/crazyws)
|
||||
[](https://github.com/docker/login-action/actions?workflow=ci)
|
||||
[](https://github.com/docker/login-action/actions?workflow=test)
|
||||
[](https://codecov.io/gh/docker/login-action)
|
||||
|
||||
## About
|
||||
|
||||
GitHub Action to login against a Docker registry
|
||||
GitHub Action to login against a Docker registry.
|
||||
|
||||
If you are interested, [check out](https://git.io/Je09Y) my other :octocat: GitHub Actions!
|
||||
|
||||

|
||||

|
||||
|
||||
___
|
||||
|
||||
* [Usage](#usage)
|
||||
* [DockerHub](#dockerhub)
|
||||
* [GitHub Package Registry](#github-package-registry)
|
||||
* [Docker Hub](#docker-hub)
|
||||
* [GitHub Container Registry](#github-container-registry)
|
||||
* [GitLab](#gitlab)
|
||||
* [Azure Container Registry (ACR)](#azure-container-registry-acr)
|
||||
* [Google Container Registry (GCR)](#google-container-registry-gcr)
|
||||
* [Google Artifact Registry (GAR)](#google-artifact-registry-gar)
|
||||
* [AWS Elastic Container Registry (ECR)](#aws-elastic-container-registry-ecr)
|
||||
* [AWS Public Elastic Container Registry (ECR)](#aws-public-elastic-container-registry-ecr)
|
||||
* [OCI Oracle Cloud Infrastructure Registry (OCIR)](#oci-oracle-cloud-infrastructure-registry-ocir)
|
||||
* [Quay.io](#quayio)
|
||||
* [Customizing](#customizing)
|
||||
* [inputs](#inputs)
|
||||
* [Limitation](#limitation)
|
||||
* [How can I help?](#how-can-i-help)
|
||||
* [License](#license)
|
||||
* [Keep up-to-date with GitHub Dependabot](#keep-up-to-date-with-github-dependabot)
|
||||
|
||||
## Usage
|
||||
|
||||
### DockerHub
|
||||
### Docker Hub
|
||||
|
||||
To authenticate against [Docker Hub](https://hub.docker.com) it's strongly recommended to create a
|
||||
[personal access token](https://docs.docker.com/docker-hub/access-tokens/) as an alternative to your password.
|
||||
|
||||
```yaml
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: master
|
||||
tags:
|
||||
branches: main
|
||||
|
||||
jobs:
|
||||
login:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Login to DockerHub
|
||||
uses: crazy-max/ghaction-docker-login@v1
|
||||
name: Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_PASSWORD }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
```
|
||||
|
||||
### GitHub Package Registry
|
||||
### GitHub Container Registry
|
||||
|
||||
To authenticate against the [GitHub Container Registry](https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry),
|
||||
use the [`GITHUB_TOKEN`](https://docs.github.com/en/actions/reference/authentication-in-a-workflow) for the best
|
||||
security and experience.
|
||||
|
||||
```yaml
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: master
|
||||
tags:
|
||||
branches: main
|
||||
|
||||
jobs:
|
||||
login:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Login to GitHub Package Registry
|
||||
uses: crazy-max/ghaction-docker-login@v1
|
||||
name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: docker.pkg.github.com
|
||||
username: ${{ github.repository_owner }}
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
```
|
||||
|
||||
You may need to [manage write and read access of GitHub Actions](https://docs.github.com/en/packages/managing-github-packages-using-github-actions-workflows/publishing-and-installing-a-package-with-github-actions#upgrading-a-workflow-that-accesses-ghcrio)
|
||||
for repositories in the container settings.
|
||||
|
||||
You can also use a [personal access token (PAT)](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token)
|
||||
with the [appropriate scopes](https://docs.github.com/en/packages/getting-started-with-github-container-registry/migrating-to-github-container-registry-for-docker-images#authenticating-with-the-container-registry).
|
||||
|
||||
### GitLab
|
||||
|
||||
```yaml
|
||||
@@ -84,25 +92,362 @@ name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: master
|
||||
tags:
|
||||
branches: main
|
||||
|
||||
jobs:
|
||||
login:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Login to GitLab
|
||||
uses: crazy-max/ghaction-docker-login@v1
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: registry.gitlab.com
|
||||
username: ${{ secrets.GITLAB_USERNAME }}
|
||||
password: ${{ secrets.GITLAB_PASSWORD }}
|
||||
```
|
||||
|
||||
### Azure Container Registry (ACR)
|
||||
|
||||
[Create a service principal](https://docs.microsoft.com/en-us/azure/container-registry/container-registry-auth-service-principal#create-a-service-principal)
|
||||
with access to your container registry through the [Azure CLI](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli)
|
||||
and take note of the generated service principal's ID (also called _client ID_) and password (also called _client secret_).
|
||||
|
||||
```yaml
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: main
|
||||
|
||||
jobs:
|
||||
login:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Login to ACR
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: <registry-name>.azurecr.io
|
||||
username: ${{ secrets.AZURE_CLIENT_ID }}
|
||||
password: ${{ secrets.AZURE_CLIENT_SECRET }}
|
||||
```
|
||||
|
||||
> Replace `<registry-name>` with the name of your registry.
|
||||
|
||||
### Google Container Registry (GCR)
|
||||
|
||||
> [Google Artifact Registry](#google-artifact-registry-gar) is the evolution of Google Container Registry. As a
|
||||
> fully-managed service with support for both container images and non-container artifacts. If you currently use
|
||||
> Google Container Registry, use the information [on this page](https://cloud.google.com/artifact-registry/docs/transition/transition-from-gcr)
|
||||
> to learn about transitioning to Google Artifact Registry.
|
||||
|
||||
You can use either workload identity federation based keyless authentication or service account based authentication.
|
||||
|
||||
#### Workload identity federation based authentication
|
||||
|
||||
Configure the workload identity federation for github actions in gcloud (for steps, [refer here](https://github.com/google-github-actions/auth#setting-up-workload-identity-federation)). In the steps, your service account should the ability to push to GCR. Then use google-github-actions/auth action for authentication using workload identity like below:
|
||||
|
||||
```yaml
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: main
|
||||
|
||||
jobs:
|
||||
login:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: 'auth'
|
||||
name: 'Authenticate to Google Cloud'
|
||||
uses: 'google-github-actions/auth@v0'
|
||||
with:
|
||||
token_format: 'access_token'
|
||||
workload_identity_provider: '<workload_identity_provider>'
|
||||
service_account: '<service_account>'
|
||||
|
||||
- name: Login to GCR
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: gcr.io
|
||||
username: oauth2accesstoken
|
||||
password: ${{ steps.auth.outputs.access_token }}
|
||||
```
|
||||
|
||||
> Replace `<workload_identity_provider>` with configured workload identity provider. For steps to configure, [refer here](https://github.com/google-github-actions/auth#setting-up-workload-identity-federation).
|
||||
|
||||
> Replace `<service_account>` with configured service account in workload identity provider which has access to push to GCR
|
||||
|
||||
#### Service account based authentication
|
||||
|
||||
Use a service account with the ability to push to GCR and [configure access control](https://cloud.google.com/container-registry/docs/access-control).
|
||||
Then create and download the JSON key for this service account and save content of `.json` file
|
||||
[as a secret](https://docs.github.com/en/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets#creating-encrypted-secrets-for-a-repository)
|
||||
called `GCR_JSON_KEY` in your GitHub repo. Ensure you set the username to `_json_key`,
|
||||
or `_json_key_base64` if you use a base64-encoded key.
|
||||
|
||||
```yaml
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: main
|
||||
|
||||
jobs:
|
||||
login:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Login to GCR
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: gcr.io
|
||||
username: _json_key
|
||||
password: ${{ secrets.GCR_JSON_KEY }}
|
||||
```
|
||||
|
||||
### Google Artifact Registry (GAR)
|
||||
|
||||
You can use either workload identity federation based keyless authentication or service account based authentication.
|
||||
|
||||
#### Workload identity federation based authentication
|
||||
|
||||
Configure the workload identity federation for github actions in gcloud (for steps, [refer here](https://github.com/google-github-actions/auth#setting-up-workload-identity-federation)). In the steps, your service account should the ability to push to GAR. Then use google-github-actions/auth action for authentication using workload identity like below:
|
||||
|
||||
```yaml
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: main
|
||||
|
||||
jobs:
|
||||
login:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: 'auth'
|
||||
name: 'Authenticate to Google Cloud'
|
||||
uses: 'google-github-actions/auth@v0'
|
||||
with:
|
||||
token_format: 'access_token'
|
||||
workload_identity_provider: '<workload_identity_provider>'
|
||||
service_account: '<service_account>'
|
||||
|
||||
- name: Login to GAR
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: <location>-docker.pkg.dev
|
||||
username: oauth2accesstoken
|
||||
password: ${{ steps.auth.outputs.access_token }}
|
||||
```
|
||||
> Replace `<workload_identity_provider>` with configured workload identity provider
|
||||
|
||||
> Replace `<service_account>` with configured service account in workload identity provider which has access to push to GCR
|
||||
|
||||
> Replace `<location>` with the regional or multi-regional [location](https://cloud.google.com/artifact-registry/docs/repo-organize#locations)
|
||||
> of the repository where the image is stored.
|
||||
|
||||
#### Service account based authentication
|
||||
|
||||
Use a service account with the ability to push to GAR and [configure access control](https://cloud.google.com/artifact-registry/docs/access-control).
|
||||
Then create and download the JSON key for this service account and save content of `.json` file
|
||||
[as a secret](https://docs.github.com/en/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets#creating-encrypted-secrets-for-a-repository)
|
||||
called `GAR_JSON_KEY` in your GitHub repo. Ensure you set the username to `_json_key`,
|
||||
or `_json_key_base64` if you use a base64-encoded key.
|
||||
|
||||
```yaml
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: main
|
||||
|
||||
jobs:
|
||||
login:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Login to GAR
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: <location>-docker.pkg.dev
|
||||
username: _json_key
|
||||
password: ${{ secrets.GAR_JSON_KEY }}
|
||||
```
|
||||
|
||||
> Replace `<location>` with the regional or multi-regional [location](https://cloud.google.com/artifact-registry/docs/repo-organize#locations)
|
||||
> of the repository where the image is stored.
|
||||
|
||||
### AWS Elastic Container Registry (ECR)
|
||||
|
||||
Use an IAM user with the ability to [push to ECR with `AmazonEC2ContainerRegistryPowerUser` managed policy for example](https://docs.aws.amazon.com/AmazonECR/latest/userguide/ecr_managed_policies.html#AmazonEC2ContainerRegistryPowerUser).
|
||||
Then create and download access keys and save `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` [as secrets](https://docs.github.com/en/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets#creating-encrypted-secrets-for-a-repository)
|
||||
in your GitHub repo.
|
||||
|
||||
```yaml
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: main
|
||||
|
||||
jobs:
|
||||
login:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Login to ECR
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: <aws-account-number>.dkr.ecr.<region>.amazonaws.com
|
||||
username: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
password: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
```
|
||||
|
||||
If you need to log in to Amazon ECR registries associated with other accounts, you can use the `AWS_ACCOUNT_IDS`
|
||||
environment variable:
|
||||
|
||||
```yaml
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: main
|
||||
|
||||
jobs:
|
||||
login:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Login to ECR
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: <aws-account-number>.dkr.ecr.<region>.amazonaws.com
|
||||
username: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
password: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
env:
|
||||
AWS_ACCOUNT_IDS: 012345678910,023456789012
|
||||
```
|
||||
|
||||
> Only available with [AWS CLI version 1](https://docs.aws.amazon.com/cli/latest/reference/ecr/get-login.html)
|
||||
|
||||
You can also use the [Configure AWS Credentials](https://github.com/aws-actions/configure-aws-credentials) action in
|
||||
combination with this action:
|
||||
|
||||
```yaml
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: main
|
||||
|
||||
jobs:
|
||||
login:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: <region>
|
||||
-
|
||||
name: Login to ECR
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: <aws-account-number>.dkr.ecr.<region>.amazonaws.com
|
||||
```
|
||||
|
||||
> Replace `<aws-account-number>` and `<region>` with their respective values.
|
||||
|
||||
### AWS Public Elastic Container Registry (ECR)
|
||||
|
||||
Use an IAM user with the ability to [push to ECR Public with `AmazonElasticContainerRegistryPublicPowerUser` managed policy for example](https://docs.aws.amazon.com/AmazonECR/latest/public/public-ecr-managed-policies.html#AmazonElasticContainerRegistryPublicPowerUser).
|
||||
Then create and download access keys and save `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` [as secrets](https://docs.github.com/en/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets#creating-encrypted-secrets-for-a-repository)
|
||||
in your GitHub repo.
|
||||
|
||||
```yaml
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: main
|
||||
|
||||
jobs:
|
||||
login:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Login to Public ECR
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: public.ecr.aws
|
||||
username: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
password: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
env:
|
||||
AWS_REGION: <region>
|
||||
```
|
||||
|
||||
> Replace `<region>` with its respective value (default `us-east-1`).
|
||||
|
||||
### OCI Oracle Cloud Infrastructure Registry (OCIR)
|
||||
|
||||
To push into OCIR in specific tenancy the [username](https://www.oracle.com/webfolder/technetwork/tutorials/obe/oci/registry/index.html#LogintoOracleCloudInfrastructureRegistryfromtheDockerCLI)
|
||||
must be placed in format `<tenancy>/<username>` (in case of federated tenancy use the format
|
||||
`<tenancy-namespace>/oracleidentitycloudservice/<username>`).
|
||||
|
||||
For password [create an auth token](https://www.oracle.com/webfolder/technetwork/tutorials/obe/oci/registry/index.html#GetanAuthToken).
|
||||
Save username and token [as a secrets](https://docs.github.com/en/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets#creating-encrypted-secrets-for-a-repository)
|
||||
in your GitHub repo.
|
||||
|
||||
```yaml
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: main
|
||||
|
||||
jobs:
|
||||
login:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Login to OCIR
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: <region>.ocir.io
|
||||
username: ${{ secrets.OCI_USERNAME }}
|
||||
password: ${{ secrets.OCI_TOKEN }}
|
||||
```
|
||||
|
||||
> Replace `<region>` with their respective values from [availability regions](https://docs.cloud.oracle.com/iaas/Content/Registry/Concepts/registryprerequisites.htm#Availab)
|
||||
|
||||
### Quay.io
|
||||
|
||||
Use a [Robot account](https://docs.quay.io/glossary/robot-accounts.html) with the ability to push to a public/private Quay.io repository.
|
||||
|
||||
```yaml
|
||||
name: ci
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: main
|
||||
|
||||
jobs:
|
||||
login:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Login to Quay.io
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: quay.io
|
||||
username: ${{ secrets.QUAY_USERNAME }}
|
||||
password: ${{ secrets.QUAY_ROBOT_TOKEN }}
|
||||
```
|
||||
|
||||
## Customizing
|
||||
|
||||
### inputs
|
||||
@@ -114,21 +459,21 @@ Following inputs can be used as `step.with` keys
|
||||
| `registry` | String | | Server address of Docker registry. If not set then will default to Docker Hub |
|
||||
| `username` | String | | Username used to log against the Docker registry |
|
||||
| `password` | String | | Password or personal access token used to log against the Docker registry |
|
||||
| `ecr` | String | `auto` | Specifies whether the given registry is ECR (`auto`, `true` or `false`) |
|
||||
| `logout` | Bool | `true` | Log out from the Docker registry at the end of a job |
|
||||
|
||||
## Limitation
|
||||
## Keep up-to-date with GitHub Dependabot
|
||||
|
||||
This action is only available for Linux [virtual environments](https://help.github.com/en/articles/virtual-environments-for-github-actions#supported-virtual-environments-and-hardware-resources).
|
||||
Since [Dependabot](https://docs.github.com/en/github/administering-a-repository/keeping-your-actions-up-to-date-with-github-dependabot)
|
||||
has [native GitHub Actions support](https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#package-ecosystem),
|
||||
to enable it on your GitHub repo all you need to do is add the `.github/dependabot.yml` file:
|
||||
|
||||
## How can I help?
|
||||
|
||||
All kinds of contributions are welcome :raised_hands:! The most basic way to show your support is to star :star2:
|
||||
the project, or to raise issues :speech_balloon: You can also support this project by
|
||||
[**becoming a sponsor on GitHub**](https://github.com/sponsors/crazy-max) :clap: or by making a
|
||||
[Paypal donation](https://www.paypal.me/crazyws) to ensure this journey continues indefinitely! :rocket:
|
||||
|
||||
Thanks again for your support, it is much appreciated! :pray:
|
||||
|
||||
## License
|
||||
|
||||
MIT. See `LICENSE` for more details.
|
||||
```yaml
|
||||
version: 2
|
||||
updates:
|
||||
# Maintain dependencies for GitHub Actions
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
```
|
||||
|
155
__tests__/aws.test.ts
Normal file
155
__tests__/aws.test.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import {AuthorizationData} from '@aws-sdk/client-ecr';
|
||||
import * as aws from '../src/aws';
|
||||
|
||||
describe('isECR', () => {
|
||||
test.each([
|
||||
['registry.gitlab.com', false],
|
||||
['gcr.io', false],
|
||||
['012345678901.dkr.ecr.eu-west-3.amazonaws.com', true],
|
||||
['876820548815.dkr.ecr.cn-north-1.amazonaws.com.cn', true],
|
||||
['390948362332.dkr.ecr.cn-northwest-1.amazonaws.com.cn', true],
|
||||
['public.ecr.aws', true]
|
||||
])('given registry %p', async (registry, expected) => {
|
||||
expect(aws.isECR(registry)).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('isPubECR', () => {
|
||||
test.each([
|
||||
['registry.gitlab.com', false],
|
||||
['gcr.io', false],
|
||||
['012345678901.dkr.ecr.eu-west-3.amazonaws.com', false],
|
||||
['876820548815.dkr.ecr.cn-north-1.amazonaws.com.cn', false],
|
||||
['390948362332.dkr.ecr.cn-northwest-1.amazonaws.com.cn', false],
|
||||
['public.ecr.aws', true]
|
||||
])('given registry %p', async (registry, expected) => {
|
||||
expect(aws.isPubECR(registry)).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getRegion', () => {
|
||||
test.each([
|
||||
['012345678901.dkr.ecr.eu-west-3.amazonaws.com', 'eu-west-3'],
|
||||
['876820548815.dkr.ecr.cn-north-1.amazonaws.com.cn', 'cn-north-1'],
|
||||
['390948362332.dkr.ecr.cn-northwest-1.amazonaws.com.cn', 'cn-northwest-1'],
|
||||
['public.ecr.aws', 'us-east-1']
|
||||
])('given registry %p', async (registry, expected) => {
|
||||
expect(aws.getRegion(registry)).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAccountIDs', () => {
|
||||
test.each([
|
||||
['012345678901.dkr.ecr.eu-west-3.amazonaws.com', undefined, ['012345678901']],
|
||||
['012345678901.dkr.ecr.eu-west-3.amazonaws.com', '012345678910,023456789012', ['012345678901', '012345678910', '023456789012']],
|
||||
['012345678901.dkr.ecr.eu-west-3.amazonaws.com', '012345678901,012345678910,023456789012', ['012345678901', '012345678910', '023456789012']],
|
||||
['390948362332.dkr.ecr.cn-northwest-1.amazonaws.com.cn', '012345678910,023456789012', ['390948362332', '012345678910', '023456789012']],
|
||||
['public.ecr.aws', undefined, []]
|
||||
])('given registry %p', async (registry, accountIDsEnv, expected) => {
|
||||
if (accountIDsEnv) {
|
||||
process.env.AWS_ACCOUNT_IDS = accountIDsEnv;
|
||||
}
|
||||
expect(aws.getAccountIDs(registry)).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
const mockEcrGetAuthToken = jest.fn();
|
||||
const mockEcrPublicGetAuthToken = jest.fn();
|
||||
jest.mock('@aws-sdk/client-ecr', () => {
|
||||
return {
|
||||
ECR: jest.fn(() => ({
|
||||
getAuthorizationToken: mockEcrGetAuthToken
|
||||
}))
|
||||
};
|
||||
});
|
||||
jest.mock('@aws-sdk/client-ecr-public', () => {
|
||||
return {
|
||||
ECRPUBLIC: jest.fn(() => ({
|
||||
getAuthorizationToken: mockEcrPublicGetAuthToken
|
||||
}))
|
||||
};
|
||||
});
|
||||
|
||||
describe('getRegistriesData', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
delete process.env.AWS_ACCOUNT_IDS;
|
||||
});
|
||||
// prettier-ignore
|
||||
test.each([
|
||||
[
|
||||
'012345678901.dkr.ecr.aws-region-1.amazonaws.com',
|
||||
'dkr.ecr.aws-region-1.amazonaws.com', undefined,
|
||||
[
|
||||
{
|
||||
registry: '012345678901.dkr.ecr.aws-region-1.amazonaws.com',
|
||||
username: '012345678901',
|
||||
password: 'world'
|
||||
}
|
||||
]
|
||||
],
|
||||
[
|
||||
'012345678901.dkr.ecr.eu-west-3.amazonaws.com',
|
||||
'dkr.ecr.eu-west-3.amazonaws.com',
|
||||
'012345678910,023456789012',
|
||||
[
|
||||
{
|
||||
registry: '012345678901.dkr.ecr.eu-west-3.amazonaws.com',
|
||||
username: '012345678901',
|
||||
password: 'world'
|
||||
},
|
||||
{
|
||||
registry: '012345678910.dkr.ecr.eu-west-3.amazonaws.com',
|
||||
username: '012345678910',
|
||||
password: 'world'
|
||||
},
|
||||
{
|
||||
registry: '023456789012.dkr.ecr.eu-west-3.amazonaws.com',
|
||||
username: '023456789012',
|
||||
password: 'world'
|
||||
}
|
||||
]
|
||||
],
|
||||
[
|
||||
'public.ecr.aws',
|
||||
undefined,
|
||||
undefined,
|
||||
[
|
||||
{
|
||||
registry: 'public.ecr.aws',
|
||||
username: 'AWS',
|
||||
password: 'world'
|
||||
}
|
||||
]
|
||||
]
|
||||
])('given registry %p', async (registry, fqdn, accountIDsEnv, expected: aws.RegistryData[]) => {
|
||||
if (accountIDsEnv) {
|
||||
process.env.AWS_ACCOUNT_IDS = accountIDsEnv;
|
||||
}
|
||||
const accountIDs = aws.getAccountIDs(registry);
|
||||
const authData: AuthorizationData[] = [];
|
||||
if (accountIDs.length == 0) {
|
||||
mockEcrPublicGetAuthToken.mockImplementation(() => {
|
||||
return Promise.resolve({
|
||||
authorizationData: {
|
||||
authorizationToken: Buffer.from(`AWS:world`).toString('base64'),
|
||||
}
|
||||
});
|
||||
});
|
||||
} else {
|
||||
aws.getAccountIDs(registry).forEach(accountID => {
|
||||
authData.push({
|
||||
authorizationToken: Buffer.from(`${accountID}:world`).toString('base64'),
|
||||
proxyEndpoint: `${accountID}.${fqdn}`
|
||||
});
|
||||
});
|
||||
mockEcrGetAuthToken.mockImplementation(() => {
|
||||
return Promise.resolve({
|
||||
authorizationData: authData
|
||||
});
|
||||
});
|
||||
}
|
||||
const regData = await aws.getRegistriesData(registry);
|
||||
expect(regData).toEqual(expected);
|
||||
});
|
||||
});
|
10
__tests__/context.test.ts
Normal file
10
__tests__/context.test.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import {getInputs} from '../src/context';
|
||||
|
||||
test('with password and username getInputs does not throw error', async () => {
|
||||
process.env['INPUT_USERNAME'] = 'dbowie';
|
||||
process.env['INPUT_PASSWORD'] = 'groundcontrol';
|
||||
process.env['INPUT_LOGOUT'] = 'true';
|
||||
expect(() => {
|
||||
getInputs();
|
||||
}).not.toThrowError();
|
||||
});
|
49
__tests__/docker.test.ts
Normal file
49
__tests__/docker.test.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import {loginStandard, logout} from '../src/docker';
|
||||
|
||||
import * as path from 'path';
|
||||
|
||||
import * as exec from '@actions/exec';
|
||||
|
||||
process.env['RUNNER_TEMP'] = path.join(__dirname, 'runner');
|
||||
|
||||
test('loginStandard calls exec', async () => {
|
||||
const execSpy: jest.SpyInstance = jest.spyOn(exec, 'getExecOutput');
|
||||
execSpy.mockImplementation(() =>
|
||||
Promise.resolve({
|
||||
exitCode: expect.any(Number),
|
||||
stdout: expect.any(Function),
|
||||
stderr: expect.any(Function)
|
||||
})
|
||||
);
|
||||
|
||||
const username: string = 'dbowie';
|
||||
const password: string = 'groundcontrol';
|
||||
const registry: string = 'https://ghcr.io';
|
||||
|
||||
await loginStandard(registry, username, password);
|
||||
|
||||
expect(execSpy).toHaveBeenCalledWith(`docker`, ['login', '--password-stdin', '--username', username, registry], {
|
||||
input: Buffer.from(password),
|
||||
silent: true,
|
||||
ignoreReturnCode: true
|
||||
});
|
||||
});
|
||||
|
||||
test('logout calls exec', async () => {
|
||||
const execSpy: jest.SpyInstance = jest.spyOn(exec, 'getExecOutput');
|
||||
execSpy.mockImplementation(() =>
|
||||
Promise.resolve({
|
||||
exitCode: expect.any(Number),
|
||||
stdout: expect.any(Function),
|
||||
stderr: expect.any(Function)
|
||||
})
|
||||
);
|
||||
|
||||
const registry: string = 'https://ghcr.io';
|
||||
|
||||
await logout(registry);
|
||||
|
||||
expect(execSpy).toHaveBeenCalledWith(`docker`, ['logout', registry], {
|
||||
ignoreReturnCode: true
|
||||
});
|
||||
});
|
79
__tests__/main.test.ts
Normal file
79
__tests__/main.test.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import osm = require('os');
|
||||
|
||||
import {run} from '../src/main';
|
||||
import * as docker from '../src/docker';
|
||||
import * as stateHelper from '../src/state-helper';
|
||||
|
||||
import * as core from '@actions/core';
|
||||
|
||||
test('errors without username and password', async () => {
|
||||
const platSpy = jest.spyOn(osm, 'platform');
|
||||
platSpy.mockImplementation(() => 'linux');
|
||||
|
||||
process.env['INPUT_LOGOUT'] = 'true'; // default value
|
||||
|
||||
const coreSpy: jest.SpyInstance = jest.spyOn(core, 'setFailed');
|
||||
|
||||
await run();
|
||||
|
||||
expect(coreSpy).toHaveBeenCalledWith('Username and password required');
|
||||
});
|
||||
|
||||
test('successful with username and password', async () => {
|
||||
const platSpy = jest.spyOn(osm, 'platform');
|
||||
platSpy.mockImplementation(() => 'linux');
|
||||
|
||||
const setRegistrySpy: jest.SpyInstance = jest.spyOn(stateHelper, 'setRegistry');
|
||||
const setLogoutSpy: jest.SpyInstance = jest.spyOn(stateHelper, 'setLogout');
|
||||
const dockerSpy: jest.SpyInstance = jest.spyOn(docker, 'login');
|
||||
dockerSpy.mockImplementation(() => {});
|
||||
|
||||
const username: string = 'dbowie';
|
||||
process.env[`INPUT_USERNAME`] = username;
|
||||
|
||||
const password: string = 'groundcontrol';
|
||||
process.env[`INPUT_PASSWORD`] = password;
|
||||
|
||||
const ecr: string = 'auto';
|
||||
process.env['INPUT_ECR'] = ecr;
|
||||
|
||||
const logout: boolean = false;
|
||||
process.env['INPUT_LOGOUT'] = String(logout);
|
||||
|
||||
await run();
|
||||
|
||||
expect(setRegistrySpy).toHaveBeenCalledWith('');
|
||||
expect(setLogoutSpy).toHaveBeenCalledWith(logout);
|
||||
expect(dockerSpy).toHaveBeenCalledWith('', username, password, ecr);
|
||||
});
|
||||
|
||||
test('calls docker login', async () => {
|
||||
const platSpy = jest.spyOn(osm, 'platform');
|
||||
platSpy.mockImplementation(() => 'linux');
|
||||
|
||||
const setRegistrySpy: jest.SpyInstance = jest.spyOn(stateHelper, 'setRegistry');
|
||||
const setLogoutSpy: jest.SpyInstance = jest.spyOn(stateHelper, 'setLogout');
|
||||
const dockerSpy: jest.SpyInstance = jest.spyOn(docker, 'login');
|
||||
dockerSpy.mockImplementation(() => {});
|
||||
|
||||
const username: string = 'dbowie';
|
||||
process.env[`INPUT_USERNAME`] = username;
|
||||
|
||||
const password: string = 'groundcontrol';
|
||||
process.env[`INPUT_PASSWORD`] = password;
|
||||
|
||||
const registry: string = 'ghcr.io';
|
||||
process.env[`INPUT_REGISTRY`] = registry;
|
||||
|
||||
const ecr: string = 'auto';
|
||||
process.env['INPUT_ECR'] = ecr;
|
||||
|
||||
const logout: boolean = true;
|
||||
process.env['INPUT_LOGOUT'] = String(logout);
|
||||
|
||||
await run();
|
||||
|
||||
expect(setRegistrySpy).toHaveBeenCalledWith(registry);
|
||||
expect(setLogoutSpy).toHaveBeenCalledWith(logout);
|
||||
expect(dockerSpy).toHaveBeenCalledWith(registry, username, password, ecr);
|
||||
});
|
10
action.yml
10
action.yml
@@ -1,9 +1,9 @@
|
||||
# https://help.github.com/en/articles/metadata-syntax-for-github-actions
|
||||
name: 'Docker Login'
|
||||
description: 'GitHub Action to login against a Docker registry'
|
||||
author: 'crazy-max'
|
||||
author: 'docker'
|
||||
branding:
|
||||
icon: 'log-in'
|
||||
icon: 'anchor'
|
||||
color: 'blue'
|
||||
|
||||
inputs:
|
||||
@@ -15,7 +15,11 @@ inputs:
|
||||
required: false
|
||||
password:
|
||||
description: 'Password or personal access token used to log against the Docker registry'
|
||||
required: true
|
||||
required: false
|
||||
ecr:
|
||||
description: 'Specifies whether the given registry is ECR (auto, true or false)'
|
||||
default: 'auto'
|
||||
required: false
|
||||
logout:
|
||||
description: 'Log out from the Docker registry at the end of a job'
|
||||
default: 'true'
|
||||
|
3
codecov.yml
Normal file
3
codecov.yml
Normal file
@@ -0,0 +1,3 @@
|
||||
comment: false
|
||||
github_checks:
|
||||
annotations: false
|
984
dist/bridge.js
generated
vendored
Normal file
984
dist/bridge.js
generated
vendored
Normal file
@@ -0,0 +1,984 @@
|
||||
/******/ (() => { // webpackBootstrap
|
||||
/******/ "use strict";
|
||||
/******/ var __webpack_modules__ = ({
|
||||
|
||||
/***/ 989:
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* __ ___ ____ _ _ ___ _ _ ____
|
||||
* \ \ / / \ | _ \| \ | |_ _| \ | |/ ___|
|
||||
* \ \ /\ / / _ \ | |_) | \| || || \| | | _
|
||||
* \ V V / ___ \| _ <| |\ || || |\ | |_| |
|
||||
* \_/\_/_/ \_\_| \_\_| \_|___|_| \_|\____|
|
||||
*
|
||||
* This file is critical for vm2. It implements the bridge between the host and the sandbox.
|
||||
* If you do not know exactly what you are doing, you should NOT edit this file.
|
||||
*
|
||||
* The file is loaded in the host and sandbox to handle objects in both directions.
|
||||
* This is done to ensure that RangeErrors are from the correct context.
|
||||
* The boundary between the sandbox and host might throw RangeErrors from both contexts.
|
||||
* Therefore, thisFromOther and friends can handle objects from both domains.
|
||||
*
|
||||
* Method parameters have comments to tell from which context they came.
|
||||
*
|
||||
*/
|
||||
|
||||
const globalsList = [
|
||||
'Number',
|
||||
'String',
|
||||
'Boolean',
|
||||
'Date',
|
||||
'RegExp',
|
||||
'Map',
|
||||
'WeakMap',
|
||||
'Set',
|
||||
'WeakSet',
|
||||
'Promise',
|
||||
'Function'
|
||||
];
|
||||
|
||||
const errorsList = [
|
||||
'RangeError',
|
||||
'ReferenceError',
|
||||
'SyntaxError',
|
||||
'TypeError',
|
||||
'EvalError',
|
||||
'URIError',
|
||||
'Error'
|
||||
];
|
||||
|
||||
const OPNA = 'Operation not allowed on contextified object.';
|
||||
|
||||
const thisGlobalPrototypes = {
|
||||
__proto__: null,
|
||||
Object: Object.prototype,
|
||||
Array: Array.prototype
|
||||
};
|
||||
|
||||
for (let i = 0; i < globalsList.length; i++) {
|
||||
const key = globalsList[i];
|
||||
const g = global[key];
|
||||
if (g) thisGlobalPrototypes[key] = g.prototype;
|
||||
}
|
||||
|
||||
for (let i = 0; i < errorsList.length; i++) {
|
||||
const key = errorsList[i];
|
||||
const g = global[key];
|
||||
if (g) thisGlobalPrototypes[key] = g.prototype;
|
||||
}
|
||||
|
||||
const {
|
||||
getPrototypeOf: thisReflectGetPrototypeOf,
|
||||
setPrototypeOf: thisReflectSetPrototypeOf,
|
||||
defineProperty: thisReflectDefineProperty,
|
||||
deleteProperty: thisReflectDeleteProperty,
|
||||
getOwnPropertyDescriptor: thisReflectGetOwnPropertyDescriptor,
|
||||
isExtensible: thisReflectIsExtensible,
|
||||
preventExtensions: thisReflectPreventExtensions,
|
||||
apply: thisReflectApply,
|
||||
construct: thisReflectConstruct,
|
||||
set: thisReflectSet,
|
||||
get: thisReflectGet,
|
||||
has: thisReflectHas,
|
||||
ownKeys: thisReflectOwnKeys,
|
||||
enumerate: thisReflectEnumerate,
|
||||
} = Reflect;
|
||||
|
||||
const thisObject = Object;
|
||||
const {
|
||||
freeze: thisObjectFreeze,
|
||||
prototype: thisObjectPrototype
|
||||
} = thisObject;
|
||||
const thisObjectHasOwnProperty = thisObjectPrototype.hasOwnProperty;
|
||||
const ThisProxy = Proxy;
|
||||
const ThisWeakMap = WeakMap;
|
||||
const {
|
||||
get: thisWeakMapGet,
|
||||
set: thisWeakMapSet
|
||||
} = ThisWeakMap.prototype;
|
||||
const ThisMap = Map;
|
||||
const thisMapGet = ThisMap.prototype.get;
|
||||
const thisMapSet = ThisMap.prototype.set;
|
||||
const thisFunction = Function;
|
||||
const thisFunctionBind = thisFunction.prototype.bind;
|
||||
const thisArrayIsArray = Array.isArray;
|
||||
const thisErrorCaptureStackTrace = Error.captureStackTrace;
|
||||
|
||||
const thisSymbolToString = Symbol.prototype.toString;
|
||||
const thisSymbolToStringTag = Symbol.toStringTag;
|
||||
|
||||
/**
|
||||
* VMError.
|
||||
*
|
||||
* @public
|
||||
* @extends {Error}
|
||||
*/
|
||||
class VMError extends Error {
|
||||
|
||||
/**
|
||||
* Create VMError instance.
|
||||
*
|
||||
* @public
|
||||
* @param {string} message - Error message.
|
||||
* @param {string} code - Error code.
|
||||
*/
|
||||
constructor(message, code) {
|
||||
super(message);
|
||||
|
||||
this.name = 'VMError';
|
||||
this.code = code;
|
||||
|
||||
thisErrorCaptureStackTrace(this, this.constructor);
|
||||
}
|
||||
}
|
||||
|
||||
thisGlobalPrototypes['VMError'] = VMError.prototype;
|
||||
|
||||
function thisUnexpected() {
|
||||
return new VMError('Unexpected');
|
||||
}
|
||||
|
||||
if (!thisReflectSetPrototypeOf(exports, null)) throw thisUnexpected();
|
||||
|
||||
function thisSafeGetOwnPropertyDescriptor(obj, key) {
|
||||
const desc = thisReflectGetOwnPropertyDescriptor(obj, key);
|
||||
if (!desc) return desc;
|
||||
if (!thisReflectSetPrototypeOf(desc, null)) throw thisUnexpected();
|
||||
return desc;
|
||||
}
|
||||
|
||||
function thisThrowCallerCalleeArgumentsAccess(key) {
|
||||
'use strict';
|
||||
thisThrowCallerCalleeArgumentsAccess[key];
|
||||
return thisUnexpected();
|
||||
}
|
||||
|
||||
function thisIdMapping(factory, other) {
|
||||
return other;
|
||||
}
|
||||
|
||||
const thisThrowOnKeyAccessHandler = thisObjectFreeze({
|
||||
__proto__: null,
|
||||
get(target, key, receiver) {
|
||||
if (typeof key === 'symbol') {
|
||||
key = thisReflectApply(thisSymbolToString, key, []);
|
||||
}
|
||||
throw new VMError(`Unexpected access to key '${key}'`);
|
||||
}
|
||||
});
|
||||
|
||||
const emptyForzenObject = thisObjectFreeze({
|
||||
__proto__: null
|
||||
});
|
||||
|
||||
const thisThrowOnKeyAccess = new ThisProxy(emptyForzenObject, thisThrowOnKeyAccessHandler);
|
||||
|
||||
function SafeBase() {}
|
||||
|
||||
if (!thisReflectDefineProperty(SafeBase, 'prototype', {
|
||||
__proto__: null,
|
||||
value: thisThrowOnKeyAccess
|
||||
})) throw thisUnexpected();
|
||||
|
||||
function SHARED_FUNCTION() {}
|
||||
|
||||
const TEST_PROXY_HANDLER = thisObjectFreeze({
|
||||
__proto__: thisThrowOnKeyAccess,
|
||||
construct() {
|
||||
return this;
|
||||
}
|
||||
});
|
||||
|
||||
function thisIsConstructor(obj) {
|
||||
// Note: obj@any(unsafe)
|
||||
const Func = new ThisProxy(obj, TEST_PROXY_HANDLER);
|
||||
try {
|
||||
// eslint-disable-next-line no-new
|
||||
new Func();
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function thisCreateTargetObject(obj, proto) {
|
||||
// Note: obj@any(unsafe) proto@any(unsafe) returns@this(unsafe) throws@this(unsafe)
|
||||
let base;
|
||||
if (typeof obj === 'function') {
|
||||
if (thisIsConstructor(obj)) {
|
||||
// Bind the function since bound functions do not have a prototype property.
|
||||
base = thisReflectApply(thisFunctionBind, SHARED_FUNCTION, [null]);
|
||||
} else {
|
||||
base = () => {};
|
||||
}
|
||||
} else if (thisArrayIsArray(obj)) {
|
||||
base = [];
|
||||
} else {
|
||||
return {__proto__: proto};
|
||||
}
|
||||
if (!thisReflectSetPrototypeOf(base, proto)) throw thisUnexpected();
|
||||
return base;
|
||||
}
|
||||
|
||||
function createBridge(otherInit, registerProxy) {
|
||||
|
||||
const mappingOtherToThis = new ThisWeakMap();
|
||||
const protoMappings = new ThisMap();
|
||||
const protoName = new ThisMap();
|
||||
|
||||
function thisAddProtoMapping(proto, other, name) {
|
||||
// Note: proto@this(unsafe) other@other(unsafe) name@this(unsafe) throws@this(unsafe)
|
||||
thisReflectApply(thisMapSet, protoMappings, [proto, thisIdMapping]);
|
||||
thisReflectApply(thisMapSet, protoMappings, [other,
|
||||
(factory, object) => thisProxyOther(factory, object, proto)]);
|
||||
if (name) thisReflectApply(thisMapSet, protoName, [proto, name]);
|
||||
}
|
||||
|
||||
function thisAddProtoMappingFactory(protoFactory, other, name) {
|
||||
// Note: protoFactory@this(unsafe) other@other(unsafe) name@this(unsafe) throws@this(unsafe)
|
||||
let proto;
|
||||
thisReflectApply(thisMapSet, protoMappings, [other,
|
||||
(factory, object) => {
|
||||
if (!proto) {
|
||||
proto = protoFactory();
|
||||
thisReflectApply(thisMapSet, protoMappings, [proto, thisIdMapping]);
|
||||
if (name) thisReflectApply(thisMapSet, protoName, [proto, name]);
|
||||
}
|
||||
return thisProxyOther(factory, object, proto);
|
||||
}]);
|
||||
}
|
||||
|
||||
const result = {
|
||||
__proto__: null,
|
||||
globalPrototypes: thisGlobalPrototypes,
|
||||
safeGetOwnPropertyDescriptor: thisSafeGetOwnPropertyDescriptor,
|
||||
fromArguments: thisFromOtherArguments,
|
||||
from: thisFromOther,
|
||||
fromWithFactory: thisFromOtherWithFactory,
|
||||
ensureThis: thisEnsureThis,
|
||||
mapping: mappingOtherToThis,
|
||||
connect: thisConnect,
|
||||
reflectSet: thisReflectSet,
|
||||
reflectGet: thisReflectGet,
|
||||
reflectDefineProperty: thisReflectDefineProperty,
|
||||
reflectDeleteProperty: thisReflectDeleteProperty,
|
||||
reflectApply: thisReflectApply,
|
||||
reflectConstruct: thisReflectConstruct,
|
||||
reflectHas: thisReflectHas,
|
||||
reflectOwnKeys: thisReflectOwnKeys,
|
||||
reflectEnumerate: thisReflectEnumerate,
|
||||
reflectGetPrototypeOf: thisReflectGetPrototypeOf,
|
||||
reflectIsExtensible: thisReflectIsExtensible,
|
||||
reflectPreventExtensions: thisReflectPreventExtensions,
|
||||
objectHasOwnProperty: thisObjectHasOwnProperty,
|
||||
weakMapSet: thisWeakMapSet,
|
||||
addProtoMapping: thisAddProtoMapping,
|
||||
addProtoMappingFactory: thisAddProtoMappingFactory,
|
||||
defaultFactory,
|
||||
protectedFactory,
|
||||
readonlyFactory,
|
||||
VMError
|
||||
};
|
||||
|
||||
const isHost = typeof otherInit !== 'object';
|
||||
|
||||
if (isHost) {
|
||||
otherInit = otherInit(result, registerProxy);
|
||||
}
|
||||
|
||||
result.other = otherInit;
|
||||
|
||||
const {
|
||||
globalPrototypes: otherGlobalPrototypes,
|
||||
safeGetOwnPropertyDescriptor: otherSafeGetOwnPropertyDescriptor,
|
||||
fromArguments: otherFromThisArguments,
|
||||
from: otherFromThis,
|
||||
mapping: mappingThisToOther,
|
||||
reflectSet: otherReflectSet,
|
||||
reflectGet: otherReflectGet,
|
||||
reflectDefineProperty: otherReflectDefineProperty,
|
||||
reflectDeleteProperty: otherReflectDeleteProperty,
|
||||
reflectApply: otherReflectApply,
|
||||
reflectConstruct: otherReflectConstruct,
|
||||
reflectHas: otherReflectHas,
|
||||
reflectOwnKeys: otherReflectOwnKeys,
|
||||
reflectEnumerate: otherReflectEnumerate,
|
||||
reflectGetPrototypeOf: otherReflectGetPrototypeOf,
|
||||
reflectIsExtensible: otherReflectIsExtensible,
|
||||
reflectPreventExtensions: otherReflectPreventExtensions,
|
||||
objectHasOwnProperty: otherObjectHasOwnProperty,
|
||||
weakMapSet: otherWeakMapSet
|
||||
} = otherInit;
|
||||
|
||||
function thisOtherHasOwnProperty(object, key) {
|
||||
// Note: object@other(safe) key@prim throws@this(unsafe)
|
||||
try {
|
||||
return otherReflectApply(otherObjectHasOwnProperty, object, [key]) === true;
|
||||
} catch (e) { // @other(unsafe)
|
||||
throw thisFromOther(e);
|
||||
}
|
||||
}
|
||||
|
||||
function thisDefaultGet(handler, object, key, desc) {
|
||||
// Note: object@other(unsafe) key@prim desc@other(safe)
|
||||
let ret; // @other(unsafe)
|
||||
if (desc.get || desc.set) {
|
||||
const getter = desc.get;
|
||||
if (!getter) return undefined;
|
||||
try {
|
||||
ret = otherReflectApply(getter, object, [key]);
|
||||
} catch (e) {
|
||||
throw thisFromOther(e);
|
||||
}
|
||||
} else {
|
||||
ret = desc.value;
|
||||
}
|
||||
return handler.fromOtherWithContext(ret);
|
||||
}
|
||||
|
||||
function otherFromThisIfAvailable(to, from, key) {
|
||||
// Note: to@other(safe) from@this(safe) key@prim throws@this(unsafe)
|
||||
if (!thisReflectApply(thisObjectHasOwnProperty, from, [key])) return false;
|
||||
try {
|
||||
to[key] = otherFromThis(from[key]);
|
||||
} catch (e) { // @other(unsafe)
|
||||
throw thisFromOther(e);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
class BaseHandler extends SafeBase {
|
||||
|
||||
constructor(object) {
|
||||
// Note: object@other(unsafe) throws@this(unsafe)
|
||||
super();
|
||||
this.object = object;
|
||||
}
|
||||
|
||||
getFactory() {
|
||||
return defaultFactory;
|
||||
}
|
||||
|
||||
fromOtherWithContext(other) {
|
||||
// Note: other@other(unsafe) throws@this(unsafe)
|
||||
return thisFromOtherWithFactory(this.getFactory(), other);
|
||||
}
|
||||
|
||||
doPreventExtensions(target, object, factory) {
|
||||
// Note: target@this(unsafe) object@other(unsafe) throws@this(unsafe)
|
||||
let keys; // @other(safe-array-of-prim)
|
||||
try {
|
||||
keys = otherReflectOwnKeys(object);
|
||||
} catch (e) { // @other(unsafe)
|
||||
throw thisFromOther(e);
|
||||
}
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i]; // @prim
|
||||
let desc;
|
||||
try {
|
||||
desc = otherSafeGetOwnPropertyDescriptor(object, key);
|
||||
} catch (e) { // @other(unsafe)
|
||||
throw thisFromOther(e);
|
||||
}
|
||||
if (!desc) continue;
|
||||
if (!desc.configurable) {
|
||||
const current = thisSafeGetOwnPropertyDescriptor(target, key);
|
||||
if (current && !current.configurable) continue;
|
||||
if (desc.get || desc.set) {
|
||||
desc.get = this.fromOtherWithContext(desc.get);
|
||||
desc.set = this.fromOtherWithContext(desc.set);
|
||||
} else if (typeof object === 'function' && (key === 'caller' || key === 'callee' || key === 'arguments')) {
|
||||
desc.value = null;
|
||||
} else {
|
||||
desc.value = this.fromOtherWithContext(desc.value);
|
||||
}
|
||||
} else {
|
||||
if (desc.get || desc.set) {
|
||||
desc = {
|
||||
__proto__: null,
|
||||
configurable: true,
|
||||
enumerable: desc.enumerable,
|
||||
writable: true,
|
||||
value: null
|
||||
};
|
||||
} else {
|
||||
desc.value = null;
|
||||
}
|
||||
}
|
||||
if (!thisReflectDefineProperty(target, key, desc)) throw thisUnexpected();
|
||||
}
|
||||
if (!thisReflectPreventExtensions(target)) throw thisUnexpected();
|
||||
}
|
||||
|
||||
get(target, key, receiver) {
|
||||
// Note: target@this(unsafe) key@prim receiver@this(unsafe) throws@this(unsafe)
|
||||
const object = this.object; // @other(unsafe)
|
||||
switch (key) {
|
||||
case 'constructor': {
|
||||
const desc = otherSafeGetOwnPropertyDescriptor(object, key);
|
||||
if (desc) return thisDefaultGet(this, object, key, desc);
|
||||
const proto = thisReflectGetPrototypeOf(target);
|
||||
return proto === null ? undefined : proto.constructor;
|
||||
}
|
||||
case '__proto__': {
|
||||
const desc = otherSafeGetOwnPropertyDescriptor(object, key);
|
||||
if (desc) return thisDefaultGet(this, object, key, desc);
|
||||
return thisReflectGetPrototypeOf(target);
|
||||
}
|
||||
case thisSymbolToStringTag:
|
||||
if (!thisOtherHasOwnProperty(object, thisSymbolToStringTag)) {
|
||||
const proto = thisReflectGetPrototypeOf(target);
|
||||
const name = thisReflectApply(thisMapGet, protoName, [proto]);
|
||||
if (name) return name;
|
||||
}
|
||||
break;
|
||||
case 'arguments':
|
||||
case 'caller':
|
||||
case 'callee':
|
||||
if (thisOtherHasOwnProperty(object, key)) throw thisThrowCallerCalleeArgumentsAccess(key);
|
||||
break;
|
||||
}
|
||||
let ret; // @other(unsafe)
|
||||
try {
|
||||
ret = otherReflectGet(object, key);
|
||||
} catch (e) { // @other(unsafe)
|
||||
throw thisFromOther(e);
|
||||
}
|
||||
return this.fromOtherWithContext(ret);
|
||||
}
|
||||
|
||||
set(target, key, value, receiver) {
|
||||
// Note: target@this(unsafe) key@prim value@this(unsafe) receiver@this(unsafe) throws@this(unsafe)
|
||||
const object = this.object; // @other(unsafe)
|
||||
if (key === '__proto__' && !thisOtherHasOwnProperty(object, key)) {
|
||||
return this.setPrototypeOf(target, value);
|
||||
}
|
||||
try {
|
||||
value = otherFromThis(value);
|
||||
return otherReflectSet(object, key, value) === true;
|
||||
} catch (e) { // @other(unsafe)
|
||||
throw thisFromOther(e);
|
||||
}
|
||||
}
|
||||
|
||||
getPrototypeOf(target) {
|
||||
// Note: target@this(unsafe)
|
||||
return thisReflectGetPrototypeOf(target);
|
||||
}
|
||||
|
||||
setPrototypeOf(target, value) {
|
||||
// Note: target@this(unsafe) throws@this(unsafe)
|
||||
throw new VMError(OPNA);
|
||||
}
|
||||
|
||||
apply(target, context, args) {
|
||||
// Note: target@this(unsafe) context@this(unsafe) args@this(safe-array) throws@this(unsafe)
|
||||
const object = this.object; // @other(unsafe)
|
||||
let ret; // @other(unsafe)
|
||||
try {
|
||||
context = otherFromThis(context);
|
||||
args = otherFromThisArguments(args);
|
||||
ret = otherReflectApply(object, context, args);
|
||||
} catch (e) { // @other(unsafe)
|
||||
throw thisFromOther(e);
|
||||
}
|
||||
return thisFromOther(ret);
|
||||
}
|
||||
|
||||
construct(target, args, newTarget) {
|
||||
// Note: target@this(unsafe) args@this(safe-array) newTarget@this(unsafe) throws@this(unsafe)
|
||||
const object = this.object; // @other(unsafe)
|
||||
let ret; // @other(unsafe)
|
||||
try {
|
||||
args = otherFromThisArguments(args);
|
||||
ret = otherReflectConstruct(object, args);
|
||||
} catch (e) { // @other(unsafe)
|
||||
throw thisFromOther(e);
|
||||
}
|
||||
return thisFromOtherWithFactory(this.getFactory(), ret, thisFromOther(object));
|
||||
}
|
||||
|
||||
getOwnPropertyDescriptorDesc(target, prop, desc) {
|
||||
// Note: target@this(unsafe) prop@prim desc@other{safe} throws@this(unsafe)
|
||||
const object = this.object; // @other(unsafe)
|
||||
if (desc && typeof object === 'function' && (prop === 'arguments' || prop === 'caller' || prop === 'callee')) desc.value = null;
|
||||
return desc;
|
||||
}
|
||||
|
||||
getOwnPropertyDescriptor(target, prop) {
|
||||
// Note: target@this(unsafe) prop@prim throws@this(unsafe)
|
||||
const object = this.object; // @other(unsafe)
|
||||
let desc; // @other(safe)
|
||||
try {
|
||||
desc = otherSafeGetOwnPropertyDescriptor(object, prop);
|
||||
} catch (e) { // @other(unsafe)
|
||||
throw thisFromOther(e);
|
||||
}
|
||||
|
||||
desc = this.getOwnPropertyDescriptorDesc(target, prop, desc);
|
||||
|
||||
if (!desc) return undefined;
|
||||
|
||||
let thisDesc;
|
||||
if (desc.get || desc.set) {
|
||||
thisDesc = {
|
||||
__proto__: null,
|
||||
get: this.fromOtherWithContext(desc.get),
|
||||
set: this.fromOtherWithContext(desc.set),
|
||||
enumerable: desc.enumerable === true,
|
||||
configurable: desc.configurable === true
|
||||
};
|
||||
} else {
|
||||
thisDesc = {
|
||||
__proto__: null,
|
||||
value: this.fromOtherWithContext(desc.value),
|
||||
writable: desc.writable === true,
|
||||
enumerable: desc.enumerable === true,
|
||||
configurable: desc.configurable === true
|
||||
};
|
||||
}
|
||||
if (!thisDesc.configurable) {
|
||||
const oldDesc = thisSafeGetOwnPropertyDescriptor(target, prop);
|
||||
if (!oldDesc || oldDesc.configurable || oldDesc.writable !== thisDesc.writable) {
|
||||
if (!thisReflectDefineProperty(target, prop, thisDesc)) throw thisUnexpected();
|
||||
}
|
||||
}
|
||||
return thisDesc;
|
||||
}
|
||||
|
||||
definePropertyDesc(target, prop, desc) {
|
||||
// Note: target@this(unsafe) prop@prim desc@this(safe) throws@this(unsafe)
|
||||
return desc;
|
||||
}
|
||||
|
||||
defineProperty(target, prop, desc) {
|
||||
// Note: target@this(unsafe) prop@prim desc@this(unsafe) throws@this(unsafe)
|
||||
const object = this.object; // @other(unsafe)
|
||||
if (!thisReflectSetPrototypeOf(desc, null)) throw thisUnexpected();
|
||||
|
||||
desc = this.definePropertyDesc(target, prop, desc);
|
||||
|
||||
if (!desc) return false;
|
||||
|
||||
let otherDesc = {__proto__: null};
|
||||
let hasFunc = true;
|
||||
let hasValue = true;
|
||||
let hasBasic = true;
|
||||
hasFunc &= otherFromThisIfAvailable(otherDesc, desc, 'get');
|
||||
hasFunc &= otherFromThisIfAvailable(otherDesc, desc, 'set');
|
||||
hasValue &= otherFromThisIfAvailable(otherDesc, desc, 'value');
|
||||
hasValue &= otherFromThisIfAvailable(otherDesc, desc, 'writable');
|
||||
hasBasic &= otherFromThisIfAvailable(otherDesc, desc, 'enumerable');
|
||||
hasBasic &= otherFromThisIfAvailable(otherDesc, desc, 'configurable');
|
||||
|
||||
try {
|
||||
if (!otherReflectDefineProperty(object, prop, otherDesc)) return false;
|
||||
if (otherDesc.configurable !== true && (!hasBasic || !(hasFunc || hasValue))) {
|
||||
otherDesc = otherSafeGetOwnPropertyDescriptor(object, prop);
|
||||
}
|
||||
} catch (e) { // @other(unsafe)
|
||||
throw thisFromOther(e);
|
||||
}
|
||||
|
||||
if (!otherDesc.configurable) {
|
||||
let thisDesc;
|
||||
if (otherDesc.get || otherDesc.set) {
|
||||
thisDesc = {
|
||||
__proto__: null,
|
||||
get: this.fromOtherWithContext(otherDesc.get),
|
||||
set: this.fromOtherWithContext(otherDesc.set),
|
||||
enumerable: otherDesc.enumerable,
|
||||
configurable: otherDesc.configurable
|
||||
};
|
||||
} else {
|
||||
thisDesc = {
|
||||
__proto__: null,
|
||||
value: this.fromOtherWithContext(otherDesc.value),
|
||||
writable: otherDesc.writable,
|
||||
enumerable: otherDesc.enumerable,
|
||||
configurable: otherDesc.configurable
|
||||
};
|
||||
}
|
||||
if (!thisReflectDefineProperty(target, prop, thisDesc)) throw thisUnexpected();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
deleteProperty(target, prop) {
|
||||
// Note: target@this(unsafe) prop@prim throws@this(unsafe)
|
||||
const object = this.object; // @other(unsafe)
|
||||
try {
|
||||
return otherReflectDeleteProperty(object, prop) === true;
|
||||
} catch (e) { // @other(unsafe)
|
||||
throw thisFromOther(e);
|
||||
}
|
||||
}
|
||||
|
||||
has(target, key) {
|
||||
// Note: target@this(unsafe) key@prim throws@this(unsafe)
|
||||
const object = this.object; // @other(unsafe)
|
||||
try {
|
||||
return otherReflectHas(object, key) === true;
|
||||
} catch (e) { // @other(unsafe)
|
||||
throw thisFromOther(e);
|
||||
}
|
||||
}
|
||||
|
||||
isExtensible(target) {
|
||||
// Note: target@this(unsafe) throws@this(unsafe)
|
||||
const object = this.object; // @other(unsafe)
|
||||
try {
|
||||
if (otherReflectIsExtensible(object)) return true;
|
||||
} catch (e) { // @other(unsafe)
|
||||
throw thisFromOther(e);
|
||||
}
|
||||
if (thisReflectIsExtensible(target)) {
|
||||
this.doPreventExtensions(target, object, this);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
ownKeys(target) {
|
||||
// Note: target@this(unsafe) throws@this(unsafe)
|
||||
const object = this.object; // @other(unsafe)
|
||||
let res; // @other(unsafe)
|
||||
try {
|
||||
res = otherReflectOwnKeys(object);
|
||||
} catch (e) { // @other(unsafe)
|
||||
throw thisFromOther(e);
|
||||
}
|
||||
return thisFromOther(res);
|
||||
}
|
||||
|
||||
preventExtensions(target) {
|
||||
// Note: target@this(unsafe) throws@this(unsafe)
|
||||
const object = this.object; // @other(unsafe)
|
||||
try {
|
||||
if (!otherReflectPreventExtensions(object)) return false;
|
||||
} catch (e) { // @other(unsafe)
|
||||
throw thisFromOther(e);
|
||||
}
|
||||
if (thisReflectIsExtensible(target)) {
|
||||
this.doPreventExtensions(target, object, this);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
enumerate(target) {
|
||||
// Note: target@this(unsafe) throws@this(unsafe)
|
||||
const object = this.object; // @other(unsafe)
|
||||
let res; // @other(unsafe)
|
||||
try {
|
||||
res = otherReflectEnumerate(object);
|
||||
} catch (e) { // @other(unsafe)
|
||||
throw thisFromOther(e);
|
||||
}
|
||||
return this.fromOtherWithContext(res);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function defaultFactory(object) {
|
||||
// Note: other@other(unsafe) returns@this(unsafe) throws@this(unsafe)
|
||||
return new BaseHandler(object);
|
||||
}
|
||||
|
||||
class ProtectedHandler extends BaseHandler {
|
||||
|
||||
getFactory() {
|
||||
return protectedFactory;
|
||||
}
|
||||
|
||||
set(target, key, value, receiver) {
|
||||
// Note: target@this(unsafe) key@prim value@this(unsafe) receiver@this(unsafe) throws@this(unsafe)
|
||||
if (typeof value === 'function') {
|
||||
return thisReflectDefineProperty(receiver, key, {
|
||||
__proto__: null,
|
||||
value: value,
|
||||
writable: true,
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
}) === true;
|
||||
}
|
||||
return super.set(target, key, value, receiver);
|
||||
}
|
||||
|
||||
definePropertyDesc(target, prop, desc) {
|
||||
// Note: target@this(unsafe) prop@prim desc@this(safe) throws@this(unsafe)
|
||||
if (desc && (desc.set || desc.get || typeof desc.value === 'function')) return undefined;
|
||||
return desc;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function protectedFactory(object) {
|
||||
// Note: other@other(unsafe) returns@this(unsafe) throws@this(unsafe)
|
||||
return new ProtectedHandler(object);
|
||||
}
|
||||
|
||||
class ReadOnlyHandler extends BaseHandler {
|
||||
|
||||
getFactory() {
|
||||
return readonlyFactory;
|
||||
}
|
||||
|
||||
set(target, key, value, receiver) {
|
||||
// Note: target@this(unsafe) key@prim value@this(unsafe) receiver@this(unsafe) throws@this(unsafe)
|
||||
return thisReflectDefineProperty(receiver, key, {
|
||||
__proto__: null,
|
||||
value: value,
|
||||
writable: true,
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
|
||||
setPrototypeOf(target, value) {
|
||||
// Note: target@this(unsafe) throws@this(unsafe)
|
||||
return false;
|
||||
}
|
||||
|
||||
defineProperty(target, prop, desc) {
|
||||
// Note: target@this(unsafe) prop@prim desc@this(unsafe) throws@this(unsafe)
|
||||
return false;
|
||||
}
|
||||
|
||||
deleteProperty(target, prop) {
|
||||
// Note: target@this(unsafe) prop@prim throws@this(unsafe)
|
||||
return false;
|
||||
}
|
||||
|
||||
isExtensible(target) {
|
||||
// Note: target@this(unsafe) throws@this(unsafe)
|
||||
return false;
|
||||
}
|
||||
|
||||
preventExtensions(target) {
|
||||
// Note: target@this(unsafe) throws@this(unsafe)
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function readonlyFactory(object) {
|
||||
// Note: other@other(unsafe) returns@this(unsafe) throws@this(unsafe)
|
||||
return new ReadOnlyHandler(object);
|
||||
}
|
||||
|
||||
class ReadOnlyMockHandler extends ReadOnlyHandler {
|
||||
|
||||
constructor(object, mock) {
|
||||
// Note: object@other(unsafe) mock:this(unsafe) throws@this(unsafe)
|
||||
super(object);
|
||||
this.mock = mock;
|
||||
}
|
||||
|
||||
get(target, key, receiver) {
|
||||
// Note: target@this(unsafe) key@prim receiver@this(unsafe) throws@this(unsafe)
|
||||
const object = this.object; // @other(unsafe)
|
||||
const mock = this.mock;
|
||||
if (thisReflectApply(thisObjectHasOwnProperty, mock, key) && !thisOtherHasOwnProperty(object, key)) {
|
||||
return mock[key];
|
||||
}
|
||||
return super.get(target, key, receiver);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function thisFromOther(other) {
|
||||
// Note: other@other(unsafe) returns@this(unsafe) throws@this(unsafe)
|
||||
return thisFromOtherWithFactory(defaultFactory, other);
|
||||
}
|
||||
|
||||
function thisProxyOther(factory, other, proto) {
|
||||
const target = thisCreateTargetObject(other, proto);
|
||||
const handler = factory(other);
|
||||
const proxy = new ThisProxy(target, handler);
|
||||
try {
|
||||
otherReflectApply(otherWeakMapSet, mappingThisToOther, [proxy, other]);
|
||||
registerProxy(proxy, handler);
|
||||
} catch (e) {
|
||||
throw new VMError('Unexpected error');
|
||||
}
|
||||
if (!isHost) {
|
||||
thisReflectApply(thisWeakMapSet, mappingOtherToThis, [other, proxy]);
|
||||
return proxy;
|
||||
}
|
||||
const proxy2 = new ThisProxy(proxy, emptyForzenObject);
|
||||
try {
|
||||
otherReflectApply(otherWeakMapSet, mappingThisToOther, [proxy2, other]);
|
||||
registerProxy(proxy2, handler);
|
||||
} catch (e) {
|
||||
throw new VMError('Unexpected error');
|
||||
}
|
||||
thisReflectApply(thisWeakMapSet, mappingOtherToThis, [other, proxy2]);
|
||||
return proxy2;
|
||||
}
|
||||
|
||||
function thisEnsureThis(other) {
|
||||
const type = typeof other;
|
||||
switch (type) {
|
||||
case 'object':
|
||||
case 'function':
|
||||
if (other === null) {
|
||||
return null;
|
||||
} else {
|
||||
let proto = thisReflectGetPrototypeOf(other);
|
||||
if (!proto) {
|
||||
return other;
|
||||
}
|
||||
while (proto) {
|
||||
const mapping = thisReflectApply(thisMapGet, protoMappings, [proto]);
|
||||
if (mapping) {
|
||||
const mapped = thisReflectApply(thisWeakMapGet, mappingOtherToThis, [other]);
|
||||
if (mapped) return mapped;
|
||||
return mapping(defaultFactory, other);
|
||||
}
|
||||
proto = thisReflectGetPrototypeOf(proto);
|
||||
}
|
||||
return other;
|
||||
}
|
||||
|
||||
case 'undefined':
|
||||
case 'string':
|
||||
case 'number':
|
||||
case 'boolean':
|
||||
case 'symbol':
|
||||
case 'bigint':
|
||||
return other;
|
||||
|
||||
default: // new, unknown types can be dangerous
|
||||
throw new VMError(`Unknown type '${type}'`);
|
||||
}
|
||||
}
|
||||
|
||||
function thisFromOtherWithFactory(factory, other, proto) {
|
||||
for (let loop = 0; loop < 10; loop++) {
|
||||
const type = typeof other;
|
||||
switch (type) {
|
||||
case 'object':
|
||||
case 'function':
|
||||
if (other === null) {
|
||||
return null;
|
||||
} else {
|
||||
const mapped = thisReflectApply(thisWeakMapGet, mappingOtherToThis, [other]);
|
||||
if (mapped) return mapped;
|
||||
if (proto) {
|
||||
return thisProxyOther(factory, other, proto);
|
||||
}
|
||||
try {
|
||||
proto = otherReflectGetPrototypeOf(other);
|
||||
} catch (e) { // @other(unsafe)
|
||||
other = e;
|
||||
break;
|
||||
}
|
||||
if (!proto) {
|
||||
return thisProxyOther(factory, other, null);
|
||||
}
|
||||
while (proto) {
|
||||
const mapping = thisReflectApply(thisMapGet, protoMappings, [proto]);
|
||||
if (mapping) return mapping(factory, other);
|
||||
try {
|
||||
proto = otherReflectGetPrototypeOf(proto);
|
||||
} catch (e) { // @other(unsafe)
|
||||
other = e;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return thisProxyOther(factory, other, thisObjectPrototype);
|
||||
}
|
||||
|
||||
case 'undefined':
|
||||
case 'string':
|
||||
case 'number':
|
||||
case 'boolean':
|
||||
case 'symbol':
|
||||
case 'bigint':
|
||||
return other;
|
||||
|
||||
default: // new, unknown types can be dangerous
|
||||
throw new VMError(`Unknown type '${type}'`);
|
||||
}
|
||||
factory = defaultFactory;
|
||||
proto = undefined;
|
||||
}
|
||||
throw new VMError('Exception recursion depth');
|
||||
}
|
||||
|
||||
function thisFromOtherArguments(args) {
|
||||
// Note: args@other(safe-array) returns@this(safe-array) throws@this(unsafe)
|
||||
const arr = [];
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
const value = thisFromOther(args[i]);
|
||||
thisReflectDefineProperty(arr, i, {
|
||||
__proto__: null,
|
||||
value: value,
|
||||
writable: true,
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
|
||||
function thisConnect(obj, other) {
|
||||
// Note: obj@this(unsafe) other@other(unsafe) throws@this(unsafe)
|
||||
try {
|
||||
otherReflectApply(otherWeakMapSet, mappingThisToOther, [obj, other]);
|
||||
} catch (e) {
|
||||
throw new VMError('Unexpected error');
|
||||
}
|
||||
thisReflectApply(thisWeakMapSet, mappingOtherToThis, [other, obj]);
|
||||
}
|
||||
|
||||
thisAddProtoMapping(thisGlobalPrototypes.Object, otherGlobalPrototypes.Object);
|
||||
thisAddProtoMapping(thisGlobalPrototypes.Array, otherGlobalPrototypes.Array);
|
||||
|
||||
for (let i = 0; i < globalsList.length; i++) {
|
||||
const key = globalsList[i];
|
||||
const tp = thisGlobalPrototypes[key];
|
||||
const op = otherGlobalPrototypes[key];
|
||||
if (tp && op) thisAddProtoMapping(tp, op, key);
|
||||
}
|
||||
|
||||
for (let i = 0; i < errorsList.length; i++) {
|
||||
const key = errorsList[i];
|
||||
const tp = thisGlobalPrototypes[key];
|
||||
const op = otherGlobalPrototypes[key];
|
||||
if (tp && op) thisAddProtoMapping(tp, op, 'Error');
|
||||
}
|
||||
|
||||
thisAddProtoMapping(thisGlobalPrototypes.VMError, otherGlobalPrototypes.VMError, 'Error');
|
||||
|
||||
result.BaseHandler = BaseHandler;
|
||||
result.ProtectedHandler = ProtectedHandler;
|
||||
result.ReadOnlyHandler = ReadOnlyHandler;
|
||||
result.ReadOnlyMockHandler = ReadOnlyMockHandler;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
exports.createBridge = createBridge;
|
||||
exports.VMError = VMError;
|
||||
|
||||
|
||||
/***/ })
|
||||
|
||||
/******/ });
|
||||
/************************************************************************/
|
||||
/******/ /* webpack/runtime/compat */
|
||||
/******/
|
||||
/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/";/************************************************************************/
|
||||
/******/
|
||||
/******/ // startup
|
||||
/******/ // Load entry module and return exports
|
||||
/******/ // This entry module is referenced by other modules so it can't be inlined
|
||||
/******/ var __webpack_exports__ = {};
|
||||
/******/ __webpack_modules__[989](0, __webpack_exports__);
|
||||
/******/ module.exports = __webpack_exports__;
|
||||
/******/
|
||||
/******/ })()
|
||||
;
|
1033
dist/events.js
generated
vendored
Normal file
1033
dist/events.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
83031
dist/index.js
generated
vendored
83031
dist/index.js
generated
vendored
File diff suppressed because one or more lines are too long
473
dist/setup-node-sandbox.js
generated
vendored
Normal file
473
dist/setup-node-sandbox.js
generated
vendored
Normal file
@@ -0,0 +1,473 @@
|
||||
/******/ (() => { // webpackBootstrap
|
||||
/******/ "use strict";
|
||||
/******/ /* webpack/runtime/compat */
|
||||
/******/
|
||||
/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/";/************************************************************************/
|
||||
var __webpack_exports__ = {};
|
||||
/* global host, data, VMError */
|
||||
|
||||
|
||||
|
||||
const LocalError = Error;
|
||||
const LocalTypeError = TypeError;
|
||||
const LocalWeakMap = WeakMap;
|
||||
|
||||
const {
|
||||
apply: localReflectApply,
|
||||
defineProperty: localReflectDefineProperty
|
||||
} = Reflect;
|
||||
|
||||
const {
|
||||
set: localWeakMapSet,
|
||||
get: localWeakMapGet
|
||||
} = LocalWeakMap.prototype;
|
||||
|
||||
const {
|
||||
isArray: localArrayIsArray
|
||||
} = Array;
|
||||
|
||||
function uncurryThis(func) {
|
||||
return (thiz, ...args) => localReflectApply(func, thiz, args);
|
||||
}
|
||||
|
||||
const localArrayPrototypeSlice = uncurryThis(Array.prototype.slice);
|
||||
const localArrayPrototypeIncludes = uncurryThis(Array.prototype.includes);
|
||||
const localArrayPrototypePush = uncurryThis(Array.prototype.push);
|
||||
const localArrayPrototypeIndexOf = uncurryThis(Array.prototype.indexOf);
|
||||
const localArrayPrototypeSplice = uncurryThis(Array.prototype.splice);
|
||||
const localStringPrototypeStartsWith = uncurryThis(String.prototype.startsWith);
|
||||
const localStringPrototypeSlice = uncurryThis(String.prototype.slice);
|
||||
const localStringPrototypeIndexOf = uncurryThis(String.prototype.indexOf);
|
||||
|
||||
const {
|
||||
argv: optionArgv,
|
||||
env: optionEnv,
|
||||
console: optionConsole,
|
||||
vm,
|
||||
resolver,
|
||||
extensions
|
||||
} = data;
|
||||
|
||||
function ensureSandboxArray(a) {
|
||||
return localArrayPrototypeSlice(a);
|
||||
}
|
||||
|
||||
const globalPaths = ensureSandboxArray(resolver.globalPaths);
|
||||
|
||||
class Module {
|
||||
|
||||
constructor(id, path, parent) {
|
||||
this.id = id;
|
||||
this.filename = id;
|
||||
this.path = path;
|
||||
this.parent = parent;
|
||||
this.loaded = false;
|
||||
this.paths = path ? ensureSandboxArray(resolver.genLookupPaths(path)) : [];
|
||||
this.children = [];
|
||||
this.exports = {};
|
||||
}
|
||||
|
||||
_updateChildren(child, isNew) {
|
||||
const children = this.children;
|
||||
if (children && (isNew || !localArrayPrototypeIncludes(children, child))) {
|
||||
localArrayPrototypePush(children, child);
|
||||
}
|
||||
}
|
||||
|
||||
require(id) {
|
||||
return requireImpl(this, id, false);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const originalRequire = Module.prototype.require;
|
||||
const cacheBuiltins = {__proto__: null};
|
||||
|
||||
function requireImpl(mod, id, direct) {
|
||||
if (direct && mod.require !== originalRequire) {
|
||||
return mod.require(id);
|
||||
}
|
||||
const filename = resolver.resolve(mod, id, undefined, Module._extensions, direct);
|
||||
if (localStringPrototypeStartsWith(filename, 'node:')) {
|
||||
id = localStringPrototypeSlice(filename, 5);
|
||||
let nmod = cacheBuiltins[id];
|
||||
if (!nmod) {
|
||||
nmod = resolver.loadBuiltinModule(vm, id);
|
||||
if (!nmod) throw new VMError(`Cannot find module '${filename}'`, 'ENOTFOUND');
|
||||
cacheBuiltins[id] = nmod;
|
||||
}
|
||||
return nmod;
|
||||
}
|
||||
|
||||
const cachedModule = Module._cache[filename];
|
||||
if (cachedModule !== undefined) {
|
||||
mod._updateChildren(cachedModule, false);
|
||||
return cachedModule.exports;
|
||||
}
|
||||
|
||||
let nmod = cacheBuiltins[id];
|
||||
if (nmod) return nmod;
|
||||
nmod = resolver.loadBuiltinModule(vm, id);
|
||||
if (nmod) {
|
||||
cacheBuiltins[id] = nmod;
|
||||
return nmod;
|
||||
}
|
||||
|
||||
const path = resolver.pathDirname(filename);
|
||||
const module = new Module(filename, path, mod);
|
||||
resolver.registerModule(module, filename, path, mod, direct);
|
||||
mod._updateChildren(module, true);
|
||||
try {
|
||||
Module._cache[filename] = module;
|
||||
const handler = findBestExtensionHandler(filename);
|
||||
handler(module, filename);
|
||||
module.loaded = true;
|
||||
} catch (e) {
|
||||
delete Module._cache[filename];
|
||||
const children = mod.children;
|
||||
if (localArrayIsArray(children)) {
|
||||
const index = localArrayPrototypeIndexOf(children, module);
|
||||
if (index !== -1) {
|
||||
localArrayPrototypeSplice(children, index, 1);
|
||||
}
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
|
||||
return module.exports;
|
||||
}
|
||||
|
||||
Module.builtinModules = ensureSandboxArray(resolver.getBuiltinModulesList());
|
||||
Module.globalPaths = globalPaths;
|
||||
Module._extensions = {__proto__: null};
|
||||
Module._cache = {__proto__: null};
|
||||
|
||||
{
|
||||
const keys = Object.getOwnPropertyNames(extensions);
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i];
|
||||
const handler = extensions[key];
|
||||
Module._extensions[key] = (mod, filename) => handler(mod, filename);
|
||||
}
|
||||
}
|
||||
|
||||
function findBestExtensionHandler(filename) {
|
||||
const name = resolver.pathBasename(filename);
|
||||
for (let i = 0; (i = localStringPrototypeIndexOf(name, '.', i + 1)) !== -1;) {
|
||||
const ext = localStringPrototypeSlice(name, i + 1);
|
||||
const handler = Module._extensions[ext];
|
||||
if (handler) return handler;
|
||||
}
|
||||
const js = Module._extensions['.js'];
|
||||
if (js) return js;
|
||||
const keys = Object.getOwnPropertyNames(Module._extensions);
|
||||
if (keys.length === 0) throw new VMError(`Failed to load '${filename}': Unknown type.`, 'ELOADFAIL');
|
||||
return Module._extensions[keys[0]];
|
||||
}
|
||||
|
||||
function createRequireForModule(mod) {
|
||||
// eslint-disable-next-line no-shadow
|
||||
function require(id) {
|
||||
return requireImpl(mod, id, true);
|
||||
}
|
||||
function resolve(id, options) {
|
||||
return resolver.resolve(mod, id, options, Module._extensions, true);
|
||||
}
|
||||
require.resolve = resolve;
|
||||
function paths(id) {
|
||||
return ensureSandboxArray(resolver.lookupPaths(mod, id));
|
||||
}
|
||||
resolve.paths = paths;
|
||||
|
||||
require.extensions = Module._extensions;
|
||||
|
||||
require.cache = Module._cache;
|
||||
|
||||
return require;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare sandbox.
|
||||
*/
|
||||
|
||||
const TIMERS = new LocalWeakMap();
|
||||
|
||||
class Timeout {
|
||||
}
|
||||
|
||||
class Interval {
|
||||
}
|
||||
|
||||
class Immediate {
|
||||
}
|
||||
|
||||
function clearTimer(timer) {
|
||||
const obj = localReflectApply(localWeakMapGet, TIMERS, [timer]);
|
||||
if (obj) {
|
||||
obj.clear(obj.value);
|
||||
}
|
||||
}
|
||||
|
||||
// This is a function and not an arrow function, since the original is also a function
|
||||
// eslint-disable-next-line no-shadow
|
||||
global.setTimeout = function setTimeout(callback, delay, ...args) {
|
||||
if (typeof callback !== 'function') throw new LocalTypeError('"callback" argument must be a function');
|
||||
const obj = new Timeout(callback, args);
|
||||
const cb = () => {
|
||||
localReflectApply(callback, null, args);
|
||||
};
|
||||
const tmr = host.setTimeout(cb, delay);
|
||||
|
||||
const ref = {
|
||||
__proto__: null,
|
||||
clear: host.clearTimeout,
|
||||
value: tmr
|
||||
};
|
||||
|
||||
localReflectApply(localWeakMapSet, TIMERS, [obj, ref]);
|
||||
return obj;
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-shadow
|
||||
global.setInterval = function setInterval(callback, interval, ...args) {
|
||||
if (typeof callback !== 'function') throw new LocalTypeError('"callback" argument must be a function');
|
||||
const obj = new Interval();
|
||||
const cb = () => {
|
||||
localReflectApply(callback, null, args);
|
||||
};
|
||||
const tmr = host.setInterval(cb, interval);
|
||||
|
||||
const ref = {
|
||||
__proto__: null,
|
||||
clear: host.clearInterval,
|
||||
value: tmr
|
||||
};
|
||||
|
||||
localReflectApply(localWeakMapSet, TIMERS, [obj, ref]);
|
||||
return obj;
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-shadow
|
||||
global.setImmediate = function setImmediate(callback, ...args) {
|
||||
if (typeof callback !== 'function') throw new LocalTypeError('"callback" argument must be a function');
|
||||
const obj = new Immediate();
|
||||
const cb = () => {
|
||||
localReflectApply(callback, null, args);
|
||||
};
|
||||
const tmr = host.setImmediate(cb);
|
||||
|
||||
const ref = {
|
||||
__proto__: null,
|
||||
clear: host.clearImmediate,
|
||||
value: tmr
|
||||
};
|
||||
|
||||
localReflectApply(localWeakMapSet, TIMERS, [obj, ref]);
|
||||
return obj;
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-shadow
|
||||
global.clearTimeout = function clearTimeout(timeout) {
|
||||
clearTimer(timeout);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-shadow
|
||||
global.clearInterval = function clearInterval(interval) {
|
||||
clearTimer(interval);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line no-shadow
|
||||
global.clearImmediate = function clearImmediate(immediate) {
|
||||
clearTimer(immediate);
|
||||
};
|
||||
|
||||
const localProcess = host.process;
|
||||
|
||||
function vmEmitArgs(event, args) {
|
||||
const allargs = [event];
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
if (!localReflectDefineProperty(allargs, i + 1, {
|
||||
__proto__: null,
|
||||
value: args[i],
|
||||
writable: true,
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
})) throw new LocalError('Unexpected');
|
||||
}
|
||||
return localReflectApply(vm.emit, vm, allargs);
|
||||
}
|
||||
|
||||
const LISTENERS = new LocalWeakMap();
|
||||
const LISTENER_HANDLER = new LocalWeakMap();
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {*} name
|
||||
* @param {*} handler
|
||||
* @this process
|
||||
* @return {this}
|
||||
*/
|
||||
function addListener(name, handler) {
|
||||
if (name !== 'beforeExit' && name !== 'exit') {
|
||||
throw new LocalError(`Access denied to listen for '${name}' event.`);
|
||||
}
|
||||
|
||||
let cb = localReflectApply(localWeakMapGet, LISTENERS, [handler]);
|
||||
if (!cb) {
|
||||
cb = () => {
|
||||
handler();
|
||||
};
|
||||
localReflectApply(localWeakMapSet, LISTENER_HANDLER, [cb, handler]);
|
||||
localReflectApply(localWeakMapSet, LISTENERS, [handler, cb]);
|
||||
}
|
||||
|
||||
localProcess.on(name, cb);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @this process
|
||||
* @return {this}
|
||||
*/
|
||||
// eslint-disable-next-line no-shadow
|
||||
function process() {
|
||||
return this;
|
||||
}
|
||||
|
||||
// FIXME wrong class structure
|
||||
global.process = {
|
||||
__proto__: process.prototype,
|
||||
argv: optionArgv !== undefined ? optionArgv : [],
|
||||
title: localProcess.title,
|
||||
version: localProcess.version,
|
||||
versions: localProcess.versions,
|
||||
arch: localProcess.arch,
|
||||
platform: localProcess.platform,
|
||||
env: optionEnv !== undefined ? optionEnv : {},
|
||||
pid: localProcess.pid,
|
||||
features: localProcess.features,
|
||||
nextTick: function nextTick(callback, ...args) {
|
||||
if (typeof callback !== 'function') {
|
||||
throw new LocalError('Callback must be a function.');
|
||||
}
|
||||
|
||||
localProcess.nextTick(()=>{
|
||||
localReflectApply(callback, null, args);
|
||||
});
|
||||
},
|
||||
hrtime: function hrtime(time) {
|
||||
return localProcess.hrtime(time);
|
||||
},
|
||||
cwd: function cwd() {
|
||||
return localProcess.cwd();
|
||||
},
|
||||
addListener,
|
||||
on: addListener,
|
||||
|
||||
once: function once(name, handler) {
|
||||
if (name !== 'beforeExit' && name !== 'exit') {
|
||||
throw new LocalError(`Access denied to listen for '${name}' event.`);
|
||||
}
|
||||
|
||||
let triggered = false;
|
||||
const cb = () => {
|
||||
if (triggered) return;
|
||||
triggered = true;
|
||||
localProcess.removeListener(name, cb);
|
||||
handler();
|
||||
};
|
||||
localReflectApply(localWeakMapSet, LISTENER_HANDLER, [cb, handler]);
|
||||
|
||||
localProcess.on(name, cb);
|
||||
|
||||
return this;
|
||||
},
|
||||
|
||||
listeners: function listeners(name) {
|
||||
if (name !== 'beforeExit' && name !== 'exit') {
|
||||
// Maybe add ({__proto__:null})[name] to throw when name fails in https://tc39.es/ecma262/#sec-topropertykey.
|
||||
return [];
|
||||
}
|
||||
|
||||
// Filter out listeners, which were not created in this sandbox
|
||||
const all = localProcess.listeners(name);
|
||||
const filtered = [];
|
||||
let j = 0;
|
||||
for (let i = 0; i < all.length; i++) {
|
||||
const h = localReflectApply(localWeakMapGet, LISTENER_HANDLER, [all[i]]);
|
||||
if (h) {
|
||||
if (!localReflectDefineProperty(filtered, j, {
|
||||
__proto__: null,
|
||||
value: h,
|
||||
writable: true,
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
})) throw new LocalError('Unexpected');
|
||||
j++;
|
||||
}
|
||||
}
|
||||
return filtered;
|
||||
},
|
||||
|
||||
removeListener: function removeListener(name, handler) {
|
||||
if (name !== 'beforeExit' && name !== 'exit') {
|
||||
return this;
|
||||
}
|
||||
|
||||
const cb = localReflectApply(localWeakMapGet, LISTENERS, [handler]);
|
||||
if (cb) localProcess.removeListener(name, cb);
|
||||
|
||||
return this;
|
||||
},
|
||||
|
||||
umask: function umask() {
|
||||
if (arguments.length) {
|
||||
throw new LocalError('Access denied to set umask.');
|
||||
}
|
||||
|
||||
return localProcess.umask();
|
||||
}
|
||||
};
|
||||
|
||||
if (optionConsole === 'inherit') {
|
||||
global.console = host.console;
|
||||
} else if (optionConsole === 'redirect') {
|
||||
global.console = {
|
||||
debug(...args) {
|
||||
vmEmitArgs('console.debug', args);
|
||||
},
|
||||
log(...args) {
|
||||
vmEmitArgs('console.log', args);
|
||||
},
|
||||
info(...args) {
|
||||
vmEmitArgs('console.info', args);
|
||||
},
|
||||
warn(...args) {
|
||||
vmEmitArgs('console.warn', args);
|
||||
},
|
||||
error(...args) {
|
||||
vmEmitArgs('console.error', args);
|
||||
},
|
||||
dir(...args) {
|
||||
vmEmitArgs('console.dir', args);
|
||||
},
|
||||
time() {},
|
||||
timeEnd() {},
|
||||
trace(...args) {
|
||||
vmEmitArgs('console.trace', args);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
__proto__: null,
|
||||
Module,
|
||||
jsonParse: JSON.parse,
|
||||
createRequireForModule
|
||||
};
|
||||
|
||||
module.exports = __webpack_exports__;
|
||||
/******/ })()
|
||||
;
|
462
dist/setup-sandbox.js
generated
vendored
Normal file
462
dist/setup-sandbox.js
generated
vendored
Normal file
@@ -0,0 +1,462 @@
|
||||
/******/ (() => { // webpackBootstrap
|
||||
/******/ "use strict";
|
||||
/******/ /* webpack/runtime/compat */
|
||||
/******/
|
||||
/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/";/************************************************************************/
|
||||
var __webpack_exports__ = {};
|
||||
/* global host, bridge, data, context */
|
||||
|
||||
|
||||
|
||||
const {
|
||||
Object: localObject,
|
||||
Array: localArray,
|
||||
Error: LocalError,
|
||||
Reflect: localReflect,
|
||||
Proxy: LocalProxy,
|
||||
WeakMap: LocalWeakMap,
|
||||
Function: localFunction,
|
||||
Promise: localPromise,
|
||||
eval: localEval
|
||||
} = global;
|
||||
|
||||
const {
|
||||
freeze: localObjectFreeze
|
||||
} = localObject;
|
||||
|
||||
const {
|
||||
getPrototypeOf: localReflectGetPrototypeOf,
|
||||
apply: localReflectApply,
|
||||
deleteProperty: localReflectDeleteProperty,
|
||||
has: localReflectHas,
|
||||
defineProperty: localReflectDefineProperty,
|
||||
setPrototypeOf: localReflectSetPrototypeOf,
|
||||
getOwnPropertyDescriptor: localReflectGetOwnPropertyDescriptor
|
||||
} = localReflect;
|
||||
|
||||
const {
|
||||
isArray: localArrayIsArray
|
||||
} = localArray;
|
||||
|
||||
const {
|
||||
ensureThis,
|
||||
ReadOnlyHandler,
|
||||
from,
|
||||
fromWithFactory,
|
||||
readonlyFactory,
|
||||
connect,
|
||||
addProtoMapping,
|
||||
VMError,
|
||||
ReadOnlyMockHandler
|
||||
} = bridge;
|
||||
|
||||
const {
|
||||
allowAsync,
|
||||
GeneratorFunction,
|
||||
AsyncFunction,
|
||||
AsyncGeneratorFunction
|
||||
} = data;
|
||||
|
||||
const localWeakMapGet = LocalWeakMap.prototype.get;
|
||||
|
||||
function localUnexpected() {
|
||||
return new VMError('Should not happen');
|
||||
}
|
||||
|
||||
// global is originally prototype of host.Object so it can be used to climb up from the sandbox.
|
||||
if (!localReflectSetPrototypeOf(context, localObject.prototype)) throw localUnexpected();
|
||||
|
||||
Object.defineProperties(global, {
|
||||
global: {value: global, writable: true, configurable: true, enumerable: true},
|
||||
globalThis: {value: global, writable: true, configurable: true},
|
||||
GLOBAL: {value: global, writable: true, configurable: true},
|
||||
root: {value: global, writable: true, configurable: true}
|
||||
});
|
||||
|
||||
if (!localReflectDefineProperty(global, 'VMError', {
|
||||
__proto__: null,
|
||||
value: VMError,
|
||||
writable: true,
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
})) throw localUnexpected();
|
||||
|
||||
// Fixes buffer unsafe allocation
|
||||
/* eslint-disable no-use-before-define */
|
||||
class BufferHandler extends ReadOnlyHandler {
|
||||
|
||||
apply(target, thiz, args) {
|
||||
if (args.length > 0 && typeof args[0] === 'number') {
|
||||
return LocalBuffer.alloc(args[0]);
|
||||
}
|
||||
return localReflectApply(LocalBuffer.from, LocalBuffer, args);
|
||||
}
|
||||
|
||||
construct(target, args, newTarget) {
|
||||
if (args.length > 0 && typeof args[0] === 'number') {
|
||||
return LocalBuffer.alloc(args[0]);
|
||||
}
|
||||
return localReflectApply(LocalBuffer.from, LocalBuffer, args);
|
||||
}
|
||||
|
||||
}
|
||||
/* eslint-enable no-use-before-define */
|
||||
|
||||
const LocalBuffer = fromWithFactory(obj => new BufferHandler(obj), host.Buffer);
|
||||
|
||||
|
||||
if (!localReflectDefineProperty(global, 'Buffer', {
|
||||
__proto__: null,
|
||||
value: LocalBuffer,
|
||||
writable: true,
|
||||
enumerable: false,
|
||||
configurable: true
|
||||
})) throw localUnexpected();
|
||||
|
||||
addProtoMapping(LocalBuffer.prototype, host.Buffer.prototype, 'Uint8Array');
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {*} size Size of new buffer
|
||||
* @this LocalBuffer
|
||||
* @return {LocalBuffer}
|
||||
*/
|
||||
function allocUnsafe(size) {
|
||||
return LocalBuffer.alloc(size);
|
||||
}
|
||||
|
||||
connect(allocUnsafe, host.Buffer.allocUnsafe);
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {*} size Size of new buffer
|
||||
* @this LocalBuffer
|
||||
* @return {LocalBuffer}
|
||||
*/
|
||||
function allocUnsafeSlow(size) {
|
||||
return LocalBuffer.alloc(size);
|
||||
}
|
||||
|
||||
connect(allocUnsafeSlow, host.Buffer.allocUnsafeSlow);
|
||||
|
||||
/**
|
||||
* Replacement for Buffer inspect
|
||||
*
|
||||
* @param {*} recurseTimes
|
||||
* @param {*} ctx
|
||||
* @this LocalBuffer
|
||||
* @return {string}
|
||||
*/
|
||||
function inspect(recurseTimes, ctx) {
|
||||
// Mimic old behavior, could throw but didn't pass a test.
|
||||
const max = host.INSPECT_MAX_BYTES;
|
||||
const actualMax = Math.min(max, this.length);
|
||||
const remaining = this.length - max;
|
||||
let str = this.hexSlice(0, actualMax).replace(/(.{2})/g, '$1 ').trim();
|
||||
if (remaining > 0) str += ` ... ${remaining} more byte${remaining > 1 ? 's' : ''}`;
|
||||
return `<${this.constructor.name} ${str}>`;
|
||||
}
|
||||
|
||||
connect(inspect, host.Buffer.prototype.inspect);
|
||||
|
||||
connect(localFunction.prototype.bind, host.Function.prototype.bind);
|
||||
|
||||
connect(localObject.prototype.__defineGetter__, host.Object.prototype.__defineGetter__);
|
||||
connect(localObject.prototype.__defineSetter__, host.Object.prototype.__defineSetter__);
|
||||
connect(localObject.prototype.__lookupGetter__, host.Object.prototype.__lookupGetter__);
|
||||
connect(localObject.prototype.__lookupSetter__, host.Object.prototype.__lookupSetter__);
|
||||
|
||||
/*
|
||||
* PrepareStackTrace sanitization
|
||||
*/
|
||||
|
||||
const oldPrepareStackTraceDesc = localReflectGetOwnPropertyDescriptor(LocalError, 'prepareStackTrace');
|
||||
|
||||
let currentPrepareStackTrace = LocalError.prepareStackTrace;
|
||||
const wrappedPrepareStackTrace = new LocalWeakMap();
|
||||
if (typeof currentPrepareStackTrace === 'function') {
|
||||
wrappedPrepareStackTrace.set(currentPrepareStackTrace, currentPrepareStackTrace);
|
||||
}
|
||||
|
||||
let OriginalCallSite;
|
||||
LocalError.prepareStackTrace = (e, sst) => {
|
||||
OriginalCallSite = sst[0].constructor;
|
||||
};
|
||||
new LocalError().stack;
|
||||
if (typeof OriginalCallSite === 'function') {
|
||||
LocalError.prepareStackTrace = undefined;
|
||||
|
||||
function makeCallSiteGetters(list) {
|
||||
const callSiteGetters = [];
|
||||
for (let i=0; i<list.length; i++) {
|
||||
const name = list[i];
|
||||
const func = OriginalCallSite.prototype[name];
|
||||
callSiteGetters[i] = {__proto__: null,
|
||||
name,
|
||||
propName: '_' + name,
|
||||
func: (thiz) => {
|
||||
return localReflectApply(func, thiz, []);
|
||||
}
|
||||
};
|
||||
}
|
||||
return callSiteGetters;
|
||||
}
|
||||
|
||||
function applyCallSiteGetters(thiz, callSite, getters) {
|
||||
for (let i=0; i<getters.length; i++) {
|
||||
const getter = getters[i];
|
||||
localReflectDefineProperty(thiz, getter.propName, {
|
||||
__proto__: null,
|
||||
value: getter.func(callSite)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const callSiteGetters = makeCallSiteGetters([
|
||||
'getTypeName',
|
||||
'getFunctionName',
|
||||
'getMethodName',
|
||||
'getFileName',
|
||||
'getLineNumber',
|
||||
'getColumnNumber',
|
||||
'getEvalOrigin',
|
||||
'isToplevel',
|
||||
'isEval',
|
||||
'isNative',
|
||||
'isConstructor',
|
||||
'isAsync',
|
||||
'isPromiseAll',
|
||||
'getPromiseIndex'
|
||||
]);
|
||||
|
||||
class CallSite {
|
||||
constructor(callSite) {
|
||||
applyCallSiteGetters(this, callSite, callSiteGetters);
|
||||
}
|
||||
getThis() {
|
||||
return undefined;
|
||||
}
|
||||
getFunction() {
|
||||
return undefined;
|
||||
}
|
||||
toString() {
|
||||
return 'CallSite {}';
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
for (let i=0; i<callSiteGetters.length; i++) {
|
||||
const name = callSiteGetters[i].name;
|
||||
const funcProp = localReflectGetOwnPropertyDescriptor(OriginalCallSite.prototype, name);
|
||||
if (!funcProp) continue;
|
||||
const propertyName = callSiteGetters[i].propName;
|
||||
const func = {func() {
|
||||
return this[propertyName];
|
||||
}}.func;
|
||||
const nameProp = localReflectGetOwnPropertyDescriptor(func, 'name');
|
||||
if (!nameProp) throw localUnexpected();
|
||||
nameProp.value = name;
|
||||
if (!localReflectDefineProperty(func, 'name', nameProp)) throw localUnexpected();
|
||||
funcProp.value = func;
|
||||
if (!localReflectDefineProperty(CallSite.prototype, name, funcProp)) throw localUnexpected();
|
||||
}
|
||||
|
||||
if (!localReflectDefineProperty(LocalError, 'prepareStackTrace', {
|
||||
configurable: false,
|
||||
enumerable: false,
|
||||
get() {
|
||||
return currentPrepareStackTrace;
|
||||
},
|
||||
set(value) {
|
||||
if (typeof(value) !== 'function') {
|
||||
currentPrepareStackTrace = value;
|
||||
return;
|
||||
}
|
||||
const wrapped = localReflectApply(localWeakMapGet, wrappedPrepareStackTrace, [value]);
|
||||
if (wrapped) {
|
||||
currentPrepareStackTrace = wrapped;
|
||||
return;
|
||||
}
|
||||
const newWrapped = (error, sst) => {
|
||||
if (localArrayIsArray(sst)) {
|
||||
for (let i=0; i < sst.length; i++) {
|
||||
const cs = sst[i];
|
||||
if (typeof cs === 'object' && localReflectGetPrototypeOf(cs) === OriginalCallSite.prototype) {
|
||||
sst[i] = new CallSite(cs);
|
||||
}
|
||||
}
|
||||
}
|
||||
return value(error, sst);
|
||||
};
|
||||
wrappedPrepareStackTrace.set(value, newWrapped);
|
||||
wrappedPrepareStackTrace.set(newWrapped, newWrapped);
|
||||
currentPrepareStackTrace = newWrapped;
|
||||
}
|
||||
})) throw localUnexpected();
|
||||
} else if (oldPrepareStackTraceDesc) {
|
||||
localReflectDefineProperty(LocalError, 'prepareStackTrace', oldPrepareStackTraceDesc);
|
||||
} else {
|
||||
localReflectDeleteProperty(LocalError, 'prepareStackTrace');
|
||||
}
|
||||
|
||||
/*
|
||||
* Exception sanitization
|
||||
*/
|
||||
|
||||
const withProxy = localObjectFreeze({
|
||||
__proto__: null,
|
||||
has(target, key) {
|
||||
if (key === host.INTERNAL_STATE_NAME) return false;
|
||||
return localReflectHas(target, key);
|
||||
}
|
||||
});
|
||||
|
||||
const interanState = localObjectFreeze({
|
||||
__proto__: null,
|
||||
wrapWith(x) {
|
||||
return new LocalProxy(x, withProxy);
|
||||
},
|
||||
handleException: ensureThis,
|
||||
import(what) {
|
||||
throw new VMError('Dynamic Import not supported');
|
||||
}
|
||||
});
|
||||
|
||||
if (!localReflectDefineProperty(global, host.INTERNAL_STATE_NAME, {
|
||||
__proto__: null,
|
||||
configurable: false,
|
||||
enumerable: false,
|
||||
writable: false,
|
||||
value: interanState
|
||||
})) throw localUnexpected();
|
||||
|
||||
/*
|
||||
* Eval sanitization
|
||||
*/
|
||||
|
||||
function throwAsync() {
|
||||
return new VMError('Async not available');
|
||||
}
|
||||
|
||||
function makeFunction(inputArgs, isAsync, isGenerator) {
|
||||
const lastArgs = inputArgs.length - 1;
|
||||
let code = lastArgs >= 0 ? `${inputArgs[lastArgs]}` : '';
|
||||
let args = lastArgs > 0 ? `${inputArgs[0]}` : '';
|
||||
for (let i = 1; i < lastArgs; i++) {
|
||||
args += `,${inputArgs[i]}`;
|
||||
}
|
||||
try {
|
||||
code = host.transformAndCheck(args, code, isAsync, isGenerator, allowAsync);
|
||||
} catch (e) {
|
||||
throw bridge.from(e);
|
||||
}
|
||||
return localEval(code);
|
||||
}
|
||||
|
||||
const FunctionHandler = {
|
||||
__proto__: null,
|
||||
apply(target, thiz, args) {
|
||||
return makeFunction(args, this.isAsync, this.isGenerator);
|
||||
},
|
||||
construct(target, args, newTarget) {
|
||||
return makeFunction(args, this.isAsync, this.isGenerator);
|
||||
}
|
||||
};
|
||||
|
||||
const EvalHandler = {
|
||||
__proto__: null,
|
||||
apply(target, thiz, args) {
|
||||
if (args.length === 0) return undefined;
|
||||
let code = `${args[0]}`;
|
||||
try {
|
||||
code = host.transformAndCheck(null, code, false, false, allowAsync);
|
||||
} catch (e) {
|
||||
throw bridge.from(e);
|
||||
}
|
||||
return localEval(code);
|
||||
}
|
||||
};
|
||||
|
||||
const AsyncErrorHandler = {
|
||||
__proto__: null,
|
||||
apply(target, thiz, args) {
|
||||
throw throwAsync();
|
||||
},
|
||||
construct(target, args, newTarget) {
|
||||
throw throwAsync();
|
||||
}
|
||||
};
|
||||
|
||||
function makeCheckFunction(isAsync, isGenerator) {
|
||||
if (isAsync && !allowAsync) return AsyncErrorHandler;
|
||||
return {
|
||||
__proto__: FunctionHandler,
|
||||
isAsync,
|
||||
isGenerator
|
||||
};
|
||||
}
|
||||
|
||||
function overrideWithProxy(obj, prop, value, handler) {
|
||||
const proxy = new LocalProxy(value, handler);
|
||||
if (!localReflectDefineProperty(obj, prop, {__proto__: null, value: proxy})) throw localUnexpected();
|
||||
return proxy;
|
||||
}
|
||||
|
||||
const proxiedFunction = overrideWithProxy(localFunction.prototype, 'constructor', localFunction, makeCheckFunction(false, false));
|
||||
if (GeneratorFunction) {
|
||||
if (!localReflectSetPrototypeOf(GeneratorFunction, proxiedFunction)) throw localUnexpected();
|
||||
overrideWithProxy(GeneratorFunction.prototype, 'constructor', GeneratorFunction, makeCheckFunction(false, true));
|
||||
}
|
||||
if (AsyncFunction) {
|
||||
if (!localReflectSetPrototypeOf(AsyncFunction, proxiedFunction)) throw localUnexpected();
|
||||
overrideWithProxy(AsyncFunction.prototype, 'constructor', AsyncFunction, makeCheckFunction(true, false));
|
||||
}
|
||||
if (AsyncGeneratorFunction) {
|
||||
if (!localReflectSetPrototypeOf(AsyncGeneratorFunction, proxiedFunction)) throw localUnexpected();
|
||||
overrideWithProxy(AsyncGeneratorFunction.prototype, 'constructor', AsyncGeneratorFunction, makeCheckFunction(true, true));
|
||||
}
|
||||
|
||||
global.Function = proxiedFunction;
|
||||
global.eval = new LocalProxy(localEval, EvalHandler);
|
||||
|
||||
/*
|
||||
* Promise sanitization
|
||||
*/
|
||||
|
||||
if (localPromise && !allowAsync) {
|
||||
|
||||
const PromisePrototype = localPromise.prototype;
|
||||
|
||||
overrideWithProxy(PromisePrototype, 'then', PromisePrototype.then, AsyncErrorHandler);
|
||||
// This seems not to work, and will produce
|
||||
// UnhandledPromiseRejectionWarning: TypeError: Method Promise.prototype.then called on incompatible receiver [object Object].
|
||||
// This is likely caused since the host.Promise.prototype.then cannot use the VM Proxy object.
|
||||
// Contextify.connect(host.Promise.prototype.then, Promise.prototype.then);
|
||||
|
||||
if (PromisePrototype.finally) {
|
||||
overrideWithProxy(PromisePrototype, 'finally', PromisePrototype.finally, AsyncErrorHandler);
|
||||
// Contextify.connect(host.Promise.prototype.finally, Promise.prototype.finally);
|
||||
}
|
||||
if (Promise.prototype.catch) {
|
||||
overrideWithProxy(PromisePrototype, 'catch', PromisePrototype.catch, AsyncErrorHandler);
|
||||
// Contextify.connect(host.Promise.prototype.catch, Promise.prototype.catch);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function readonly(other, mock) {
|
||||
// Note: other@other(unsafe) mock@other(unsafe) returns@this(unsafe) throws@this(unsafe)
|
||||
if (!mock) return fromWithFactory(readonlyFactory, other);
|
||||
const tmock = from(mock);
|
||||
return fromWithFactory(obj=>new ReadOnlyMockHandler(obj, tmock), other);
|
||||
}
|
||||
|
||||
return {
|
||||
__proto__: null,
|
||||
readonly,
|
||||
global
|
||||
};
|
||||
|
||||
module.exports = __webpack_exports__;
|
||||
/******/ })()
|
||||
;
|
70
docker-bake.hcl
Normal file
70
docker-bake.hcl
Normal file
@@ -0,0 +1,70 @@
|
||||
variable "NODE_VERSION" {
|
||||
default = "12"
|
||||
}
|
||||
|
||||
target "node-version" {
|
||||
args = {
|
||||
NODE_VERSION = NODE_VERSION
|
||||
}
|
||||
}
|
||||
|
||||
group "default" {
|
||||
targets = ["build"]
|
||||
}
|
||||
|
||||
group "pre-checkin" {
|
||||
targets = ["vendor-update", "format", "build"]
|
||||
}
|
||||
|
||||
group "validate" {
|
||||
targets = ["format-validate", "build-validate", "vendor-validate"]
|
||||
}
|
||||
|
||||
target "build" {
|
||||
inherits = ["node-version"]
|
||||
dockerfile = "./hack/build.Dockerfile"
|
||||
target = "build-update"
|
||||
output = ["."]
|
||||
}
|
||||
|
||||
target "build-validate" {
|
||||
inherits = ["node-version"]
|
||||
dockerfile = "./hack/build.Dockerfile"
|
||||
target = "build-validate"
|
||||
output = ["type=cacheonly"]
|
||||
}
|
||||
|
||||
target "format" {
|
||||
inherits = ["node-version"]
|
||||
dockerfile = "./hack/build.Dockerfile"
|
||||
target = "format-update"
|
||||
output = ["."]
|
||||
}
|
||||
|
||||
target "format-validate" {
|
||||
inherits = ["node-version"]
|
||||
dockerfile = "./hack/build.Dockerfile"
|
||||
target = "format-validate"
|
||||
output = ["type=cacheonly"]
|
||||
}
|
||||
|
||||
target "vendor-update" {
|
||||
inherits = ["node-version"]
|
||||
dockerfile = "./hack/build.Dockerfile"
|
||||
target = "vendor-update"
|
||||
output = ["."]
|
||||
}
|
||||
|
||||
target "vendor-validate" {
|
||||
inherits = ["node-version"]
|
||||
dockerfile = "./hack/build.Dockerfile"
|
||||
target = "vendor-validate"
|
||||
output = ["type=cacheonly"]
|
||||
}
|
||||
|
||||
target "test" {
|
||||
inherits = ["node-version"]
|
||||
dockerfile = "./hack/build.Dockerfile"
|
||||
target = "test-coverage"
|
||||
output = ["./coverage"]
|
||||
}
|
78
hack/build.Dockerfile
Normal file
78
hack/build.Dockerfile
Normal file
@@ -0,0 +1,78 @@
|
||||
# syntax=docker/dockerfile:1.3-labs
|
||||
|
||||
ARG NODE_VERSION
|
||||
ARG DOCKER_VERSION=20.10.10
|
||||
ARG BUILDX_VERSION=0.7.0
|
||||
|
||||
FROM node:${NODE_VERSION}-alpine AS base
|
||||
RUN apk add --no-cache cpio findutils git
|
||||
WORKDIR /src
|
||||
|
||||
FROM base AS deps
|
||||
RUN --mount=type=bind,target=.,rw \
|
||||
--mount=type=cache,target=/src/node_modules \
|
||||
yarn install && mkdir /vendor && cp yarn.lock /vendor
|
||||
|
||||
FROM scratch AS vendor-update
|
||||
COPY --from=deps /vendor /
|
||||
|
||||
FROM deps AS vendor-validate
|
||||
RUN --mount=type=bind,target=.,rw <<EOT
|
||||
set -e
|
||||
git add -A
|
||||
cp -rf /vendor/* .
|
||||
if [ -n "$(git status --porcelain -- yarn.lock)" ]; then
|
||||
echo >&2 'ERROR: Vendor result differs. Please vendor your package with "docker buildx bake vendor-update"'
|
||||
git status --porcelain -- yarn.lock
|
||||
exit 1
|
||||
fi
|
||||
EOT
|
||||
|
||||
FROM deps AS build
|
||||
RUN --mount=type=bind,target=.,rw \
|
||||
--mount=type=cache,target=/src/node_modules \
|
||||
yarn run build && mkdir /out && cp -Rf dist /out/
|
||||
|
||||
FROM scratch AS build-update
|
||||
COPY --from=build /out /
|
||||
|
||||
FROM build AS build-validate
|
||||
RUN --mount=type=bind,target=.,rw <<EOT
|
||||
set -e
|
||||
git add -A
|
||||
cp -rf /out/* .
|
||||
if [ -n "$(git status --porcelain -- dist)" ]; then
|
||||
echo >&2 'ERROR: Build result differs. Please build first with "docker buildx bake build"'
|
||||
git status --porcelain -- dist
|
||||
exit 1
|
||||
fi
|
||||
EOT
|
||||
|
||||
FROM deps AS format
|
||||
RUN --mount=type=bind,target=.,rw \
|
||||
--mount=type=cache,target=/src/node_modules \
|
||||
yarn run format \
|
||||
&& mkdir /out && find . -name '*.ts' -not -path './node_modules/*' | cpio -pdm /out
|
||||
|
||||
FROM scratch AS format-update
|
||||
COPY --from=format /out /
|
||||
|
||||
FROM deps AS format-validate
|
||||
RUN --mount=type=bind,target=.,rw \
|
||||
--mount=type=cache,target=/src/node_modules \
|
||||
yarn run format-check
|
||||
|
||||
FROM docker:${DOCKER_VERSION} as docker
|
||||
FROM docker/buildx-bin:${BUILDX_VERSION} as buildx
|
||||
|
||||
FROM deps AS test
|
||||
ENV RUNNER_TEMP=/tmp/github_runner
|
||||
ENV RUNNER_TOOL_CACHE=/tmp/github_tool_cache
|
||||
RUN --mount=type=bind,target=.,rw \
|
||||
--mount=type=cache,target=/src/node_modules \
|
||||
--mount=type=bind,from=docker,source=/usr/local/bin/docker,target=/usr/bin/docker \
|
||||
--mount=type=bind,from=buildx,source=/buildx,target=/usr/libexec/docker/cli-plugins/docker-buildx \
|
||||
yarn run test --coverageDirectory=/tmp/coverage
|
||||
|
||||
FROM scratch AS test-coverage
|
||||
COPY --from=test /tmp/coverage /
|
12
jest.config.js
Normal file
12
jest.config.js
Normal file
@@ -0,0 +1,12 @@
|
||||
module.exports = {
|
||||
clearMocks: true,
|
||||
moduleFileExtensions: ['js', 'ts'],
|
||||
setupFiles: ["dotenv/config"],
|
||||
testEnvironment: 'node',
|
||||
testMatch: ['**/*.test.ts'],
|
||||
testRunner: 'jest-circus/runner',
|
||||
transform: {
|
||||
'^.+\\.ts$': 'ts-jest'
|
||||
},
|
||||
verbose: false
|
||||
}
|
34
package.json
34
package.json
@@ -6,29 +6,45 @@
|
||||
"build": "tsc && ncc build",
|
||||
"format": "prettier --write **/*.ts",
|
||||
"format-check": "prettier --check **/*.ts",
|
||||
"test": "jest --coverage",
|
||||
"pre-checkin": "yarn run format && yarn run build"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/crazy-max/ghaction-docker-login.git"
|
||||
"url": "git+https://github.com/docker/login-action.git"
|
||||
},
|
||||
"keywords": [
|
||||
"actions",
|
||||
"docker",
|
||||
"login"
|
||||
],
|
||||
"author": "CrazyMax",
|
||||
"author": "Docker",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "CrazyMax",
|
||||
"url": "https://crazymax.dev"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.4",
|
||||
"@actions/exec": "^1.0.4"
|
||||
"@actions/core": "^1.6.0",
|
||||
"@actions/exec": "^1.1.0",
|
||||
"@actions/io": "^1.1.1",
|
||||
"@aws-sdk/client-ecr": "^3.45.0",
|
||||
"@aws-sdk/client-ecr-public": "^3.45.0",
|
||||
"proxy-agent": "^5.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^26.0.3",
|
||||
"@types/node": "^14.0.14",
|
||||
"@zeit/ncc": "^0.22.3",
|
||||
"prettier": "^2.0.5",
|
||||
"typescript": "^3.9.5",
|
||||
"@types/jest": "^26.0.23",
|
||||
"@types/node": "^14.17.4",
|
||||
"@vercel/ncc": "^0.28.6",
|
||||
"dotenv": "^8.6.0",
|
||||
"jest": "^26.6.3",
|
||||
"jest-circus": "^26.6.3",
|
||||
"jest-runtime": "^26.6.3",
|
||||
"prettier": "^2.3.2",
|
||||
"ts-jest": "^26.5.6",
|
||||
"typescript": "^3.9.10",
|
||||
"typescript-formatter": "^7.2.2"
|
||||
}
|
||||
}
|
||||
|
132
src/aws.ts
Normal file
132
src/aws.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
import * as core from '@actions/core';
|
||||
import {ECR} from '@aws-sdk/client-ecr';
|
||||
import {ECRPUBLIC} from '@aws-sdk/client-ecr-public';
|
||||
import {NodeHttpHandler} from '@aws-sdk/node-http-handler';
|
||||
import ProxyAgent from 'proxy-agent';
|
||||
|
||||
const ecrRegistryRegex = /^(([0-9]{12})\.dkr\.ecr\.(.+)\.amazonaws\.com(.cn)?)(\/([^:]+)(:.+)?)?$/;
|
||||
|
||||
export const isECR = (registry: string): boolean => {
|
||||
return ecrRegistryRegex.test(registry) || isPubECR(registry);
|
||||
};
|
||||
|
||||
export const isPubECR = (registry: string): boolean => {
|
||||
return registry === 'public.ecr.aws';
|
||||
};
|
||||
|
||||
export const getRegion = (registry: string): string => {
|
||||
if (isPubECR(registry)) {
|
||||
return process.env.AWS_REGION || process.env.AWS_DEFAULT_REGION || 'us-east-1';
|
||||
}
|
||||
const matches = registry.match(ecrRegistryRegex);
|
||||
if (!matches) {
|
||||
return '';
|
||||
}
|
||||
return matches[3];
|
||||
};
|
||||
|
||||
export const getAccountIDs = (registry: string): string[] => {
|
||||
if (isPubECR(registry)) {
|
||||
return [];
|
||||
}
|
||||
const matches = registry.match(ecrRegistryRegex);
|
||||
if (!matches) {
|
||||
return [];
|
||||
}
|
||||
let accountIDs: Array<string> = [matches[2]];
|
||||
if (process.env.AWS_ACCOUNT_IDS) {
|
||||
accountIDs.push(...process.env.AWS_ACCOUNT_IDS.split(','));
|
||||
}
|
||||
return accountIDs.filter((item, index) => accountIDs.indexOf(item) === index);
|
||||
};
|
||||
|
||||
export interface RegistryData {
|
||||
registry: string;
|
||||
username: string;
|
||||
password: string;
|
||||
}
|
||||
|
||||
export const getRegistriesData = async (registry: string, username?: string, password?: string): Promise<RegistryData[]> => {
|
||||
const region = getRegion(registry);
|
||||
const accountIDs = getAccountIDs(registry);
|
||||
|
||||
const authTokenRequest = {};
|
||||
if (accountIDs.length > 0) {
|
||||
core.debug(`Requesting AWS ECR auth token for ${accountIDs.join(', ')}`);
|
||||
authTokenRequest['registryIds'] = accountIDs;
|
||||
}
|
||||
|
||||
let httpProxyAgent: any = null;
|
||||
const httpProxy = process.env.http_proxy || process.env.HTTP_PROXY || '';
|
||||
if (httpProxy) {
|
||||
core.debug(`Using http proxy ${httpProxy}`);
|
||||
httpProxyAgent = new ProxyAgent(httpProxy);
|
||||
}
|
||||
|
||||
let httpsProxyAgent: any = null;
|
||||
const httpsProxy = process.env.https_proxy || process.env.HTTPS_PROXY || '';
|
||||
if (httpsProxy) {
|
||||
core.debug(`Using https proxy ${httpsProxy}`);
|
||||
httpsProxyAgent = new ProxyAgent(httpsProxy);
|
||||
}
|
||||
|
||||
const credentials =
|
||||
username && password
|
||||
? {
|
||||
accessKeyId: username,
|
||||
secretAccessKey: password
|
||||
}
|
||||
: undefined;
|
||||
|
||||
if (isPubECR(registry)) {
|
||||
core.info(`AWS Public ECR detected with ${region} region`);
|
||||
const ecrPublic = new ECRPUBLIC({
|
||||
customUserAgent: 'docker-login-action',
|
||||
credentials,
|
||||
region: region,
|
||||
requestHandler: new NodeHttpHandler({
|
||||
httpAgent: httpProxyAgent,
|
||||
httpsAgent: httpsProxyAgent
|
||||
})
|
||||
});
|
||||
const authTokenResponse = await ecrPublic.getAuthorizationToken(authTokenRequest);
|
||||
if (!authTokenResponse.authorizationData || !authTokenResponse.authorizationData.authorizationToken) {
|
||||
throw new Error('Could not retrieve an authorization token from AWS Public ECR');
|
||||
}
|
||||
const authToken = Buffer.from(authTokenResponse.authorizationData.authorizationToken, 'base64').toString('utf-8');
|
||||
const creds = authToken.split(':', 2);
|
||||
return [
|
||||
{
|
||||
registry: 'public.ecr.aws',
|
||||
username: creds[0],
|
||||
password: creds[1]
|
||||
}
|
||||
];
|
||||
} else {
|
||||
core.info(`AWS ECR detected with ${region} region`);
|
||||
const ecr = new ECR({
|
||||
customUserAgent: 'docker-login-action',
|
||||
credentials,
|
||||
region: region,
|
||||
requestHandler: new NodeHttpHandler({
|
||||
httpAgent: httpProxyAgent,
|
||||
httpsAgent: httpsProxyAgent
|
||||
})
|
||||
});
|
||||
const authTokenResponse = await ecr.getAuthorizationToken(authTokenRequest);
|
||||
if (!Array.isArray(authTokenResponse.authorizationData) || !authTokenResponse.authorizationData.length) {
|
||||
throw new Error('Could not retrieve an authorization token from AWS ECR');
|
||||
}
|
||||
const regDatas: RegistryData[] = [];
|
||||
for (const authData of authTokenResponse.authorizationData) {
|
||||
const authToken = Buffer.from(authData.authorizationToken || '', 'base64').toString('utf-8');
|
||||
const creds = authToken.split(':', 2);
|
||||
regDatas.push({
|
||||
registry: authData.proxyEndpoint || '',
|
||||
username: creds[0],
|
||||
password: creds[1]
|
||||
});
|
||||
}
|
||||
return regDatas;
|
||||
}
|
||||
};
|
19
src/context.ts
Normal file
19
src/context.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import * as core from '@actions/core';
|
||||
|
||||
export interface Inputs {
|
||||
registry: string;
|
||||
username: string;
|
||||
password: string;
|
||||
ecr: string;
|
||||
logout: boolean;
|
||||
}
|
||||
|
||||
export function getInputs(): Inputs {
|
||||
return {
|
||||
registry: core.getInput('registry'),
|
||||
username: core.getInput('username'),
|
||||
password: core.getInput('password'),
|
||||
ecr: core.getInput('ecr'),
|
||||
logout: core.getBooleanInput('logout')
|
||||
};
|
||||
}
|
71
src/docker.ts
Normal file
71
src/docker.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import * as aws from './aws';
|
||||
import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
|
||||
export async function login(registry: string, username: string, password: string, ecr: string): Promise<void> {
|
||||
if (/true/i.test(ecr) || (ecr == 'auto' && aws.isECR(registry))) {
|
||||
await loginECR(registry, username, password);
|
||||
} else {
|
||||
await loginStandard(registry, username, password);
|
||||
}
|
||||
}
|
||||
|
||||
export async function logout(registry: string): Promise<void> {
|
||||
await exec
|
||||
.getExecOutput('docker', ['logout', registry], {
|
||||
ignoreReturnCode: true
|
||||
})
|
||||
.then(res => {
|
||||
if (res.stderr.length > 0 && res.exitCode != 0) {
|
||||
core.warning(res.stderr.trim());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export async function loginStandard(registry: string, username: string, password: string): Promise<void> {
|
||||
if (!username || !password) {
|
||||
throw new Error('Username and password required');
|
||||
}
|
||||
|
||||
let loginArgs: Array<string> = ['login', '--password-stdin'];
|
||||
loginArgs.push('--username', username);
|
||||
loginArgs.push(registry);
|
||||
|
||||
if (registry) {
|
||||
core.info(`Logging into ${registry}...`);
|
||||
} else {
|
||||
core.info(`Logging into Docker Hub...`);
|
||||
}
|
||||
await exec
|
||||
.getExecOutput('docker', loginArgs, {
|
||||
ignoreReturnCode: true,
|
||||
silent: true,
|
||||
input: Buffer.from(password)
|
||||
})
|
||||
.then(res => {
|
||||
if (res.stderr.length > 0 && res.exitCode != 0) {
|
||||
throw new Error(res.stderr.trim());
|
||||
}
|
||||
core.info(`Login Succeeded!`);
|
||||
});
|
||||
}
|
||||
|
||||
export async function loginECR(registry: string, username: string, password: string): Promise<void> {
|
||||
core.info(`Retrieving registries data through AWS SDK...`);
|
||||
const regDatas = await aws.getRegistriesData(registry, username, password);
|
||||
for (const regData of regDatas) {
|
||||
core.info(`Logging into ${regData.registry}...`);
|
||||
await exec
|
||||
.getExecOutput('docker', ['login', '--password-stdin', '--username', regData.username, regData.registry], {
|
||||
ignoreReturnCode: true,
|
||||
silent: true,
|
||||
input: Buffer.from(regData.password)
|
||||
})
|
||||
.then(res => {
|
||||
if (res.stderr.length > 0 && res.exitCode != 0) {
|
||||
throw new Error(res.stderr.trim());
|
||||
}
|
||||
core.info('Login Succeeded!');
|
||||
});
|
||||
}
|
||||
}
|
34
src/exec.ts
34
src/exec.ts
@@ -1,34 +0,0 @@
|
||||
import * as actionsExec from '@actions/exec';
|
||||
import {ExecOptions} from '@actions/exec';
|
||||
|
||||
export interface ExecResult {
|
||||
success: boolean;
|
||||
stdout: string;
|
||||
stderr: string;
|
||||
}
|
||||
|
||||
export const exec = async (command: string, args: string[] = [], silent: boolean): Promise<ExecResult> => {
|
||||
let stdout: string = '';
|
||||
let stderr: string = '';
|
||||
|
||||
const options: ExecOptions = {
|
||||
silent: silent,
|
||||
ignoreReturnCode: true
|
||||
};
|
||||
options.listeners = {
|
||||
stdout: (data: Buffer) => {
|
||||
stdout += data.toString();
|
||||
},
|
||||
stderr: (data: Buffer) => {
|
||||
stderr += data.toString();
|
||||
}
|
||||
};
|
||||
|
||||
const returnCode: number = await actionsExec.exec(command, args, options);
|
||||
|
||||
return {
|
||||
success: returnCode === 0,
|
||||
stdout: stdout.trim(),
|
||||
stderr: stderr.trim()
|
||||
};
|
||||
};
|
40
src/main.ts
40
src/main.ts
@@ -1,34 +1,14 @@
|
||||
import * as os from 'os';
|
||||
import * as core from '@actions/core';
|
||||
import * as exec from './exec';
|
||||
import * as context from './context';
|
||||
import * as docker from './docker';
|
||||
import * as stateHelper from './state-helper';
|
||||
|
||||
async function run(): Promise<void> {
|
||||
export async function run(): Promise<void> {
|
||||
try {
|
||||
if (os.platform() !== 'linux') {
|
||||
core.setFailed('Only supported on linux platform');
|
||||
return;
|
||||
}
|
||||
|
||||
const registry: string = core.getInput('registry');
|
||||
stateHelper.setRegistry(registry);
|
||||
stateHelper.setLogout(core.getInput('logout'));
|
||||
|
||||
const username: string = core.getInput('username');
|
||||
const password: string = core.getInput('password', {required: true});
|
||||
|
||||
let loginArgs: Array<string> = ['login', '--password', password];
|
||||
if (username) {
|
||||
loginArgs.push('--username', username);
|
||||
}
|
||||
loginArgs.push(registry);
|
||||
|
||||
await exec.exec('docker', loginArgs, true).then(res => {
|
||||
if (res.stderr != '' && !res.success) {
|
||||
throw new Error(res.stderr);
|
||||
}
|
||||
core.info('🎉 Login Succeeded!');
|
||||
});
|
||||
const input: context.Inputs = context.getInputs();
|
||||
stateHelper.setRegistry(input.registry);
|
||||
stateHelper.setLogout(input.logout);
|
||||
await docker.login(input.registry, input.username, input.password, input.ecr);
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
}
|
||||
@@ -38,11 +18,7 @@ async function logout(): Promise<void> {
|
||||
if (!stateHelper.logout) {
|
||||
return;
|
||||
}
|
||||
await exec.exec('docker', ['logout', stateHelper.registry], false).then(res => {
|
||||
if (res.stderr != '' && !res.success) {
|
||||
core.warning(res.stderr);
|
||||
}
|
||||
});
|
||||
await docker.logout(stateHelper.registry);
|
||||
}
|
||||
|
||||
if (!stateHelper.IsPost) {
|
||||
|
@@ -8,7 +8,7 @@ export function setRegistry(registry: string) {
|
||||
core.saveState('registry', registry);
|
||||
}
|
||||
|
||||
export function setLogout(logout: string) {
|
||||
export function setLogout(logout: boolean) {
|
||||
core.saveState('logout', logout);
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user