Compare commits

..

No commits in common. "master" and "v1.0.3" have entirely different histories.

261 changed files with 36969 additions and 63610 deletions

4
.github/FUNDING.yml vendored
View File

@ -1,4 +0,0 @@
# These are supported funding model platforms
github: ['bcomnes']
custom: ['https://bret.io', 'https://neocities.org/donate']

View File

@ -1,22 +0,0 @@
# Basic dependabot.yml file with
# minimum configuration for two package managers
version: 2
updates:
# Enable version updates for npm
- package-ecosystem: "npm"
# Look for `package.json` and `lock` files in the `root` directory
directory: "/"
# Check the npm registry for updates every day (weekdays)
schedule:
interval: "daily"
groups:
typescript:
patterns:
- "typescript"
- "@voxpelli/tsconfig"
# Enable updates to github actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"

View File

@ -1,40 +0,0 @@
name: Deploy to neocities
# only run on changes to master
on:
push:
branches:
- master
env:
FORCE_COLOR: 1
node_version: lts/*
concurrency: # prevent concurrent deploys doing strange things
group: deploy-to-neocities
cancel-in-progress: true
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
# Set up any tools and build steps here
# This example uses a Node.js toolchain to build a site
- name: Use Node.js
uses: actions/setup-node@v6
with:
node-version: ${{ env.node_version }}
- run: npm i
- run: npm run build
# When the dist_dir is ready, deploy it to neocities
- name: Deploy to neocities
uses: bcomnes/deploy-to-neocities@master # dont use master in production
with:
api_key: ${{ secrets.NEOCITIES_API_TOKEN }}
cleanup: true
dist_dir: public
protected_files: 'dropbox/*'
neocities_supporter: true # set this to true if you have a supporter account and want to bypass unsuported files filter.
preview_before_deploy: true

View File

@ -1,39 +0,0 @@
name: npm bump
on:
workflow_dispatch:
inputs:
newversion:
description: 'npm version (major minor patch)'
required: true
env:
node_version: lts/*
FORCE_COLOR: 1
concurrency: # prevent concurrent releases
group: npm-bump
cancel-in-progress: true
jobs:
version_and_release:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
with:
# fetch full history so things like auto-changelog work properly
fetch-depth: 0
- uses: actions/setup-node@v6
with:
node-version: ${{ env.node_version }}
# setting a registry enables the NODE_AUTH_TOKEN env variable where we can set an npm token. REQUIRED
registry-url: 'https://registry.npmjs.org'
- run: npm i
- run: npm test
- uses: bcomnes/npm-bump@v2.2.1
with:
git_email: bcomnes@gmail.com
git_username: ${{ github.actor }}
newversion: ${{ github.event.inputs.newversion }}
github_token: ${{ secrets.GITHUB_TOKEN }} # built in actions token. Passed tp gh-release if in use.
publish_cmd: npm run release

View File

@ -1,9 +1,6 @@
name: tests
on: [push, pull_request]
env:
FORCE_COLOR: 1
on: [push]
jobs:
test:
@ -12,25 +9,17 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest]
node: [lts/*]
node: [12]
steps:
- uses: actions/checkout@v6
- uses: actions/checkout@v1
- name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@v6
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node }}
- run: npm i
- run: npm test
automerge:
needs: test
runs-on: ubuntu-latest
permissions:
pull-requests: write
contents: write
steps:
- uses: fastify/github-action-merge-dependabot@v3
if: ${{ github.actor == 'dependabot[bot]' && github.event_name == 'pull_request' && contains(github.head_ref, 'dependabot/github_actions') }}
with:
github-token: ${{secrets.github_token}}
- name: npm install && npm test
run: |
npm i
npm test
env:
CI: true

2
.gitignore vendored
View File

@ -4,5 +4,3 @@ config.json
public
node_modules
tmp_modules
package-lock.json
coverage

17
.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,17 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "Launch Program",
"skipFiles": [
"<node_internals>/**"
],
"program": "${workspaceFolder}/test.js"
}
]
}

View File

@ -7,554 +7,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog).
## [v3.0.4](https://github.com/bcomnes/deploy-to-neocities/compare/v3.0.3...v3.0.4)
### Merged
- chore(deps): bump minimatch from 10.0.1 to 10.0.3 [`#224`](https://github.com/bcomnes/deploy-to-neocities/pull/224)
### Commits
- Merge pull request #225 from bcomnes/dependabot/npm_and_yarn/async-neocities-4.1.2 [`692b031`](https://github.com/bcomnes/deploy-to-neocities/commit/692b031118b324f3c28f2204ff18882edb9c53af)
- chore(deps): bump async-neocities from 4.1.1 to 4.1.2 [`a1ae452`](https://github.com/bcomnes/deploy-to-neocities/commit/a1ae452c40f056b6cead68ed5d914245860ab45e)
## [v3.0.3](https://github.com/bcomnes/deploy-to-neocities/compare/v3.0.2...v3.0.3) - 2025-06-01
### Merged
- Add my own website in README.md [`#222`](https://github.com/bcomnes/deploy-to-neocities/pull/222)
- chore(deps-dev): bump typescript from 5.7.3 to 5.8.2 [`#217`](https://github.com/bcomnes/deploy-to-neocities/pull/217)
- chore(deps-dev): bump esbuild from 0.24.2 to 0.25.0 [`#213`](https://github.com/bcomnes/deploy-to-neocities/pull/213)
- chore(deps-dev): bump neostandard from 0.11.9 to 0.12.0 [`#211`](https://github.com/bcomnes/deploy-to-neocities/pull/211)
### Commits
- Update async-neocities [`51ec937`](https://github.com/bcomnes/deploy-to-neocities/commit/51ec937eb84fa6beae055593f8e286e7d49d1d1e)
- Merge pull request #221 from bcomnes/dependabot/npm_and_yarn/npm-run-all2-8.0.1 [`7c11b66`](https://github.com/bcomnes/deploy-to-neocities/commit/7c11b664c6b9ed5b7d72def8047f539b2f2205a9)
- More example comments [`5761b44`](https://github.com/bcomnes/deploy-to-neocities/commit/5761b44f4df0408697ca3c9438cf8f7ae8df9628)
## [v3.0.2](https://github.com/bcomnes/deploy-to-neocities/compare/v3.0.1...v3.0.2) - 2024-12-09
### Merged
- chore(deps): bump async-neocities from 4.0.4 to 4.1.0 [`#210`](https://github.com/bcomnes/deploy-to-neocities/pull/210)
## [v3.0.1](https://github.com/bcomnes/deploy-to-neocities/compare/v3.0.0...v3.0.1) - 2024-11-28
### Merged
- chore(deps): bump async-neocities from 4.0.3 to 4.0.4 [`#209`](https://github.com/bcomnes/deploy-to-neocities/pull/209)
- fixed a typo in README.md [`#208`](https://github.com/bcomnes/deploy-to-neocities/pull/208)
- chore(deps-dev): bump typescript from 5.6.3 to 5.7.2 [`#207`](https://github.com/bcomnes/deploy-to-neocities/pull/207)
- Add my website https://jefbecker.com/ to the list. [`#206`](https://github.com/bcomnes/deploy-to-neocities/pull/206)
### Commits
- Update README.md [`15eb88d`](https://github.com/bcomnes/deploy-to-neocities/commit/15eb88d4831e8b3170195c1e2b468e8ab3a9703d)
## [v3.0.0](https://github.com/bcomnes/deploy-to-neocities/compare/v2.0.4...v3.0.0) - 2024-11-19
### Merged
- chore(deps-dev): bump npm-run-all2 from 6.2.6 to 7.0.1 [`#205`](https://github.com/bcomnes/deploy-to-neocities/pull/205)
### Commits
- **Breaking change:** Update to the latest async-neocities 4.0 [`7b14798`](https://github.com/bcomnes/deploy-to-neocities/commit/7b14798c892f2494466bc72d5402c85801f75341)
## [v2.0.4](https://github.com/bcomnes/deploy-to-neocities/compare/v2.0.3...v2.0.4) - 2024-10-07
### Merged
- chore(deps-dev): bump top-bun from 9.1.1 to 10.0.0 [`#202`](https://github.com/bcomnes/deploy-to-neocities/pull/202)
- chore(deps): bump @actions/core from 1.11.0 to 1.11.1 [`#203`](https://github.com/bcomnes/deploy-to-neocities/pull/203)
## [v2.0.3](https://github.com/bcomnes/deploy-to-neocities/compare/v2.0.2...v2.0.3) - 2024-10-03
### Merged
- chore(deps): bump @actions/core from 1.10.1 to 1.11.0 [`#201`](https://github.com/bcomnes/deploy-to-neocities/pull/201)
- Add my site to the readme [`#200`](https://github.com/bcomnes/deploy-to-neocities/pull/200)
- chore(deps-dev): bump top-bun from 8.0.2 to 9.0.1 [`#197`](https://github.com/bcomnes/deploy-to-neocities/pull/197)
- chore(deps-dev): bump top-bun from 7.3.4 to 8.0.0 [`#193`](https://github.com/bcomnes/deploy-to-neocities/pull/193)
- chore(deps): bump minimatch from 9.0.5 to 10.0.1 [`#192`](https://github.com/bcomnes/deploy-to-neocities/pull/192)
- Remove my hobby website from the website list [`#188`](https://github.com/bcomnes/deploy-to-neocities/pull/188)
- chore(deps-dev): bump esbuild from 0.20.2 to 0.21.0 [`#184`](https://github.com/bcomnes/deploy-to-neocities/pull/184)
### Commits
- Merge pull request #199 from bcomnes/dependabot/npm_and_yarn/esbuild-0.24.0 [`f2bd635`](https://github.com/bcomnes/deploy-to-neocities/commit/f2bd635f4250c0f0c3dbf2fbced0be3608c96f2d)
- chore(deps-dev): bump esbuild from 0.23.1 to 0.24.0 [`90e8a07`](https://github.com/bcomnes/deploy-to-neocities/commit/90e8a074cd8aef07020ac80fbc420ee0a4189615)
- Merge pull request #191 from bcomnes/dependabot/npm_and_yarn/esbuild-0.23.0 [`8999c71`](https://github.com/bcomnes/deploy-to-neocities/commit/8999c710228c8627186a5360a882117a20c16cc1)
## [v2.0.2](https://github.com/bcomnes/deploy-to-neocities/compare/v2.0.1...v2.0.2) - 2024-04-04
### Commits
- Clarify default branch situation in README [`1fcd576`](https://github.com/bcomnes/deploy-to-neocities/commit/1fcd576de99dc8f39086f7239bf22962f3b8e7ae)
## [v2.0.1](https://github.com/bcomnes/deploy-to-neocities/compare/v2.0.0...v2.0.1) - 2024-04-04
### Commits
- Update docs [`5a66259`](https://github.com/bcomnes/deploy-to-neocities/commit/5a66259a88059e84b1a3981aa88389854700f8a8)
## [v2.0.0](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.21...v2.0.0) - 2024-04-04
### Merged
- Added https://sacred.neocities.org [`#175`](https://github.com/bcomnes/deploy-to-neocities/pull/175)
- chore(deps-dev): bump esbuild from 0.19.12 to 0.20.0 [`#170`](https://github.com/bcomnes/deploy-to-neocities/pull/170)
- chore(deps): bump actions/setup-node from 3 to 4 [`#165`](https://github.com/bcomnes/deploy-to-neocities/pull/165)
### Fixed
- **Breaking change:** Update action runtime to node20 [`#174`](https://github.com/bcomnes/deploy-to-neocities/issues/174)
### Commits
- Merge pull request #179 from bcomnes/dependabot/npm_and_yarn/minimatch-9.0.4 [`d2622ae`](https://github.com/bcomnes/deploy-to-neocities/commit/d2622ae949a095974f8a3ffb695d70658a9911fa)
- chore(deps): bump minimatch from 9.0.3 to 9.0.4 [`0d910df`](https://github.com/bcomnes/deploy-to-neocities/commit/0d910dfd72d5c6a016940426f91622f84d1dbdca)
- Update README.md [`0ee4b4c`](https://github.com/bcomnes/deploy-to-neocities/commit/0ee4b4c3a5bbab316700252a7e41e758773e2737)
## [v1.1.21](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.20...v1.1.21) - 2023-12-02
### Merged
- chore(deps-dev): bump @siteup/cli from 4.0.1 to 5.0.0 [`#163`](https://github.com/bcomnes/deploy-to-neocities/pull/163)
- chore(deps): bump actions/setup-node from 3 to 4 [`#162`](https://github.com/bcomnes/deploy-to-neocities/pull/162)
### Commits
- Update node in actions and rebuild [`bad0c3f`](https://github.com/bcomnes/deploy-to-neocities/commit/bad0c3f21924f957bd2d41156f9ae9894e8d01a1)
## [v1.1.20](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.19...v1.1.20) - 2023-10-22
### Merged
- chore(deps-dev): bump @siteup/cli from 2.2.11 to 3.1.0 [`#161`](https://github.com/bcomnes/deploy-to-neocities/pull/161)
### Commits
- Update async-neocities to 2.1.6 [`f820123`](https://github.com/bcomnes/deploy-to-neocities/commit/f8201237291216a53f9c4dde35c46a65a8ea534d)
## [v1.1.19](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.18...v1.1.19) - 2023-09-30
### Merged
- chore(deps): bump @actions/core from 1.10.0 to 1.10.1 [`#159`](https://github.com/bcomnes/deploy-to-neocities/pull/159)
- Update neocities.yml [`#158`](https://github.com/bcomnes/deploy-to-neocities/pull/158)
- chore(deps): bump actions/checkout from 3 to 4 [`#157`](https://github.com/bcomnes/deploy-to-neocities/pull/157)
## [v1.1.18](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.17...v1.1.18) - 2023-08-31
### Merged
- chore(deps-dev): bump esbuild from 0.18.20 to 0.19.0 [`#154`](https://github.com/bcomnes/deploy-to-neocities/pull/154)
- Update README.md [`#153`](https://github.com/bcomnes/deploy-to-neocities/pull/153)
### Commits
- Neocities badge [`0827633`](https://github.com/bcomnes/deploy-to-neocities/commit/0827633ad88c495adaa304fb68bb1ef422973659)
- Merge pull request #152 from bcomnes/dependabot/npm_and_yarn/minimatch-9.0.3 [`6b85bf5`](https://github.com/bcomnes/deploy-to-neocities/commit/6b85bf5162547159b92547d1e79f96e5996d2e96)
- chore(deps): bump minimatch from 9.0.2 to 9.0.3 [`48dc557`](https://github.com/bcomnes/deploy-to-neocities/commit/48dc557fed2d40506be024f40ef9c4045d82781a)
## [v1.1.17](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.16...v1.1.17) - 2023-06-27
### Commits
- Prune example URLs [`dabda03`](https://github.com/bcomnes/deploy-to-neocities/commit/dabda0324f7965b65951d021c1e99bc10d640903)
## [v1.1.16](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.15...v1.1.16) - 2023-06-26
### Merged
- chore(deps-dev): bump esbuild from 0.17.19 to 0.18.0 [`#149`](https://github.com/bcomnes/deploy-to-neocities/pull/149)
- chore(deps): bump minimatch from 9.0.0 to 9.0.1 [`#146`](https://github.com/bcomnes/deploy-to-neocities/pull/146)
- chore(deps): bump minimatch from 7.4.2 to 7.4.3 [`#140`](https://github.com/bcomnes/deploy-to-neocities/pull/140)
- chore(deps): bump bcomnes/npm-bump from 2.1.0 to 2.2.1 [`#139`](https://github.com/bcomnes/deploy-to-neocities/pull/139)
- chore(deps): bump minimatch from 7.2.0 to 7.4.2 [`#138`](https://github.com/bcomnes/deploy-to-neocities/pull/138)
- chore(deps): bump minimatch from 7.1.1 to 7.2.0 [`#136`](https://github.com/bcomnes/deploy-to-neocities/pull/136)
- chore(deps): bump minimatch from 7.0.1 to 7.1.0 [`#134`](https://github.com/bcomnes/deploy-to-neocities/pull/134)
- Update README.md [`#131`](https://github.com/bcomnes/deploy-to-neocities/pull/131)
- chore(deps): bump minimatch from 7.0.0 to 7.0.1 [`#132`](https://github.com/bcomnes/deploy-to-neocities/pull/132)
- chore(deps): bump minimatch from 6.2.0 to 7.0.0 [`#130`](https://github.com/bcomnes/deploy-to-neocities/pull/130)
- chore(deps): bump minimatch from 6.1.8 to 6.2.0 [`#129`](https://github.com/bcomnes/deploy-to-neocities/pull/129)
- chore(deps): bump minimatch from 6.1.6 to 6.1.8 [`#128`](https://github.com/bcomnes/deploy-to-neocities/pull/128)
- chore(deps): bump minimatch from 6.1.5 to 6.1.6 [`#123`](https://github.com/bcomnes/deploy-to-neocities/pull/123)
- Add my site [`#122`](https://github.com/bcomnes/deploy-to-neocities/pull/122)
### Commits
- Merge pull request #151 from bcomnes/dependabot/npm_and_yarn/minimatch-9.0.2 [`5f0065b`](https://github.com/bcomnes/deploy-to-neocities/commit/5f0065b0a9cb3627bee2fce1890f6335aeb15bf6)
- Remove packages [`8919ab9`](https://github.com/bcomnes/deploy-to-neocities/commit/8919ab9606c90d47193ddca8159b5540ae38923f)
- chore(deps): bump minimatch from 9.0.1 to 9.0.2 [`7c37474`](https://github.com/bcomnes/deploy-to-neocities/commit/7c37474912a72b54f851a1152ef2f52111927181)
## [v1.1.15](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.14...v1.1.15) - 2023-01-18
### Merged
- chore(deps): bump minimatch from 6.1.3 to 6.1.5 [`#121`](https://github.com/bcomnes/deploy-to-neocities/pull/121)
## [v1.1.14](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.13...v1.1.14) - 2023-01-17
### Merged
- chore(deps-dev): bump mkdirp from 1.0.4 to 2.1.1 [`#118`](https://github.com/bcomnes/deploy-to-neocities/pull/118)
- chore(deps): bump minimatch from 5.1.2 to 6.0.4 [`#117`](https://github.com/bcomnes/deploy-to-neocities/pull/117)
### Commits
- test build [`b6e88c0`](https://github.com/bcomnes/deploy-to-neocities/commit/b6e88c0383e2e7a2b6f71afd4ab109681eec3dba)
- Switch to esbuild [`f417d42`](https://github.com/bcomnes/deploy-to-neocities/commit/f417d42694329e4a2863c587f0d2889fd691da1c)
- Revert "chore(deps): bump minimatch from 5.1.2 to 6.0.4" [`2761a4b`](https://github.com/bcomnes/deploy-to-neocities/commit/2761a4b53809f85506d8c9388e2bd7f4ff0ae58b)
## [v1.1.13](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.12...v1.1.13) - 2022-12-21
### Merged
- chore(deps): bump minimatch from 5.1.1 to 5.1.2 [`#115`](https://github.com/bcomnes/deploy-to-neocities/pull/115)
### Commits
- Update example to use actions v3 [`97a3bdd`](https://github.com/bcomnes/deploy-to-neocities/commit/97a3bdd7ff2cb7dd8a61969d6a3a8029d331bd29)
- Update README.md [`5dda7b9`](https://github.com/bcomnes/deploy-to-neocities/commit/5dda7b9d7afe5f87039e8bff30d1dd6d5eeb793e)
## [v1.1.12](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.11...v1.1.12) - 2022-12-20
### Merged
- chore(deps-dev): bump @vercel/ncc from 0.34.0 to 0.36.0 [`#112`](https://github.com/bcomnes/deploy-to-neocities/pull/112)
- chore(deps): bump minimatch from 5.1.0 to 5.1.1 [`#111`](https://github.com/bcomnes/deploy-to-neocities/pull/111)
### Commits
- Update action to use node 16 [`b5e74f0`](https://github.com/bcomnes/deploy-to-neocities/commit/b5e74f0efddc239968a312e1bae4501d3e84f9a9)
- Merge pull request #113 from bcomnes/dependabot/npm_and_yarn/gh-release-7.0.0 [`1114060`](https://github.com/bcomnes/deploy-to-neocities/commit/1114060652dade40eaacdea104051d6e13a096a3)
- chore(deps-dev): bump gh-release from 6.0.4 to 7.0.0 [`e00b896`](https://github.com/bcomnes/deploy-to-neocities/commit/e00b89634371d88950fa4f3ec2a8892888bc98e2)
## [v1.1.11](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.10...v1.1.11) - 2022-11-07
### Merged
- the site is open-source again [`#107`](https://github.com/bcomnes/deploy-to-neocities/pull/107)
- info [`#104`](https://github.com/bcomnes/deploy-to-neocities/pull/104)
- chore(deps): bump bcomnes/npm-bump from 2.0.2 to 2.1.0 [`#105`](https://github.com/bcomnes/deploy-to-neocities/pull/105)
### Commits
- Print error cause no matter what [`8fae4b2`](https://github.com/bcomnes/deploy-to-neocities/commit/8fae4b2c6c1923355b43f7dcfd2efdad7279ffc1)
- Update README.md [`c7b4dca`](https://github.com/bcomnes/deploy-to-neocities/commit/c7b4dca027b41fa09bc9d92efa5489238bc98a5a)
## [v1.1.10](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.9...v1.1.10) - 2022-10-03
### Merged
- chore(deps): bump @actions/core from 1.9.1 to 1.10.0 [`#102`](https://github.com/bcomnes/deploy-to-neocities/pull/102)
- the site is open-source again [`#103`](https://github.com/bcomnes/deploy-to-neocities/pull/103)
- added notice about my site source code [`#100`](https://github.com/bcomnes/deploy-to-neocities/pull/100)
- Update README.md [`#97`](https://github.com/bcomnes/deploy-to-neocities/pull/97)
- add conorsheehan1.neocities.org [`#95`](https://github.com/bcomnes/deploy-to-neocities/pull/95)
### Commits
- Bump async-neocities to 2.1.3 [`8f9be80`](https://github.com/bcomnes/deploy-to-neocities/commit/8f9be808b5b4f9e1d9a316b391c7c21acfcba3ad)
- Merge pull request #96 from bechnokid/patch-1 [`635243f`](https://github.com/bcomnes/deploy-to-neocities/commit/635243fdea7e5d224675222008031a553bc142c3)
- Add bechnokid.neocities.org to README.md [`9808b83`](https://github.com/bcomnes/deploy-to-neocities/commit/9808b8398bf70ac8e2a41172c74eb9f66e7d0d89)
## [v1.1.9](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.8...v1.1.9) - 2022-08-10
### Commits
- Update deps to fix hung actions [`46319ec`](https://github.com/bcomnes/deploy-to-neocities/commit/46319ec8034824d8a562f3f449080400f2fc0218)
- Merge pull request #94 from bcomnes/dependabot/npm_and_yarn/actions/core-1.9.1 [`3cfd2ec`](https://github.com/bcomnes/deploy-to-neocities/commit/3cfd2ec3ee0474635567bbfc56763329693b16cf)
- chore(deps): bump @actions/core from 1.9.0 to 1.9.1 [`65cb789`](https://github.com/bcomnes/deploy-to-neocities/commit/65cb7897571e6b168e5ba8b0823307c4fe64bc7d)
## [v1.1.8](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.7...v1.1.8) - 2022-07-22
### Fixed
- s/zeit/vercel [`#92`](https://github.com/bcomnes/deploy-to-neocities/issues/92)
### Commits
- Merge pull request #93 from bcomnes/docs [`5e51c4d`](https://github.com/bcomnes/deploy-to-neocities/commit/5e51c4db747382faa9f56d0f8de8f37d2a5842e5)
- Merge pull request #91 from fu-sen/patch-1 [`bda6c1e`](https://github.com/bcomnes/deploy-to-neocities/commit/bda6c1eb47143325a36eea712ac9c954f1c70f12)
- Use lts/* [`9b65715`](https://github.com/bcomnes/deploy-to-neocities/commit/9b6571537b238f3446151711b1ea115e4d4e4023)
## [v1.1.7](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.6...v1.1.7) - 2022-06-17
### Merged
- chore(deps): bump @actions/core from 1.8.2 to 1.9.0 [`#89`](https://github.com/bcomnes/deploy-to-neocities/pull/89)
- Update README.md [`#88`](https://github.com/bcomnes/deploy-to-neocities/pull/88)
- chore(deps-dev): bump npm-run-all2 from 5.0.2 to 6.0.0 [`#87`](https://github.com/bcomnes/deploy-to-neocities/pull/87)
- please add me [`#85`](https://github.com/bcomnes/deploy-to-neocities/pull/85)
## [v1.1.6](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.5...v1.1.6) - 2022-05-26
### Merged
- chore(deps-dev): bump @vercel/ncc from 0.33.4 to 0.34.0 [`#84`](https://github.com/bcomnes/deploy-to-neocities/pull/84)
- chore(deps): bump @actions/core from 1.8.1 to 1.8.2 [`#83`](https://github.com/bcomnes/deploy-to-neocities/pull/83)
### Commits
- Merge pull request #82 from bcomnes/dependabot/npm_and_yarn/actions/core-1.8.1 [`2abb3f9`](https://github.com/bcomnes/deploy-to-neocities/commit/2abb3f996d5b9af84eb44488cfb74e307ad8574d)
- chore(deps): bump @actions/core from 1.8.0 to 1.8.1 [`f16012d`](https://github.com/bcomnes/deploy-to-neocities/commit/f16012dd28b1a06080fa8fefeb28da6cb44b3c2e)
- Update test.yml [`bfa04d5`](https://github.com/bcomnes/deploy-to-neocities/commit/bfa04d557bd13931d6b441e2455b525dc076ed26)
## [v1.1.5](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.4...v1.1.5) - 2022-05-06
### Merged
- chore(deps-dev): bump standard from 16.0.4 to 17.0.0 [`#77`](https://github.com/bcomnes/deploy-to-neocities/pull/77)
- chore(deps): bump @actions/core from 1.6.0 to 1.7.0 [`#78`](https://github.com/bcomnes/deploy-to-neocities/pull/78)
- chore(deps): bump actions/setup-node from 3.1.0 to 3.1.1 [`#76`](https://github.com/bcomnes/deploy-to-neocities/pull/76)
- chore(deps): bump fastify/github-action-merge-dependabot from 3.0.2 to 3.1 [`#75`](https://github.com/bcomnes/deploy-to-neocities/pull/75)
- chore(deps): bump actions/setup-node from 3.0.0 to 3.1.0 [`#74`](https://github.com/bcomnes/deploy-to-neocities/pull/74)
- chore(deps): bump actions/checkout from 2 to 3 [`#73`](https://github.com/bcomnes/deploy-to-neocities/pull/73)
- chore(deps): bump actions/setup-node from 2 to 3.0.0 [`#72`](https://github.com/bcomnes/deploy-to-neocities/pull/72)
### Commits
- Merge pull request #81 from bcomnes/dependabot/npm_and_yarn/actions/core-1.8.0 [`f37bb2b`](https://github.com/bcomnes/deploy-to-neocities/commit/f37bb2b7224fae864b2080db62db15e886dbd6c5)
- chore(deps): bump @actions/core from 1.7.0 to 1.8.0 [`cabfb54`](https://github.com/bcomnes/deploy-to-neocities/commit/cabfb5468a6b1e78144a68c32e2a14c4fdcb839a)
- Merge pull request #80 from bcomnes/dependabot/npm_and_yarn/siteup/cli-2.0.0 [`b36d448`](https://github.com/bcomnes/deploy-to-neocities/commit/b36d44888aa3dce0da455cd8610b988d50925e89)
## [v1.1.4](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.3...v1.1.4) - 2022-02-17
### Merged
- chore(deps): bump minimatch from 4.2.1 to 5.0.0 [`#71`](https://github.com/bcomnes/deploy-to-neocities/pull/71)
- chore(deps): bump minimatch from 3.1.1 to 4.1.1 [`#70`](https://github.com/bcomnes/deploy-to-neocities/pull/70)
### Commits
- Update README.md [`beaded6`](https://github.com/bcomnes/deploy-to-neocities/commit/beaded6a96f0378b2111dc87c379edf035411108)
## [v1.1.3](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.2...v1.1.3) - 2022-02-08
### Merged
- chore(deps): bump fastify/github-action-merge-dependabot from 2.7.1 to 3.0.2 [`#66`](https://github.com/bcomnes/deploy-to-neocities/pull/66)
### Commits
- Document the use of concurrency [`f160bcd`](https://github.com/bcomnes/deploy-to-neocities/commit/f160bcde052d8794acf67ffb8e7c042e3c721b37)
- Update neocities.yml [`223504c`](https://github.com/bcomnes/deploy-to-neocities/commit/223504cb704aeba9aed8b354f4e53aa15593f8dd)
- Update release.yml [`898a858`](https://github.com/bcomnes/deploy-to-neocities/commit/898a85881d00f10f1ddc66d430f80b89d6ed467d)
## [v1.1.2](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.1...v1.1.2) - 2022-02-08
### Merged
- add additional website [`#69`](https://github.com/bcomnes/deploy-to-neocities/pull/69)
- Add "Frontier Corps" [`#68`](https://github.com/bcomnes/deploy-to-neocities/pull/68)
- chore(deps): bump actions/setup-node from 2.5.0 to 2.5.1 [`#67`](https://github.com/bcomnes/deploy-to-neocities/pull/67)
- chore(deps): bump fastify/github-action-merge-dependabot [`#65`](https://github.com/bcomnes/deploy-to-neocities/pull/65)
- chore(deps): bump fastify/github-action-merge-dependabot [`#64`](https://github.com/bcomnes/deploy-to-neocities/pull/64)
- chore(deps): bump actions/setup-node from 2.4.1 to 2.5.0 [`#63`](https://github.com/bcomnes/deploy-to-neocities/pull/63)
- chore(deps): bump fastify/github-action-merge-dependabot [`#57`](https://github.com/bcomnes/deploy-to-neocities/pull/57)
- chore(deps): bump actions/checkout from 2.3.5 to 2.4.0 [`#56`](https://github.com/bcomnes/deploy-to-neocities/pull/56)
- chore(deps): bump actions/checkout from 2.3.4 to 2.3.5 [`#55`](https://github.com/bcomnes/deploy-to-neocities/pull/55)
- chore(deps): bump @actions/core from 1.5.0 to 1.6.0 [`#54`](https://github.com/bcomnes/deploy-to-neocities/pull/54)
- chore(deps): bump actions/setup-node from 2.4.0 to 2.4.1 [`#53`](https://github.com/bcomnes/deploy-to-neocities/pull/53)
- chore(deps): bump fastify/github-action-merge-dependabot [`#52`](https://github.com/bcomnes/deploy-to-neocities/pull/52)
- chore(deps-dev): bump cpx2 from 3.0.2 to 4.0.0 [`#51`](https://github.com/bcomnes/deploy-to-neocities/pull/51)
### Commits
- Update site build and ncc [`def67b4`](https://github.com/bcomnes/deploy-to-neocities/commit/def67b48c6e28d4544d8dd2bd92ab53f56856e40)
- Fix static deploy stuff with bandaid [`fea2d47`](https://github.com/bcomnes/deploy-to-neocities/commit/fea2d4780c7a98bcdf5738c1c259d66ff115326a)
## [v1.1.1](https://github.com/bcomnes/deploy-to-neocities/compare/v1.1.0...v1.1.1) - 2021-08-20
### Merged
- chore(deps): bump @actions/core from 1.4.0 to 1.5.0 [`#50`](https://github.com/bcomnes/deploy-to-neocities/pull/50)
- chore(deps): bump fastify/github-action-merge-dependabot [`#49`](https://github.com/bcomnes/deploy-to-neocities/pull/49)
- chore(deps): bump fastify/github-action-merge-dependabot [`#48`](https://github.com/bcomnes/deploy-to-neocities/pull/48)
- chore(deps): bump actions/setup-node from 2.3.2 to 2.4.0 [`#47`](https://github.com/bcomnes/deploy-to-neocities/pull/47)
- chore(deps): bump actions/setup-node from 2.3.1 to 2.3.2 [`#46`](https://github.com/bcomnes/deploy-to-neocities/pull/46)
- chore(deps): bump actions/setup-node from 2.3.0 to 2.3.1 [`#45`](https://github.com/bcomnes/deploy-to-neocities/pull/45)
- chore(deps): bump actions/setup-node from 2.2.0 to 2.3.0 [`#44`](https://github.com/bcomnes/deploy-to-neocities/pull/44)
- chore(deps): bump fastify/github-action-merge-dependabot [`#43`](https://github.com/bcomnes/deploy-to-neocities/pull/43)
- chore(deps): bump actions/setup-node from 2.1.5 to 2.2.0 [`#42`](https://github.com/bcomnes/deploy-to-neocities/pull/42)
- chore(deps): bump @actions/core from 1.3.0 to 1.4.0 [`#41`](https://github.com/bcomnes/deploy-to-neocities/pull/41)
- chore(deps): bump fastify/github-action-merge-dependabot [`#40`](https://github.com/bcomnes/deploy-to-neocities/pull/40)
- chore(deps): bump fastify/github-action-merge-dependabot [`#39`](https://github.com/bcomnes/deploy-to-neocities/pull/39)
- chore(deps): bump @actions/core from 1.2.7 to 1.3.0 [`#38`](https://github.com/bcomnes/deploy-to-neocities/pull/38)
- chore(deps-dev): bump gh-release from 5.0.2 to 6.0.0 [`#37`](https://github.com/bcomnes/deploy-to-neocities/pull/37)
- chore(deps): bump actions/checkout from 2 to 2.3.4 [`#36`](https://github.com/bcomnes/deploy-to-neocities/pull/36)
- chore(deps): bump @actions/core from 1.2.6 to 1.2.7 [`#35`](https://github.com/bcomnes/deploy-to-neocities/pull/35)
- chore(deps): bump fastify/github-action-merge-dependabot from v1.2.1 to v2.0.0 [`#34`](https://github.com/bcomnes/deploy-to-neocities/pull/34)
- chore(deps): bump fastify/github-action-merge-dependabot [`#33`](https://github.com/bcomnes/deploy-to-neocities/pull/33)
- chore(deps): bump fastify/github-action-merge-dependabot from v1.1.1 to v1.2.0 [`#32`](https://github.com/bcomnes/deploy-to-neocities/pull/32)
- chore(deps): bump actions/setup-node from v2.1.4 to v2.1.5 [`#31`](https://github.com/bcomnes/deploy-to-neocities/pull/31)
### Commits
- Update README.md [`c20b64e`](https://github.com/bcomnes/deploy-to-neocities/commit/c20b64ec3ad139d92da5bd9d956cd86bd3786850)
## [v1.1.0](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.17...v1.1.0) - 2021-01-17
### Commits
- Add a protected_files input that accepts a minimatch glob [`cadbbff`](https://github.com/bcomnes/deploy-to-neocities/commit/cadbbff70037001f49c979fa34e897efac8ab455)
- fix site static glob [`7c7e908`](https://github.com/bcomnes/deploy-to-neocities/commit/7c7e9080c03ebfe9f1517c579e38245260ce4f0b)
- clean up [`b2793d2`](https://github.com/bcomnes/deploy-to-neocities/commit/b2793d2827cbd21b3b2f38ac7a2da6ffafb7fcc8)
## [v1.0.17](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.16...v1.0.17) - 2021-01-17
### Merged
- Fix problems with large deploys by updating to async-neocities 2.0.0 [`#30`](https://github.com/bcomnes/deploy-to-neocities/pull/30)
- Adding Neofeed! [`#29`](https://github.com/bcomnes/deploy-to-neocities/pull/29)
### Commits
- Use standard breaking change identifier [`64c177b`](https://github.com/bcomnes/deploy-to-neocities/commit/64c177bb7cb6c7cc33e53c6198218e02be928a61)
## [v1.0.16](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.15...v1.0.16) - 2021-01-04
### Merged
- chore(deps): bump ms from 2.1.2 to 2.1.3 [`#27`](https://github.com/bcomnes/deploy-to-neocities/pull/27)
### Commits
- Update README.md [`362544e`](https://github.com/bcomnes/deploy-to-neocities/commit/362544ea6507553a7155f083e222c502f1863663)
## [v1.0.15](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.14...v1.0.15) - 2021-01-03
### Merged
- Clarify usage instructions [`#26`](https://github.com/bcomnes/deploy-to-neocities/pull/26)
- chore(deps): bump actions/setup-node from v2.1.3 to v2.1.4 [`#25`](https://github.com/bcomnes/deploy-to-neocities/pull/25)
- chore(deps): bump actions/setup-node from v2.1.2 to v2.1.3 [`#23`](https://github.com/bcomnes/deploy-to-neocities/pull/23)
- chore(deps-dev): bump sitedown from 4.0.0 to 5.0.0 [`#24`](https://github.com/bcomnes/deploy-to-neocities/pull/24)
- chore(deps-dev): bump standard from 15.0.1 to 16.0.0 [`#22`](https://github.com/bcomnes/deploy-to-neocities/pull/22)
- chore(deps-dev): bump standard from 14.3.4 to 15.0.0 [`#21`](https://github.com/bcomnes/deploy-to-neocities/pull/21)
- chore(deps): bump bcomnes/npm-bump from v2.0.1 to v2.0.2 [`#20`](https://github.com/bcomnes/deploy-to-neocities/pull/20)
- chore(deps): bump actions/setup-node from v2.1.1 to v2.1.2 [`#19`](https://github.com/bcomnes/deploy-to-neocities/pull/19)
- chore(deps): bump @actions/core from 1.2.5 to 1.2.6 [`#18`](https://github.com/bcomnes/deploy-to-neocities/pull/18)
- chore(deps-dev): bump cpx2 from 2.0.0 to 3.0.0 [`#17`](https://github.com/bcomnes/deploy-to-neocities/pull/17)
- chore(deps): bump bcomnes/npm-bump from v1.0.5 to v2.0.0 [`#16`](https://github.com/bcomnes/deploy-to-neocities/pull/16)
- chore(deps): bump bcomnes/npm-bump from v1.0.4 to v1.0.5 [`#14`](https://github.com/bcomnes/deploy-to-neocities/pull/14)
### Commits
- Print file stats when things error. [`06656fc`](https://github.com/bcomnes/deploy-to-neocities/commit/06656fc11f192e070e32acf845e0631d792ee424)
- Update test.yml [`e95b2b3`](https://github.com/bcomnes/deploy-to-neocities/commit/e95b2b3a311c11aa7a114d96f773013eef154f52)
- Update release.yml [`4434482`](https://github.com/bcomnes/deploy-to-neocities/commit/44344826ee59da570fa6d5ca8285cd67693cc6aa)
## [v1.0.14](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.13...v1.0.14) - 2020-09-14
### Merged
- chore(deps-dev): bump gh-release from 3.5.0 to 4.0.0 [`#15`](https://github.com/bcomnes/deploy-to-neocities/pull/15)
## [v1.0.13](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.12...v1.0.13) - 2020-09-11
### Commits
- Delete package-lock.json [`88a58f8`](https://github.com/bcomnes/deploy-to-neocities/commit/88a58f8af50d3ac41ce4e523597434adfa6fa304)
- Update .gitignore [`9e5dafc`](https://github.com/bcomnes/deploy-to-neocities/commit/9e5dafc00e3910658e768ad1de0a08512399bd51)
## [v1.0.12](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.11...v1.0.12) - 2020-09-11
### Merged
- chore(deps): bump actions/setup-node from v1 to v2.1.1 [`#12`](https://github.com/bcomnes/deploy-to-neocities/pull/12)
- chore(deps): bump @actions/core from 1.2.4 to 1.2.5 [`#13`](https://github.com/bcomnes/deploy-to-neocities/pull/13)
### Commits
- Create release.yml [`9a79230`](https://github.com/bcomnes/deploy-to-neocities/commit/9a792306ca7b0d9800de1c6e1cb995070caf67df)
- Create dependabot.yml [`6975b43`](https://github.com/bcomnes/deploy-to-neocities/commit/6975b431197241de88beb858e612dfc5c37f414f)
- Update release.yml [`0460e9f`](https://github.com/bcomnes/deploy-to-neocities/commit/0460e9fd782d26030ab102877961cfe0f0aa7b2e)
## [v1.0.11](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.10...v1.0.11) - 2020-08-05
### Merged
- action.yml: remove invalid syntax [`#11`](https://github.com/bcomnes/deploy-to-neocities/pull/11)
- bug: fix import [`#9`](https://github.com/bcomnes/deploy-to-neocities/pull/9)
### Fixed
- action.yml: remove invalid syntax [`#10`](https://github.com/bcomnes/deploy-to-neocities/issues/10)
### Commits
- Update README.md [`bfabadd`](https://github.com/bcomnes/deploy-to-neocities/commit/bfabadd110556a72b29314df1349f4efb3e1566c)
- Update README.md [`63006b6`](https://github.com/bcomnes/deploy-to-neocities/commit/63006b6b6bbb3276fd1130b23398c468196dc34b)
- Update README.md [`7dce130`](https://github.com/bcomnes/deploy-to-neocities/commit/7dce130590457fee0823c708884cb309fedc140c)
## [v1.0.10](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.9...v1.0.10) - 2020-04-30
### Fixed
- bug: fix import [`#8`](https://github.com/bcomnes/deploy-to-neocities/issues/8)
## [v1.0.9](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.8...v1.0.9) - 2020-04-30
### Merged
- Update auto-changelog to the latest version 🚀 [`#6`](https://github.com/bcomnes/deploy-to-neocities/pull/6)
- Update @actions/core to the latest version 🚀 [`#7`](https://github.com/bcomnes/deploy-to-neocities/pull/7)
- Update @zeit/ncc to the latest version 🚀 [`#5`](https://github.com/bcomnes/deploy-to-neocities/pull/5)
- Update @actions/core to the latest version 🚀 [`#4`](https://github.com/bcomnes/deploy-to-neocities/pull/4)
### Commits
- Fix semver ranges for runtime deps [`ac44455`](https://github.com/bcomnes/deploy-to-neocities/commit/ac444553521612a3cab840b54287356c2e585971)
- fix(package): update @actions/core to version 1.2.4 [`bc980f6`](https://github.com/bcomnes/deploy-to-neocities/commit/bc980f6378b06e225a7854b65ef9de27838eba18)
- chore(package): update auto-changelog to version 2.0.0 [`7059bca`](https://github.com/bcomnes/deploy-to-neocities/commit/7059bcae5305d0068f02b2268f76e626ff0a940a)
## [v1.0.8](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.7...v1.0.8) - 2020-02-29
### Merged
- Switch to usign ncc for building [`#3`](https://github.com/bcomnes/deploy-to-neocities/pull/3)
### Fixed
- docs: typo [`#2`](https://github.com/bcomnes/deploy-to-neocities/issues/2)
### Commits
- feat: use ncc for building [`eb2da96`](https://github.com/bcomnes/deploy-to-neocities/commit/eb2da9674524db0456b21bfabd3fc05d843f061d)
- chore: fixpack [`9662b25`](https://github.com/bcomnes/deploy-to-neocities/commit/9662b257f1ba81d5b9215bba451d951886298515)
- chore: update actions [`4fd7e5d`](https://github.com/bcomnes/deploy-to-neocities/commit/4fd7e5d4a94e6a7b682d45edc98ece52c9a8cd87)
## [v1.0.7](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.6...v1.0.7) - 2020-02-18
### Commits
- docs: improve description [`fc6bef9`](https://github.com/bcomnes/deploy-to-neocities/commit/fc6bef9a9b0942102f64e1ef8d5dae0f1290ac95)
## [v1.0.6](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.5...v1.0.6) - 2020-02-18
### Commits
- docs: fix static build [`106936a`](https://github.com/bcomnes/deploy-to-neocities/commit/106936af614ae58e7acdf2f8b5df1478860e46b6)
## [v1.0.5](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.4...v1.0.5) - 2020-02-18
### Commits
- bug: fix internal stat check bug [`24cfb5b`](https://github.com/bcomnes/deploy-to-neocities/commit/24cfb5bc53d4ab90387a5209282f0a30a06a6134)
## [v1.0.4](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.3...v1.0.4) - 2020-02-18
### Commits
- feat: add website [`648ade5`](https://github.com/bcomnes/deploy-to-neocities/commit/648ade5fef90e498638f7c584e7ee504641db809)
- chore: remove debug statements [`06e20d6`](https://github.com/bcomnes/deploy-to-neocities/commit/06e20d6efaabb4b5fad11bffa2d2dd1fc7c94245)
- chore: debugging statements [`5ed114b`](https://github.com/bcomnes/deploy-to-neocities/commit/5ed114bebecb242e06371f8b3d844aaa01cd17dd)
## [v1.0.3](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.2...v1.0.3) - 2020-02-18
### Commits
@ -563,19 +15,19 @@ Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog).
- refactor: clean up pacakge.json a bit [`7bb0bde`](https://github.com/bcomnes/deploy-to-neocities/commit/7bb0bdec75f6ec26707d036479c84b40f20f6ba8)
- docs: improve docs [`cc11705`](https://github.com/bcomnes/deploy-to-neocities/commit/cc11705059c58406f070b67a4f3456a040599ae5)
## [v1.0.2](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.1...v1.0.2) - 2020-02-17
## [v1.0.2](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.1...v1.0.2) - 2020-02-18
### Commits
- bug: fix input name [`14f0fea`](https://github.com/bcomnes/deploy-to-neocities/commit/14f0feaed2b52c65b7a9fac2a8c5437f75a3b033)
## [v1.0.1](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.0...v1.0.1) - 2020-02-17
## [v1.0.1](https://github.com/bcomnes/deploy-to-neocities/compare/v1.0.0...v1.0.1) - 2020-02-18
### Commits
- docs: typos [`91c25ab`](https://github.com/bcomnes/deploy-to-neocities/commit/91c25ab7221a139f318ed7ef4a6518d5a64debe8)
## [v1.0.0](https://github.com/bcomnes/deploy-to-neocities/compare/v0.0.11...v1.0.0) - 2020-02-17
## [v1.0.0](https://github.com/bcomnes/deploy-to-neocities/compare/v0.0.11...v1.0.0) - 2020-02-18
### Merged
@ -635,14 +87,14 @@ Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog).
- refactor: Dramatically simplify logging [`bc86874`](https://github.com/bcomnes/deploy-to-neocities/commit/bc86874ede188f9c33f0b6dfd2e54b25328b1285)
## [v0.0.3](https://github.com/bcomnes/deploy-to-neocities/compare/v0.0.2...v0.0.3) - 2020-02-12
## [v0.0.3](https://github.com/bcomnes/deploy-to-neocities/compare/v0.0.2...v0.0.3) - 2020-02-13
### Commits
- Fix boolean parsing [`19246fa`](https://github.com/bcomnes/deploy-to-neocities/commit/19246fac798151a3ab80666412f72394c0615c32)
- bug: Fix reference bug in logging [`2c52eea`](https://github.com/bcomnes/deploy-to-neocities/commit/2c52eeaa2badbb1bcb3c2520e358fcc088bc3879)
## [v0.0.2](https://github.com/bcomnes/deploy-to-neocities/compare/v0.0.1...v0.0.2) - 2020-02-12
## [v0.0.2](https://github.com/bcomnes/deploy-to-neocities/compare/v0.0.1...v0.0.2) - 2020-02-13
### Commits

View File

@ -1,6 +1,6 @@
MIT License
Copyright (c) 2020 Bret Comnes
Copyright (c) 2019 Bret Comnes
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

106
README.md
View File

@ -1,90 +1,66 @@
# deploy-to-neocities
[![GitHub tag (latest SemVer)](https://img.shields.io/github/v/tag/bcomnes/deploy-to-neocities)](https://github.com/bcomnes/deploy-to-neocities)
[![Actions Status](https://github.com/bcomnes/deploy-to-neocities/workflows/tests/badge.svg)](https://github.com/bcomnes/deploy-to-neocities/actions)
![Deploy to neocities](https://github.com/bcomnes/deploy-to-neocities/workflows/Deploy%20to%20neocities/badge.svg)
[![Marketplace link](https://img.shields.io/badge/github%20marketplace-deploy--to--neocities-brightgreen)](https://github.com/marketplace/actions/deploy-to-neocities)
[![Neocities][neocities-img]](https://deploy-to-neocities.neocities.org)
<center><img src="static/logo.png"></center>
<center><img src="logo.png"></center>
Efficiently deploy a website to [Neocities][nc] using [Github actions](https://github.com/features/actions). Uses content aware diffing to only update files that changed.
Alternatively, you can use the bin helper in [async-neocities](https://github.com/bcomnes/async-neocities) to deploy to neocities locally from your own machine as well as in CI.
Efficiently deploy a website to [Neocities][nc].
## Usage
```yaml
name: Deploy to neocities
# only run on changes to main. Use main or master depending on whatever your default branch is called.
# only run on changes to master
on:
push:
branches:
- main
concurrency: # prevent concurrent deploys doing strange things
group: deploy-to-neocities
cancel-in-progress: true
- master
jobs:
deploy:
runs-on: ubuntu-latest
steps:
# The checkout step copies your repo into the action runner. Important!
- uses: actions/checkout@v4
- uses: actions/checkout@v1
# Set up any tools and build steps here
# This example uses a Node.js toolchain to build a site
# If you don't need Node.js to build your site, you can omit this.
- name: Use Node.js
uses: actions/setup-node@v4
uses: actions/setup-node@v1
with:
node-version: lts/*
# If you have a different build process, replace this with your own build steps
node-version: 12
- name: Install deps and build
run: |
npm i
npm run build
# When the dist_dir is ready, deploy it to neocities
# Here we deploy the folder named `public`
- name: Deploy to neocities
uses: bcomnes/deploy-to-neocities@v3
uses: bcomnes/deploy-to-neocities@v1
with:
api_key: ${{ secrets.NEOCITIES_API_TOKEN }}
api_token: ${{ secrets.NEOCITIES_API_TOKEN }}
cleanup: false
neocities_supporter: false # set this to true if you have a supporter account and want to bypass unsuported files filter.
preview_before_deploy: true # print a deployment plan prior to waiting for files to upload.
dist_dir: public
```
- 💻 [Example YML](.github/workflows/neocities.yml)
- 🌎 [Example Deploy](https://deploy-to-neocities.neocities.org)
Create a workflow `.yml` file in your repositories `.github/workflows` directory. An [example workflow](#example-workflow) is available below. For more information, reference the GitHub Help Documentation for [Creating a workflow file](https://help.github.com/en/articles/configuring-a-workflow#creating-a-workflow-file).
Create a workflow `.yml` file in your repository's `.github/workflows` directory. An [example workflow](#example-workflow) is available above. For more information, reference the GitHub Help Documentation for [Creating a workflow file](https://help.github.com/en/articles/configuring-a-workflow#creating-a-workflow-file).
You'll need the API token for your site. Go to:
Get your sites API token and set a [secret][sec] called `NEOCITIES_API_TOKEN`. Set the `api_token` input on your `deploy-to-neocities` action to `NEOCITIES_API_TOKEN`.
```
https://neocities.org/settings/{{your-sitename}}#api_key
https://neocities.org/settings/{{sitename}}#api_key
```
Retreive your site's API token from Neocities. In your GitHub repository, set a [secret][sec] called `NEOCITIES_API_TOKEN`. Set the `api_token` input on your `deploy-to-neocities` action to `${{ secrets.NEOCITIES_API_TOKEN }}` as in the example above. The setting (as of writing) lives in Repository > Settings > Security > Secrets and Variables > Actions > Repository secrets.
During your workflow, generate the files you want to deploy to [Neocities][nc] into a directory. Set this as the `dist_dir` directory in your workflow (the default is `public`). You can use any tools to generate your site that can be installed or brought into the Github actions environment.
During your workflow, generate the files you want to deploy to [Neocities][nc] into a `dist_dir` directory. You can use any tools that can be installed or brought into the Github actions environment.
Once the build is complete, `deploy-to-neocities` will efficiently upload all new and all changed files to Neocities. Any files on Neocities that don't exist in the `dist_dir` are considered 'orphaned' files. To destructively remove these 'orphaned' files, set the `cleanup` input to `true`.
You most likely only want to run this on the `master` branch so that only changes committed to `master` result in website updates. Running a test build that does not deploy on all branches and PRs can help catch changes that will break the build.
You most likely only want to run this on the `master` branch so that only changes committed to `master` result in website updates.
### Inputs
- `api_key` (**REQUIRED**): The API token for your [Neocities][nc] website to deploy to.
- `dist_dir`: The directory to deploy to [Neocities][nc]. Default: `public`. Don't deploy your root repo directory (e.g. `./`). It contains `.git`, `.github` and other files that won't deploy properly to neocities. Keep it clean by keeping or building your site into a subdir and deploy that.
- `neocities_supporter`: Set this to `true` if you have a paid neocities account and want to bypass the [unsupported files filter](https://neocities.org/site_files/allowed_types).
- `api_token` (**REQUIRED**): The API token for your [Neocities][nc] website to deploy to.
- `dist_dir`: The directory to deploy to [Neocities][nc]. Default: `public`.
- `cleanup`: Boolean string (`true` or `false`). If `true`, `deploy-to-neocities` will destructively delete files found on [Neocities][nc] not found in your `dist_dir`. Default: `false`.
- `preview_before_deploy`: Boolean string (`true` or `false`). If `true`, `deploy-to-neocities` will print a preview of the files that will be uploaded and deleted. Default: `true`.
- `protected_files`: An optional glob string used to mark files as protected. Protected files are never cleaned up. Test this option out with `cleanup` set to false before relying on it. Protected files are printed when `cleanup` is set to true or false. Glob strings are processed by [minimatch](https://github.com/isaacs/minimatch) against remote neocities file paths. Protected files can still be updated.
### Outputs
@ -97,7 +73,7 @@ None.
[Neocities][nc] offers a bunch of nice properties:
- Neocities CDN uses a pure [anycast](https://en.wikipedia.org/wiki/Anycast) network providing efficient content serving no matter where your visitors are located around the world.
- Anycast doesn't require special DNS records to achieve geolocation routing characteristics. Simple `A` and `AAAA` records are all you need. Bare names and all!
- Anycast doesn't require special DNS records to achieve geolocation routing characteristics. Simple `A` and `AAAA` records are all you need.
- Neocities owns its own [ARIN](https://en.wikipedia.org/wiki/American_Registry_for_Internet_Numbers) IP block and has its own [BGP](https://en.wikipedia.org/wiki/Border_Gateway_Protocol) peering agreements, eliminating entire layers of bureaucracy between your content and the rest of the Internet typical of all major cloud providers.
- Far faster cold cache access than other popular static hosting services. Perfect for personal websites, projects and other infrequently accessed documents.
- Simple and understandable feature set. You can hand upload and edit files along side built/deployed assets.
@ -108,12 +84,11 @@ None.
- Offers simple, Google-free site analytics.
- Makes accepting tips a breeze.
- Bring your own CI environment, or don't.
- Free https via [Lets Encrypt](https://blog.neocities.org/blog/2016/11/10/switching-to-default-ssl.html).
- Cute cat logo.
- [Support the distributed web](https://neocities.org/distributed-web). Built in IPFS support.
- [Beginner friendly docs](https://neocities.org/tutorials) for learning how to make your own websites.
### What are some of the drawbacks compared to Netlify/Vercel?
### What are some of the drawbacks compared to Netlify/Zeit?
- Not appropriate for hyper traffic commercial sites most likely.
- No concept of a Deploy or atomicity when changing files.
@ -122,60 +97,15 @@ None.
- No deploy previews.
- No Github Deploys API support (yet).
## Sites using deploy-to-neocities
- https://github.com/bcomnes/bret.io ([bret.io](https://bret.io))
- https://github.com/ecomnes/elenacomnes.com ([elenacomnes.comnes](https://elenacomnes.com))
- https://github.com/gumcast/gumcast-client ([gumcast.com](https://gumcast.com))
- https://github.com/bcomnes/deploy-to-neocities/blob/master/.github/workflows/neocities.yml ([deploy-to-neocities.neocities.org](https://deploy-to-neocities.neocities.org))
- [Zambonifofex/stories](https://github.com/Zambonifofex/stories) ([zamstories.neocities.org](https://zamstories.neocities.org))
- [Your Neofeed](https://github.com/victoriadrake/neocities-neofeed), (っ◔◡◔)っ a personal timeline for Neocities and GitHub Pages.
- https://speakscribe.com
- https://geno7.neocities.org
- https://github.com/M1ssM0ss/deploy-to-neocities-template
- https://nelson.neocities.org
- https://flamedfury.com
- https://keb.neocities.org
- https://missmoss.neocities.org
- https://rarebit.neocities.org
- https://cavacado.neocities.org
- https://wanderinginn.neocities.org
- https://andri.dk/blog/2019/2021/deploy-static-websites-anywhere/
- https://github.com/PersonMeetup/frontiercorps ([frontiercorps.neocities.org](https://frontiercorps.neocities.org/))
- https://github.com/riastrad/cyberbspace ([cyberb.space](https://cyberb.space))
- https://github.com/rogerahuntley/neocities-site ([stealdog.neocities.org](https://stealdog.neocities.org))
- https://github.com/ConorSheehan1/conorsheehan1.neocities.org ([conorsheehan1.neocities.org](https://conorsheehan1.neocities.org))
- https://github.com/bechnokid/neocities ([bechnokid.neocities.org](https://bechnokid.neocities.org))
- https://github.com/lime360/website ([lime360.neocities.org](https://lime360.neocities.org))
- https://obspogon.neocities.org/
- https://profsugoi.neocities.org/
- https://github.com/tencurse/neocities ([10kph.neocities.org](https://10kph.neocities.org/))
- https://github.com/alephdfilms/neocities/ ([alephd.neocities.org](https://alephd.neocities.org/)]
- https://sacred.neocities.org/ (https://github.com/M-Valentino/sacredOS)
- https://lenp.net/ (https://github.com/Len42/web-site)
- <https://punkfairie.net> (<https://github.com/punkfairie/punkfairie-site>)
- https://github.com/jefbecker/jefbecker.com ([jefbecker.com](https://jefbecker.com))
- https://github.com/MymeType/personal-website ([mymetype.neocities.org](https://mymetype.neocities.org/))
- https://github.com/oceanthunder/oceanthunder.github.io ([oceanthunder.dev](https://www.oceanthunder.dev))
- [See more!](https://github.com/bcomnes/deploy-to-neocities/network/dependents)
- ...PR your site when you set it up!
## See also
- [async-neocities](https://ghub.io/async-neocities): diffing engine used for action.
- [Neocities API Docs](https://neocities.org/api)
- [neocities/neocities-node](https://github.com/neocities/neocities-node): Official Node API
- [jonchang/deploy-neocities](https://github.com/jonchang/deploy-neocities): An alternative docker + official ruby client based action similar to this one.
- [M1ssM0ss/deploy-to-neocities-template](https://github.com/M1ssM0ss/deploy-to-neocities-template): a template repo ready for cloning using deploy-to-neocities.
- [professorsugoi/Deploy-Astro-Neocities](https://github.com/professorsugoi/Deploy-Astro-Neocities): a template repo for projets built with Astro. uses deploy-to-neocities.
## CHANGELOG
See [changelog.md](CHANGELOG.md)
[qs]: https://ghub.io/qs
[nf]: https://ghub.io/node-fetch
[fd]: https://ghub.io/form-data
[nc]: https://neocities.org
[sec]: https://help.github.com/en/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets
[neocities-img]: https://img.shields.io/website/https/siteup.neocities.org?label=neocities&logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAAXNSR0IArs4c6QAAAGhlWElmTU0AKgAAAAgABAEGAAMAAAABAAIAAAESAAMAAAABAAEAAAEoAAMAAAABAAIAAIdpAAQAAAABAAAAPgAAAAAAA6ABAAMAAAABAAEAAKACAAQAAAABAAAAIKADAAQAAAABAAAAIAAAAAAueefIAAACC2lUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNS40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOk9yaWVudGF0aW9uPjE8L3RpZmY6T3JpZW50YXRpb24+CiAgICAgICAgIDx0aWZmOlBob3RvbWV0cmljSW50ZXJwcmV0YXRpb24+MjwvdGlmZjpQaG90b21ldHJpY0ludGVycHJldGF0aW9uPgogICAgICAgICA8dGlmZjpSZXNvbHV0aW9uVW5pdD4yPC90aWZmOlJlc29sdXRpb25Vbml0PgogICAgICAgICA8dGlmZjpDb21wcmVzc2lvbj4xPC90aWZmOkNvbXByZXNzaW9uPgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4Kpl32MAAABzBJREFUWAnFVwtwnFUV/v5//31ks5tsE9I8moS0iWETSNKUVpBKDKFQxtrCUIpacHQEGYk16FQHaZ3ajjqjOGWqOKUyMCl2xFoKhQJDBQftpOnAmDZoOyRNjCS1SdO8H5vXPv7rd/7NZvIipQjjmfn23Me555x77rnnv6sppTT8H0n/tG1rmlZIVBG+eW1JBD4t0GA8cYZQcS7ncXL7bFuYPfBJ9mlwtxg3bJoSTvx0tn7LAU48IJNE3GyBj9unrlJC2XRt4vGvLFGGrkXYDxEl03WyDyfRRoiHrxOfiBPU85bovPezi5pHnlmhHq5IsaLAXHhltgPXi+A0VE8X+Dht6lov+uw2rf/8nmIlDjQ+fp1yO/SYnaKYXoOC5QSu8trgddnND7rHv0EvOymwTcbnI867OZ5PLCOKiUIijQgS54nPE3hsfXog2WNY2Z+V5MDXVifjd3/ths/jquL0QyIj9EdC3V6UoLr25KurU73D0ieOEIniKbkc063EduLPRDcR2828/DOpzrbBp0ut3UsEBMe3X2PJuhw2sWHplgjkEViyyBGM93gcf3kkxVP2hNZ1sWfoLg7/jbttJC8jMgiLHHYj4EuIb81I9gQLM92O0iyH+9pUlZSdGDHCJjA0biI/zZ3NxIstsfjKpfFYmROHutYxDwduIo6JAxI6LIq3cSmtpCSg9jF3UsXuix2tHb3L7YZevHRx/FBZvrNzTaEnLTfFQHaSna6CSrghjbVMJzRbtC1KFqC1xT5xAFdnZdxPMcsBS1wpDLHhEoWpiXbj3R8mZ1zoT0Caz677PE4fdDunJYIzd2UtvoKfWwq9+PnRiwgMDd5RX/PGVRIBixLjbNNKpQaP1wO/NzYb47ON0yEzAhUJQjOYJhKFy9DybDcyk+y40DeSdOz5J+5h7CBAxDQdl1k7d5rGHWW74Cz/GdM0gQGSWrMwxTl0VBRSlnSmoblMjIel0zkgN+gKSDFl7G7YMm+C4d8Ix4pvQ4XGPpKC8snQ/vPfvYXiwPuy6tylK3RAFokTpuU/NF8u08dAzbkA/nCylyVeBOanJawJQpcGxjMkB04QdzS0j5ujQVNntZK5BSkwYaIvEEZmQgjm4AeweTOguRah4ZKJdbubeZwKaYl23HptNNQxZeMhE0fqBrDthXZraHTCtKydlF73cFhv67l8FGRnm55sQcGjZ/GTI50IN75kKdMTsywnzMmtj4XmhuDRP13Ag8+2YnA0GrVgWDFmwFld10dN03TXNg2jIMNlKfywn//0BXGyKWBNv904isj5GqjhdmjeJSjMzUDttmUYChpYnS+1ZiY9+IUUrCvxIS/Nic/tbAiOBBkBltoeGn9PRA+c6Jm5Yp5edrIDlWsWw09Ht23IgBrvQ+i9Zy1JcaKE1+zmZTp0c240i7LiwJIPXdPACMnmw9ZriOV2Czu/ES3v7izAdZlx0rw8SQLy/jtu/AEmstfhTP3fcUPRUkS6ziB0eh/M/hZovCkx6ugP4ccvtuO1+gGMMI9IfbGM289j6JSRY/8YEIbmSxM4enoA+2t60MuEm0NyA2xOuL5UDaPgXjQ0NODmW27DgVeOw5a3Dq6Nh2DLWcMnyOjU0v6RME63jloJOjnYZ0VAOozCb8kq4506fG4bOgZCU1fphe/m4osliZNrokwFA3Cs/A7sq6qsgU0bN+LwS9GE9Pv9cLvd8Ofn4Zl7wlC9zXRWSnmUnqvpDVY+1yZ38WgsAjKzX34kNF1DYeQtduLOFT4ceSRvjnFEQrClFMK2/FsIBALYu3evZfw2mxe/Yj1obGzExY4OfPmr98Hu38QCOSGqp+j3tT3RLAZek0SwiMlYxyjIFu6WgX3fzMGNufKonYd49kNGOspLrkdTUxMikQhS4r34tZGDZObEHkccdu3chQ0bNiDc/OoMBQdqe/HOv0aSONhBHJ5yYFLqR+QVoYjyPcT7+mJVLsZ5n988O4gTvHrfX5uKMimjzOJEewhbt25FZ2cnWlpaUF1djdcTR1A6NoH24BiC/E4IKSaiyMuX9OVT/Xh4f5tkn0R+Czc9MOdZzokHLGmuiLPr8qqViqKchqYObcmNvnCeLlajz9+uzGCAOpTiNVabN2+25ETWMAxVV1enzPEBS254X5GqWpsmHwqRkfP4OpdF8y/WmM4psJ3HIVuYMr7n/qwZz6uRp/xq4uQvuSxK4sTBgwfVjh07VH19veInWnW9+j11uDJdlebEj0zqaiC/gSum/gxN3QJOzCA6sIIDv2D0KlhdrWS9Jt2F9aU+FKQ7eeYKi3kaSaur4C29j98lE4P9XWg59z5OnXgDb7/1pvlOY7c5EbYKjug+RFTSeJ90pmi6N/O1KbiKeIqOtJFPhXl6m87OGae8hPoU8SSxaj7dMvahEeCiGUQjcm/LiHLCT8hbUsaGCKk2wqWWNxHykD1LA13kC9JHdmBBLf/D5H8By9d+IkwR5NMAAAAASUVORK5CYII=

View File

@ -1,35 +1,21 @@
name: 'Deploy to Neocities'
description: 'Efficiently deploy a folder to Neocities.org'
description: 'Efficiently deplpoy a folder to Neocities.org'
branding:
icon: aperture
color: orange
inputs:
api_key: # api token for site to deploy to
description: 'Neocities API key for site to deploy to'
api_token: # api token for site to deploy to
description: 'Neocities API token for site to deploy to'
required: true
dist_dir:
description: 'Local folder to deploy to neocities'
default: 'public'
required: true
neocities_supporter:
description: 'Set to true if you are a Neocities supporter to bypass file type upload restrictions'
default: 'false'
required: true
cleanup:
description: Delete orphaned files on neocities that don't exist in distDir
default: 'false'
default: false
required: true
preview_before_deploy:
description: 'Set to true if you want to print deploy preview stats prior to deploying.'
default: 'true'
required: false
protected_files:
description: A glob string that prevents matched files from ever being deleted.
required: false
api_token: # api token for site to deploy to
description: 'Neocities API key for site to deploy to'
required: false
deprecationMessage: 'api_token is deprecated, use api_key instead'
outputs: # none
runs:
using: 'node20'
main: 'dist/index.cjs'
using: 'node12'
main: 'index.js'

53632
dist/index.cjs vendored

File diff suppressed because one or more lines are too long

7
dist/index.cjs.map vendored

File diff suppressed because one or more lines are too long

9039
dist/meta.json vendored

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +0,0 @@
import neostandard, { resolveIgnoresFromGitignore } from 'neostandard'
export default neostandard({
ignores: [
...resolveIgnoresFromGitignore(),
'dist'
],
})

31
example.yml Normal file
View File

@ -0,0 +1,31 @@
name: Example usage
on: [push]
jobs:
example:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest]
node: [12]
steps:
- uses: actions/checkout@v1
- name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node }}
- name: npm install and build
run: |
npm i
npm run build
env:
CI: true
- name: Deploy to neocities
uses: bcomnes/deploy-to-neocities@master
with:
api-token: ${{ secrets.NEOCITIES_API_TOKEN }}
distDir: public
cleanup: true

View File

@ -1,84 +1,34 @@
import core from '@actions/core'
import {
NeocitiesAPIClient,
printDeployText,
printPreviewText,
printResultsErrorDump,
SimpleTimer
} from 'async-neocities'
import path from 'node:path'
import assert from 'node:assert'
import fs from 'node:fs/promises'
import { minimatch } from 'minimatch'
const core = require('@actions/core')
// const github = require('@actions/github')
const Neocities = require('async-neocities')
const path = require('path')
const ms = require('ms')
const assert = require('nanoassert')
const fsp = require('fs').promises
async function run () {
const key = core.getInput('api_key') || core.getInput('api_token')
async function doDeploy () {
const token = core.getInput('api_token')
const distDir = path.join(process.cwd(), core.getInput('dist_dir'))
const cleanup = JSON.parse(core.getInput('cleanup'))
const neocitiesSupporter = JSON.parse(core.getInput('neocities_supporter'))
const previewDeploy = JSON.parse(core.getInput('preview_before_deploy'))
const protectedFilesGlob = core.getInput('protected_files')
const cleanup = core.getInput('cleanup')
assert(typeof cleanup === 'boolean', '`cleanup` input must be a boolean "true" or "false"')
assert(typeof neocitiesSupporter === 'boolean', '`neocities_supporter` input must be a boolean "true" or "false"')
assert(typeof previewDeploy === 'boolean', '`preview_before_deploy` input must be a boolean "true" or "false"')
assert(typeof cleanup === 'boolean', 'Cleanup input must be a boolean "true" or "false"')
const stat = await fsp.stat()
assert(stat.isDirectory(), 'dist_dir must be a directory that exists')
const stat = await fs.stat(distDir)
const client = new Neocities(token)
assert(stat.isDirectory(), '`dist_dir` input must be a path to a directory that exists')
const client = new NeocitiesAPIClient(key)
if (previewDeploy) {
const previewTimer = new SimpleTimer()
console.log('Running deploy preview prior to deployment...\n\n')
const diff = await client.previewDeploy({
directory: distDir,
includeUnsupportedFiles: neocitiesSupporter,
protectedFileFilter: protectedFilesGlob ? minimatch.filter(protectedFilesGlob) : undefined
})
previewTimer.stop()
printPreviewText({
diff,
timer: previewTimer,
cleanup,
includeUnsupportedFiles: neocitiesSupporter
})
}
const deployTimer = new SimpleTimer()
console.log('Deploying to Neocities...')
const results = await client.deploy({
directory: distDir,
const stats = await client.deploy(distDir, {
cleanup,
includeUnsupportedFiles: neocitiesSupporter,
protectedFileFilter: protectedFilesGlob ? minimatch.filter(protectedFilesGlob) : undefined
statsCb: Neocities.statsHandler()
})
deployTimer.stop()
if (results.errors.length > 0) {
printResultsErrorDump({
results,
timer: deployTimer
})
core.setFailed('The deploy completed with errors.')
} else {
printDeployText({
results,
timer: deployTimer,
cleanup,
includeUnsupportedFiles: neocitiesSupporter
})
}
console.log(`Deployed to Neocities in ${ms(stats.time)}:`)
console.log(` Uploaded ${stats.filesToUpload.length} files`)
console.log(` ${cleanup ? 'Deleted' : 'Orphaned'} ${stats.filesToDelete.length} files`)
console.log(` Skipped ${stats.filesSkipped.length} files`)
}
run().catch(err => {
console.log('Unexpected error/throw during deployment:\n\n')
console.dir(err, { colors: true, depth: 999 })
core.setFailed(err instanceof Error ? err.message : `An unexpected error occurred during deployment: ${err}`)
doDeploy().catch(err => {
console.error(err)
core.setFailed(err.message)
})

View File

Before

Width:  |  Height:  |  Size: 110 KiB

After

Width:  |  Height:  |  Size: 110 KiB

140
node_modules/@actions/core/README.md generated vendored Normal file
View File

@ -0,0 +1,140 @@
# `@actions/core`
> Core functions for setting results, logging, registering secrets and exporting variables across actions
## Usage
### Import the package
```js
// javascript
const core = require('@actions/core');
// typescript
import * as core from '@actions/core';
```
#### Inputs/Outputs
Action inputs can be read with `getInput`. Outputs can be set with `setOutput` which makes them available to be mapped into inputs of other actions to ensure they are decoupled.
```js
const myInput = core.getInput('inputName', { required: true });
core.setOutput('outputKey', 'outputVal');
```
#### Exporting variables
Since each step runs in a separate process, you can use `exportVariable` to add it to this step and future steps environment blocks.
```js
core.exportVariable('envVar', 'Val');
```
#### Setting a secret
Setting a secret registers the secret with the runner to ensure it is masked in logs.
```js
core.setSecret('myPassword');
```
#### PATH Manipulation
To make a tool's path available in the path for the remainder of the job (without altering the machine or containers state), use `addPath`. The runner will prepend the path given to the jobs PATH.
```js
core.addPath('/path/to/mytool');
```
#### Exit codes
You should use this library to set the failing exit code for your action. If status is not set and the script runs to completion, that will lead to a success.
```js
const core = require('@actions/core');
try {
// Do stuff
}
catch (err) {
// setFailed logs the message and sets a failing exit code
core.setFailed(`Action failed with error ${err}`);
}
Note that `setNeutral` is not yet implemented in actions V2 but equivalent functionality is being planned.
```
#### Logging
Finally, this library provides some utilities for logging. Note that debug logging is hidden from the logs by default. This behavior can be toggled by enabling the [Step Debug Logs](../../docs/action-debugging.md#step-debug-logs).
```js
const core = require('@actions/core');
const myInput = core.getInput('input');
try {
core.debug('Inside try block');
if (!myInput) {
core.warning('myInput was not set');
}
// Do stuff
}
catch (err) {
core.error(`Error ${err}, action may still succeed though`);
}
```
This library can also wrap chunks of output in foldable groups.
```js
const core = require('@actions/core')
// Manually wrap output
core.startGroup('Do some function')
doSomeFunction()
core.endGroup()
// Wrap an asynchronous function call
const result = await core.group('Do something async', async () => {
const response = await doSomeHTTPRequest()
return response
})
```
#### Action state
You can use this library to save state and get state for sharing information between a given wrapper action:
**action.yml**
```yaml
name: 'Wrapper action sample'
inputs:
name:
default: 'GitHub'
runs:
using: 'node12'
main: 'main.js'
post: 'cleanup.js'
```
In action's `main.js`:
```js
const core = require('@actions/core');
core.saveState("pidToKill", 12345);
```
In action's `cleanup.js`:
```js
const core = require('@actions/core');
var pid = core.getState("pidToKill");
process.kill(pid);
```

16
node_modules/@actions/core/lib/command.d.ts generated vendored Normal file
View File

@ -0,0 +1,16 @@
interface CommandProperties {
[key: string]: string;
}
/**
* Commands
*
* Command Format:
* ::name key=value,key=value::message
*
* Examples:
* ::warning::This is the message
* ::set-env name=MY_VAR::some value
*/
export declare function issueCommand(command: string, properties: CommandProperties, message: string): void;
export declare function issue(name: string, message?: string): void;
export {};

78
node_modules/@actions/core/lib/command.js generated vendored Normal file
View File

@ -0,0 +1,78 @@
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const os = __importStar(require("os"));
/**
* Commands
*
* Command Format:
* ::name key=value,key=value::message
*
* Examples:
* ::warning::This is the message
* ::set-env name=MY_VAR::some value
*/
function issueCommand(command, properties, message) {
const cmd = new Command(command, properties, message);
process.stdout.write(cmd.toString() + os.EOL);
}
exports.issueCommand = issueCommand;
function issue(name, message = '') {
issueCommand(name, {}, message);
}
exports.issue = issue;
const CMD_STRING = '::';
class Command {
constructor(command, properties, message) {
if (!command) {
command = 'missing.command';
}
this.command = command;
this.properties = properties;
this.message = message;
}
toString() {
let cmdStr = CMD_STRING + this.command;
if (this.properties && Object.keys(this.properties).length > 0) {
cmdStr += ' ';
let first = true;
for (const key in this.properties) {
if (this.properties.hasOwnProperty(key)) {
const val = this.properties[key];
if (val) {
if (first) {
first = false;
}
else {
cmdStr += ',';
}
cmdStr += `${key}=${escapeProperty(val)}`;
}
}
}
}
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
return cmdStr;
}
}
function escapeData(s) {
return (s || '')
.replace(/%/g, '%25')
.replace(/\r/g, '%0D')
.replace(/\n/g, '%0A');
}
function escapeProperty(s) {
return (s || '')
.replace(/%/g, '%25')
.replace(/\r/g, '%0D')
.replace(/\n/g, '%0A')
.replace(/:/g, '%3A')
.replace(/,/g, '%2C');
}
//# sourceMappingURL=command.js.map

1
node_modules/@actions/core/lib/command.js.map generated vendored Normal file
View File

@ -0,0 +1 @@
{"version":3,"file":"command.js","sourceRoot":"","sources":["../src/command.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAwB;AAQxB;;;;;;;;;GASG;AACH,SAAgB,YAAY,CAC1B,OAAe,EACf,UAA6B,EAC7B,OAAe;IAEf,MAAM,GAAG,GAAG,IAAI,OAAO,CAAC,OAAO,EAAE,UAAU,EAAE,OAAO,CAAC,CAAA;IACrD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AAC/C,CAAC;AAPD,oCAOC;AAED,SAAgB,KAAK,CAAC,IAAY,EAAE,UAAkB,EAAE;IACtD,YAAY,CAAC,IAAI,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACjC,CAAC;AAFD,sBAEC;AAED,MAAM,UAAU,GAAG,IAAI,CAAA;AAEvB,MAAM,OAAO;IAKX,YAAY,OAAe,EAAE,UAA6B,EAAE,OAAe;QACzE,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,iBAAiB,CAAA;SAC5B;QAED,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAA;QAC5B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;IACxB,CAAC;IAED,QAAQ;QACN,IAAI,MAAM,GAAG,UAAU,GAAG,IAAI,CAAC,OAAO,CAAA;QAEtC,IAAI,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YAC9D,MAAM,IAAI,GAAG,CAAA;YACb,IAAI,KAAK,GAAG,IAAI,CAAA;YAChB,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,UAAU,EAAE;gBACjC,IAAI,IAAI,CAAC,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;oBACvC,MAAM,GAAG,GAAG,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;oBAChC,IAAI,GAAG,EAAE;wBACP,IAAI,KAAK,EAAE;4BACT,KAAK,GAAG,KAAK,CAAA;yBACd;6BAAM;4BACL,MAAM,IAAI,GAAG,CAAA;yBACd;wBAED,MAAM,IAAI,GAAG,GAAG,IAAI,cAAc,CAAC,GAAG,CAAC,EAAE,CAAA;qBAC1C;iBACF;aACF;SACF;QAED,MAAM,IAAI,GAAG,UAAU,GAAG,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAA;QACpD,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAED,SAAS,UAAU,CAAC,CAAS;IAC3B,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;SACb,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA;AAC1B,CAAC;AAED,SAAS,cAAc,CAAC,CAAS;IAC/B,OAAO,CAAC,CAAC,IAAI,EAAE,CAAC;SACb,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;AACzB,CAAC"}

112
node_modules/@actions/core/lib/core.d.ts generated vendored Normal file
View File

@ -0,0 +1,112 @@
/**
* Interface for getInput options
*/
export interface InputOptions {
/** Optional. Whether the input is required. If required and not present, will throw. Defaults to false */
required?: boolean;
}
/**
* The code to exit an action
*/
export declare enum ExitCode {
/**
* A code indicating that the action was successful
*/
Success = 0,
/**
* A code indicating that the action was a failure
*/
Failure = 1
}
/**
* Sets env variable for this action and future actions in the job
* @param name the name of the variable to set
* @param val the value of the variable
*/
export declare function exportVariable(name: string, val: string): void;
/**
* Registers a secret which will get masked from logs
* @param secret value of the secret
*/
export declare function setSecret(secret: string): void;
/**
* Prepends inputPath to the PATH (for this action and future actions)
* @param inputPath
*/
export declare function addPath(inputPath: string): void;
/**
* Gets the value of an input. The value is also trimmed.
*
* @param name name of the input to get
* @param options optional. See InputOptions.
* @returns string
*/
export declare function getInput(name: string, options?: InputOptions): string;
/**
* Sets the value of an output.
*
* @param name name of the output to set
* @param value value to store
*/
export declare function setOutput(name: string, value: string): void;
/**
* Sets the action status to failed.
* When the action exits it will be with an exit code of 1
* @param message add error issue message
*/
export declare function setFailed(message: string): void;
/**
* Writes debug message to user log
* @param message debug message
*/
export declare function debug(message: string): void;
/**
* Adds an error issue
* @param message error issue message
*/
export declare function error(message: string): void;
/**
* Adds an warning issue
* @param message warning issue message
*/
export declare function warning(message: string): void;
/**
* Writes info to log with console.log.
* @param message info message
*/
export declare function info(message: string): void;
/**
* Begin an output group.
*
* Output until the next `groupEnd` will be foldable in this group
*
* @param name The name of the output group
*/
export declare function startGroup(name: string): void;
/**
* End an output group.
*/
export declare function endGroup(): void;
/**
* Wrap an asynchronous function call in a group.
*
* Returns the same type as the function itself.
*
* @param name The name of the group
* @param fn The function to wrap in the group
*/
export declare function group<T>(name: string, fn: () => Promise<T>): Promise<T>;
/**
* Saves state for current action, the state can only be retrieved by this action's post job execution.
*
* @param name name of the state to store
* @param value value to store
*/
export declare function saveState(name: string, value: string): void;
/**
* Gets the value of an state set by this action's main execution.
*
* @param name name of the state to get
* @returns string
*/
export declare function getState(name: string): string;

202
node_modules/@actions/core/lib/core.js generated vendored Normal file
View File

@ -0,0 +1,202 @@
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const command_1 = require("./command");
const os = __importStar(require("os"));
const path = __importStar(require("path"));
/**
* The code to exit an action
*/
var ExitCode;
(function (ExitCode) {
/**
* A code indicating that the action was successful
*/
ExitCode[ExitCode["Success"] = 0] = "Success";
/**
* A code indicating that the action was a failure
*/
ExitCode[ExitCode["Failure"] = 1] = "Failure";
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
//-----------------------------------------------------------------------
// Variables
//-----------------------------------------------------------------------
/**
* Sets env variable for this action and future actions in the job
* @param name the name of the variable to set
* @param val the value of the variable
*/
function exportVariable(name, val) {
process.env[name] = val;
command_1.issueCommand('set-env', { name }, val);
}
exports.exportVariable = exportVariable;
/**
* Registers a secret which will get masked from logs
* @param secret value of the secret
*/
function setSecret(secret) {
command_1.issueCommand('add-mask', {}, secret);
}
exports.setSecret = setSecret;
/**
* Prepends inputPath to the PATH (for this action and future actions)
* @param inputPath
*/
function addPath(inputPath) {
command_1.issueCommand('add-path', {}, inputPath);
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
}
exports.addPath = addPath;
/**
* Gets the value of an input. The value is also trimmed.
*
* @param name name of the input to get
* @param options optional. See InputOptions.
* @returns string
*/
function getInput(name, options) {
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
if (options && options.required && !val) {
throw new Error(`Input required and not supplied: ${name}`);
}
return val.trim();
}
exports.getInput = getInput;
/**
* Sets the value of an output.
*
* @param name name of the output to set
* @param value value to store
*/
function setOutput(name, value) {
command_1.issueCommand('set-output', { name }, value);
}
exports.setOutput = setOutput;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
/**
* Sets the action status to failed.
* When the action exits it will be with an exit code of 1
* @param message add error issue message
*/
function setFailed(message) {
process.exitCode = ExitCode.Failure;
error(message);
}
exports.setFailed = setFailed;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/**
* Writes debug message to user log
* @param message debug message
*/
function debug(message) {
command_1.issueCommand('debug', {}, message);
}
exports.debug = debug;
/**
* Adds an error issue
* @param message error issue message
*/
function error(message) {
command_1.issue('error', message);
}
exports.error = error;
/**
* Adds an warning issue
* @param message warning issue message
*/
function warning(message) {
command_1.issue('warning', message);
}
exports.warning = warning;
/**
* Writes info to log with console.log.
* @param message info message
*/
function info(message) {
process.stdout.write(message + os.EOL);
}
exports.info = info;
/**
* Begin an output group.
*
* Output until the next `groupEnd` will be foldable in this group
*
* @param name The name of the output group
*/
function startGroup(name) {
command_1.issue('group', name);
}
exports.startGroup = startGroup;
/**
* End an output group.
*/
function endGroup() {
command_1.issue('endgroup');
}
exports.endGroup = endGroup;
/**
* Wrap an asynchronous function call in a group.
*
* Returns the same type as the function itself.
*
* @param name The name of the group
* @param fn The function to wrap in the group
*/
function group(name, fn) {
return __awaiter(this, void 0, void 0, function* () {
startGroup(name);
let result;
try {
result = yield fn();
}
finally {
endGroup();
}
return result;
});
}
exports.group = group;
//-----------------------------------------------------------------------
// Wrapper action state
//-----------------------------------------------------------------------
/**
* Saves state for current action, the state can only be retrieved by this action's post job execution.
*
* @param name name of the state to store
* @param value value to store
*/
function saveState(name, value) {
command_1.issueCommand('save-state', { name }, value);
}
exports.saveState = saveState;
/**
* Gets the value of an state set by this action's main execution.
*
* @param name name of the state to get
* @returns string
*/
function getState(name) {
return process.env[`STATE_${name}`] || '';
}
exports.getState = getState;
//# sourceMappingURL=core.js.map

1
node_modules/@actions/core/lib/core.js.map generated vendored Normal file
View File

@ -0,0 +1 @@
{"version":3,"file":"core.js","sourceRoot":"","sources":["../src/core.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,uCAA6C;AAE7C,uCAAwB;AACxB,2CAA4B;AAU5B;;GAEG;AACH,IAAY,QAUX;AAVD,WAAY,QAAQ;IAClB;;OAEG;IACH,6CAAW,CAAA;IAEX;;OAEG;IACH,6CAAW,CAAA;AACb,CAAC,EAVW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QAUnB;AAED,yEAAyE;AACzE,YAAY;AACZ,yEAAyE;AAEzE;;;;GAIG;AACH,SAAgB,cAAc,CAAC,IAAY,EAAE,GAAW;IACtD,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,GAAG,CAAA;IACvB,sBAAY,CAAC,SAAS,EAAE,EAAC,IAAI,EAAC,EAAE,GAAG,CAAC,CAAA;AACtC,CAAC;AAHD,wCAGC;AAED;;;GAGG;AACH,SAAgB,SAAS,CAAC,MAAc;IACtC,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,MAAM,CAAC,CAAA;AACtC,CAAC;AAFD,8BAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,SAAiB;IACvC,sBAAY,CAAC,UAAU,EAAE,EAAE,EAAE,SAAS,CAAC,CAAA;IACvC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,GAAG,SAAS,GAAG,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAA;AAC7E,CAAC;AAHD,0BAGC;AAED;;;;;;GAMG;AACH,SAAgB,QAAQ,CAAC,IAAY,EAAE,OAAsB;IAC3D,MAAM,GAAG,GACP,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,WAAW,EAAE,EAAE,CAAC,IAAI,EAAE,CAAA;IACrE,IAAI,OAAO,IAAI,OAAO,CAAC,QAAQ,IAAI,CAAC,GAAG,EAAE;QACvC,MAAM,IAAI,KAAK,CAAC,oCAAoC,IAAI,EAAE,CAAC,CAAA;KAC5D;IAED,OAAO,GAAG,CAAC,IAAI,EAAE,CAAA;AACnB,CAAC;AARD,4BAQC;AAED;;;;;GAKG;AACH,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAa;IACnD,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,KAAK,CAAC,CAAA;AAC3C,CAAC;AAFD,8BAEC;AAED,yEAAyE;AACzE,UAAU;AACV,yEAAyE;AAEzE;;;;GAIG;AACH,SAAgB,SAAS,CAAC,OAAe;IACvC,OAAO,CAAC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAA;IACnC,KAAK,CAAC,OAAO,CAAC,CAAA;AAChB,CAAC;AAHD,8BAGC;AAED,yEAAyE;AACzE,mBAAmB;AACnB,yEAAyE;AAEzE;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAe;IACnC,sBAAY,CAAC,OAAO,EAAE,EAAE,EAAE,OAAO,CAAC,CAAA;AACpC,CAAC;AAFD,sBAEC;AAED;;;GAGG;AACH,SAAgB,KAAK,CAAC,OAAe;IACnC,eAAK,CAAC,OAAO,EAAE,OAAO,CAAC,CAAA;AACzB,CAAC;AAFD,sBAEC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,OAAe;IACrC,eAAK,CAAC,SAAS,EAAE,OAAO,CAAC,CAAA;AAC3B,CAAC;AAFD,0BAEC;AAED;;;GAGG;AACH,SAAgB,IAAI,CAAC,OAAe;IAClC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,OAAO,GAAG,EAAE,CAAC,GAAG,CAAC,CAAA;AACxC,CAAC;AAFD,oBAEC;AAED;;;;;;GAMG;AACH,SAAgB,UAAU,CAAC,IAAY;IACrC,eAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAA;AACtB,CAAC;AAFD,gCAEC;AAED;;GAEG;AACH,SAAgB,QAAQ;IACtB,eAAK,CAAC,UAAU,CAAC,CAAA;AACnB,CAAC;AAFD,4BAEC;AAED;;;;;;;GAOG;AACH,SAAsB,KAAK,CAAI,IAAY,EAAE,EAAoB;;QAC/D,UAAU,CAAC,IAAI,CAAC,CAAA;QAEhB,IAAI,MAAS,CAAA;QAEb,IAAI;YACF,MAAM,GAAG,MAAM,EAAE,EAAE,CAAA;SACpB;gBAAS;YACR,QAAQ,EAAE,CAAA;SACX;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CAAA;AAZD,sBAYC;AAED,yEAAyE;AACzE,uBAAuB;AACvB,yEAAyE;AAEzE;;;;;GAKG;AACH,SAAgB,SAAS,CAAC,IAAY,EAAE,KAAa;IACnD,sBAAY,CAAC,YAAY,EAAE,EAAC,IAAI,EAAC,EAAE,KAAK,CAAC,CAAA;AAC3C,CAAC;AAFD,8BAEC;AAED;;;;;GAKG;AACH,SAAgB,QAAQ,CAAC,IAAY;IACnC,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,IAAI,EAAE,CAAC,IAAI,EAAE,CAAA;AAC3C,CAAC;AAFD,4BAEC"}

66
node_modules/@actions/core/package.json generated vendored Normal file
View File

@ -0,0 +1,66 @@
{
"_from": "@actions/core@1.2.2",
"_id": "@actions/core@1.2.2",
"_inBundle": false,
"_integrity": "sha512-IbCx7oefq+Gi6FWbSs2Fnw8VkEI6Y4gvjrYprY3RV//ksq/KPMlClOerJ4jRosyal6zkUIc8R9fS/cpRMlGClg==",
"_location": "/@actions/core",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "@actions/core@1.2.2",
"name": "@actions/core",
"escapedName": "@actions%2fcore",
"scope": "@actions",
"rawSpec": "1.2.2",
"saveSpec": null,
"fetchSpec": "1.2.2"
},
"_requiredBy": [
"/"
],
"_resolved": "https://registry.npmjs.org/@actions/core/-/core-1.2.2.tgz",
"_shasum": "3c4848d50378f9e3bcb67bcf97813382ec7369ee",
"_spec": "@actions/core@1.2.2",
"_where": "/Users/bret/repos/deploy-to-neocities",
"bugs": {
"url": "https://github.com/actions/toolkit/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "Actions core lib",
"devDependencies": {
"@types/node": "^12.0.2"
},
"directories": {
"lib": "lib",
"test": "__tests__"
},
"files": [
"lib"
],
"homepage": "https://github.com/actions/toolkit/tree/master/packages/core",
"keywords": [
"github",
"actions",
"core"
],
"license": "MIT",
"main": "lib/core.js",
"name": "@actions/core",
"publishConfig": {
"access": "public"
},
"repository": {
"type": "git",
"url": "git+https://github.com/actions/toolkit.git",
"directory": "packages/core"
},
"scripts": {
"audit-moderate": "npm install && npm audit --audit-level=moderate",
"test": "echo \"Error: run tests from root\" && exit 1",
"tsc": "tsc"
},
"types": "lib/core.d.ts",
"version": "1.2.2"
}

View File

@ -0,0 +1,25 @@
name: tests
on: [push]
jobs:
test:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
node: [12]
steps:
- uses: actions/checkout@v1
- name: Use Node.js ${{ matrix.node }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node }}
- name: npm install && npm test
run: |
npm i
npm test
env:
CI: true

22
node_modules/async-folder-walker/.vscode/launch.json generated vendored Normal file
View File

@ -0,0 +1,22 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "Launch via NPM",
"runtimeExecutable": "npm",
"runtimeArgs": [
"run-script",
"debug"
],
"port": 9229,
"skipFiles": [
"<node_internals>/**"
]
}
]
}

19
node_modules/async-folder-walker/CHANGELOG.md generated vendored Normal file
View File

@ -0,0 +1,19 @@
# async-folder-walker Change Log
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](http://semver.org/).
## Unreleased
## 2.0.1 - 2019-12-27
* Internal tweaks
* Docs updates
## 2.0.0 - 2019-12-26
* Remove ESM support. Fuggit. It simply offers zero benafits in a node env.
## 1.0.0 - 2019-11-11
* Initial release.

4
node_modules/async-folder-walker/CODE_OF_CONDUCT.md generated vendored Normal file
View File

@ -0,0 +1,4 @@
# Code of conduct
- This repo is governed as a dictatorship starting with the originator of the project.
- No malevolence tolerated whatsoever.

11
node_modules/async-folder-walker/CONTRIBUTING.md generated vendored Normal file
View File

@ -0,0 +1,11 @@
# Contributing
- Contributors reserve the right to walk away from this project at any moment with or without notice.
- Questions are welcome, however unless there is a official support contract established between the maintainers and the requester, support is not guaranteed.
- Patches, ideas and changes welcome.
- Fixes almost always welcome.
- Features sometimes welcome. Please open an issue to discuss the issue prior to spending lots of time on the problem. It may be rejected. If you don't want to wait around for the discussion to commence, and you really want to jump into the implementation work, be prepared for fork if the idea is respectfully declined.
- Try to stay within the style of the existing code.
- All tests must pass.
- Additional features or code paths must be tested.
- Aim for 100% coverage.

21
node_modules/async-folder-walker/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2019 Bret Comnes
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

170
node_modules/async-folder-walker/README.md generated vendored Normal file
View File

@ -0,0 +1,170 @@
# async-folder-walker
[![Actions Status](https://github.com/bcomnes/async-folder-walker/workflows/tests/badge.svg)](https://github.com/bcomnes/async-folder-walker/actions)
A recursive async iterator of the files and directories in a given folder. Can take multiple folders, limit walk depth and filter based on path names and stat results.
![](https://repository-images.githubusercontent.com/223294839/43cf9600-0d3f-11ea-858e-81b08a14509f)
```
npm install async-folder-walker
```
## Usage
``` js
const { asyncFolderWalker, allFiles } = require('async-folder-walker')
async function iterateFiles () {
const walker = asyncFolderWalker(['.git', 'node_modules'])
for await (const file of walker) {
console.log(file) // logs the file path!
}
}
async function getAllFiles () {
const allFilepaths = await allFiles(['.git', 'node_modules'])
console.log(allFilepaths)
}
iterateFiles().then(() => getAllFiles())
```
## API
### `const { asyncFolderWalker, allFiles } = require('async-folder-walker')`
Import `asyncFolderWalker` or `allFiles`.
### `async-gen = asyncFolderWalker(paths, [opts])`
Return an async generator that will iterate over all of files inside of a directory. `paths` can be a string path or an Array of string paths.
You can iterate over each file and directory individually using a `for-await...of` loop. Note, you must be inside an [async function statement](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function).
```js
const { asyncFolderWalker } = require('async-folder-walker');
async function iterateFiles () {
const walker = asyncFolderWalker(['.git', 'node_modules']);
for await (const file of walker) {
console.log(file); // logs the file path!
}
}
iterateFiles();
```
Opts include:
```js
{
fs: require('fs'),
pathFilter: filepath => true,
statFilter st => true,
maxDepth: Infinity,
shaper: ({ root, filepath, stat, relname, basename }) => filepath
}
```
The `pathFilter` function allows you to filter files from additional async stat operations. Return false to filter the file.
```js
{ // exclude node_modules
pathFilter: filepath => !filepath.includes(node_modules)
}
```
The `statFilter` function allows you to filter files based on the internal stat operation. Return false to filter the file.
```js
{ // exclude all directories:
statFilter: st => !st.isDirectory()
}
```
The `shaper` function lets you change the shape of the returned value based on data accumulaed during the iteration. To return the same shape as [okdistribute/folder-walker](https://github.com/okdistribute/folder-walker) use the following function:
```js
{ // Return the same shape as folder-walker
shaper: fwData => fwData
}
````
Example of a fwData object for a directory:
```js
{
root: '/Users/bret/repos/async-folder-walker/fixtures',
filepath: '/Users/bret/repos/async-folder-walker/fixtures/sub-folder/sub-sub-folder',
stat: Stats {
dev: 16777220,
mode: 16877,
nlink: 3,
uid: 501,
gid: 20,
rdev: 0,
blksize: 4096,
ino: 30244023,
size: 96,
blocks: 0,
atimeMs: 1574381262779.8396,
mtimeMs: 1574380914743.5474,
ctimeMs: 1574380914743.5474,
birthtimeMs: 1574380905232.5996,
atime: 2019-11-22T00:07:42.780Z,
mtime: 2019-11-22T00:01:54.744Z,
ctime: 2019-11-22T00:01:54.744Z,
birthtime: 2019-11-22T00:01:45.233Z
},
relname: 'sub-folder/sub-sub-folder',
basename: 'sub-sub-folder'
}
```
and another example for a file on windows:
```js
{
root: 'D:\\a\\async-folder-walker\\async-folder-walker\\fixtures',
filepath: 'D:\\a\\async-folder-walker\\async-folder-walker\\fixtures\\sub-folder\\sub-sub-folder\\sub-sub-folder-file.json',
stat: Stats {
dev: 1321874112,
mode: 33206,
nlink: 1,
uid: 0,
gid: 0,
rdev: 0,
blksize: 4096,
ino: 562949953421580,
size: 37,
blocks: 0,
atimeMs: 1577476819530.035,
mtimeMs: 1577476819530.035,
ctimeMs: 1577476819530.035,
birthtimeMs: 1577476819530.035,
atime: 2019-12-27T20:00:19.530Z,
mtime: 2019-12-27T20:00:19.530Z,
ctime: 2019-12-27T20:00:19.530Z,
birthtime: 2019-12-27T20:00:19.530Z
},
relname: 'sub-folder\\sub-sub-folder\\sub-sub-folder-file.json',
basename: 'sub-sub-folder-file.json'
}
```
The `stat` property is an instance of [fs.Stats](https://nodejs.org/api/fs.html#fs_class_fs_stats) so it has extra methods not listed here.
### `files = await allFiles(paths, [opts])`
Get an Array of all files inside of a directory. `paths` can be a single string path or an array of string paths.
`opts` Is the same as `asyncFolderWalker`.
## See also
This module is effectivly a rewrite of [okdistribute/folder-walker](https://github.com/okdistribute/folder-walker) using async generators instead of Node streams, and a few tweaks to the underlying options to make the results a bit more flexible.
- [for-await...of](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/for-await...of)
## License
MIT

View File

@ -0,0 +1 @@
{}

View File

@ -0,0 +1,3 @@
{
"sub-folder": "content"
}

View File

@ -0,0 +1,3 @@
{
"sub-sub-folder": "content"
}

3
node_modules/async-folder-walker/fixtures/test.json generated vendored Normal file
View File

@ -0,0 +1,3 @@
{
"hello": "world"
}

179
node_modules/async-folder-walker/index.js generated vendored Normal file
View File

@ -0,0 +1,179 @@
const fs = require('fs')
const path = require('path')
const { readdir, lstat } = fs.promises
/**
* pathFilter lets you filter files based on a resolved `filepath`.
* @callback pathFilter
* @param {String} filepath - The resolved `filepath` of the file to test for filtering.
*
* @return {Boolean} Return false to filter the given `filepath` and true to include it.
*/
const pathFilter = filepath => true
/**
* statFilter lets you filter files based on a lstat object.
* @callback statFilter
* @param {Object} st - A fs.Stats instance.
*
* @return {Boolean} Return false to filter the given `filepath` and true to include it.
*/
const statFilter = filepath => true
/**
* FWStats is the object that the okdistribute/folder-walker module returns by default.
*
* @typedef FWStats
* @property {String} root - The filepath of the directory where the walk started.
* @property {String} filepath - The resolved filepath.
* @property {Object} stat - A fs.Stats instance.
* @property {String} relname - The relative path to `root`.
* @property {String} basename - The resolved filepath of the files containing directory.
*/
/**
* shaper lets you change the shape of the returned file data from walk-time stats.
* @callback shaper
* @param {FWStats} fwStats - The same status object returned from folder-walker.
*
* @return {*} - Whatever you want returned from the directory walk.
*/
const shaper = ({ root, filepath, stat, relname, basename }) => filepath
/**
* Options object
*
* @typedef Opts
* @property {pathFilter} [pathFilter] - A pathFilter cb.
* @property {statFilter} [statFilter] - A statFilter cb.
* @property {Number} [maxDepth=Infinity] - The maximum number of folders to walk down into.
* @property {shaper} [shaper] - A shaper cb.
*/
/**
* Create an async generator that iterates over all folders and directories inside of `dirs`.
*
* @async
* @generator
* @function
* @public
* @param {String|String[]} dirs - The path of the directory to walk, or an array of directory paths.
* @param {?(Opts)} opts - Options used for the directory walk.
*
* @yields {Promise<String|any>} - An async iterator that returns anything.
*/
async function * asyncFolderWalker (dirs, opts) {
opts = Object.assign({
fs,
pathFilter,
statFilter,
maxDepth: Infinity,
shaper
}, opts)
const roots = [dirs].flat().filter(opts.pathFilter)
const pending = []
while (roots.length) {
const root = roots.shift()
pending.push(root)
while (pending.length) {
const current = pending.shift()
if (typeof current === 'undefined') continue
const st = await lstat(current)
if ((!st.isDirectory() || depthLimiter(current, root, opts.maxDepth)) && opts.statFilter(st)) {
yield opts.shaper(fwShape(root, current, st))
continue
}
const files = await readdir(current)
files.sort()
for (const file of files) {
var next = path.join(current, file)
if (opts.pathFilter(next)) pending.unshift(next)
}
if (current === root || !opts.statFilter(st)) continue
else yield opts.shaper(fwShape(root, current, st))
}
}
}
/**
* Generates the same shape as the folder-walker module.
*
* @function
* @private
* @param {String} root - Root filepath.
* @param {String} name - Target filepath.
* @param {Object} st - fs.Stat object.
*
* @return {FWStats} - Folder walker object.
*/
function fwShape (root, name, st) {
return {
root: root,
filepath: name,
stat: st,
relname: root === name ? path.basename(name) : path.relative(root, name),
basename: path.basename(name)
}
}
/**
* Test if we are at maximum directory depth.
*
* @private
* @function
* @param {String} filePath - The resolved path of the target fille.
* @param {String} relativeTo - The root directory of the current walk.
* @param {Number} maxDepth - The maximum number of folders to descend into.
*
* @returns {Boolean} - Return true to signal stop descending.
*/
function depthLimiter (filePath, relativeTo, maxDepth) {
if (maxDepth === Infinity) return false
const rootDepth = relativeTo.split(path.sep).length
const fileDepth = filePath.split(path.sep).length
return fileDepth - rootDepth > maxDepth
}
/**
* Async iterable collector
*
* @async
* @function
* @private
*/
async function all (iterator) {
const collect = []
for await (const result of iterator) {
collect.push(result)
}
return collect
}
/**
* allFiles gives you all files from the directory walk as an array.
*
* @async
* @function
* @public
* @param {String|String[]} dirs - The path of the directory to walk, or an array of directory paths.
* @param {?(Opts)} opts - Options used for the directory walk.
*
* @returns {Promise<String[]|any>} - An async iterator that returns anything.
*/
async function allFiles (...args) {
return all(asyncFolderWalker(...args))
}
module.exports = {
asyncFolderWalker,
allFiles,
all
}

66
node_modules/async-folder-walker/package.json generated vendored Normal file
View File

@ -0,0 +1,66 @@
{
"_from": "async-folder-walker@^2.0.1",
"_id": "async-folder-walker@2.0.1",
"_inBundle": false,
"_integrity": "sha512-n0PW9w3HAZW7ems0XrgIYeX+3l2vX6HhZQyXMtkeKW3uEjHT5EOlKD8NgIeZK6fREnpw50F+Cb6ig3nWsuaTPA==",
"_location": "/async-folder-walker",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "async-folder-walker@^2.0.1",
"name": "async-folder-walker",
"escapedName": "async-folder-walker",
"rawSpec": "^2.0.1",
"saveSpec": null,
"fetchSpec": "^2.0.1"
},
"_requiredBy": [
"/async-neocities"
],
"_resolved": "https://registry.npmjs.org/async-folder-walker/-/async-folder-walker-2.0.1.tgz",
"_shasum": "cfc3007945a4bb109c5c548a0f07cd8a9cc15671",
"_spec": "async-folder-walker@^2.0.1",
"_where": "/Users/bret/repos/deploy-to-neocities/node_modules/async-neocities",
"author": {
"name": "Bret Comnes",
"email": "bcomnes@gmail.com",
"url": "https://bret.io"
},
"bugs": {
"url": "https://github.com/bcomnes/async-folder-walker/issues"
},
"bundleDependencies": false,
"dependencies": {},
"deprecated": false,
"description": "WIP - nothing to see here",
"devDependencies": {
"dependency-check": "^4.1.0",
"npm-run-all": "^4.1.5",
"p-temporary-directory": "^1.1.1",
"standard": "^14.3.1",
"tap": "^14.10.5"
},
"homepage": "https://github.com/bcomnes/async-folder-walker",
"keywords": [],
"license": "MIT",
"main": "index.js",
"name": "async-folder-walker",
"repository": {
"type": "git",
"url": "git+https://github.com/bcomnes/async-folder-walker.git"
},
"scripts": {
"debug": "node --nolazy --inspect-brk=9229 -r esm test.js",
"test": "run-s test:*",
"test:deps": "dependency-check . --no-dev --no-peer",
"test:standard": "standard",
"test:tap": "tap"
},
"standard": {
"ignore": [
"dist"
]
},
"version": "2.0.1"
}

102
node_modules/async-folder-walker/test.js generated vendored Normal file
View File

@ -0,0 +1,102 @@
const tap = require('tap')
const { asyncFolderWalker, allFiles } = require('.')
const path = require('path')
const tmp = require('p-temporary-directory')
const fixtures = path.join(__dirname, 'fixtures')
tap.test('for of multiple folders', async t => {
for await (const file of asyncFolderWalker([
path.join(fixtures, 'sub-folder'),
path.join(fixtures, 'another-folder')
])) {
t.ok(file, file)
}
})
tap.test('Array from async iterator', async t => {
const files = await allFiles([
path.join(fixtures, 'sub-folder'),
path.join(fixtures, 'another-folder')
])
t.equal(files.length, 4, 'expected number of files are found')
})
tap.skip('Shape example', async t => {
await allFiles([fixtures], {
shaper: fwData => {
console.log(fwData)
return fwData
}
})
t.pass('shape printed')
})
tap.test('No args', async t => {
for await (const file of asyncFolderWalker()) {
t.fail(file, 'no files should be found!')
}
t.pass('for of executed')
})
tap.test('No folders', async t => {
const [dir, cleanup] = await tmp()
try {
for await (const file of asyncFolderWalker(dir)) {
t.fail(file, 'no files should be found!')
}
t.pass('for of executed')
} finally {
await cleanup()
}
})
tap.test('When you just pass a file', async t => {
const [dir, cleanup] = await tmp()
try {
const theFile = path.join(fixtures, 'test.json')
const files = await allFiles([theFile, dir])
t.equal(files.length, 1, 'only one file is found')
t.equal(theFile, files[0], 'only one file is found')
} finally {
await cleanup()
}
})
tap.test('pathFilter works', async t => {
const filterStrig = 'sub-folder'
const files = await allFiles(fixtures, {
pathFilter: p => !p.includes(filterStrig)
})
t.false(files.some(f => f.includes(filterStrig)), 'No paths include the excluded string')
})
tap.test('statFilter works', async t => {
const stats = await allFiles(fixtures, {
statFilter: st => !st.isDirectory(), // Exclude files
shaper: ({ root, filepath, stat, relname, basename }) => stat // Lets get the stats instead of paths
})
for (const st of stats) {
t.false(st.isDirectory(), 'none of the files are directories')
}
})
tap.test('dont include root directory in response', async (t) => {
const root = process.cwd()
for await (const file of asyncFolderWalker(root)) {
if (file === root) t.fail('root directory should not be in results')
}
t.pass('The root was not included in results.')
})
tap.test('dont walk past the maxDepth', async t => {
const maxDepth = 3
const walker = asyncFolderWalker(['.git', 'node_modules'], { maxDepth })
for await (const file of walker) {
const correctLength = file.split(path.sep).length - process.cwd().split(path.sep).length <= maxDepth
if (!correctLength) t.fail('walker walked past the depth it was supposed to')
}
t.pass('Walker was depth limited')
})

128
node_modules/async-neocities/CHANGELOG.md generated vendored Normal file
View File

@ -0,0 +1,128 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog).
## [v1.1.6](https://github.com/bcomnes/async-neocities/compare/v1.1.5...v1.1.6) - 2020-02-13
### Commits
- refactor: limit decimal places in statHandler [`745b5c0`](https://github.com/bcomnes/async-neocities/commit/745b5c08b446723b837cf787e5911045cd0a01ed)
## [v1.1.5](https://github.com/bcomnes/async-neocities/compare/v1.1.4...v1.1.5) - 2020-02-13
### Commits
- bug: Log last stats before clear [`c4c83f8`](https://github.com/bcomnes/async-neocities/commit/c4c83f8e329303404dfec6a2943d8b01783040ad)
## [v1.1.4](https://github.com/bcomnes/async-neocities/compare/v1.1.3...v1.1.4) - 2020-02-13
### Commits
- bug: fix logging handler logic [`f6c5921`](https://github.com/bcomnes/async-neocities/commit/f6c5921d6fd0a420c5895ed6ba2fe2f766e726fd)
## [v1.1.3](https://github.com/bcomnes/async-neocities/compare/v1.1.2...v1.1.3) - 2020-02-13
### Commits
- refactor: Stop logging progress when the stream has been read. [`3ce0fc4`](https://github.com/bcomnes/async-neocities/commit/3ce0fc452acbbbd28c39b53a3a9a4318a8019c09)
## [v1.1.2](https://github.com/bcomnes/async-neocities/compare/v1.1.1...v1.1.2) - 2020-02-13
### Commits
- bug: Fix export name of statsHandler [`8958e5e`](https://github.com/bcomnes/async-neocities/commit/8958e5eeb947376690a1ce0cefe7cce3d59be5b8)
## [v1.1.1](https://github.com/bcomnes/async-neocities/compare/v1.1.0...v1.1.1) - 2020-02-13
### Commits
- bug: Fix busted export of stats handler. [`510ae29`](https://github.com/bcomnes/async-neocities/commit/510ae293263955e0e34d3ab48df253fb6e093053)
## [v1.1.0](https://github.com/bcomnes/async-neocities/compare/v1.0.2...v1.1.0) - 2020-02-13
### Commits
- feat: Finish statsCb API and add a stats-handler.js function. [`c8e6483`](https://github.com/bcomnes/async-neocities/commit/c8e64835e594e68715ef71590b08baac374052bd)
## [v1.0.2](https://github.com/bcomnes/async-neocities/compare/v1.0.1...v1.0.2) - 2020-02-13
### Commits
- chore: remove total time from stats [`3c375ec`](https://github.com/bcomnes/async-neocities/commit/3c375ecf64ae8536a8e3ccce0a69cd93c8c6a306)
## [v1.0.1](https://github.com/bcomnes/async-neocities/compare/v1.0.0...v1.0.1) - 2020-02-13
### Commits
- bug: Fix a number of logging bugs [`96fbea2`](https://github.com/bcomnes/async-neocities/commit/96fbea2bbd27ba1ac5105fce37e624d804dcbdb6)
## [v1.0.0](https://github.com/bcomnes/async-neocities/compare/v0.0.10...v1.0.0) - 2020-02-12
### Commits
- feat: progress API sketch [`be8b9ec`](https://github.com/bcomnes/async-neocities/commit/be8b9ec062b5ea23157a6a841c9d66d03a85a8ca)
- docs: update README [`ec4f5f1`](https://github.com/bcomnes/async-neocities/commit/ec4f5f154b690dba0814ec0955fee674e8e94692)
- CHANGELOG [`c9b64ed`](https://github.com/bcomnes/async-neocities/commit/c9b64edd4d3db025adc737982477ce0d760f3254)
## [v0.0.10](https://github.com/bcomnes/async-neocities/compare/v0.0.9...v0.0.10) - 2020-02-10
### Commits
- dont do work unless there is work [`616a306`](https://github.com/bcomnes/async-neocities/commit/616a306ba3ca091da11c9c85bae2b07cb0b2768e)
## [v0.0.9](https://github.com/bcomnes/async-neocities/compare/v0.0.8...v0.0.9) - 2020-02-10
### Commits
- Use stream ctor [`e8201a0`](https://github.com/bcomnes/async-neocities/commit/e8201a053950848962a220b83ffa1a97ebab6e70)
## [v0.0.8](https://github.com/bcomnes/async-neocities/compare/v0.0.7...v0.0.8) - 2020-02-10
### Commits
- Fix more bugs [`95da7b7`](https://github.com/bcomnes/async-neocities/commit/95da7b7218082ab51c1463851f87428dc0c501ac)
## [v0.0.7](https://github.com/bcomnes/async-neocities/compare/v0.0.6...v0.0.7) - 2020-02-10
### Commits
- bugs [`71ead78`](https://github.com/bcomnes/async-neocities/commit/71ead78e0f48f619816b3ae3ea8154e8301c77ac)
## [v0.0.6](https://github.com/bcomnes/async-neocities/compare/v0.0.5...v0.0.6) - 2020-02-10
### Commits
- bugs [`c1d9973`](https://github.com/bcomnes/async-neocities/commit/c1d9973afef3abd7d6edfc5a6ae1c9d37f6cb34d)
## [v0.0.5](https://github.com/bcomnes/async-neocities/compare/v0.0.4...v0.0.5) - 2020-02-10
### Commits
- bugs [`e542111`](https://github.com/bcomnes/async-neocities/commit/e542111f3404ab923be3490e62ba16b4f6b66a70)
## [v0.0.4](https://github.com/bcomnes/async-neocities/compare/v0.0.3...v0.0.4) - 2020-02-10
### Commits
- bump version [`a3da5f7`](https://github.com/bcomnes/async-neocities/commit/a3da5f77cda15fb3e9ec5861b588f616d8b0055c)
## [v0.0.3](https://github.com/bcomnes/async-neocities/compare/v0.0.2...v0.0.3) - 2020-02-10
### Commits
- tmp releases [`16a6db4`](https://github.com/bcomnes/async-neocities/commit/16a6db49a06bebef89007b94e03dd34e6d17b298)
## [v0.0.2](https://github.com/bcomnes/async-neocities/compare/v0.0.1...v0.0.2) - 2020-02-10
## v0.0.1 - 2020-02-10
### Commits
- Init [`bb055ae`](https://github.com/bcomnes/async-neocities/commit/bb055ae8e76b0344acc929e8ffd3974d19144001)
- fix tests [`c294b52`](https://github.com/bcomnes/async-neocities/commit/c294b528a64a50638c4374a8782b177fe3634eb2)
- Init [`9ec8fb5`](https://github.com/bcomnes/async-neocities/commit/9ec8fb557ebf8578c9eb07dedffcb1b7eedbd3e6)

4
node_modules/async-neocities/CODE_OF_CONDUCT.md generated vendored Normal file
View File

@ -0,0 +1,4 @@
# Code of conduct
- This repo is governed as a dictatorship starting with the originator of the project.
- This is a malevolence free zone.

26
node_modules/async-neocities/CONTRIBUTING.md generated vendored Normal file
View File

@ -0,0 +1,26 @@
# Contributing
## Releasing
Changelog, and releasing is autmated with npm scripts. To create a release:
- Ensure a clean working git workspace.
- Run `npm version {patch,minor,major}`.
- This wills update the version number and generate the changelog.
- Run `npm publish`.
- This will push your local git branch and tags to the default remote, perform a [gh-release](https://ghub.io/gh-release), and create an npm publication.
## Guidelines
- Patches, ideas and changes welcome.
- Fixes almost always welcome.
- Features sometimes welcome.
- Please open an issue to discuss the issue prior to spending lots of time on the problem.
- It may be rejected.
- If you don't want to wait around for the discussion to commence, and you really want to jump into the implementation work, be prepared for fork if the idea is respectfully declined.
- Try to stay within the style of the existing code.
- All tests must pass.
- Additional features or code paths must be tested.
- Aim for 100% coverage.
- Questions are welcome, however unless there is a official support contract established between the maintainers and the requester, support is not guaranteed.
- Contributors reserve the right to walk away from this project at any moment with or without notice.

21
node_modules/async-neocities/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2019 Bret Comnes
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

245
node_modules/async-neocities/README.md generated vendored Normal file
View File

@ -0,0 +1,245 @@
# async-neocities
[![Actions Status](https://github.com/bcomnes/async-neocities/workflows/tests/badge.svg)](https://github.com/bcomnes/async-neocities/actions)
An api client for [neocities][nc] with an async/promise API and an efficient deploy algorithm.
<center><img src="logo.jpg"></center>
```console
npm install async-neocities
```
## Usage
``` js
const path = require('path')
const Neocities = require('async-neocities')
async function deploySite () {
const token = await Neocities.getKey('sitename', 'password')
const client = new Neocities(token)
console.log(await client.list()) // site files
console.log(await client.info()) // site info
return client.deploy(path.join(__dirname, './site-contents'))
}
deploySite.then(info => { console.log('done deploying site!') })
.catch(e => { throw e })
```
## API
### `Neocities = require('async-neocities')`
Import the Neocities API client.
### `apiKey = await Neocities.getKey(sitename, password, [opts])`
Static class method that will get an API Key from a sitename and password.
`opts` include:
```js
{
url: 'https://neocities.org' // Base URL to use for requests
}
```
### `client = new Neocities(apiKey, [opts])`
Create a new API client for a given API key.
`opts` include:
```js
{
url: 'https://neocities.org' // Base URL to use for requests
}
```
### `response = await client.upload(files)`
Pass an array of objects with the `{ name, path }` pair to upload these files to neocities, where `name` is desired remote unix path on neocities and `path` is the local path on disk in whichever format the local operating system desires.
A successful `response`:
```js
{
result: 'success',
message: 'your file(s) have been successfully uploaded'
}
```
### `response = await client.delete(filenames)`
Pass an array of path strings to delete on neocities. The path strings should be the unix style path of the file you want to delete.
A successful `response`:
```js
{ result: 'success', message: 'file(s) have been deleted' }
```
### `response = await client.list([queries])`
Get a list of files for your site. The optional `queries` object is passed through [qs][qs] and added to the request.
Available queries:
```js
{
path // list the contents of a subdirectory on neocities
}
```
Example `responses`:
```json
{
"result": "success",
"files": [
{
"path": "index.html",
"is_directory": false,
"size": 1023,
"updated_at": "Sat, 13 Feb 2016 03:04:00 -0000",
"sha1_hash": "c8aac06f343c962a24a7eb111aad739ff48b7fb1"
},
{
"path": "not_found.html",
"is_directory": false,
"size": 271,
"updated_at": "Sat, 13 Feb 2016 03:04:00 -0000",
"sha1_hash": "cfdf0bda2557c322be78302da23c32fec72ffc0b"
},
{
"path": "images",
"is_directory": true,
"updated_at": "Sat, 13 Feb 2016 03:04:00 -0000"
},
{
"path": "images/cat.png",
"is_directory": false,
"size": 16793,
"updated_at": "Sat, 13 Feb 2016 03:04:00 -0000",
"sha1_hash": "41fe08fc0dd44e79f799d03ece903e62be25dc7d"
}
]
}
```
With the `path` query:
```json
{
"result": "success",
"files": [
{
"path": "images/cat.png",
"is_directory": false,
"size": 16793,
"updated_at": "Sat, 13 Feb 2016 03:04:00 -0000",
"sha1_hash": "41fe08fc0dd44e79f799d03ece903e62be25dc7d"
}
]
}
```
### `response = await client.info([queries])`
Get info about your or other sites. The optional `queries` object is passed through [qs][qs] and added to the request.
Available queries:
```js
{
sitename // get info on a given sitename
}
```
Example `responses`:
```json
{
"result": "success",
"info": {
"sitename": "youpi",
"hits": 5072,
"created_at": "Sat, 29 Jun 2013 10:11:38 +0000",
"last_updated": "Tue, 23 Jul 2013 20:04:03 +0000",
"domain": null,
"tags": []
}
}
```
### `stats = await client.deploy(directory, [opts])`
Deploy a path to a `directory`, efficiently only uploading missing and changed files. Files are determined to be different by size, and sha1 hash, if the size is the same.
`opts` include:
```js
{
cleanup: false // delete orphaned files on neocities that are not in the `directory`
statsCb: (stats) => {}
}
```
For an example of a stats handler, see [lib/stats-handler.js]('./lib/stats-handler.js').
### `client.get(endpoint, [quieries], [opts])`
Low level GET request to a given `endpoint`.
**NOTE**: The `/api/` prefix is automatically added: `/api/${endpoint}` so that must be omitted from `endpoint.
The optional `queries` object is stringified to a querystring using [`qs`][qs]a and added to the request.
`opts` includes:
```js
{
method: 'GET',
headers: { ...client.defaultHeaders, ...opts.headers },
}
```
Note, that `opts` is passed internally to [`node-fetch`][nf] and you can include any options that work for that client here.
### `client.post(endpoint, formEntries, [opts])`
Low level POST request to a given `endpoint`.
**NOTE**: The `/api/` prefix is automatically adeded: `/api/${endpoint}` so that must be omitted from `endpoint.
Pass a `formEntries` array or iterator containing objects with `{name, value}` pairs to be sent with the POST request as [FormData](https://developer.mozilla.org/en-US/docs/Web/API/FormData). The [form-datat][fd] module is used internally.
`opts` include:
```js
{
method: 'POST',
body: new FormData(), // Don't override this.
headers: { ...client.defafultHeaders, ...formHeaders, opts.headers }
}
```
Note, that `opts` is passed internally to [`node-fetch`][nf] and you can include any options that work for that client here.
## See also
- [Neocities API docs](https://neocities.org/api)
- [Official Node.js API client](https://github.com/neocities/neocities-node)
## License
MIT
[qs]: https://ghub.io/qs
[nf]: https://ghub.io/node-fetch
[fd]: https://ghub.io/form-data
[nc]: https://neocities.org

303
node_modules/async-neocities/index.js generated vendored Normal file
View File

@ -0,0 +1,303 @@
const { handleResponse } = require('fetch-errors')
const { createReadStream } = require('fs')
const afw = require('async-folder-walker')
const FormData = require('form-data')
const assert = require('nanoassert')
const fetch = require('node-fetch')
const { URL } = require('url')
const qs = require('qs')
const os = require('os')
const { neocitiesLocalDiff } = require('./lib/folder-diff')
const pkg = require('./package.json')
const SimpleTimer = require('./lib/timer')
const { getStreamLength, meterStream } = require('./lib/stream-meter')
const statsHandler = require('./lib/stats-handler')
const defaultURL = 'https://neocities.org'
// Progress API constants
const START = 'start'
const PROGRESS = 'progress' // progress updates
const STOP = 'stop'
const SKIP = 'skip'
// Progress stages
const INSPECTING = 'inspecting'
const DIFFING = 'diffing'
const APPLYING = 'applying'
/**
* NeocitiesAPIClient class representing a neocities api client.
*/
class NeocitiesAPIClient {
/**
* getKey returns an apiKey from a sitename and password.
* @param {String} sitename username/sitename to log into.
* @param {String} password password to log in with.
* @param {Object} [opts] Options object.
* @param {Object} [opts.url=https://neocities.org] Base URL to request to.
* @return {Promise<String>} An api key for the sitename..
*/
static getKey (sitename, password, opts) {
assert(sitename, 'must pass sitename as first arg')
assert(typeof sitename === 'string', 'user arg must be a string')
assert(password, 'must pass a password as the second arg')
assert(typeof password, 'password arg must be a string')
opts = Object.assign({
url: defaultURL
}, opts)
const baseURL = opts.url
delete opts.url
const url = new URL('/api/key', baseURL)
url.username = sitename
url.password = password
return fetch(url, opts)
}
static statsHandler (...args) { return statsHandler(...args) }
/**
* Create an async-neocities api client.
* @param {string} apiKey An apiKey to make requests with.
* @param {Object} [opts] Options object.
* @param {Object} [opts.url=https://neocities.org] Base URL to make requests to.
* @return {Object} An api client instance.
*/
constructor (apiKey, opts) {
assert(apiKey, 'must pass apiKey as first argument')
assert(typeof apiKey === 'string', 'apiKey must be a string')
opts = Object.assign({
url: defaultURL
})
this.url = opts.url
this.apiKey = apiKey
}
get defaultHeaders () {
return {
Authorization: `Bearer ${this.apiKey}`,
Accept: 'application/json',
'User-Agent': `async-neocities/${pkg.version} (${os.type()})`
}
}
/**
* Generic GET request to neocities.
* @param {String} endpoint An endpoint path to GET request.
* @param {Object} [quieries] An object that gets added to the request in the form of a query string.
* @param {Object} [opts] Options object.
* @param {String} [opts.method=GET] The http method to use.
* @param {Object} [opts.headers] Headers to include in the request.
* @return {Object} The parsed JSON from the request response.
*/
get (endpoint, quieries, opts) {
assert(endpoint, 'must pass endpoint as first argument')
opts = Object.assign({
method: 'GET'
}, opts)
opts.headers = Object.assign({}, this.defaultHeaders, opts.headers)
let path = `/api/${endpoint}`
if (quieries) path += `?${qs.stringify(quieries)}`
const url = new URL(path, this.url)
return fetch(url, opts)
}
/**
* Low level POST request to neocities with FormData.
* @param {String} endpoint The endpoint to make the request to.
* @param {Array.<{name: String, value: String}>} formEntries Array of form entries.
* @param {Object} [opts] Options object.
* @param {String} [opts.method=POST] HTTP Method.
* @param {Object} [opts.headers] Additional headers to send.
* @return {Object} The parsed JSON response object.
*/
async post (endpoint, formEntries, opts) {
assert(endpoint, 'must pass endpoint as first argument')
assert(formEntries, 'must pass formEntries as second argument')
function createForm () {
const form = new FormData()
for (const { name, value } of formEntries) {
form.append(name, value)
}
return form
}
opts = Object.assign({
method: 'POST',
statsCb: () => {}
}, opts)
const statsCb = opts.statsCb
delete opts.statsCb
const stats = {
totalBytes: await getStreamLength(createForm()),
bytesWritten: 0
}
statsCb(stats)
const form = createForm()
opts.body = meterStream(form, bytesRead => {
stats.bytesWritten = bytesRead
statsCb(stats)
})
opts.headers = Object.assign(
{},
this.defaultHeaders,
form.getHeaders(),
opts.headers)
const url = new URL(`/api/${endpoint}`, this.url)
return fetch(url, opts)
}
/**
* Upload files to neocities
*/
upload (files, opts = {}) {
opts = {
statsCb: () => {},
...opts
}
const formEntries = files.map(({ name, path }) => {
const streamCtor = (next) => next(createReadStream(path))
streamCtor.path = path
return {
name,
value: streamCtor
}
})
return this.post('upload', formEntries, { statsCb: opts.statsCb }).then(handleResponse)
}
/**
* delete files from your website
*/
delete (filenames, opts = {}) {
assert(filenames, 'filenames is a required first argument')
assert(Array.isArray(filenames), 'filenames argument must be an array of file paths in your website')
opts = {
statsCb: () => {},
...opts
}
const formEntries = filenames.map(file => ({
name: 'filenames[]',
value: file
}))
return this.post('delete', formEntries, { statsCb: opts.statsCb }).then(handleResponse)
}
list (queries) {
// args.path: Path to list
return this.get('list', queries).then(handleResponse)
}
/**
* info returns info on your site, or optionally on a sitename querystrign
* @param {Object} args Querystring arguments to include (e.g. sitename)
* @return {Promise} Fetch request promise
*/
info (queries) {
// args.sitename: sitename to get info on
return this.get('info', queries).then(handleResponse)
}
/**
* Deploy a directory to neocities, skipping already uploaded files and optionally cleaning orphaned files.
* @param {String} directory The path of the directory to deploy.
* @param {Object} opts Options object.
* @param {Boolean} opts.cleanup Boolean to delete orphaned files nor not. Defaults to false.
* @param {Boolean} opts.statsCb Get access to stat info before uploading is complete.
* @return {Promise} Promise containing stats about the deploy
*/
async deploy (directory, opts) {
opts = {
cleanup: false, // delete remote orphaned files
statsCb: () => {},
...opts
}
const statsCb = opts.statsCb
const totalTime = new SimpleTimer(Date.now())
// INSPECTION STAGE
statsCb({ stage: INSPECTING, status: START })
const [localFiles, remoteFiles] = await Promise.all([
afw.allFiles(directory, { shaper: f => f }),
this.list().then(res => res.files)
])
statsCb({ stage: INSPECTING, status: STOP })
// DIFFING STAGE
statsCb({ stage: DIFFING, status: START })
const { filesToUpload, filesToDelete, filesSkipped } = await neocitiesLocalDiff(remoteFiles, localFiles)
statsCb({ stage: DIFFING, status: STOP })
// APPLYING STAGE
if (filesToUpload.length === 0 && (!opts.cleanup || filesToDelete.length === 0)) {
statsCb({ stage: APPLYING, status: SKIP })
return stats()
}
statsCb({ stage: APPLYING, status: START })
const work = []
if (filesToUpload.length > 0) {
const uploadJob = this.upload(filesToUpload, {
statsCb ({ totalBytes, bytesWritten }) {
statsCb({
stage: APPLYING,
status: PROGRESS,
complete: false,
totalBytes,
bytesWritten,
get progress () {
return (this.bytesWritten / this.totalBytes) || 0
}
})
}
}).then((_) => {
statsCb({
stage: APPLYING,
status: PROGRESS,
complete: true,
progress: 1.0
})
})
work.push(uploadJob)
}
if (opts.cleanup && filesToDelete.length > 0) {
work.push(this.delete(filesToDelete))
}
await Promise.all(work)
statsCb({ stage: APPLYING, status: STOP })
return stats()
function stats () {
totalTime.stop()
return {
time: totalTime.elapsed,
filesToUpload,
filesToDelete,
filesSkipped
}
}
}
}
module.exports = NeocitiesAPIClient

179
node_modules/async-neocities/lib/folder-diff.js generated vendored Normal file
View File

@ -0,0 +1,179 @@
const crypto = require('crypto')
const util = require('util')
const fs = require('fs')
const ppump = util.promisify(require('pump'))
/**
* neocitiesLocalDiff returns an array of files to delete and update and some useful stats.
* @param {Array} neocitiesFiles Array of files returned from the neocities list api.
* @param {Array} localListing Array of files returned by a full data async-folder-walker run.
* @return {Promise<Object>} Object of filesToUpload, filesToDelete and filesSkipped.
*/
async function neocitiesLocalDiff (neocitiesFiles, localListing) {
const localIndex = {}
const ncIndex = {}
const neoCitiesFiltered = neocitiesFiles.filter(f => !f.is_directory)
neoCitiesFiltered.forEach(f => { ncIndex[f.path] = f }) // index
const ncFiles = new Set(neoCitiesFiltered.map(f => f.path)) // shape
const localListingFiltered = localListing.filter(f => !f.stat.isDirectory()) // files only
localListingFiltered.forEach(f => { localIndex[forceUnixRelname(f.relname)] = f }) // index
const localFiles = new Set(localListingFiltered.map(f => forceUnixRelname(f.relname))) // shape
const filesToAdd = difference(localFiles, ncFiles)
const filesToDelete = difference(ncFiles, localFiles)
const maybeUpdate = intersection(localFiles, ncFiles)
const skipped = new Set()
for (const p of maybeUpdate) {
const local = localIndex[p]
const remote = ncIndex[p]
if (local.stat.size !== remote.size) { filesToAdd.add(p); continue }
const localSha1 = await sha1FromPath(local.filepath)
if (localSha1 !== remote.sha1_hash) { filesToAdd.add(p); continue }
skipped.add(p)
}
return {
filesToUpload: Array.from(filesToAdd).map(p => ({
name: forceUnixRelname(localIndex[p].relname),
path: localIndex[p].filepath
})),
filesToDelete: Array.from(filesToDelete).map(p => ncIndex[p].path),
filesSkipped: Array.from(skipped).map(p => localIndex[p])
}
}
module.exports = {
neocitiesLocalDiff
}
/**
* sha1FromPath returns a sha1 hex from a path
* @param {String} p string of the path of the file to hash
* @return {Promise<String>} the hex representation of the sha1
*/
async function sha1FromPath (p) {
const rs = fs.createReadStream(p)
const hash = crypto.createHash('sha1')
await ppump(rs, hash)
return hash.digest('hex')
}
// From https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set#Implementing_basic_set_operations
/**
* difference returnss the difference betwen setA and setB.
* @param {Set} setA LHS set
* @param {Set} setB RHS set
* @return {Set} The difference Set
*/
function difference (setA, setB) {
const _difference = new Set(setA)
for (const elem of setB) {
_difference.delete(elem)
}
return _difference
}
/**
* intersection returns the interesction between setA and setB.
* @param {Set} setA setA LHS set
* @param {Set} setB setB RHS set
* @return {Set} The intersection set between setA and setB.
*/
function intersection (setA, setB) {
const _intersection = new Set()
for (const elem of setB) {
if (setA.has(elem)) {
_intersection.add(elem)
}
}
return _intersection
}
/**
* forceUnixRelname forces a OS dependent path to a unix style path.
* @param {String} relname String path to convert to unix style.
* @return {String} The unix variant of the path
*/
function forceUnixRelname (relname) {
return relname.split(relname.sep).join('/')
}
/**
* Example of neocitiesFiles
*/
// [
// {
// path: 'img',
// is_directory: true,
// updated_at: 'Thu, 21 Nov 2019 04:06:17 -0000'
// },
// {
// path: 'index.html',
// is_directory: false,
// size: 1094,
// updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000',
// sha1_hash: '7f15617e87d83218223662340f4052d9bb9d096d'
// },
// {
// path: 'neocities.png',
// is_directory: false,
// size: 13232,
// updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000',
// sha1_hash: 'fd2ee41b1922a39a716cacb88c323d613b0955e4'
// },
// {
// path: 'not_found.html',
// is_directory: false,
// size: 347,
// updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000',
// sha1_hash: 'd7f004e9d3b2eaaa8827f741356f1122dc9eb030'
// },
// {
// path: 'style.css',
// is_directory: false,
// size: 298,
// updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000',
// sha1_hash: 'e516457acdb0d00710ab62cc257109ef67209ce8'
// }
// ]
/**
* Example of localListing
*/
// [{
// root: '/Users/bret/repos/async-folder-walker/fixtures',
// filepath: '/Users/bret/repos/async-folder-walker/fixtures/sub-folder/sub-sub-folder',
// stat: Stats {
// dev: 16777220,
// mode: 16877,
// nlink: 3,
// uid: 501,
// gid: 20,
// rdev: 0,
// blksize: 4096,
// ino: 30244023,
// size: 96,
// blocks: 0,
// atimeMs: 1574381262779.8396,
// mtimeMs: 1574380914743.5474,
// ctimeMs: 1574380914743.5474,
// birthtimeMs: 1574380905232.5996,
// atime: 2019-11-22T00:07:42.780Z,
// mtime: 2019-11-22T00:01:54.744Z,
// ctime: 2019-11-22T00:01:54.744Z,
// birthtime: 2019-11-22T00:01:45.233Z
// },
// relname: 'sub-folder/sub-sub-folder',
// basename: 'sub-sub-folder'
// }]

65
node_modules/async-neocities/lib/folder-diff.test.js generated vendored Normal file
View File

@ -0,0 +1,65 @@
const afw = require('async-folder-walker')
const path = require('path')
const tap = require('tap')
const { neocitiesLocalDiff } = require('./folder-diff')
const remoteFiles = [
{
path: 'img',
is_directory: true,
updated_at: 'Thu, 21 Nov 2019 04:06:17 -0000'
},
{
path: 'index.html',
is_directory: false,
size: 1094,
updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000',
sha1_hash: '7f15617e87d83218223662340f4052d9bb9d096d'
},
{
path: 'neocities.png',
is_directory: false,
size: 13232,
updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000',
sha1_hash: 'fd2ee41b1922a39a716cacb88c323d613b0955e4'
},
{
path: 'not_found.html',
is_directory: false,
size: 347,
updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000',
sha1_hash: 'd7f004e9d3b2eaaa8827f741356f1122dc9eb030'
},
{
path: 'style.css',
is_directory: false,
size: 298,
updated_at: 'Mon, 11 Nov 2019 22:23:16 -0000',
sha1_hash: 'e516457acdb0d00710ab62cc257109ef67209ce8'
}
]
tap.test('test differ', async t => {
const localFiles = await afw.allFiles(path.join(__dirname, '../fixtures'), {
shaper: f => f
})
const { filesToUpload, filesToDelete, filesSkipped } = await neocitiesLocalDiff(remoteFiles, localFiles)
t.true(['tootzzz.png', 'toot.gif', 'cat.png'].every(path => {
const found = filesToUpload.find(ftu => ftu.name === path)
t.ok(found.path && found.name, 'each file to upload has a name and path')
return found
}), 'every file to upload is included')
t.deepEqual(filesToDelete, [
'not_found.html',
'style.css'
], 'filesToDelete returned correctly')
t.true(['neocities.png'].every(path => {
const found = filesSkipped.find(fs => fs.relname === path)
return found
}), 'every file skipped is included')
})

60
node_modules/async-neocities/lib/stats-handler.js generated vendored Normal file
View File

@ -0,0 +1,60 @@
const prettyBytes = require('pretty-bytes')
// Progress API constants
const START = 'start'
const PROGRESS = 'progress' // progress updates
const STOP = 'stop'
const SKIP = 'skip'
function statsHandler (opts = {}) {
let progressInterval
const lastStats = {}
return (stats) => {
switch (stats.status) {
case START: {
console.log(`Starting ${stats.stage} stage...`)
break
}
case STOP: {
console.log(`Finished ${stats.stage} stage.`)
break
}
case SKIP: {
console.log(`Skipping ${stats.stage} stage.`)
break
}
case PROGRESS: {
progressHandler(stats)
break
}
default: {
}
}
}
function progressHandler (stats) {
Object.assign(lastStats, stats)
if (!stats.complete && stats.progress < 1) {
if (!progressInterval) {
progressInterval = setInterval(logProgress, 500, lastStats)
logProgress(lastStats)
}
} else {
if (progressInterval) {
clearInterval(progressInterval)
logProgress(lastStats)
}
}
}
function logProgress (stats) {
let logLine = `Stage ${stats.stage}: ${(stats.progress * 100).toFixed(2)}%`
if (stats.bytesWritten != null && stats.totalBytes != null) {
logLine = logLine + ` (${prettyBytes(stats.bytesWritten)} / ${prettyBytes(stats.totalBytes)})`
}
console.log(logLine)
}
}
module.exports = statsHandler

38
node_modules/async-neocities/lib/stream-meter.js generated vendored Normal file
View File

@ -0,0 +1,38 @@
const { Writable, Transform } = require('streamx')
const pump = require('pump')
const pumpify = require('pumpify')
function getStreamLength (readable) {
let length = 0
const dummyLoad = new Writable({
write (data, cb) {
length += data.length
cb(null)
}
})
return new Promise((resolve, reject) => {
pump(readable, dummyLoad, (err) => {
if (err) return reject(err)
resolve(length)
})
})
}
function meterStream (readable, statsCb) {
let bytesRead = 0
const meter = new Transform({
transform (data, cb) {
bytesRead += data.length
statsCb(bytesRead)
cb(null, data)
}
})
return pumpify(readable, meter)
}
module.exports = {
getStreamLength,
meterStream
}

34
node_modules/async-neocities/lib/timer.js generated vendored Normal file
View File

@ -0,0 +1,34 @@
/**
* Simple timer lets you record start and stop times, with an elapsed time getter.
*/
class SimpleTimer {
constructor (startTime) {
this.start = startTime || Date.now()
this.end = null
this.stopped = false
}
get elapsed () {
if (this.stopped) {
return this.end - this.start
} else {
return Date.now() - this.start
}
}
stop () {
if (this.stopped) return
this.stopped = true
this.end = Date.now()
}
toString () {
return this.elapsed
}
toJSON () {
return this.elapsed
}
}
module.exports = SimpleTimer

84
node_modules/async-neocities/package.json generated vendored Normal file
View File

@ -0,0 +1,84 @@
{
"_from": "async-neocities@1.1.6",
"_id": "async-neocities@1.1.6",
"_inBundle": false,
"_integrity": "sha512-q5fTVttBaN9znGxqxxDAh/ks+bZngIJPu6zPS7nlbJLC9NnOhrcP5Mu0VntxgEBtAuaExyI6uH/C+CxKSW0LeQ==",
"_location": "/async-neocities",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "async-neocities@1.1.6",
"name": "async-neocities",
"escapedName": "async-neocities",
"rawSpec": "1.1.6",
"saveSpec": null,
"fetchSpec": "1.1.6"
},
"_requiredBy": [
"/"
],
"_resolved": "https://registry.npmjs.org/async-neocities/-/async-neocities-1.1.6.tgz",
"_shasum": "405b45565ccbe9c4ea56e65552ae9c48c20a0309",
"_spec": "async-neocities@1.1.6",
"_where": "/Users/bret/repos/deploy-to-neocities",
"author": {
"name": "Bret Comnes",
"email": "bcomnes@gmail.com",
"url": "https://bret.io"
},
"bugs": {
"url": "https://github.com/bcomnes/async-neocities/issues"
},
"bundleDependencies": false,
"dependencies": {
"async-folder-walker": "^2.0.1",
"fetch-errors": "^2.0.1",
"form-data": "^3.0.0",
"nanoassert": "^2.0.0",
"node-fetch": "^2.6.0",
"pretty-bytes": "^5.3.0",
"pump": "^3.0.0",
"pumpify": "^2.0.1",
"qs": "^6.9.1",
"streamx": "^2.6.0"
},
"deprecated": false,
"description": "WIP - nothing to see here",
"devDependencies": {
"auto-changelog": "^1.16.2",
"dependency-check": "^4.1.0",
"gh-release": "^3.5.0",
"npm-run-all": "^4.1.5",
"standard": "^13.1.0",
"tap": "^14.10.2"
},
"homepage": "https://github.com/bcomnes/async-neocities",
"keywords": [
"neocities",
"async",
"api client",
"static hosting"
],
"license": "MIT",
"main": "index.js",
"name": "async-neocities",
"repository": {
"type": "git",
"url": "git+https://github.com/bcomnes/async-neocities.git"
},
"scripts": {
"prepublishOnly": "git push --follow-tags && gh-release",
"test": "run-s test:*",
"test:deps": "dependency-check . --no-dev --no-peer",
"test:standard": "standard",
"test:tape": "tap",
"version": "auto-changelog -p --template keepachangelog auto-changelog --breaking-pattern 'BREAKING:' && git add CHANGELOG.md"
},
"standard": {
"ignore": [
"dist"
]
},
"version": "1.1.6"
}

127
node_modules/async-neocities/test.js generated vendored Normal file
View File

@ -0,0 +1,127 @@
const tap = require('tap')
const { readFileSync } = require('fs')
const { resolve } = require('path')
const NeocitiesAPIClient = require('.')
const statsHanlder = require('./lib/stats-handler')
let token = process.env.NEOCITIES_API_TOKEN
let fakeToken = false
if (!token) {
try {
const config = JSON.parse(readFileSync(resolve(__dirname, 'config.json')))
token = config.token
tap.test('token loaded', async t => {
t.ok(token)
})
} catch (e) {
console.warn('error loading config.json')
console.warn('using fake token, live tests disabled')
fakeToken = true
token = '123456'
}
}
tap.test('basic client api', async t => {
const client = new NeocitiesAPIClient(token)
t.ok(client.info, 'info method available')
t.ok(client.list, 'list method available')
t.ok(client.get, 'get method available')
t.ok(client.post, 'post method available')
})
if (!fakeToken) {
tap.test('can get info about site', async t => {
const client = new NeocitiesAPIClient(token)
const info = await client.info()
// console.log(info)
t.equal(info.result, 'success', 'info requesst successfull')
const list = await client.list()
// console.log(list)
t.equal(list.result, 'success', 'list result successfull')
})
// test('form data works the way I think', t => {
// const form = new FormData();
// const p = resolve(__dirname, 'package.json');
// form.append('package.json', next => next(createReadStream(p)));
//
// const concatStream = concat((data) => {
// console.log(data);
// t.end();
// });
//
// form.on('error', (err) => {
// t.error(err);
// });
// form.pipe(concatStream);
// });
tap.test('can upload and delete files', async t => {
const client = new NeocitiesAPIClient(token)
const uploadResults = await client.upload([
{
name: 'toot.gif',
path: resolve(__dirname, 'fixtures/toot.gif')
},
{
name: 'img/tootzzz.png',
path: resolve(__dirname, 'fixtures/tootzzz.png')
}
])
// console.log(uploadResults)
t.equal(uploadResults.result, 'success', 'list result successfull')
const deleteResults = await client.delete([
'toot.gif',
'img/tootzzz.png'
])
// console.log(deleteResults)
t.equal(deleteResults.result, 'success', 'list result successfull')
})
tap.test('can deploy folders', async t => {
const client = new NeocitiesAPIClient(token)
const deployStats = await client.deploy(
resolve(__dirname, 'fixtures'),
{
statsCb: statsHanlder(),
cleanup: false
}
)
t.ok(deployStats)
// console.dir(deployStats, { depth: 99, colors: true })
const redeployStats = await client.deploy(
resolve(__dirname, 'fixtures'),
{
statsCb: statsHanlder(),
cleanup: false
}
)
t.ok(redeployStats)
// console.dir(redeployStats, { depth: 99, colors: true })
const cleanupStats = await client.deploy(
resolve(__dirname, 'fixtures/empty'),
{
statsCb: statsHanlder(),
cleanup: true
}
)
t.ok(cleanupStats)
// console.dir(cleanupStats, { depth: 99, colors: true })
})
}

21
node_modules/asynckit/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2016 Alex Indigo
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

233
node_modules/asynckit/README.md generated vendored Normal file
View File

@ -0,0 +1,233 @@
# asynckit [![NPM Module](https://img.shields.io/npm/v/asynckit.svg?style=flat)](https://www.npmjs.com/package/asynckit)
Minimal async jobs utility library, with streams support.
[![PhantomJS Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=browser&style=flat)](https://travis-ci.org/alexindigo/asynckit)
[![Linux Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=linux:0.12-6.x&style=flat)](https://travis-ci.org/alexindigo/asynckit)
[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/asynckit/v0.4.0.svg?label=windows:0.12-6.x&style=flat)](https://ci.appveyor.com/project/alexindigo/asynckit)
[![Coverage Status](https://img.shields.io/coveralls/alexindigo/asynckit/v0.4.0.svg?label=code+coverage&style=flat)](https://coveralls.io/github/alexindigo/asynckit?branch=master)
[![Dependency Status](https://img.shields.io/david/alexindigo/asynckit/v0.4.0.svg?style=flat)](https://david-dm.org/alexindigo/asynckit)
[![bitHound Overall Score](https://www.bithound.io/github/alexindigo/asynckit/badges/score.svg)](https://www.bithound.io/github/alexindigo/asynckit)
<!-- [![Readme](https://img.shields.io/badge/readme-tested-brightgreen.svg?style=flat)](https://www.npmjs.com/package/reamde) -->
AsyncKit provides harness for `parallel` and `serial` iterators over list of items represented by arrays or objects.
Optionally it accepts abort function (should be synchronously return by iterator for each item), and terminates left over jobs upon an error event. For specific iteration order built-in (`ascending` and `descending`) and custom sort helpers also supported, via `asynckit.serialOrdered` method.
It ensures async operations to keep behavior more stable and prevent `Maximum call stack size exceeded` errors, from sync iterators.
| compression | size |
| :----------------- | -------: |
| asynckit.js | 12.34 kB |
| asynckit.min.js | 4.11 kB |
| asynckit.min.js.gz | 1.47 kB |
## Install
```sh
$ npm install --save asynckit
```
## Examples
### Parallel Jobs
Runs iterator over provided array in parallel. Stores output in the `result` array,
on the matching positions. In unlikely event of an error from one of the jobs,
will terminate rest of the active jobs (if abort function is provided)
and return error along with salvaged data to the main callback function.
#### Input Array
```javascript
var parallel = require('asynckit').parallel
, assert = require('assert')
;
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
, expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ]
, target = []
;
parallel(source, asyncJob, function(err, result)
{
assert.deepEqual(result, expectedResult);
assert.deepEqual(target, expectedTarget);
});
// async job accepts one element from the array
// and a callback function
function asyncJob(item, cb)
{
// different delays (in ms) per item
var delay = item * 25;
// pretend different jobs take different time to finish
// and not in consequential order
var timeoutId = setTimeout(function() {
target.push(item);
cb(null, item * 2);
}, delay);
// allow to cancel "leftover" jobs upon error
// return function, invoking of which will abort this job
return clearTimeout.bind(null, timeoutId);
}
```
More examples could be found in [test/test-parallel-array.js](test/test-parallel-array.js).
#### Input Object
Also it supports named jobs, listed via object.
```javascript
var parallel = require('asynckit/parallel')
, assert = require('assert')
;
var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 }
, expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 }
, expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ]
, expectedKeys = [ 'first', 'one', 'two', 'four', 'eight', 'sixteen', 'thirtyTwo', 'sixtyFour' ]
, target = []
, keys = []
;
parallel(source, asyncJob, function(err, result)
{
assert.deepEqual(result, expectedResult);
assert.deepEqual(target, expectedTarget);
assert.deepEqual(keys, expectedKeys);
});
// supports full value, key, callback (shortcut) interface
function asyncJob(item, key, cb)
{
// different delays (in ms) per item
var delay = item * 25;
// pretend different jobs take different time to finish
// and not in consequential order
var timeoutId = setTimeout(function() {
keys.push(key);
target.push(item);
cb(null, item * 2);
}, delay);
// allow to cancel "leftover" jobs upon error
// return function, invoking of which will abort this job
return clearTimeout.bind(null, timeoutId);
}
```
More examples could be found in [test/test-parallel-object.js](test/test-parallel-object.js).
### Serial Jobs
Runs iterator over provided array sequentially. Stores output in the `result` array,
on the matching positions. In unlikely event of an error from one of the jobs,
will not proceed to the rest of the items in the list
and return error along with salvaged data to the main callback function.
#### Input Array
```javascript
var serial = require('asynckit/serial')
, assert = require('assert')
;
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
, expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ]
, target = []
;
serial(source, asyncJob, function(err, result)
{
assert.deepEqual(result, expectedResult);
assert.deepEqual(target, expectedTarget);
});
// extended interface (item, key, callback)
// also supported for arrays
function asyncJob(item, key, cb)
{
target.push(key);
// it will be automatically made async
// even it iterator "returns" in the same event loop
cb(null, item * 2);
}
```
More examples could be found in [test/test-serial-array.js](test/test-serial-array.js).
#### Input Object
Also it supports named jobs, listed via object.
```javascript
var serial = require('asynckit').serial
, assert = require('assert')
;
var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
, expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ]
, expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ]
, target = []
;
var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 }
, expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 }
, expectedTarget = [ 1, 1, 4, 16, 64, 32, 8, 2 ]
, target = []
;
serial(source, asyncJob, function(err, result)
{
assert.deepEqual(result, expectedResult);
assert.deepEqual(target, expectedTarget);
});
// shortcut interface (item, callback)
// works for object as well as for the arrays
function asyncJob(item, cb)
{
target.push(item);
// it will be automatically made async
// even it iterator "returns" in the same event loop
cb(null, item * 2);
}
```
More examples could be found in [test/test-serial-object.js](test/test-serial-object.js).
_Note: Since _object_ is an _unordered_ collection of properties,
it may produce unexpected results with sequential iterations.
Whenever order of the jobs' execution is important please use `serialOrdered` method._
### Ordered Serial Iterations
TBD
For example [compare-property](compare-property) package.
### Streaming interface
TBD
## Want to Know More?
More examples can be found in [test folder](test/).
Or open an [issue](https://github.com/alexindigo/asynckit/issues) with questions and/or suggestions.
## License
AsyncKit is licensed under the MIT license.

76
node_modules/asynckit/bench.js generated vendored Normal file
View File

@ -0,0 +1,76 @@
/* eslint no-console: "off" */
var asynckit = require('./')
, async = require('async')
, assert = require('assert')
, expected = 0
;
var Benchmark = require('benchmark');
var suite = new Benchmark.Suite;
var source = [];
for (var z = 1; z < 100; z++)
{
source.push(z);
expected += z;
}
suite
// add tests
.add('async.map', function(deferred)
{
var total = 0;
async.map(source,
function(i, cb)
{
setImmediate(function()
{
total += i;
cb(null, total);
});
},
function(err, result)
{
assert.ifError(err);
assert.equal(result[result.length - 1], expected);
deferred.resolve();
});
}, {'defer': true})
.add('asynckit.parallel', function(deferred)
{
var total = 0;
asynckit.parallel(source,
function(i, cb)
{
setImmediate(function()
{
total += i;
cb(null, total);
});
},
function(err, result)
{
assert.ifError(err);
assert.equal(result[result.length - 1], expected);
deferred.resolve();
});
}, {'defer': true})
// add listeners
.on('cycle', function(ev)
{
console.log(String(ev.target));
})
.on('complete', function()
{
console.log('Fastest is ' + this.filter('fastest').map('name'));
})
// run async
.run({ 'async': true });

6
node_modules/asynckit/index.js generated vendored Normal file
View File

@ -0,0 +1,6 @@
module.exports =
{
parallel : require('./parallel.js'),
serial : require('./serial.js'),
serialOrdered : require('./serialOrdered.js')
};

29
node_modules/asynckit/lib/abort.js generated vendored Normal file
View File

@ -0,0 +1,29 @@
// API
module.exports = abort;
/**
* Aborts leftover active jobs
*
* @param {object} state - current state object
*/
function abort(state)
{
Object.keys(state.jobs).forEach(clean.bind(state));
// reset leftover jobs
state.jobs = {};
}
/**
* Cleans up leftover job by invoking abort function for the provided job id
*
* @this state
* @param {string|number} key - job id to abort
*/
function clean(key)
{
if (typeof this.jobs[key] == 'function')
{
this.jobs[key]();
}
}

34
node_modules/asynckit/lib/async.js generated vendored Normal file
View File

@ -0,0 +1,34 @@
var defer = require('./defer.js');
// API
module.exports = async;
/**
* Runs provided callback asynchronously
* even if callback itself is not
*
* @param {function} callback - callback to invoke
* @returns {function} - augmented callback
*/
function async(callback)
{
var isAsync = false;
// check if async happened
defer(function() { isAsync = true; });
return function async_callback(err, result)
{
if (isAsync)
{
callback(err, result);
}
else
{
defer(function nextTick_callback()
{
callback(err, result);
});
}
};
}

26
node_modules/asynckit/lib/defer.js generated vendored Normal file
View File

@ -0,0 +1,26 @@
module.exports = defer;
/**
* Runs provided function on next iteration of the event loop
*
* @param {function} fn - function to run
*/
function defer(fn)
{
var nextTick = typeof setImmediate == 'function'
? setImmediate
: (
typeof process == 'object' && typeof process.nextTick == 'function'
? process.nextTick
: null
);
if (nextTick)
{
nextTick(fn);
}
else
{
setTimeout(fn, 0);
}
}

75
node_modules/asynckit/lib/iterate.js generated vendored Normal file
View File

@ -0,0 +1,75 @@
var async = require('./async.js')
, abort = require('./abort.js')
;
// API
module.exports = iterate;
/**
* Iterates over each job object
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {object} state - current job status
* @param {function} callback - invoked when all elements processed
*/
function iterate(list, iterator, state, callback)
{
// store current index
var key = state['keyedList'] ? state['keyedList'][state.index] : state.index;
state.jobs[key] = runJob(iterator, key, list[key], function(error, output)
{
// don't repeat yourself
// skip secondary callbacks
if (!(key in state.jobs))
{
return;
}
// clean up jobs
delete state.jobs[key];
if (error)
{
// don't process rest of the results
// stop still active jobs
// and reset the list
abort(state);
}
else
{
state.results[key] = output;
}
// return salvaged results
callback(error, state.results);
});
}
/**
* Runs iterator over provided job element
*
* @param {function} iterator - iterator to invoke
* @param {string|number} key - key/index of the element in the list of jobs
* @param {mixed} item - job description
* @param {function} callback - invoked after iterator is done with the job
* @returns {function|mixed} - job abort function or something else
*/
function runJob(iterator, key, item, callback)
{
var aborter;
// allow shortcut if iterator expects only two arguments
if (iterator.length == 2)
{
aborter = iterator(item, async(callback));
}
// otherwise go with full three arguments
else
{
aborter = iterator(item, key, async(callback));
}
return aborter;
}

91
node_modules/asynckit/lib/readable_asynckit.js generated vendored Normal file
View File

@ -0,0 +1,91 @@
var streamify = require('./streamify.js')
, defer = require('./defer.js')
;
// API
module.exports = ReadableAsyncKit;
/**
* Base constructor for all streams
* used to hold properties/methods
*/
function ReadableAsyncKit()
{
ReadableAsyncKit.super_.apply(this, arguments);
// list of active jobs
this.jobs = {};
// add stream methods
this.destroy = destroy;
this._start = _start;
this._read = _read;
}
/**
* Destroys readable stream,
* by aborting outstanding jobs
*
* @returns {void}
*/
function destroy()
{
if (this.destroyed)
{
return;
}
this.destroyed = true;
if (typeof this.terminator == 'function')
{
this.terminator();
}
}
/**
* Starts provided jobs in async manner
*
* @private
*/
function _start()
{
// first argument runner function
var runner = arguments[0]
// take away first argument
, args = Array.prototype.slice.call(arguments, 1)
// second argument - input data
, input = args[0]
// last argument - result callback
, endCb = streamify.callback.call(this, args[args.length - 1])
;
args[args.length - 1] = endCb;
// third argument - iterator
args[1] = streamify.iterator.call(this, args[1]);
// allow time for proper setup
defer(function()
{
if (!this.destroyed)
{
this.terminator = runner.apply(null, args);
}
else
{
endCb(null, Array.isArray(input) ? [] : {});
}
}.bind(this));
}
/**
* Implement _read to comply with Readable streams
* Doesn't really make sense for flowing object mode
*
* @private
*/
function _read()
{
}

25
node_modules/asynckit/lib/readable_parallel.js generated vendored Normal file
View File

@ -0,0 +1,25 @@
var parallel = require('../parallel.js');
// API
module.exports = ReadableParallel;
/**
* Streaming wrapper to `asynckit.parallel`
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} callback - invoked when all elements processed
* @returns {stream.Readable#}
*/
function ReadableParallel(list, iterator, callback)
{
if (!(this instanceof ReadableParallel))
{
return new ReadableParallel(list, iterator, callback);
}
// turn on object mode
ReadableParallel.super_.call(this, {objectMode: true});
this._start(parallel, list, iterator, callback);
}

25
node_modules/asynckit/lib/readable_serial.js generated vendored Normal file
View File

@ -0,0 +1,25 @@
var serial = require('../serial.js');
// API
module.exports = ReadableSerial;
/**
* Streaming wrapper to `asynckit.serial`
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} callback - invoked when all elements processed
* @returns {stream.Readable#}
*/
function ReadableSerial(list, iterator, callback)
{
if (!(this instanceof ReadableSerial))
{
return new ReadableSerial(list, iterator, callback);
}
// turn on object mode
ReadableSerial.super_.call(this, {objectMode: true});
this._start(serial, list, iterator, callback);
}

29
node_modules/asynckit/lib/readable_serial_ordered.js generated vendored Normal file
View File

@ -0,0 +1,29 @@
var serialOrdered = require('../serialOrdered.js');
// API
module.exports = ReadableSerialOrdered;
// expose sort helpers
module.exports.ascending = serialOrdered.ascending;
module.exports.descending = serialOrdered.descending;
/**
* Streaming wrapper to `asynckit.serialOrdered`
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} sortMethod - custom sort function
* @param {function} callback - invoked when all elements processed
* @returns {stream.Readable#}
*/
function ReadableSerialOrdered(list, iterator, sortMethod, callback)
{
if (!(this instanceof ReadableSerialOrdered))
{
return new ReadableSerialOrdered(list, iterator, sortMethod, callback);
}
// turn on object mode
ReadableSerialOrdered.super_.call(this, {objectMode: true});
this._start(serialOrdered, list, iterator, sortMethod, callback);
}

37
node_modules/asynckit/lib/state.js generated vendored Normal file
View File

@ -0,0 +1,37 @@
// API
module.exports = state;
/**
* Creates initial state object
* for iteration over list
*
* @param {array|object} list - list to iterate over
* @param {function|null} sortMethod - function to use for keys sort,
* or `null` to keep them as is
* @returns {object} - initial state object
*/
function state(list, sortMethod)
{
var isNamedList = !Array.isArray(list)
, initState =
{
index : 0,
keyedList: isNamedList || sortMethod ? Object.keys(list) : null,
jobs : {},
results : isNamedList ? {} : [],
size : isNamedList ? Object.keys(list).length : list.length
}
;
if (sortMethod)
{
// sort array keys based on it's values
// sort object's keys just on own merit
initState.keyedList.sort(isNamedList ? sortMethod : function(a, b)
{
return sortMethod(list[a], list[b]);
});
}
return initState;
}

141
node_modules/asynckit/lib/streamify.js generated vendored Normal file
View File

@ -0,0 +1,141 @@
var async = require('./async.js');
// API
module.exports = {
iterator: wrapIterator,
callback: wrapCallback
};
/**
* Wraps iterators with long signature
*
* @this ReadableAsyncKit#
* @param {function} iterator - function to wrap
* @returns {function} - wrapped function
*/
function wrapIterator(iterator)
{
var stream = this;
return function(item, key, cb)
{
var aborter
, wrappedCb = async(wrapIteratorCallback.call(stream, cb, key))
;
stream.jobs[key] = wrappedCb;
// it's either shortcut (item, cb)
if (iterator.length == 2)
{
aborter = iterator(item, wrappedCb);
}
// or long format (item, key, cb)
else
{
aborter = iterator(item, key, wrappedCb);
}
return aborter;
};
}
/**
* Wraps provided callback function
* allowing to execute snitch function before
* real callback
*
* @this ReadableAsyncKit#
* @param {function} callback - function to wrap
* @returns {function} - wrapped function
*/
function wrapCallback(callback)
{
var stream = this;
var wrapped = function(error, result)
{
return finisher.call(stream, error, result, callback);
};
return wrapped;
}
/**
* Wraps provided iterator callback function
* makes sure snitch only called once,
* but passes secondary calls to the original callback
*
* @this ReadableAsyncKit#
* @param {function} callback - callback to wrap
* @param {number|string} key - iteration key
* @returns {function} wrapped callback
*/
function wrapIteratorCallback(callback, key)
{
var stream = this;
return function(error, output)
{
// don't repeat yourself
if (!(key in stream.jobs))
{
callback(error, output);
return;
}
// clean up jobs
delete stream.jobs[key];
return streamer.call(stream, error, {key: key, value: output}, callback);
};
}
/**
* Stream wrapper for iterator callback
*
* @this ReadableAsyncKit#
* @param {mixed} error - error response
* @param {mixed} output - iterator output
* @param {function} callback - callback that expects iterator results
*/
function streamer(error, output, callback)
{
if (error && !this.error)
{
this.error = error;
this.pause();
this.emit('error', error);
// send back value only, as expected
callback(error, output && output.value);
return;
}
// stream stuff
this.push(output);
// back to original track
// send back value only, as expected
callback(error, output && output.value);
}
/**
* Stream wrapper for finishing callback
*
* @this ReadableAsyncKit#
* @param {mixed} error - error response
* @param {mixed} output - iterator output
* @param {function} callback - callback that expects final results
*/
function finisher(error, output, callback)
{
// signal end of the stream
// only for successfully finished streams
if (!error)
{
this.push(null);
}
// back to original track
callback(error, output);
}

29
node_modules/asynckit/lib/terminator.js generated vendored Normal file
View File

@ -0,0 +1,29 @@
var abort = require('./abort.js')
, async = require('./async.js')
;
// API
module.exports = terminator;
/**
* Terminates jobs in the attached state context
*
* @this AsyncKitState#
* @param {function} callback - final callback to invoke after termination
*/
function terminator(callback)
{
if (!Object.keys(this.jobs).length)
{
return;
}
// fast forward iteration index
this.index = this.size;
// abort jobs
abort(this);
// send back results we have so far
async(callback)(null, this.results);
}

92
node_modules/asynckit/package.json generated vendored Normal file
View File

@ -0,0 +1,92 @@
{
"_from": "asynckit@^0.4.0",
"_id": "asynckit@0.4.0",
"_inBundle": false,
"_integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=",
"_location": "/asynckit",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "asynckit@^0.4.0",
"name": "asynckit",
"escapedName": "asynckit",
"rawSpec": "^0.4.0",
"saveSpec": null,
"fetchSpec": "^0.4.0"
},
"_requiredBy": [
"/form-data",
"/request/form-data"
],
"_resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"_shasum": "c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79",
"_spec": "asynckit@^0.4.0",
"_where": "/Users/bret/repos/deploy-to-neocities/node_modules/form-data",
"author": {
"name": "Alex Indigo",
"email": "iam@alexindigo.com"
},
"bugs": {
"url": "https://github.com/alexindigo/asynckit/issues"
},
"bundleDependencies": false,
"dependencies": {},
"deprecated": false,
"description": "Minimal async jobs utility library, with streams support",
"devDependencies": {
"browserify": "^13.0.0",
"browserify-istanbul": "^2.0.0",
"coveralls": "^2.11.9",
"eslint": "^2.9.0",
"istanbul": "^0.4.3",
"obake": "^0.1.2",
"phantomjs-prebuilt": "^2.1.7",
"pre-commit": "^1.1.3",
"reamde": "^1.1.0",
"rimraf": "^2.5.2",
"size-table": "^0.2.0",
"tap-spec": "^4.1.1",
"tape": "^4.5.1"
},
"homepage": "https://github.com/alexindigo/asynckit#readme",
"keywords": [
"async",
"jobs",
"parallel",
"serial",
"iterator",
"array",
"object",
"stream",
"destroy",
"terminate",
"abort"
],
"license": "MIT",
"main": "index.js",
"name": "asynckit",
"pre-commit": [
"clean",
"lint",
"test",
"browser",
"report",
"size"
],
"repository": {
"type": "git",
"url": "git+https://github.com/alexindigo/asynckit.git"
},
"scripts": {
"browser": "browserify -t browserify-istanbul test/lib/browserify_adjustment.js test/test-*.js | obake --coverage | tap-spec",
"clean": "rimraf coverage",
"debug": "tape test/test-*.js",
"lint": "eslint *.js lib/*.js test/*.js",
"report": "istanbul report",
"size": "browserify index.js | size-table asynckit",
"test": "istanbul cover --reporter=json tape -- 'test/test-*.js' | tap-spec",
"win-test": "tape test/test-*.js"
},
"version": "0.4.0"
}

43
node_modules/asynckit/parallel.js generated vendored Normal file
View File

@ -0,0 +1,43 @@
var iterate = require('./lib/iterate.js')
, initState = require('./lib/state.js')
, terminator = require('./lib/terminator.js')
;
// Public API
module.exports = parallel;
/**
* Runs iterator over provided array elements in parallel
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} callback - invoked when all elements processed
* @returns {function} - jobs terminator
*/
function parallel(list, iterator, callback)
{
var state = initState(list);
while (state.index < (state['keyedList'] || list).length)
{
iterate(list, iterator, state, function(error, result)
{
if (error)
{
callback(error, result);
return;
}
// looks like it's the last one
if (Object.keys(state.jobs).length === 0)
{
callback(null, state.results);
return;
}
});
state.index++;
}
return terminator.bind(state, callback);
}

17
node_modules/asynckit/serial.js generated vendored Normal file
View File

@ -0,0 +1,17 @@
var serialOrdered = require('./serialOrdered.js');
// Public API
module.exports = serial;
/**
* Runs iterator over provided array elements in series
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} callback - invoked when all elements processed
* @returns {function} - jobs terminator
*/
function serial(list, iterator, callback)
{
return serialOrdered(list, iterator, null, callback);
}

75
node_modules/asynckit/serialOrdered.js generated vendored Normal file
View File

@ -0,0 +1,75 @@
var iterate = require('./lib/iterate.js')
, initState = require('./lib/state.js')
, terminator = require('./lib/terminator.js')
;
// Public API
module.exports = serialOrdered;
// sorting helpers
module.exports.ascending = ascending;
module.exports.descending = descending;
/**
* Runs iterator over provided sorted array elements in series
*
* @param {array|object} list - array or object (named list) to iterate over
* @param {function} iterator - iterator to run
* @param {function} sortMethod - custom sort function
* @param {function} callback - invoked when all elements processed
* @returns {function} - jobs terminator
*/
function serialOrdered(list, iterator, sortMethod, callback)
{
var state = initState(list, sortMethod);
iterate(list, iterator, state, function iteratorHandler(error, result)
{
if (error)
{
callback(error, result);
return;
}
state.index++;
// are we there yet?
if (state.index < (state['keyedList'] || list).length)
{
iterate(list, iterator, state, iteratorHandler);
return;
}
// done here
callback(null, state.results);
});
return terminator.bind(state, callback);
}
/*
* -- Sort methods
*/
/**
* sort helper to sort array elements in ascending order
*
* @param {mixed} a - an item to compare
* @param {mixed} b - an item to compare
* @returns {number} - comparison result
*/
function ascending(a, b)
{
return a < b ? -1 : a > b ? 1 : 0;
}
/**
* sort helper to sort array elements in descending order
*
* @param {mixed} a - an item to compare
* @param {mixed} b - an item to compare
* @returns {number} - comparison result
*/
function descending(a, b)
{
return -1 * ascending(a, b);
}

21
node_modules/asynckit/stream.js generated vendored Normal file
View File

@ -0,0 +1,21 @@
var inherits = require('util').inherits
, Readable = require('stream').Readable
, ReadableAsyncKit = require('./lib/readable_asynckit.js')
, ReadableParallel = require('./lib/readable_parallel.js')
, ReadableSerial = require('./lib/readable_serial.js')
, ReadableSerialOrdered = require('./lib/readable_serial_ordered.js')
;
// API
module.exports =
{
parallel : ReadableParallel,
serial : ReadableSerial,
serialOrdered : ReadableSerialOrdered,
};
inherits(ReadableAsyncKit, Readable);
inherits(ReadableParallel, ReadableAsyncKit);
inherits(ReadableSerial, ReadableAsyncKit);
inherits(ReadableSerialOrdered, ReadableAsyncKit);

19
node_modules/combined-stream/License generated vendored Normal file
View File

@ -0,0 +1,19 @@
Copyright (c) 2011 Debuggable Limited <felix@debuggable.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

138
node_modules/combined-stream/Readme.md generated vendored Normal file
View File

@ -0,0 +1,138 @@
# combined-stream
A stream that emits multiple other streams one after another.
**NB** Currently `combined-stream` works with streams version 1 only. There is ongoing effort to switch this library to streams version 2. Any help is welcome. :) Meanwhile you can explore other libraries that provide streams2 support with more or less compatibility with `combined-stream`.
- [combined-stream2](https://www.npmjs.com/package/combined-stream2): A drop-in streams2-compatible replacement for the combined-stream module.
- [multistream](https://www.npmjs.com/package/multistream): A stream that emits multiple other streams one after another.
## Installation
``` bash
npm install combined-stream
```
## Usage
Here is a simple example that shows how you can use combined-stream to combine
two files into one:
``` javascript
var CombinedStream = require('combined-stream');
var fs = require('fs');
var combinedStream = CombinedStream.create();
combinedStream.append(fs.createReadStream('file1.txt'));
combinedStream.append(fs.createReadStream('file2.txt'));
combinedStream.pipe(fs.createWriteStream('combined.txt'));
```
While the example above works great, it will pause all source streams until
they are needed. If you don't want that to happen, you can set `pauseStreams`
to `false`:
``` javascript
var CombinedStream = require('combined-stream');
var fs = require('fs');
var combinedStream = CombinedStream.create({pauseStreams: false});
combinedStream.append(fs.createReadStream('file1.txt'));
combinedStream.append(fs.createReadStream('file2.txt'));
combinedStream.pipe(fs.createWriteStream('combined.txt'));
```
However, what if you don't have all the source streams yet, or you don't want
to allocate the resources (file descriptors, memory, etc.) for them right away?
Well, in that case you can simply provide a callback that supplies the stream
by calling a `next()` function:
``` javascript
var CombinedStream = require('combined-stream');
var fs = require('fs');
var combinedStream = CombinedStream.create();
combinedStream.append(function(next) {
next(fs.createReadStream('file1.txt'));
});
combinedStream.append(function(next) {
next(fs.createReadStream('file2.txt'));
});
combinedStream.pipe(fs.createWriteStream('combined.txt'));
```
## API
### CombinedStream.create([options])
Returns a new combined stream object. Available options are:
* `maxDataSize`
* `pauseStreams`
The effect of those options is described below.
### combinedStream.pauseStreams = `true`
Whether to apply back pressure to the underlaying streams. If set to `false`,
the underlaying streams will never be paused. If set to `true`, the
underlaying streams will be paused right after being appended, as well as when
`delayedStream.pipe()` wants to throttle.
### combinedStream.maxDataSize = `2 * 1024 * 1024`
The maximum amount of bytes (or characters) to buffer for all source streams.
If this value is exceeded, `combinedStream` emits an `'error'` event.
### combinedStream.dataSize = `0`
The amount of bytes (or characters) currently buffered by `combinedStream`.
### combinedStream.append(stream)
Appends the given `stream` to the combinedStream object. If `pauseStreams` is
set to `true, this stream will also be paused right away.
`streams` can also be a function that takes one parameter called `next`. `next`
is a function that must be invoked in order to provide the `next` stream, see
example above.
Regardless of how the `stream` is appended, combined-stream always attaches an
`'error'` listener to it, so you don't have to do that manually.
Special case: `stream` can also be a String or Buffer.
### combinedStream.write(data)
You should not call this, `combinedStream` takes care of piping the appended
streams into itself for you.
### combinedStream.resume()
Causes `combinedStream` to start drain the streams it manages. The function is
idempotent, and also emits a `'resume'` event each time which usually goes to
the stream that is currently being drained.
### combinedStream.pause();
If `combinedStream.pauseStreams` is set to `false`, this does nothing.
Otherwise a `'pause'` event is emitted, this goes to the stream that is
currently being drained, so you can use it to apply back pressure.
### combinedStream.end();
Sets `combinedStream.writable` to false, emits an `'end'` event, and removes
all streams from the queue.
### combinedStream.destroy();
Same as `combinedStream.end()`, except it emits a `'close'` event instead of
`'end'`.
## License
combined-stream is licensed under the MIT license.

208
node_modules/combined-stream/lib/combined_stream.js generated vendored Normal file
View File

@ -0,0 +1,208 @@
var util = require('util');
var Stream = require('stream').Stream;
var DelayedStream = require('delayed-stream');
module.exports = CombinedStream;
function CombinedStream() {
this.writable = false;
this.readable = true;
this.dataSize = 0;
this.maxDataSize = 2 * 1024 * 1024;
this.pauseStreams = true;
this._released = false;
this._streams = [];
this._currentStream = null;
this._insideLoop = false;
this._pendingNext = false;
}
util.inherits(CombinedStream, Stream);
CombinedStream.create = function(options) {
var combinedStream = new this();
options = options || {};
for (var option in options) {
combinedStream[option] = options[option];
}
return combinedStream;
};
CombinedStream.isStreamLike = function(stream) {
return (typeof stream !== 'function')
&& (typeof stream !== 'string')
&& (typeof stream !== 'boolean')
&& (typeof stream !== 'number')
&& (!Buffer.isBuffer(stream));
};
CombinedStream.prototype.append = function(stream) {
var isStreamLike = CombinedStream.isStreamLike(stream);
if (isStreamLike) {
if (!(stream instanceof DelayedStream)) {
var newStream = DelayedStream.create(stream, {
maxDataSize: Infinity,
pauseStream: this.pauseStreams,
});
stream.on('data', this._checkDataSize.bind(this));
stream = newStream;
}
this._handleErrors(stream);
if (this.pauseStreams) {
stream.pause();
}
}
this._streams.push(stream);
return this;
};
CombinedStream.prototype.pipe = function(dest, options) {
Stream.prototype.pipe.call(this, dest, options);
this.resume();
return dest;
};
CombinedStream.prototype._getNext = function() {
this._currentStream = null;
if (this._insideLoop) {
this._pendingNext = true;
return; // defer call
}
this._insideLoop = true;
try {
do {
this._pendingNext = false;
this._realGetNext();
} while (this._pendingNext);
} finally {
this._insideLoop = false;
}
};
CombinedStream.prototype._realGetNext = function() {
var stream = this._streams.shift();
if (typeof stream == 'undefined') {
this.end();
return;
}
if (typeof stream !== 'function') {
this._pipeNext(stream);
return;
}
var getStream = stream;
getStream(function(stream) {
var isStreamLike = CombinedStream.isStreamLike(stream);
if (isStreamLike) {
stream.on('data', this._checkDataSize.bind(this));
this._handleErrors(stream);
}
this._pipeNext(stream);
}.bind(this));
};
CombinedStream.prototype._pipeNext = function(stream) {
this._currentStream = stream;
var isStreamLike = CombinedStream.isStreamLike(stream);
if (isStreamLike) {
stream.on('end', this._getNext.bind(this));
stream.pipe(this, {end: false});
return;
}
var value = stream;
this.write(value);
this._getNext();
};
CombinedStream.prototype._handleErrors = function(stream) {
var self = this;
stream.on('error', function(err) {
self._emitError(err);
});
};
CombinedStream.prototype.write = function(data) {
this.emit('data', data);
};
CombinedStream.prototype.pause = function() {
if (!this.pauseStreams) {
return;
}
if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause();
this.emit('pause');
};
CombinedStream.prototype.resume = function() {
if (!this._released) {
this._released = true;
this.writable = true;
this._getNext();
}
if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume();
this.emit('resume');
};
CombinedStream.prototype.end = function() {
this._reset();
this.emit('end');
};
CombinedStream.prototype.destroy = function() {
this._reset();
this.emit('close');
};
CombinedStream.prototype._reset = function() {
this.writable = false;
this._streams = [];
this._currentStream = null;
};
CombinedStream.prototype._checkDataSize = function() {
this._updateDataSize();
if (this.dataSize <= this.maxDataSize) {
return;
}
var message =
'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.';
this._emitError(new Error(message));
};
CombinedStream.prototype._updateDataSize = function() {
this.dataSize = 0;
var self = this;
this._streams.forEach(function(stream) {
if (!stream.dataSize) {
return;
}
self.dataSize += stream.dataSize;
});
if (this._currentStream && this._currentStream.dataSize) {
this.dataSize += this._currentStream.dataSize;
}
};
CombinedStream.prototype._emitError = function(err) {
this._reset();
this.emit('error', err);
};

59
node_modules/combined-stream/package.json generated vendored Normal file
View File

@ -0,0 +1,59 @@
{
"_from": "combined-stream@^1.0.8",
"_id": "combined-stream@1.0.8",
"_inBundle": false,
"_integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"_location": "/combined-stream",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "combined-stream@^1.0.8",
"name": "combined-stream",
"escapedName": "combined-stream",
"rawSpec": "^1.0.8",
"saveSpec": null,
"fetchSpec": "^1.0.8"
},
"_requiredBy": [
"/form-data",
"/request",
"/request/form-data"
],
"_resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"_shasum": "c3d45a8b34fd730631a110a8a2520682b31d5a7f",
"_spec": "combined-stream@^1.0.8",
"_where": "/Users/bret/repos/deploy-to-neocities/node_modules/form-data",
"author": {
"name": "Felix Geisendörfer",
"email": "felix@debuggable.com",
"url": "http://debuggable.com/"
},
"bugs": {
"url": "https://github.com/felixge/node-combined-stream/issues"
},
"bundleDependencies": false,
"dependencies": {
"delayed-stream": "~1.0.0"
},
"deprecated": false,
"description": "A stream that emits multiple other streams one after another.",
"devDependencies": {
"far": "~0.0.7"
},
"engines": {
"node": ">= 0.8"
},
"homepage": "https://github.com/felixge/node-combined-stream",
"license": "MIT",
"main": "./lib/combined_stream",
"name": "combined-stream",
"repository": {
"type": "git",
"url": "git://github.com/felixge/node-combined-stream.git"
},
"scripts": {
"test": "node test/run.js"
},
"version": "1.0.8"
}

17
node_modules/combined-stream/yarn.lock generated vendored Normal file
View File

@ -0,0 +1,17 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
delayed-stream@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
far@~0.0.7:
version "0.0.7"
resolved "https://registry.yarnpkg.com/far/-/far-0.0.7.tgz#01c1fd362bcd26ce9cf161af3938aa34619f79a7"
dependencies:
oop "0.0.3"
oop@0.0.3:
version "0.0.3"
resolved "https://registry.yarnpkg.com/oop/-/oop-0.0.3.tgz#70fa405a5650891a194fdc82ca68dad6dabf4401"

1
node_modules/delayed-stream/.npmignore generated vendored Normal file
View File

@ -0,0 +1 @@
test

19
node_modules/delayed-stream/License generated vendored Normal file
View File

@ -0,0 +1,19 @@
Copyright (c) 2011 Debuggable Limited <felix@debuggable.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

7
node_modules/delayed-stream/Makefile generated vendored Normal file
View File

@ -0,0 +1,7 @@
SHELL := /bin/bash
test:
@./test/run.js
.PHONY: test

141
node_modules/delayed-stream/Readme.md generated vendored Normal file
View File

@ -0,0 +1,141 @@
# delayed-stream
Buffers events from a stream until you are ready to handle them.
## Installation
``` bash
npm install delayed-stream
```
## Usage
The following example shows how to write a http echo server that delays its
response by 1000 ms.
``` javascript
var DelayedStream = require('delayed-stream');
var http = require('http');
http.createServer(function(req, res) {
var delayed = DelayedStream.create(req);
setTimeout(function() {
res.writeHead(200);
delayed.pipe(res);
}, 1000);
});
```
If you are not using `Stream#pipe`, you can also manually release the buffered
events by calling `delayedStream.resume()`:
``` javascript
var delayed = DelayedStream.create(req);
setTimeout(function() {
// Emit all buffered events and resume underlaying source
delayed.resume();
}, 1000);
```
## Implementation
In order to use this meta stream properly, here are a few things you should
know about the implementation.
### Event Buffering / Proxying
All events of the `source` stream are hijacked by overwriting the `source.emit`
method. Until node implements a catch-all event listener, this is the only way.
However, delayed-stream still continues to emit all events it captures on the
`source`, regardless of whether you have released the delayed stream yet or
not.
Upon creation, delayed-stream captures all `source` events and stores them in
an internal event buffer. Once `delayedStream.release()` is called, all
buffered events are emitted on the `delayedStream`, and the event buffer is
cleared. After that, delayed-stream merely acts as a proxy for the underlaying
source.
### Error handling
Error events on `source` are buffered / proxied just like any other events.
However, `delayedStream.create` attaches a no-op `'error'` listener to the
`source`. This way you only have to handle errors on the `delayedStream`
object, rather than in two places.
### Buffer limits
delayed-stream provides a `maxDataSize` property that can be used to limit
the amount of data being buffered. In order to protect you from bad `source`
streams that don't react to `source.pause()`, this feature is enabled by
default.
## API
### DelayedStream.create(source, [options])
Returns a new `delayedStream`. Available options are:
* `pauseStream`
* `maxDataSize`
The description for those properties can be found below.
### delayedStream.source
The `source` stream managed by this object. This is useful if you are
passing your `delayedStream` around, and you still want to access properties
on the `source` object.
### delayedStream.pauseStream = true
Whether to pause the underlaying `source` when calling
`DelayedStream.create()`. Modifying this property afterwards has no effect.
### delayedStream.maxDataSize = 1024 * 1024
The amount of data to buffer before emitting an `error`.
If the underlaying source is emitting `Buffer` objects, the `maxDataSize`
refers to bytes.
If the underlaying source is emitting JavaScript strings, the size refers to
characters.
If you know what you are doing, you can set this property to `Infinity` to
disable this feature. You can also modify this property during runtime.
### delayedStream.dataSize = 0
The amount of data buffered so far.
### delayedStream.readable
An ECMA5 getter that returns the value of `source.readable`.
### delayedStream.resume()
If the `delayedStream` has not been released so far, `delayedStream.release()`
is called.
In either case, `source.resume()` is called.
### delayedStream.pause()
Calls `source.pause()`.
### delayedStream.pipe(dest)
Calls `delayedStream.resume()` and then proxies the arguments to `source.pipe`.
### delayedStream.release()
Emits and clears all events that have been buffered up so far. This does not
resume the underlaying source, use `delayedStream.resume()` instead.
## License
delayed-stream is licensed under the MIT license.

107
node_modules/delayed-stream/lib/delayed_stream.js generated vendored Normal file
View File

@ -0,0 +1,107 @@
var Stream = require('stream').Stream;
var util = require('util');
module.exports = DelayedStream;
function DelayedStream() {
this.source = null;
this.dataSize = 0;
this.maxDataSize = 1024 * 1024;
this.pauseStream = true;
this._maxDataSizeExceeded = false;
this._released = false;
this._bufferedEvents = [];
}
util.inherits(DelayedStream, Stream);
DelayedStream.create = function(source, options) {
var delayedStream = new this();
options = options || {};
for (var option in options) {
delayedStream[option] = options[option];
}
delayedStream.source = source;
var realEmit = source.emit;
source.emit = function() {
delayedStream._handleEmit(arguments);
return realEmit.apply(source, arguments);
};
source.on('error', function() {});
if (delayedStream.pauseStream) {
source.pause();
}
return delayedStream;
};
Object.defineProperty(DelayedStream.prototype, 'readable', {
configurable: true,
enumerable: true,
get: function() {
return this.source.readable;
}
});
DelayedStream.prototype.setEncoding = function() {
return this.source.setEncoding.apply(this.source, arguments);
};
DelayedStream.prototype.resume = function() {
if (!this._released) {
this.release();
}
this.source.resume();
};
DelayedStream.prototype.pause = function() {
this.source.pause();
};
DelayedStream.prototype.release = function() {
this._released = true;
this._bufferedEvents.forEach(function(args) {
this.emit.apply(this, args);
}.bind(this));
this._bufferedEvents = [];
};
DelayedStream.prototype.pipe = function() {
var r = Stream.prototype.pipe.apply(this, arguments);
this.resume();
return r;
};
DelayedStream.prototype._handleEmit = function(args) {
if (this._released) {
this.emit.apply(this, args);
return;
}
if (args[0] === 'data') {
this.dataSize += args[1].length;
this._checkIfMaxDataSizeExceeded();
}
this._bufferedEvents.push(args);
};
DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() {
if (this._maxDataSizeExceeded) {
return;
}
if (this.dataSize <= this.maxDataSize) {
return;
}
this._maxDataSizeExceeded = true;
var message =
'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'
this.emit('error', new Error(message));
};

62
node_modules/delayed-stream/package.json generated vendored Normal file
View File

@ -0,0 +1,62 @@
{
"_from": "delayed-stream@~1.0.0",
"_id": "delayed-stream@1.0.0",
"_inBundle": false,
"_integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=",
"_location": "/delayed-stream",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "delayed-stream@~1.0.0",
"name": "delayed-stream",
"escapedName": "delayed-stream",
"rawSpec": "~1.0.0",
"saveSpec": null,
"fetchSpec": "~1.0.0"
},
"_requiredBy": [
"/combined-stream"
],
"_resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"_shasum": "df3ae199acadfb7d440aaae0b29e2272b24ec619",
"_spec": "delayed-stream@~1.0.0",
"_where": "/Users/bret/repos/deploy-to-neocities/node_modules/combined-stream",
"author": {
"name": "Felix Geisendörfer",
"email": "felix@debuggable.com",
"url": "http://debuggable.com/"
},
"bugs": {
"url": "https://github.com/felixge/node-delayed-stream/issues"
},
"bundleDependencies": false,
"contributors": [
{
"name": "Mike Atkins",
"email": "apeherder@gmail.com"
}
],
"dependencies": {},
"deprecated": false,
"description": "Buffers events from a stream until you are ready to handle them.",
"devDependencies": {
"fake": "0.2.0",
"far": "0.0.1"
},
"engines": {
"node": ">=0.4.0"
},
"homepage": "https://github.com/felixge/node-delayed-stream",
"license": "MIT",
"main": "./lib/delayed_stream",
"name": "delayed-stream",
"repository": {
"type": "git",
"url": "git://github.com/felixge/node-delayed-stream.git"
},
"scripts": {
"test": "make test"
},
"version": "1.0.0"
}

6
node_modules/duplexify/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,6 @@
language: node_js
node_js:
- "4"
- "6"
- "8"
- "10"

21
node_modules/duplexify/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014 Mathias Buus
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

97
node_modules/duplexify/README.md generated vendored Normal file
View File

@ -0,0 +1,97 @@
# duplexify
Turn a writeable and readable stream into a single streams2 duplex stream.
Similar to [duplexer2](https://github.com/deoxxa/duplexer2) except it supports both streams2 and streams1 as input
and it allows you to set the readable and writable part asynchronously using `setReadable(stream)` and `setWritable(stream)`
```
npm install duplexify
```
[![build status](http://img.shields.io/travis/mafintosh/duplexify.svg?style=flat)](http://travis-ci.org/mafintosh/duplexify)
## Usage
Use `duplexify(writable, readable, streamOptions)` (or `duplexify.obj(writable, readable)` to create an object stream)
``` js
var duplexify = require('duplexify')
// turn writableStream and readableStream into a single duplex stream
var dup = duplexify(writableStream, readableStream)
dup.write('hello world') // will write to writableStream
dup.on('data', function(data) {
// will read from readableStream
})
```
You can also set the readable and writable parts asynchronously
``` js
var dup = duplexify()
dup.write('hello world') // write will buffer until the writable
// part has been set
// wait a bit ...
dup.setReadable(readableStream)
// maybe wait some more?
dup.setWritable(writableStream)
```
If you call `setReadable` or `setWritable` multiple times it will unregister the previous readable/writable stream.
To disable the readable or writable part call `setReadable` or `setWritable` with `null`.
If the readable or writable streams emits an error or close it will destroy both streams and bubble up the event.
You can also explicitly destroy the streams by calling `dup.destroy()`. The `destroy` method optionally takes an
error object as argument, in which case the error is emitted as part of the `error` event.
``` js
dup.on('error', function(err) {
console.log('readable or writable emitted an error - close will follow')
})
dup.on('close', function() {
console.log('the duplex stream is destroyed')
})
dup.destroy() // calls destroy on the readable and writable part (if present)
```
## HTTP request example
Turn a node core http request into a duplex stream is as easy as
``` js
var duplexify = require('duplexify')
var http = require('http')
var request = function(opts) {
var req = http.request(opts)
var dup = duplexify(req)
req.on('response', function(res) {
dup.setReadable(res)
})
return dup
}
var req = request({
method: 'GET',
host: 'www.google.com',
port: 80
})
req.end()
req.pipe(process.stdout)
```
## License
MIT
## Related
`duplexify` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one.

21
node_modules/duplexify/example.js generated vendored Normal file
View File

@ -0,0 +1,21 @@
var duplexify = require('duplexify')
var http = require('http')
var request = function(opts) {
var req = http.request(opts)
var dup = duplexify()
dup.setWritable(req)
req.on('response', function(res) {
dup.setReadable(res)
})
return dup
}
var req = request({
method: 'GET',
host: 'www.google.com',
port: 80
})
req.end()
req.pipe(process.stdout)

238
node_modules/duplexify/index.js generated vendored Normal file
View File

@ -0,0 +1,238 @@
var stream = require('readable-stream')
var eos = require('end-of-stream')
var inherits = require('inherits')
var shift = require('stream-shift')
var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from)
? Buffer.from([0])
: new Buffer([0])
var onuncork = function(self, fn) {
if (self._corked) self.once('uncork', fn)
else fn()
}
var autoDestroy = function (self, err) {
if (self._autoDestroy) self.destroy(err)
}
var destroyer = function(self, end) {
return function(err) {
if (err) autoDestroy(self, err.message === 'premature close' ? null : err)
else if (end && !self._ended) self.end()
}
}
var end = function(ws, fn) {
if (!ws) return fn()
if (ws._writableState && ws._writableState.finished) return fn()
if (ws._writableState) return ws.end(fn)
ws.end()
fn()
}
var noop = function() {}
var toStreams2 = function(rs) {
return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs)
}
var Duplexify = function(writable, readable, opts) {
if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts)
stream.Duplex.call(this, opts)
this._writable = null
this._readable = null
this._readable2 = null
this._autoDestroy = !opts || opts.autoDestroy !== false
this._forwardDestroy = !opts || opts.destroy !== false
this._forwardEnd = !opts || opts.end !== false
this._corked = 1 // start corked
this._ondrain = null
this._drained = false
this._forwarding = false
this._unwrite = null
this._unread = null
this._ended = false
this.destroyed = false
if (writable) this.setWritable(writable)
if (readable) this.setReadable(readable)
}
inherits(Duplexify, stream.Duplex)
Duplexify.obj = function(writable, readable, opts) {
if (!opts) opts = {}
opts.objectMode = true
opts.highWaterMark = 16
return new Duplexify(writable, readable, opts)
}
Duplexify.prototype.cork = function() {
if (++this._corked === 1) this.emit('cork')
}
Duplexify.prototype.uncork = function() {
if (this._corked && --this._corked === 0) this.emit('uncork')
}
Duplexify.prototype.setWritable = function(writable) {
if (this._unwrite) this._unwrite()
if (this.destroyed) {
if (writable && writable.destroy) writable.destroy()
return
}
if (writable === null || writable === false) {
this.end()
return
}
var self = this
var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd))
var ondrain = function() {
var ondrain = self._ondrain
self._ondrain = null
if (ondrain) ondrain()
}
var clear = function() {
self._writable.removeListener('drain', ondrain)
unend()
}
if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks
this._writable = writable
this._writable.on('drain', ondrain)
this._unwrite = clear
this.uncork() // always uncork setWritable
}
Duplexify.prototype.setReadable = function(readable) {
if (this._unread) this._unread()
if (this.destroyed) {
if (readable && readable.destroy) readable.destroy()
return
}
if (readable === null || readable === false) {
this.push(null)
this.resume()
return
}
var self = this
var unend = eos(readable, {writable:false, readable:true}, destroyer(this))
var onreadable = function() {
self._forward()
}
var onend = function() {
self.push(null)
}
var clear = function() {
self._readable2.removeListener('readable', onreadable)
self._readable2.removeListener('end', onend)
unend()
}
this._drained = true
this._readable = readable
this._readable2 = readable._readableState ? readable : toStreams2(readable)
this._readable2.on('readable', onreadable)
this._readable2.on('end', onend)
this._unread = clear
this._forward()
}
Duplexify.prototype._read = function() {
this._drained = true
this._forward()
}
Duplexify.prototype._forward = function() {
if (this._forwarding || !this._readable2 || !this._drained) return
this._forwarding = true
var data
while (this._drained && (data = shift(this._readable2)) !== null) {
if (this.destroyed) continue
this._drained = this.push(data)
}
this._forwarding = false
}
Duplexify.prototype.destroy = function(err, cb) {
if (!cb) cb = noop
if (this.destroyed) return cb(null)
this.destroyed = true
var self = this
process.nextTick(function() {
self._destroy(err)
cb(null)
})
}
Duplexify.prototype._destroy = function(err) {
if (err) {
var ondrain = this._ondrain
this._ondrain = null
if (ondrain) ondrain(err)
else this.emit('error', err)
}
if (this._forwardDestroy) {
if (this._readable && this._readable.destroy) this._readable.destroy()
if (this._writable && this._writable.destroy) this._writable.destroy()
}
this.emit('close')
}
Duplexify.prototype._write = function(data, enc, cb) {
if (this.destroyed) return
if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb))
if (data === SIGNAL_FLUSH) return this._finish(cb)
if (!this._writable) return cb()
if (this._writable.write(data) === false) this._ondrain = cb
else if (!this.destroyed) cb()
}
Duplexify.prototype._finish = function(cb) {
var self = this
this.emit('preend')
onuncork(this, function() {
end(self._forwardEnd && self._writable, function() {
// haxx to not emit prefinish twice
if (self._writableState.prefinished === false) self._writableState.prefinished = true
self.emit('prefinish')
onuncork(self, cb)
})
})
}
Duplexify.prototype.end = function(data, enc, cb) {
if (typeof data === 'function') return this.end(null, null, data)
if (typeof enc === 'function') return this.end(data, null, enc)
this._ended = true
if (data) this.write(data)
if (!this._writableState.ending) this.write(SIGNAL_FLUSH)
return stream.Writable.prototype.end.call(this, cb)
}
module.exports = Duplexify

66
node_modules/duplexify/package.json generated vendored Normal file
View File

@ -0,0 +1,66 @@
{
"_from": "duplexify@^4.1.1",
"_id": "duplexify@4.1.1",
"_inBundle": false,
"_integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==",
"_location": "/duplexify",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "duplexify@^4.1.1",
"name": "duplexify",
"escapedName": "duplexify",
"rawSpec": "^4.1.1",
"saveSpec": null,
"fetchSpec": "^4.1.1"
},
"_requiredBy": [
"/pumpify"
],
"_resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz",
"_shasum": "7027dc374f157b122a8ae08c2d3ea4d2d953aa61",
"_spec": "duplexify@^4.1.1",
"_where": "/Users/bret/repos/deploy-to-neocities/node_modules/pumpify",
"author": {
"name": "Mathias Buus"
},
"bugs": {
"url": "https://github.com/mafintosh/duplexify/issues"
},
"bundleDependencies": false,
"dependencies": {
"end-of-stream": "^1.4.1",
"inherits": "^2.0.3",
"readable-stream": "^3.1.1",
"stream-shift": "^1.0.0"
},
"deprecated": false,
"description": "Turn a writable and readable stream into a streams2 duplex stream with support for async initialization and streams1/streams2 input",
"devDependencies": {
"concat-stream": "^1.5.2",
"tape": "^4.0.0",
"through2": "^2.0.0"
},
"homepage": "https://github.com/mafintosh/duplexify",
"keywords": [
"duplex",
"streams2",
"streams",
"stream",
"writable",
"readable",
"async"
],
"license": "MIT",
"main": "index.js",
"name": "duplexify",
"repository": {
"type": "git",
"url": "git://github.com/mafintosh/duplexify.git"
},
"scripts": {
"test": "tape test.js"
},
"version": "4.1.1"
}

338
node_modules/duplexify/test.js generated vendored Normal file
View File

@ -0,0 +1,338 @@
var tape = require('tape')
var through = require('through2')
var concat = require('concat-stream')
var stream = require('readable-stream')
var net = require('net')
var duplexify = require('./')
var HELLO_WORLD = (Buffer.from && Buffer.from !== Uint8Array.from)
? Buffer.from('hello world')
: new Buffer('hello world')
tape('passthrough', function(t) {
t.plan(2)
var pt = through()
var dup = duplexify(pt, pt)
dup.end('hello world')
dup.on('finish', function() {
t.ok(true, 'should finish')
})
dup.pipe(concat(function(data) {
t.same(data.toString(), 'hello world', 'same in as out')
}))
})
tape('passthrough + double end', function(t) {
t.plan(2)
var pt = through()
var dup = duplexify(pt, pt)
dup.end('hello world')
dup.end()
dup.on('finish', function() {
t.ok(true, 'should finish')
})
dup.pipe(concat(function(data) {
t.same(data.toString(), 'hello world', 'same in as out')
}))
})
tape('async passthrough + end', function(t) {
t.plan(2)
var pt = through.obj({highWaterMark:1}, function(data, enc, cb) {
setTimeout(function() {
cb(null, data)
}, 100)
})
var dup = duplexify(pt, pt)
dup.write('hello ')
dup.write('world')
dup.end()
dup.on('finish', function() {
t.ok(true, 'should finish')
})
dup.pipe(concat(function(data) {
t.same(data.toString(), 'hello world', 'same in as out')
}))
})
tape('duplex', function(t) {
var readExpected = ['read-a', 'read-b', 'read-c']
var writeExpected = ['write-a', 'write-b', 'write-c']
t.plan(readExpected.length+writeExpected.length+2)
var readable = through.obj()
var writable = through.obj(function(data, enc, cb) {
t.same(data, writeExpected.shift(), 'onwrite should match')
cb()
})
var dup = duplexify.obj(writable, readable)
readExpected.slice().forEach(function(data) {
readable.write(data)
})
readable.end()
writeExpected.slice().forEach(function(data) {
dup.write(data)
})
dup.end()
dup.on('data', function(data) {
t.same(data, readExpected.shift(), 'ondata should match')
})
dup.on('end', function() {
t.ok(true, 'should end')
})
dup.on('finish', function() {
t.ok(true, 'should finish')
})
})
tape('async', function(t) {
var dup = duplexify()
var pt = through()
dup.pipe(concat(function(data) {
t.same(data.toString(), 'i was async', 'same in as out')
t.end()
}))
dup.write('i')
dup.write(' was ')
dup.end('async')
setTimeout(function() {
dup.setWritable(pt)
setTimeout(function() {
dup.setReadable(pt)
}, 50)
}, 50)
})
tape('destroy', function(t) {
t.plan(2)
var write = through()
var read = through()
var dup = duplexify(write, read)
write.destroy = function() {
t.ok(true, 'write destroyed')
}
dup.on('close', function() {
t.ok(true, 'close emitted')
})
dup.destroy()
dup.destroy() // should only work once
})
tape('destroy both', function(t) {
t.plan(3)
var write = through()
var read = through()
var dup = duplexify(write, read)
write.destroy = function() {
t.ok(true, 'write destroyed')
}
read.destroy = function() {
t.ok(true, 'read destroyed')
}
dup.on('close', function() {
t.ok(true, 'close emitted')
})
dup.destroy()
dup.destroy() // should only work once
})
tape('bubble read errors', function(t) {
t.plan(2)
var write = through()
var read = through()
var dup = duplexify(write, read)
dup.on('error', function(err) {
t.same(err.message, 'read-error', 'received read error')
})
dup.on('close', function() {
t.ok(true, 'close emitted')
})
read.emit('error', new Error('read-error'))
write.emit('error', new Error('write-error')) // only emit first error
})
tape('bubble write errors', function(t) {
t.plan(2)
var write = through()
var read = through()
var dup = duplexify(write, read)
dup.on('error', function(err) {
t.same(err.message, 'write-error', 'received write error')
})
dup.on('close', function() {
t.ok(true, 'close emitted')
})
write.emit('error', new Error('write-error'))
read.emit('error', new Error('read-error')) // only emit first error
})
tape('bubble errors from write()', function(t) {
t.plan(3)
var errored = false
var dup = duplexify(new stream.Writable({
write: function(chunk, enc, next) {
next(new Error('write-error'))
}
}))
dup.on('error', function(err) {
errored = true
t.same(err.message, 'write-error', 'received write error')
})
dup.on('close', function() {
t.pass('close emitted')
t.ok(errored, 'error was emitted before close')
})
dup.end('123')
})
tape('destroy while waiting for drain', function(t) {
t.plan(3)
var errored = false
var dup = duplexify(new stream.Writable({
highWaterMark: 0,
write: function() {}
}))
dup.on('error', function(err) {
errored = true
t.same(err.message, 'destroy-error', 'received destroy error')
})
dup.on('close', function() {
t.pass('close emitted')
t.ok(errored, 'error was emitted before close')
})
dup.write('123')
dup.destroy(new Error('destroy-error'))
})
tape('reset writable / readable', function(t) {
t.plan(3)
var toUpperCase = function(data, enc, cb) {
cb(null, data.toString().toUpperCase())
}
var passthrough = through()
var upper = through(toUpperCase)
var dup = duplexify(passthrough, passthrough)
dup.once('data', function(data) {
t.same(data.toString(), 'hello')
dup.setWritable(upper)
dup.setReadable(upper)
dup.once('data', function(data) {
t.same(data.toString(), 'HELLO')
dup.once('data', function(data) {
t.same(data.toString(), 'HI')
t.end()
})
})
dup.write('hello')
dup.write('hi')
})
dup.write('hello')
})
tape('cork', function(t) {
var passthrough = through()
var dup = duplexify(passthrough, passthrough)
var ok = false
dup.on('prefinish', function() {
dup.cork()
setTimeout(function() {
ok = true
dup.uncork()
}, 100)
})
dup.on('finish', function() {
t.ok(ok)
t.end()
})
dup.end()
})
tape('prefinish not twice', function(t) {
var passthrough = through()
var dup = duplexify(passthrough, passthrough)
var prefinished = false
dup.on('prefinish', function() {
t.ok(!prefinished, 'only prefinish once')
prefinished = true
})
dup.on('finish', function() {
t.end()
})
dup.end()
})
tape('close', function(t) {
var passthrough = through()
var dup = duplexify(passthrough, passthrough)
passthrough.emit('close')
dup.on('close', function() {
t.ok(true, 'should forward close')
t.end()
})
})
tape('works with node native streams (net)', function(t) {
t.plan(1)
var server = net.createServer(function(socket) {
var dup = duplexify(socket, socket)
dup.once('data', function(chunk) {
t.same(chunk, HELLO_WORLD)
server.close()
socket.end()
t.end()
})
})
server.listen(0, function () {
var socket = net.connect(server.address().port)
var dup = duplexify(socket, socket)
dup.write(HELLO_WORLD)
})
})

21
node_modules/end-of-stream/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2014 Mathias Buus
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

54
node_modules/end-of-stream/README.md generated vendored Normal file
View File

@ -0,0 +1,54 @@
# end-of-stream
A node module that calls a callback when a readable/writable/duplex stream has completed or failed.
npm install end-of-stream
[![Build status](https://travis-ci.org/mafintosh/end-of-stream.svg?branch=master)](https://travis-ci.org/mafintosh/end-of-stream)
## Usage
Simply pass a stream and a callback to the `eos`.
Both legacy streams, streams2 and stream3 are supported.
``` js
var eos = require('end-of-stream');
eos(readableStream, function(err) {
// this will be set to the stream instance
if (err) return console.log('stream had an error or closed early');
console.log('stream has ended', this === readableStream);
});
eos(writableStream, function(err) {
if (err) return console.log('stream had an error or closed early');
console.log('stream has finished', this === writableStream);
});
eos(duplexStream, function(err) {
if (err) return console.log('stream had an error or closed early');
console.log('stream has ended and finished', this === duplexStream);
});
eos(duplexStream, {readable:false}, function(err) {
if (err) return console.log('stream had an error or closed early');
console.log('stream has finished but might still be readable');
});
eos(duplexStream, {writable:false}, function(err) {
if (err) return console.log('stream had an error or closed early');
console.log('stream has ended but might still be writable');
});
eos(readableStream, {error:false}, function(err) {
// do not treat emit('error', err) as a end-of-stream
});
```
## License
MIT
## Related
`end-of-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one.

94
node_modules/end-of-stream/index.js generated vendored Normal file
View File

@ -0,0 +1,94 @@
var once = require('once');
var noop = function() {};
var isRequest = function(stream) {
return stream.setHeader && typeof stream.abort === 'function';
};
var isChildProcess = function(stream) {
return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
};
var eos = function(stream, opts, callback) {
if (typeof opts === 'function') return eos(stream, null, opts);
if (!opts) opts = {};
callback = once(callback || noop);
var ws = stream._writableState;
var rs = stream._readableState;
var readable = opts.readable || (opts.readable !== false && stream.readable);
var writable = opts.writable || (opts.writable !== false && stream.writable);
var cancelled = false;
var onlegacyfinish = function() {
if (!stream.writable) onfinish();
};
var onfinish = function() {
writable = false;
if (!readable) callback.call(stream);
};
var onend = function() {
readable = false;
if (!writable) callback.call(stream);
};
var onexit = function(exitCode) {
callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
};
var onerror = function(err) {
callback.call(stream, err);
};
var onclose = function() {
process.nextTick(onclosenexttick);
};
var onclosenexttick = function() {
if (cancelled) return;
if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));
if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));
};
var onrequest = function() {
stream.req.on('finish', onfinish);
};
if (isRequest(stream)) {
stream.on('complete', onfinish);
stream.on('abort', onclose);
if (stream.req) onrequest();
else stream.on('request', onrequest);
} else if (writable && !ws) { // legacy streams
stream.on('end', onlegacyfinish);
stream.on('close', onlegacyfinish);
}
if (isChildProcess(stream)) stream.on('exit', onexit);
stream.on('end', onend);
stream.on('finish', onfinish);
if (opts.error !== false) stream.on('error', onerror);
stream.on('close', onclose);
return function() {
cancelled = true;
stream.removeListener('complete', onfinish);
stream.removeListener('abort', onclose);
stream.removeListener('request', onrequest);
if (stream.req) stream.req.removeListener('finish', onfinish);
stream.removeListener('end', onlegacyfinish);
stream.removeListener('close', onlegacyfinish);
stream.removeListener('finish', onfinish);
stream.removeListener('exit', onexit);
stream.removeListener('end', onend);
stream.removeListener('error', onerror);
stream.removeListener('close', onclose);
};
};
module.exports = eos;

66
node_modules/end-of-stream/package.json generated vendored Normal file
View File

@ -0,0 +1,66 @@
{
"_from": "end-of-stream@^1.1.0",
"_id": "end-of-stream@1.4.4",
"_inBundle": false,
"_integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==",
"_location": "/end-of-stream",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "end-of-stream@^1.1.0",
"name": "end-of-stream",
"escapedName": "end-of-stream",
"rawSpec": "^1.1.0",
"saveSpec": null,
"fetchSpec": "^1.1.0"
},
"_requiredBy": [
"/duplexify",
"/pump"
],
"_resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
"_shasum": "5ae64a5f45057baf3626ec14da0ca5e4b2431eb0",
"_spec": "end-of-stream@^1.1.0",
"_where": "/Users/bret/repos/deploy-to-neocities/node_modules/pump",
"author": {
"name": "Mathias Buus",
"email": "mathiasbuus@gmail.com"
},
"bugs": {
"url": "https://github.com/mafintosh/end-of-stream/issues"
},
"bundleDependencies": false,
"dependencies": {
"once": "^1.4.0"
},
"deprecated": false,
"description": "Call a callback when a readable/writable/duplex stream has completed or failed.",
"devDependencies": {
"tape": "^4.11.0"
},
"files": [
"index.js"
],
"homepage": "https://github.com/mafintosh/end-of-stream",
"keywords": [
"stream",
"streams",
"callback",
"finish",
"close",
"end",
"wait"
],
"license": "MIT",
"main": "index.js",
"name": "end-of-stream",
"repository": {
"type": "git",
"url": "git://github.com/mafintosh/end-of-stream.git"
},
"scripts": {
"test": "node test.js"
},
"version": "1.4.4"
}

21
node_modules/fast-fifo/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2019 Mathias Buus
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

66
node_modules/fast-fifo/README.md generated vendored Normal file
View File

@ -0,0 +1,66 @@
# fast-fifo
A fast fifo implementation similar to the one powering nextTick in Node.js core
```
npm install fast-fifo
```
Uses a linked list of growing fixed sized arrays to implement the FIFO to avoid
allocating a wrapper object for each item.
## Usage
``` js
const FIFO = require('fast-fifo')
const q = new FIFO()
q.push('hello')
q.push('world')
q.shift() // returns hello
q.shift() // returns world
```
## API
#### `q = new FIFO()`
Create a new FIFO.
#### `q.push(value)`
Push a value to the FIFO. `value` can be anything other than undefined.
#### `value = q.shift()`
Return the oldest value from the FIFO.
#### `bool = q.isEmpty()`
Returns `true` if the FIFO is empty and false otherwise.
## Benchmarks
Included in bench.js is a simple benchmark that benchmarks this against a simple
linked list based FIFO.
On my machine the benchmark looks like this:
```
fifo bulk push and shift: 2881.508ms
fifo individual push and shift: 3248.437ms
fast-fifo bulk push and shift: 1606.972ms
fast-fifo individual push and shift: 1328.064ms
fifo bulk push and shift: 3266.902ms
fifo individual push and shift: 3320.944ms
fast-fifo bulk push and shift: 1858.307ms
fast-fifo individual push and shift: 1516.983ms
```
YMMV
## License
MIT

34
node_modules/fast-fifo/bench.js generated vendored Normal file
View File

@ -0,0 +1,34 @@
const FastFIFO = require('./')
const FIFO = require('fifo')
run(new FIFO(), 'fifo')
run(new FastFIFO(), 'fast-fifo')
run(new FIFO(), 'fifo')
run(new FastFIFO(), 'fast-fifo')
function run (q, prefix) {
const runs = 1024
console.time(prefix + ' bulk push and shift')
for (let j = 0; j < 1e5; j++) {
for (let i = 0; i < runs; i++) {
q.push(i)
}
for (let i = 0; i < runs; i++) {
q.shift()
}
}
console.timeEnd(prefix + ' bulk push and shift')
console.time(prefix + ' individual push and shift')
for (let j = 0; j < 1e5; j++) {
for (let i = 0; i < runs; i++) {
q.push(i)
q.shift()
}
}
console.timeEnd(prefix + ' individual push and shift')
}

29
node_modules/fast-fifo/fixed-size.js generated vendored Normal file
View File

@ -0,0 +1,29 @@
module.exports = class FixedFIFO {
constructor (hwm) {
if (!(hwm > 0) || ((hwm - 1) & hwm) !== 0) throw new Error('Max size for a FixedFIFO should be a power of two')
this.buffer = new Array(hwm)
this.mask = hwm - 1
this.top = 0
this.btm = 0
this.next = null
}
push (data) {
if (this.buffer[this.top] !== undefined) return false
this.buffer[this.top] = data
this.top = (this.top + 1) & this.mask
return true
}
shift () {
const last = this.buffer[this.btm]
if (last === undefined) return undefined
this.buffer[this.btm] = undefined
this.btm = (this.btm + 1) & this.mask
return last
}
isEmpty () {
return this.buffer[this.btm] === undefined
}
}

32
node_modules/fast-fifo/index.js generated vendored Normal file
View File

@ -0,0 +1,32 @@
const FixedFIFO = require('./fixed-size')
module.exports = class FastFIFO {
constructor (hwm) {
this.hwm = hwm || 16
this.head = new FixedFIFO(this.hwm)
this.tail = this.head
}
push (val) {
if (!this.head.push(val)) {
const prev = this.head
this.head = prev.next = new FixedFIFO(2 * this.head.buffer.length)
this.head.push(val)
}
}
shift () {
const val = this.tail.shift()
if (val === undefined && this.tail.next) {
const next = this.tail.next
this.tail.next = null
this.tail = next
return this.tail.shift()
}
return val
}
isEmpty () {
return this.head.isEmpty()
}
}

Some files were not shown because too many files have changed in this diff Show More