Questbook Update + Buildscript Improvements (#681)

[EXPAND]
[[messages]]
messageTitle = "QB Update for GT 2.8 (#681)"
messageBody = """
[QB]
[DETAILS]
details = ["Fixes many Quest Book issues", "Updates QB with changes in GT 2.8"]
[DETAILS]
"""

[[messages]]
messageTitle = "Buildscript Refactor (#681)"
messageBody = """
[INTERNAL]
[DETAILS]
details = ["**Important: Buildscript has changed from `npx gulp...` or `gulp...` to `npm run gulp...`**!", "Moves to Node 16 Package Management + Typescript Strict Mode", "New Port QB, Check QB and Fix QB Tasks"]
[DETAILS]
"""
[EXPAND]


Co-authored-by: Integer Limit <103940576+IntegerLimit@users.noreply.github.com>
Co-authored-by: Ghzdude <44148655+ghzdude@users.noreply.github.com>
Co-authored-by: SparkedTheorem <162088357+SparkedTheorem@users.noreply.github.com>
This commit is contained in:
tracer4b 2024-05-14 19:53:08 +08:00 committed by Integer Limit
parent 7ba8aa8cfb
commit b526677126
114 changed files with 20512 additions and 22849 deletions

View File

@ -95,30 +95,39 @@ jobs:
fetch-depth: 0
ref: ${{ inputs.tag }}
- name: Restore Cached Files
- name: Restore NPM Cached Files
uses: actions/cache@v4
id: cache
id: npm-cache
with:
path: |
~/.npm
./.cache
./tools/node_modules
key: ${{ runner.os }}-bunny-${{ hashFiles('**/.cache', '**/package-lock.json', '**/manifest.json') }}
restore-keys: ${{ runner.os }}-bunny-
key: ${{ runner.os }}-npm-${{ hashFiles('./tools/package-lock.json') }}
restore-keys: ${{ runner.os }}-npm-
- name: Setup NodeJS v16
- name: Restore Build Cached Files
uses: actions/cache@v4
id: build-cache
with:
path: |
./.cache
key: ${{ runner.os }}-build-${{ hashFiles('./.cache', './manifest.json') }}
restore-keys: ${{ runner.os }}-build-
- name: Setup NodeJS v20
uses: actions/setup-node@v4
with:
node-version: 20
check-latest: true
- name: Setup NPM Packages
if: steps.npm-cache.outputs.cache-hit != 'true'
working-directory: ./tools
run: npm ci
- name: Check Environmental Variables
working-directory: ./tools
run: npx gulp check
run: npm run gulp check
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CURSEFORGE_PROJECT_ID: ${{ secrets.CURSEFORGE_PROJECT_ID }}
@ -127,7 +136,7 @@ jobs:
- name: Build and Zip Pack
working-directory: ./tools
run: npx gulp
run: npm run gulp
env:
CFCORE_API_TOKEN: ${{ secrets.CFCORE_API_TOKEN }}
CHANGELOG_BRANCH: ${{ inputs.changelog_branch }}
@ -166,30 +175,30 @@ jobs:
fetch-depth: 0
ref: ${{ inputs.tag }}
- name: Restore Cached Files
- name: Restore NPM Cached Files
uses: actions/cache@v4
id: cache
id: npm-cache
with:
path: |
~/.npm
./.cache
./tools/node_modules
key: ${{ runner.os }}-bunny-${{ hashFiles('**/.cache', '**/package-lock.json', '**/manifest.json') }}
restore-keys: ${{ runner.os }}-bunny-
key: ${{ runner.os }}-npm-${{ hashFiles('./tools/package-lock.json') }}
restore-keys: ${{ runner.os }}-npm-
- name: Setup NodeJS v16
- name: Setup NodeJS v20
uses: actions/setup-node@v4
with:
node-version: 20
check-latest: true
- name: Setup NPM Packages
if: steps.npm-cache.outputs.cache-hit != 'true'
working-directory: ./tools
run: npm ci
- name: Check Environmental Variables
working-directory: ./tools
run: npx gulp check
run: npm run gulp check
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CURSEFORGE_PROJECT_ID: ${{ secrets.CURSEFORGE_PROJECT_ID }}
@ -199,7 +208,7 @@ jobs:
- name: Make Artifact Names
id: artifactNames
working-directory: ./tools
run: npx gulp makeArtifactNames
run: npm run gulp makeArtifactNames
buildClient:
name: Build Pack Client (${{ inputs.tag }})
@ -218,30 +227,30 @@ jobs:
fetch-depth: 0
ref: ${{ inputs.tag }}
- name: Restore Cached Files
- name: Restore NPM Cached Files
uses: actions/cache@v4
id: cache
id: npm-cache
with:
path: |
~/.npm
./.cache
./tools/node_modules
key: ${{ runner.os }}-bunny-${{ hashFiles('**/.cache', '**/package-lock.json', '**/manifest.json') }}
restore-keys: ${{ runner.os }}-bunny-
key: ${{ runner.os }}-npm-${{ hashFiles('./tools/package-lock.json') }}
restore-keys: ${{ runner.os }}-npm-
- name: Setup NodeJS v16
- name: Setup NodeJS v20
uses: actions/setup-node@v4
with:
node-version: 20
check-latest: true
- name: Setup NPM Packages
if: steps.npm-cache.outputs.cache-hit != 'true'
working-directory: ./tools
run: npm ci
- name: Check Environmental Variables
working-directory: ./tools
run: npx gulp check
run: npm run gulp check
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CURSEFORGE_PROJECT_ID: ${{ secrets.CURSEFORGE_PROJECT_ID }}
@ -250,7 +259,7 @@ jobs:
- name: Build Client
working-directory: ./tools
run: npx gulp buildClient
run: npm run gulp buildClient
env:
CFCORE_API_TOKEN: ${{ secrets.CFCORE_API_TOKEN }}
CHANGELOG_BRANCH: ${{ inputs.changelog_branch }}
@ -283,30 +292,39 @@ jobs:
fetch-depth: 0
ref: ${{ inputs.tag }}
- name: Restore Cached Files
- name: Restore NPM Cached Files
uses: actions/cache@v4
id: cache
id: npm-cache
with:
path: |
~/.npm
./.cache
./tools/node_modules
key: ${{ runner.os }}-bunny-${{ hashFiles('**/.cache', '**/package-lock.json', '**/manifest.json') }}
restore-keys: ${{ runner.os }}-bunny-
key: ${{ runner.os }}-npm-${{ hashFiles('./tools/package-lock.json') }}
restore-keys: ${{ runner.os }}-npm-
- name: Setup NodeJS v16
- name: Restore Build Cached Files
uses: actions/cache@v4
id: build-cache
with:
path: |
./.cache
key: ${{ runner.os }}-build-${{ hashFiles('./.cache', './manifest.json') }}
restore-keys: ${{ runner.os }}-build-
- name: Setup NodeJS v20
uses: actions/setup-node@v4
with:
node-version: 20
check-latest: true
- name: Setup NPM Packages
if: steps.npm-cache.outputs.cache-hit != 'true'
working-directory: ./tools
run: npm ci
- name: Check Environmental Variables
working-directory: ./tools
run: npx gulp check
run: npm run gulp check
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CURSEFORGE_PROJECT_ID: ${{ secrets.CURSEFORGE_PROJECT_ID }}
@ -315,7 +333,7 @@ jobs:
- name: Build Server
working-directory: ./tools
run: npx gulp buildServer
run: npm run gulp buildServer
env:
CFCORE_API_TOKEN: ${{ secrets.CFCORE_API_TOKEN }}
CHANGELOG_BRANCH: ${{ inputs.changelog_branch }}
@ -348,30 +366,30 @@ jobs:
fetch-depth: 0
ref: ${{ inputs.tag }}
- name: Restore Cached Files
- name: Restore NPM Cached Files
uses: actions/cache@v4
id: cache
id: npm-cache
with:
path: |
~/.npm
./.cache
./tools/node_modules
key: ${{ runner.os }}-bunny-${{ hashFiles('**/.cache', '**/package-lock.json', '**/manifest.json') }}
restore-keys: ${{ runner.os }}-bunny-
key: ${{ runner.os }}-npm-${{ hashFiles('./tools/package-lock.json') }}
restore-keys: ${{ runner.os }}-npm-
- name: Setup NodeJS v16
- name: Setup NodeJS v20
uses: actions/setup-node@v4
with:
node-version: 20
check-latest: true
- name: Setup NPM Packages
if: steps.npm-cache.outputs.cache-hit != 'true'
working-directory: ./tools
run: npm ci
- name: Check Environmental Variables
working-directory: ./tools
run: npx gulp check
run: npm run gulp check
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CURSEFORGE_PROJECT_ID: ${{ secrets.CURSEFORGE_PROJECT_ID }}
@ -380,7 +398,7 @@ jobs:
- name: Build Lang and Changelogs
working-directory: ./tools
run: npx gulp buildLang
run: npm run gulp buildLang
env:
CFCORE_API_TOKEN: ${{ secrets.CFCORE_API_TOKEN }}
CHANGELOG_BRANCH: ${{ inputs.changelog_branch }}

54
.github/workflows/checks.yml vendored Normal file
View File

@ -0,0 +1,54 @@
name: Checks
on:
push:
branches:
- main
- test_buildscript*
- dev/*
paths-ignore:
- "README.md"
pull_request:
branches:
- main
paths-ignore:
- "README.md"
jobs:
checks:
name: Checks
runs-on: ubuntu-latest
steps:
- name: Checkout Ref
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Restore NPM Cached Files
uses: actions/cache@v4
id: npm-cache
with:
path: |
~/.npm
./tools/node_modules
key: ${{ runner.os }}-npm-${{ hashFiles('./tools/package-lock.json') }}
restore-keys: ${{ runner.os }}-npm-
- name: Setup NodeJS v20
uses: actions/setup-node@v4
with:
node-version: 20
check-latest: true
- name: Setup NPM Packages
if: steps.npm-cache.outputs.cache-hit != 'true'
working-directory: ./tools
run: npm ci
- name: Check Buildscripts
working-directory: ./tools
run: npm run check
- name: Check QB
working-directory: ./tools
run: npm run gulp checkQB

View File

@ -62,12 +62,11 @@ jobs:
steps:
- name: Get Token
id: token
uses: peter-murray/workflow-application-token-action@v3
uses: actions/create-github-app-token@v1
with:
application_id: ${{ secrets.APP_ID }}
application_private_key: ${{ secrets.APP_KEY }}
organization: Nomi-CEu
revoke_token: true
app-id: ${{ secrets.APP_ID }}
private-key: ${{ secrets.APP_KEY }}
owner: Nomi-CEu
- name: Checkout Repo
uses: actions/checkout@v4
@ -76,30 +75,30 @@ jobs:
ref: ${{ inputs.tag }}
token: ${{ steps.token.outputs.token }}
- name: Restore Cached Files
- name: Restore NPM Cached Files
uses: actions/cache@v4
id: cache
id: npm-cache
with:
path: |
~/.npm
./.cache
./tools/node_modules
key: ${{ runner.os }}-bunny-${{ hashFiles('**/.cache', '**/package-lock.json', '**/manifest.json') }}
restore-keys: ${{ runner.os }}-bunny-
key: ${{ runner.os }}-npm-${{ hashFiles('./tools/package-lock.json') }}
restore-keys: ${{ runner.os }}-npm-
- name: Setup NodeJS v16
- name: Setup NodeJS v20
uses: actions/setup-node@v4
with:
node-version: 20
check-latest: true
- name: Setup NPM Packages
if: steps.npm-cache.outputs.cache-hit != 'true'
working-directory: ./tools
run: npm ci
- name: Create Changelog
working-directory: ./tools
run: npx gulp createChangelog
run: npm run gulp createChangelog
env:
CFCORE_API_TOKEN: ${{ secrets.CFCORE_API_TOKEN }}
COMPARE_TAG: ${{ inputs.compare_tag }}

View File

@ -25,30 +25,30 @@ jobs:
fetch-depth: 0
ref: ${{ inputs.tag }}
- name: Restore Cached Files
- name: Restore NPM Cached Files
uses: actions/cache@v4
id: cache
id: npm-cache
with:
path: |
~/.npm
./.cache
./tools/node_modules
key: ${{ runner.os }}-bunny-${{ hashFiles('**/.cache', '**/package-lock.json', '**/manifest.json') }}
restore-keys: ${{ runner.os }}-bunny-
key: ${{ runner.os }}-npm-${{ hashFiles('./tools/package-lock.json') }}
restore-keys: ${{ runner.os }}-npm-
- name: Setup NodeJS v16
- name: Setup NodeJS v20
uses: actions/setup-node@v4
with:
node-version: 20
check-latest: true
- name: Setup NPM Packages
if: steps.npm-cache.outputs.cache-hit != 'true'
working-directory: ./tools
run: npm ci
- name: Check Environmental Variables
working-directory: ./tools
run: npx gulp check
run: npm run gulp check
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CURSEFORGE_PROJECT_ID: ${{ secrets.CURSEFORGE_PROJECT_ID }}
@ -67,4 +67,4 @@ jobs:
CURSEFORGE_API_TOKEN: ${{ secrets.CURSEFORGE_API_TOKEN }}
RELEASE_TYPE: ${{ inputs.release_type }}
working-directory: ./tools
run: npx gulp deployCurseForge
run: npm run gulp deployCurseForge

View File

@ -21,12 +21,11 @@ jobs:
steps:
- name: Get Token
id: token
uses: peter-murray/workflow-application-token-action@v3
uses: actions/create-github-app-token@v1
with:
application_id: ${{ secrets.APP_ID }}
application_private_key: ${{ secrets.APP_KEY }}
organization: Nomi-CEu
revoke_token: true
app-id: ${{ secrets.APP_ID }}
private-key: ${{ secrets.APP_KEY }}
owner: Nomi-CEu
- name: Checkout Tag
uses: actions/checkout@v4
@ -34,30 +33,30 @@ jobs:
fetch-depth: 0
ref: ${{ inputs.tag }}
- name: Restore Cached Files
- name: Restore NPM Cached Files
uses: actions/cache@v4
id: cache
id: npm-cache
with:
path: |
~/.npm
./.cache
./tools/node_modules
key: ${{ runner.os }}-bunny-${{ hashFiles('**/.cache', '**/package-lock.json', '**/manifest.json') }}
restore-keys: ${{ runner.os }}-bunny-
key: ${{ runner.os }}-npm-${{ hashFiles('./tools/package-lock.json') }}
restore-keys: ${{ runner.os }}-npm-
- name: Setup NodeJS v16
- name: Setup NodeJS v20
uses: actions/setup-node@v4
with:
node-version: 20
check-latest: true
- name: Setup NPM Packages
if: steps.npm-cache.outputs.cache-hit != 'true'
working-directory: ./tools
run: npm ci
- name: Check Environmental Variables
working-directory: ./tools
run: npx gulp check
run: npm run gulp check
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CURSEFORGE_PROJECT_ID: ${{ secrets.CURSEFORGE_PROJECT_ID }}
@ -77,4 +76,4 @@ jobs:
CURSEFORGE_API_TOKEN: ${{ secrets.CURSEFORGE_API_TOKEN }}
RELEASE_TYPE: ${{ inputs.release_type }}
working-directory: ./tools
run: npx gulp deployReleases
run: npm run gulp deployReleases

View File

@ -47,7 +47,7 @@ jobs:
# Don't use cache to prevent cache poisoning
- name: Setup NodeJS v16
- name: Setup NodeJS v20
uses: actions/setup-node@v4
with:
node-version: 20
@ -60,7 +60,7 @@ jobs:
- name: Make Artifact Names
id: artifactNames
working-directory: ./tools
run: npx gulp makeArtifactNames
run: npm run gulp makeArtifactNames
buildClient:
# Only continue if we are in base Nomi-CEu Repo and pull request is from fork
@ -81,7 +81,7 @@ jobs:
# Don't use cache to prevent cache poisoning
- name: Setup NodeJS v16
- name: Setup NodeJS v20
uses: actions/setup-node@v4
with:
node-version: 20
@ -93,7 +93,7 @@ jobs:
- name: Build Client
working-directory: ./tools
run: npx gulp buildClient
run: npm run gulp buildClient
env:
CFCORE_API_TOKEN: ${{ secrets.CFCORE_API_TOKEN }}
@ -124,7 +124,7 @@ jobs:
# Don't use cache to prevent cache poisoning
- name: Setup NodeJS v16
- name: Setup NodeJS v20
uses: actions/setup-node@v4
with:
node-version: 20
@ -136,7 +136,7 @@ jobs:
- name: Build Server
working-directory: ./tools
run: npx gulp buildServer
run: npm run gulp buildServer
env:
CFCORE_API_TOKEN: ${{ secrets.CFCORE_API_TOKEN }}
@ -167,7 +167,7 @@ jobs:
# Don't use cache to prevent cache poisoning
- name: Setup NodeJS v16
- name: Setup NodeJS v20
uses: actions/setup-node@v4
with:
node-version: 20
@ -179,7 +179,7 @@ jobs:
- name: Build Lang and Changelogs
working-directory: ./tools
run: npx gulp buildLang
run: npm run gulp buildLang
env:
CFCORE_API_TOKEN: ${{ secrets.CFCORE_API_TOKEN }}

View File

@ -58,12 +58,11 @@ jobs:
steps:
- name: Get Token
id: token
uses: peter-murray/workflow-application-token-action@v3
uses: actions/create-github-app-token@v1
with:
application_id: ${{ secrets.APP_ID }}
application_private_key: ${{ secrets.APP_KEY }}
organization: Nomi-CEu
revoke_token: true
app-id: ${{ secrets.APP_ID }}
private-key: ${{ secrets.APP_KEY }}
owner: Nomi-CEu
- name: Checkout Repo
uses: actions/checkout@v4
@ -71,30 +70,30 @@ jobs:
fetch-depth: 0
token: ${{ steps.token.outputs.token }}
- name: Restore Cached Files
- name: Restore NPM Cached Files
uses: actions/cache@v4
id: cache
id: npm-cache
with:
path: |
~/.npm
./.cache
./tools/node_modules
key: ${{ runner.os }}-bunny-${{ hashFiles('**/.cache', '**/package-lock.json', '**/manifest.json') }}
restore-keys: ${{ runner.os }}-bunny-
key: ${{ runner.os }}-npm-${{ hashFiles('./tools/package-lock.json') }}
restore-keys: ${{ runner.os }}-npm-
- name: Setup NodeJS v16
- name: Setup NodeJS v20
uses: actions/setup-node@v4
with:
node-version: 20
check-latest: true
- name: Setup NPM Packages
if: steps.npm-cache.outputs.cache-hit != 'true'
working-directory: ./tools
run: npm ci
- name: Check Environmental Variables
working-directory: ./tools
run: npx gulp check
run: npm run gulp check
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CURSEFORGE_PROJECT_ID: ${{ secrets.CURSEFORGE_PROJECT_ID }}
@ -103,7 +102,7 @@ jobs:
- name: Update Files
working-directory: ./tools
run: npx gulp updateFilesAll
run: npm run gulp updateFilesAll
- name: Commit and Push Release Changes
uses: stefanzweifel/git-auto-commit-action@v5

View File

@ -28,16 +28,15 @@ jobs:
# Nomi-CEu-Management has access to all of Nomi-CEu's Repos.
- name: Get Token
id: token
uses: peter-murray/workflow-application-token-action@v3
uses: actions/create-github-app-token@v1
with:
# Shared Org Secret: Contains the Nomi-CEu-Management App ID (773030)
application_id: ${{ secrets.APP_ID }}
app-id: ${{ secrets.APP_ID }}
# Shared Org Secret: Contains the Nomi-CEu-Management App's Private Key.
# run `cat {PEM_FILE_PATH} | base64 -w 0 && echo` to encode the key first if changing the key.
# Paste the output of the command into the secret value.
application_private_key: ${{ secrets.APP_KEY }}
organization: Nomi-CEu
revoke_token: true
private-key: ${{ secrets.APP_KEY }}
owner: Nomi-CEu
- name: Dispatch Workflow
uses: actions/github-script@v5

3
.gitignore vendored
View File

@ -15,6 +15,9 @@ tools/node_modules
.vscode/
tools/.vscode/
# Tools Debugging
tools/.dest/
# OS Specific
# Mac

View File

@ -123,26 +123,26 @@
"displayDuration": 100,
"fadeDuration": 40,
"images": [
"minecraft:textures/gui/title/background/besoiobiy_1.png",
"minecraft:textures/gui/title/background/besoiobiy_2.png",
"minecraft:textures/gui/title/background/cactus_cool.png",
"minecraft:textures/gui/title/background/cobracreeper1.png",
"minecraft:textures/gui/title/background/darkarkangel.png",
"minecraft:textures/gui/title/background/ely_1.png",
"minecraft:textures/gui/title/background/ely_2.png",
"minecraft:textures/gui/title/background/ely_3.png",
"minecraft:textures/gui/title/background/emiuna.png",
"minecraft:textures/gui/title/background/extracoolcat_1.png",
"minecraft:textures/gui/title/background/extracoolcat_2.png",
"minecraft:textures/gui/title/background/extracoolcat_3.png",
"minecraft:textures/gui/title/background/extracoolcat_4.png",
"minecraft:textures/gui/title/background/gaboggamer.png",
"minecraft:textures/gui/title/background/itstheguywhoasked.png",
"minecraft:textures/gui/title/background/lyeo.png",
"minecraft:textures/gui/title/background/pgs_1.png",
"minecraft:textures/gui/title/background/pgs_2.png",
"minecraft:textures/gui/title/background/qr_est.png",
"minecraft:textures/gui/title/background/supasem.png"
"minecraft:textures/gui/title/background/besoiobiy_1.jpg",
"minecraft:textures/gui/title/background/besoiobiy_2.jpg",
"minecraft:textures/gui/title/background/cactus_cool.jpg",
"minecraft:textures/gui/title/background/cobracreeper1.jpg",
"minecraft:textures/gui/title/background/darkarkangel.jpg",
"minecraft:textures/gui/title/background/ely_1.jpg",
"minecraft:textures/gui/title/background/ely_2.jpg",
"minecraft:textures/gui/title/background/ely_3.jpg",
"minecraft:textures/gui/title/background/emiuna.jpg",
"minecraft:textures/gui/title/background/extracoolcat_1.jpg",
"minecraft:textures/gui/title/background/extracoolcat_2.jpg",
"minecraft:textures/gui/title/background/extracoolcat_3.jpg",
"minecraft:textures/gui/title/background/extracoolcat_4.jpg",
"minecraft:textures/gui/title/background/gaboggamer.jpg",
"minecraft:textures/gui/title/background/itstheguywhoasked.jpg",
"minecraft:textures/gui/title/background/lyeo.jpg",
"minecraft:textures/gui/title/background/pgs_1.jpg",
"minecraft:textures/gui/title/background/pgs_2.jpg",
"minecraft:textures/gui/title/background/qr_est.jpg",
"minecraft:textures/gui/title/background/supasem.jpg"
],
"shuffle": true
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Binary file not shown.

After

Width:  |  Height:  |  Size: 489 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 257 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 756 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 916 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.5 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 441 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 480 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.0 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 869 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.1 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.1 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 830 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 503 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.1 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 286 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.1 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 319 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.3 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 350 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.2 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 524 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.6 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 419 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 247 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 828 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 508 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 523 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 906 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 311 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 933 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 287 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.0 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 246 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 MiB

View File

@ -2652,6 +2652,19 @@ recipes.addShapeless(<metaitem:ingotRedAlloy>, [<enderio:item_alloy_ingot:3>]);
<ore:questbookWA>.add(<metaitem:world_accelerator.ev>);
<ore:questbookWA>.add(<metaitem:world_accelerator.iv>);
<ore:questbookLathe>.add(<metaitem:lathe.lv>);
<ore:questbookLathe>.add(<metaitem:lathe.mv>);
<ore:questbookSingleMiner>.add(<metaitem:miner.lv>);
<ore:questbookSingleMiner>.add(<metaitem:miner.mv>);
<ore:questbookSingleMiner>.add(<metaitem:miner.hv>);
<ore:questbookAutoclave>.add(<metaitem:autoclave.lv>);
<ore:questbookAutoclave>.add(<metaitem:autoclave.mv>);
<ore:questbookAutoclave>.add(<metaitem:autoclave.hv>);
<ore:questbookAutoclave>.add(<metaitem:autoclave.ev>);
<ore:questbookAutoclave>.add(<metaitem:autoclave.iv>);
// Add All GT Super and Quantum Tanks (apart from creative)
// Super Tanks
<ore:questbookTanks>.add(<metaitem:super_tank.lv>);

View File

@ -8,7 +8,10 @@ module.exports = {
},
},
plugins: ["@typescript-eslint"],
extends: ["plugin:@typescript-eslint/recommended", "plugin:prettier/recommended"],
extends: [
"plugin:@typescript-eslint/recommended",
"plugin:prettier/recommended",
],
rules: {
quotes: [2, "double", { avoidEscape: true }],
semi: [2, "always"],

7
tools/.prettierrc.cjs Normal file
View File

@ -0,0 +1,7 @@
module.exports = {
semi: true,
trailingComma: "all",
printWidth: 80,
useTabs: true,
alignObjectProperties: true,
};

View File

@ -1,7 +0,0 @@
module.exports = {
semi: true,
trailingComma: "all",
printWidth: 120,
useTabs: true,
alignObjectProperties: true
};

View File

@ -1,8 +1,11 @@
import buildConfig from "./util/buildConfig.default.json";
import buildConfig from "#utils/buildConfig.default.json" assert { type: "json" };
import fs from "fs";
if (fs.existsSync("./config.json")) {
Object.assign(buildConfig, JSON.parse(fs.readFileSync("./config.json").toString()));
Object.assign(
buildConfig,
JSON.parse(fs.readFileSync("./config.json").toString()),
);
}
export default buildConfig;

View File

@ -1,8 +1,5 @@
{
"copyFromSharedServerGlobs": [
"overrides/**/*",
"!overrides/resources/**/*"
],
"copyFromSharedServerGlobs": ["overrides/**/*", "!overrides/resources/**/*"],
"copyFromSharedClientGlobs": [
"overrides/**/*",
"!overrides/resources/minecraft/textures/gui/title/background/*"

View File

@ -1,21 +1,46 @@
import buildConfig from "./buildConfig";
import buildConfig from "#buildConfig";
import upath from "upath";
import manifest from "./../manifest.json";
import { ModpackManifest } from "./types/modpackManifest";
import manifest from "./../manifest.json" assert { type: "json" };
import { ModpackManifest } from "#types/modpackManifest.ts";
export const sharedDestDirectory = upath.join(buildConfig.buildDestinationDirectory, "shared");
export const modDestDirectory = upath.join(buildConfig.buildDestinationDirectory, "mods");
export const clientDestDirectory = upath.join(buildConfig.buildDestinationDirectory, "client");
export const mmcDestDirectory = upath.join(buildConfig.buildDestinationDirectory, "mmc");
export const serverDestDirectory = upath.join(buildConfig.buildDestinationDirectory, "server");
export const langDestDirectory = upath.join(buildConfig.buildDestinationDirectory, "lang");
export const tempDirectory = upath.join(buildConfig.buildDestinationDirectory, "temp");
export const sharedDestDirectory = upath.join(
buildConfig.buildDestinationDirectory,
"shared",
);
export const modDestDirectory = upath.join(
buildConfig.buildDestinationDirectory,
"mods",
);
export const clientDestDirectory = upath.join(
buildConfig.buildDestinationDirectory,
"client",
);
export const mmcDestDirectory = upath.join(
buildConfig.buildDestinationDirectory,
"mmc",
);
export const serverDestDirectory = upath.join(
buildConfig.buildDestinationDirectory,
"server",
);
export const langDestDirectory = upath.join(
buildConfig.buildDestinationDirectory,
"lang",
);
export const tempDirectory = upath.join(
buildConfig.buildDestinationDirectory,
"temp",
);
export const modpackManifest = manifest as ModpackManifest;
export const overridesFolder = modpackManifest.overrides || "overrides";
export const configFolder = upath.join(overridesFolder, "config");
export const configOverridesFolder = upath.join(overridesFolder, "config-overrides");
export const configOverridesFolder = upath.join(
overridesFolder,
"config-overrides",
);
export const rootDirectory = "..";
export const templatesFolder = "templates";
export const storageFolder = "storage";
// The Repository Owner (For Issues & PR Tags Transforms in Changelog)
export const repoOwner = "Nomi-CEu";

View File

@ -1,55 +1,73 @@
// noinspection JSUnusedGlobalSymbols,UnnecessaryLocalVariableJS
import * as gulp from "gulp";
import gulp from "gulp";
import pruneCacheTask from "./tasks/misc/pruneCache";
import pruneCacheTask from "./tasks/misc/pruneCache.ts";
export const pruneCache = pruneCacheTask;
import * as transformFiles from "./tasks/misc/transformFiles";
import * as transformFiles from "./tasks/misc/transformFiles.ts";
export const updateFilesIssue = transformFiles.updateFilesIssue;
export const updateFilesRandomPatches = transformFiles.updateFilesRandomPatches;
export const updateFilesServer = transformFiles.updateFilesServer;
export const updateFilesMainMenu = transformFiles.updateFilesMainMenu;
export const updateFilesAll = transformFiles.updateAll;
import * as changelog from "./tasks/changelog/createChangelog";
import * as changelog from "./tasks/changelog/index.ts";
export const createChangelog = changelog.createRootChangelog;
import sharedTasks from "./tasks/shared";
import clientTasks from "./tasks/client";
import serverTasks from "./tasks/server";
import langTasks from "./tasks/lang";
import mmcTasks from "./tasks/mmc";
import modTasks from "./tasks/misc/downloadMods";
import sharedTasks from "./tasks/shared/index.ts";
import clientTasks from "./tasks/client/index.ts";
import serverTasks from "./tasks/server/index.ts";
import langTasks from "./tasks/lang/index.ts";
import mmcTasks from "./tasks/mmc/index.ts";
import modTasks from "./tasks/misc/downloadMods.ts";
export const buildClient = gulp.series(sharedTasks, clientTasks);
export const buildServer = gulp.series(gulp.parallel(sharedTasks, modTasks), serverTasks);
export const buildLang = gulp.series(sharedTasks, langTasks);
export const buildMMC = gulp.series(gulp.parallel(sharedTasks, modTasks), clientTasks, mmcTasks);
export const buildClient = gulp.series(
sharedTasks,
clientTasks,
pruneCacheTask,
);
export const buildServer = gulp.series(
gulp.parallel(sharedTasks, modTasks),
serverTasks,
pruneCacheTask,
);
export const buildLang = gulp.series(sharedTasks, langTasks, pruneCacheTask);
export const buildMMC = gulp.series(
gulp.parallel(sharedTasks, modTasks),
clientTasks,
mmcTasks,
pruneCacheTask,
);
export const buildAll = gulp.series(
sharedTasks,
gulp.parallel(clientTasks, langTasks, gulp.series(modTasks, serverTasks)),
pruneCacheTask,
);
import checkTasks from "./tasks/checks";
import checkTasks from "./tasks/checks/index.ts";
export const check = gulp.series(checkTasks);
import * as zip from "./tasks/misc/zip";
import * as zip from "./tasks/misc/zip.ts";
export const zipClient = zip.zipClient;
export const zipServer = zip.zipServer;
export const zipLang = zip.zipLang;
export const zipMMC = zip.zipMMC;
export const zipAll = zip.zipAll;
exports.default = gulp.series(buildAll, zipAll);
export default gulp.series(buildAll, zipAll);
import * as gha from "./tasks/misc/gha";
import * as gha from "./tasks/misc/gha.ts";
export const makeArtifactNames = gha.makeArtifactNames;
export { deployCurseForge } from "./tasks/deploy/curseforge";
export { deployCurseForge } from "./tasks/deploy/curseforge.ts";
import deployReleasesTask from "./tasks/deploy/releases";
export const deployReleases = deployReleasesTask;
import * as release from "./tasks/deploy/releases.ts";
export const deployReleases = release.default;
import fireNightlyWebhookTask from "./tasks/misc/webhook";
export const fireNightlyWebhook = fireNightlyWebhookTask;
import * as checkFix from "./tasks/helpers/questChecks/index.ts";
export const checkQB = checkFix.check;
export const fixQB = checkFix.fix;
import * as qbPort from "./tasks/helpers/questPorting/index.ts";
export const portQBChanges = gulp.series(qbPort.default, fixQB);

20243
tools/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,60 +1,87 @@
{
"name": "nomi-ceu-build-tools",
"version": "1.2.2",
"version": "2.0.0",
"description": "Nomifactory CEu Buildscript and Tools.",
"main": "gulpfile.js",
"author": "Nomi-CEu Team",
"license": "LGPL-3.0",
"type": "module",
"private": true,
"scripts": {
"gulp": "node --import ./tsNodeESMRegister.mjs node_modules/gulp/bin/gulp.js",
"check": "tsc --pretty"
},
"imports": {
"#types/*": "./types/*",
"#utils/*": "./utils/*",
"#tasks/*": "./tasks/*",
"#globals": "./globals.ts",
"#buildConfig": "./buildConfig.ts"
},
"devDependencies": {
"@octokit/rest": "^18.3.5",
"@types/bluebird": "^3.5.33",
"@types/fancy-log": "^1.3.1",
"@types/gulp": "^4.0.8",
"@types/gulp-imagemin": "^7.0.2",
"@types/gulp-rename": "^2.0.0",
"@types/gulp-zip": "^4.0.1",
"@types/md5": "^2.3.2",
"@types/merge-stream": "^1.1.2",
"@types/mustache": "^4.1.1",
"@types/requestretry": "^1.12.7",
"@types/sha1": "^1.1.2",
"@types/unzipper": "^0.10.3",
"@typescript-eslint/eslint-plugin": "^4.18.0",
"@typescript-eslint/parser": "^4.18.0",
"bluebird": "^3.7.2",
"del": "^6.0.0",
"@actions/core": "^1.10.1",
"@inquirer/confirm": "^3.1.6",
"@inquirer/prompts": "^5.0.2",
"@octokit/rest": "^20.1.1",
"@types/diff-match-patch": "^1.0.36",
"@types/fake-diff": "^1.0.3",
"@types/fancy-log": "^2.0.2",
"@types/gulp": "^4.0.17",
"@types/gulp-filter": "^3.0.39",
"@types/gulp-rename": "^2.0.6",
"@types/gulp-zip": "^4.0.4",
"@types/inquirer": "^9.0.7",
"@types/lodash": "^4.17.1",
"@types/md5": "^2.3.5",
"@types/merge-stream": "^1.1.5",
"@types/mustache": "^4.2.5",
"@types/picomatch": "^2.3.3",
"@types/sha1": "^1.1.5",
"@types/through2": "^2.0.41",
"@types/unzipper": "^0.10.9",
"@typescript-eslint/eslint-plugin": "^7.8.0",
"@typescript-eslint/parser": "^7.8.0",
"axios": "^1.6.8",
"axios-retry": "^4.1.0",
"case-switcher-js": "^1.1.10",
"colors": "^1.4.0",
"dedent-js": "^1.0.1",
"del": "^7.1.0",
"diff-match-patch": "^1.0.5",
"discord-webhook-node": "^1.1.8",
"eslint": "^7.22.0",
"eslint-config-prettier": "^8.1.0",
"eslint-plugin-prettier": "^3.3.1",
"fancy-log": "^1.3.3",
"eslint": "^8.57.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-prettier": "^5.1.3",
"fake-diff": "^1.0.0",
"fancy-log": "^2.0.0",
"filesize": "^10.1.1",
"gray-matter": "^4.0.3",
"gulp": "^4.0.2",
"gulp-imagemin": "^7.1.0",
"gulp": "^5.0.0",
"gulp-filter": "^9.0.1",
"gulp-rename": "^2.0.0",
"gulp-zip": "^5.1.0",
"marked": "^9.0.3",
"gulp-zip": "^6.0.0",
"inquirer": "^9.2.20",
"javascript-stringify": "^2.1.0",
"just-diff": "^6.0.2",
"just-remove": "^3.2.0",
"lodash": "^4.17.21",
"marked": "^12.0.2",
"md5": "^2.3.0",
"merge-stream": "^2.0.0",
"mustache": "^4.1.0",
"png-to-jpeg": "^1.0.1",
"prettier": "^2.2.1",
"request": "^2.88.2",
"requestretry": "^5.0.0",
"mustache": "^4.2.0",
"picomatch": "^4.0.2",
"prettier": "^3.2.5",
"retry-axios": "^3.1.3",
"sanitize-filename": "^1.6.3",
"sha1": "^1.1.1",
"simple-git": "^3.19.1",
"ts-node": "^10.9.1",
"typescript": "^4.8.4",
"unzipper": "^0.10.11",
"simple-git": "^3.24.0",
"sort-keys": "^5.0.0",
"sort-keys-recursive": "^2.1.10",
"through2": "^4.0.2",
"toml-v1": "^1.0.0",
"ts-node": "^10.9.2",
"typescript": "5.4.5",
"unzipper": "^0.11.5",
"upath": "^2.0.1"
},
"dependencies": {
"@actions/core": "^1.10.1",
"@egjs/list-differ": "^1.0.1",
"@types/json-stable-stringify-without-jsonify": "^1.0.1",
"dedent-js": "^1.0.1",
"json-stable-stringify-without-jsonify": "^1.0.1",
"toml-v1": "^1.0.0"
}
}

View File

@ -0,0 +1,310 @@
{
"savedQuestMap": [
{
"normal": 3,
"expert": 3
},
{
"normal": 19,
"expert": 53
},
{
"normal": 21,
"expert": 21
},
{
"normal": 22,
"expert": 22
},
{
"normal": 38,
"expert": 38
},
{
"normal": 52,
"expert": 52
},
{
"normal": 56,
"expert": 56
},
{
"normal": 60,
"expert": 60
},
{
"normal": 65,
"expert": 65
},
{
"normal": 78,
"expert": 78
},
{
"normal": 84,
"expert": 84
},
{
"normal": 88,
"expert": 88
},
{
"normal": 94,
"expert": 94
},
{
"normal": 95,
"expert": 95
},
{
"normal": 102,
"expert": 102
},
{
"normal": 116,
"expert": 116
},
{
"normal": 149,
"expert": 149
},
{
"normal": 150,
"expert": 150
},
{
"normal": 167,
"expert": 167
},
{
"normal": 190,
"expert": 190
},
{
"normal": 197,
"expert": 197
},
{
"normal": 201,
"expert": 201
},
{
"normal": 214,
"expert": 214
},
{
"normal": 216,
"expert": 216
},
{
"normal": 228,
"expert": 228
},
{
"normal": 240,
"expert": 240
},
{
"normal": 251,
"expert": 251
},
{
"normal": 253,
"expert": 253
},
{
"normal": 262,
"expert": 262
},
{
"normal": 263,
"expert": 263
},
{
"normal": 279,
"expert": 279
},
{
"normal": 311,
"expert": 311
},
{
"normal": 328,
"expert": 328
},
{
"normal": 339,
"expert": 339
},
{
"normal": 386,
"expert": 386
},
{
"normal": 389,
"expert": 389
},
{
"normal": 399,
"expert": 399
},
{
"normal": 402,
"expert": 402
},
{
"normal": 490,
"expert": 490
},
{
"normal": 504,
"expert": 504
},
{
"normal": 524,
"expert": 524
},
{
"normal": 531,
"expert": 531
},
{
"normal": 535,
"expert": 535
},
{
"normal": 549,
"expert": 549
},
{
"normal": 672,
"expert": 672
},
{
"normal": 727,
"expert": 727
},
{
"normal": 729,
"expert": 729
},
{
"normal": 750,
"expert": 750
},
{
"normal": 774,
"expert": 774
},
{
"normal": 782,
"expert": 1045
},
{
"normal": 795,
"expert": 795
},
{
"normal": 814,
"expert": 814
},
{
"normal": 827,
"expert": 827
},
{
"normal": 828,
"expert": 828
},
{
"normal": 838,
"expert": 838
},
{
"normal": 844,
"expert": 844
},
{
"normal": 847,
"expert": 847
},
{
"normal": 849,
"expert": 849
},
{
"normal": 856,
"expert": 856
},
{
"normal": 880,
"expert": 880
},
{
"normal": 895,
"expert": 895
},
{
"normal": 897,
"expert": 897
},
{
"normal": 903,
"expert": 903
},
{
"normal": 906,
"expert": 906
},
{
"normal": 1027,
"expert": 935
},
{
"normal": 1031,
"expert": 1031
},
{
"normal": 1033,
"expert": 1033
},
{
"normal": 1035,
"expert": 1035
},
{
"normal": 1037,
"expert": 1037
},
{
"normal": 1042,
"expert": 1042
},
{
"normal": 1046,
"expert": 1046
},
{
"normal": 1047,
"expert": 1047
},
{
"normal": 1048,
"expert": 1048
},
{
"normal": 1049,
"expert": 1049
},
{
"normal": 1050,
"expert": 1050
}
],
"ignoreQuestsNormal": [
959
],
"ignoreQuestsExpert": [],
"alwaysAskQuestsNormal": [],
"alwaysAskQuestsExpert": []
}

View File

@ -1,5 +1,9 @@
import { categories } from "./definitions";
import { Category, ChangelogMessage, SubCategory } from "../../types/changelogTypes";
import { categories } from "./definitions.ts";
import {
Category,
ChangelogMessage,
SubCategory,
} from "#types/changelogTypes.ts";
export function categoriesSetup(): void {
// Initialize Category Lists

View File

@ -1,5 +1,10 @@
import { Commit, FixUpInfo, InputReleaseType, ParsedModInfo } from "../../types/changelogTypes";
import { getLastGitTag, getTags, isEnvVariableSet } from "../../util/util";
import {
Commit,
FixUpInfo,
InputReleaseType,
ParsedModInfo,
} from "#types/changelogTypes.ts";
import { getLastGitTag, getTags, isEnvVariableSet } from "#utils/util.ts";
export default class ChangelogData {
since: string;
@ -26,30 +31,33 @@ export default class ChangelogData {
modInfoList: Map<number, ParsedModInfo>;
/**
* A normal initialisation.
* Constructor. Non-Async Inits are performed here.
*/
async init(): Promise<void> {
constructor() {
this.since = getLastGitTag();
this.to = "HEAD";
// If this is a tagged build, fetch the tag before last.
if (isEnvVariableSet("GITHUB_TAG")) {
this.since = getLastGitTag(process.env.GITHUB_TAG);
this.to = process.env.GITHUB_TAG;
this.to = process.env.GITHUB_TAG ?? "HEAD";
}
// Get Release Type
this.releaseType = "Release";
if (isEnvVariableSet("RELEASE_TYPE")) this.releaseType = process.env.RELEASE_TYPE as InputReleaseType;
if (isEnvVariableSet("RELEASE_TYPE"))
this.releaseType = process.env.RELEASE_TYPE as InputReleaseType;
// See if current run is test
if (isEnvVariableSet("TEST_CHANGELOG")) {
try {
this.isTest = JSON.parse(process.env.TEST_CHANGELOG.toLowerCase());
this.isTest = JSON.parse(
(process.env.TEST_CHANGELOG ?? "false").toLowerCase(),
);
} catch (err) {
throw new Error("Test Changelog Env Variable set to Invalid Value.");
}
}
} else this.isTest = false;
// Initialise Final Builder
this.builder = [];
@ -61,10 +69,18 @@ export default class ChangelogData {
this.shaList = new Set<string>();
this.combineList = new Map<string, Commit[]>();
this.modInfoList = new Map<number, ParsedModInfo>();
// Init Tag Sets for Now, so we don't have to deal with nullable params
this.tags = new Set<string>();
this.compareTags = new Set<string>();
}
/**
* A normal initialisation. Async Inits are called here.
*/
async init(): Promise<void> {
this.tags = new Set<string>(await getTags(this.to));
this.compareTags = new Set<string>(await getTags(this.since));
this.modInfoList = new Map<number, ParsedModInfo>();
}
shouldIterate(): boolean {
@ -76,7 +92,7 @@ export default class ChangelogData {
* @return tags The Compare Tags
*/
getIterations(): string[] {
const iterations = process.env.COMPARE_TAG;
const iterations = process.env.COMPARE_TAG ?? "";
return iterations.split(",").map((tag) => tag.trim());
}

View File

@ -7,10 +7,10 @@ import {
IgnoreLogic,
Parser,
SubCategory,
} from "../../types/changelogTypes";
import { modpackManifest } from "../../globals";
import { parseCommitBody } from "./parser";
import { parseFixUp } from "./specialParser";
} from "#types/changelogTypes.ts";
import { modpackManifest } from "#globals";
import { parseCommitBody } from "./parser.ts";
import { parseFixUp } from "./specialParser.ts";
/* Values */
export const defaultIndentation = "";
@ -48,7 +48,10 @@ const normalMode: SubCategory = { commitKey: "[NM]", keyName: "Normal Mode" };
const hardMode: SubCategory = { commitKey: "[HM]", keyName: "Hard Mode" };
/* Misc Sub Category Keys */
const qolChanges: SubCategory = { commitKey: "[QOL]", keyName: "Quality of Life" };
const qolChanges: SubCategory = {
commitKey: "[QOL]",
keyName: "Quality of Life",
};
/* Set Sub Categories (Sub Categories that do not let any commit in) */
const bothModes: SubCategory = { keyName: "Both Modes" };
@ -127,7 +130,11 @@ export const categories: Category[] = [
];
/* Parsing Util Methods */
const defaultSkipCallback = (_commit: Commit, _commitMessage: string, commitBody: string): boolean => {
const defaultSkipCallback = (
_commit: Commit,
_commitMessage: string,
commitBody?: string,
): boolean => {
if (!commitBody) return false;
return commitBody.includes(skipKey);
};
@ -135,7 +142,7 @@ const defaultParsingCallback = async (
parser: Parser,
commit: Commit,
commitMessage: string,
commitBody: string,
commitBody?: string,
): Promise<boolean | Ignored> => {
if (!commitBody) return false;
return parseCommitBody(commitMessage, commitBody, commit, parser);
@ -146,12 +153,20 @@ const defaultParsingCallback = async (
const fixupParsing: Parser = {
skipCallback: () => false,
// No need to care about message/body, never parse expand/details commits
itemCallback: (_parser, commit, _commitMessage: string, _commitBody?: string, fix?: FixUpInfo) =>
parseFixUp(commit, fix),
itemCallback: (
_parser,
commit,
_commitMessage: string,
_commitBody?: string,
fix?: FixUpInfo,
) => parseFixUp(commit, fix),
addCommitListCallback: () => false,
addSHACallback: () => false,
// Don't apply fixup if it is not meant to apply to fixes
applyFixCalback: (fix) => fix.changeFixes === undefined || fix.changeFixes === null || (fix.changeFixes as boolean),
applyFixCalback: (fix) =>
fix.changeFixes === undefined ||
fix.changeFixes === null ||
(fix.changeFixes as boolean),
};
const overridesParsing: Parser = {
@ -159,11 +174,13 @@ const overridesParsing: Parser = {
skipCallback: defaultSkipCallback,
itemCallback: defaultParsingCallback,
leftOverCallback: (commit, commitMessage, _commitBody, subMessages) => {
generalCategory.changelogSection.get(generalCategory.defaultSubCategory).push({
commitMessage: commitMessage,
commitObject: commit,
subChangelogMessages: subMessages,
});
generalCategory.changelogSection
?.get(generalCategory.defaultSubCategory)
?.push({
commitMessage: commitMessage,
commitObject: commit,
subChangelogMessages: subMessages,
});
},
addCommitListCallback: () => true,
};
@ -188,7 +205,12 @@ const finalParsing: Parser = {
* Note that unless `addSHA` of the category is set to false, a commit parsed in a previous category will not be allowed to be parsed in future categories,
* even if they fit in the dirs.
*/
export const parsers: Parser[] = [fixupParsing, overridesParsing, manifestParsing, finalParsing];
export const changelogParsers: Parser[] = [
fixupParsing,
overridesParsing,
manifestParsing,
finalParsing,
];
/* Parsing Information / Allocations for Mod Changes */
@ -219,7 +241,10 @@ export interface ModChangesAllocation {
}
// These templates must be triple bracketed, because we don't want these to be html safe
export const modChangesAllocations: Record<ModChangesType, ModChangesAllocation> = {
export const modChangesAllocations: Record<
ModChangesType,
ModChangesAllocation
> = {
added: {
category: generalCategory,
subCategory: modAdditions,
@ -242,7 +267,8 @@ export const modChangesAllocations: Record<ModChangesType, ModChangesAllocation>
/* Ignore Checks */
const targetBeforeCheck: IgnoreCheck = (tag, data) => !data.tags.has(tag);
const targetAfterCheck: IgnoreCheck = (tag, data) => data.tags.has(tag);
const compareBeforeCheck: IgnoreCheck = (tag, data) => !data.compareTags.has(tag);
const compareBeforeCheck: IgnoreCheck = (tag, data) =>
!data.compareTags.has(tag);
const compareAfterCheck: IgnoreCheck = (tag, data) => data.compareTags.has(tag);
const compareIsCheck: IgnoreCheck = (tag, data) => data.since === tag;
const compareNotCheck: IgnoreCheck = (tag, data) => data.since !== tag;
@ -262,8 +288,10 @@ export const ignoreChecks: Record<string, IgnoreCheck> = {
};
/* Ignore Logic */
const andLogic: IgnoreLogic = (checkResults) => checkResults.filter((result) => result === false).length === 0;
const orLogic: IgnoreLogic = (checkResults) => checkResults.filter((result) => result === true).length > 0;
const andLogic: IgnoreLogic = (checkResults) =>
checkResults.filter((result) => !result).length === 0;
const orLogic: IgnoreLogic = (checkResults) =>
checkResults.filter((result) => result).length > 0;
const nandLogic: IgnoreLogic = (checkResults) => !andLogic(checkResults);
const norLogic: IgnoreLogic = (checkResults) => !orLogic(checkResults);

View File

@ -1,19 +1,34 @@
import { cleanupVersion, compareAndExpandManifestDependencies, getChangelog, getFileAtRevision } from "../../util/util";
import { ModpackManifest, ModpackManifestFile } from "../../types/modpackManifest";
import { ChangelogMessage, Commit, ModChangeInfo } from "../../types/changelogTypes";
import ListDiffer, { DiffResult } from "@egjs/list-differ";
import {
ArrayUnique,
cleanupVersion,
compareAndExpandManifestDependencies,
getChangelog,
getFileAtRevision,
getUniqueToArray,
} from "#utils/util.ts";
import {
ModpackManifest,
ModpackManifestFile,
} from "#types/modpackManifest.ts";
import {
ChangelogMessage,
Commit,
ModChangeInfo,
} from "#types/changelogTypes.ts";
import dedent from "dedent-js";
import mustache from "mustache";
import { modChangesAllocations, repoLink } from "./definitions";
import ChangelogData from "./changelogData";
import { SpecialChangelogFormatting } from "../../types/changelogTypes";
import { sortCommitListReverse } from "./pusher";
import { error } from "fancy-log";
import { modChangesAllocations, repoLink } from "./definitions.ts";
import ChangelogData from "./changelogData.ts";
import { SpecialChangelogFormatting } from "#types/changelogTypes.ts";
import { sortCommitListReverse } from "./pusher.ts";
import { logError } from "#utils/log.ts";
/**
* Mod Changes special formatting
*/
const getModChangesFormatting: (commits: Commit[]) => SpecialChangelogFormatting<Commit[]> = (commits) => {
const getModChangesFormatting: (
commits?: Commit[],
) => SpecialChangelogFormatting<Commit[] | undefined> = (commits) => {
return {
formatting: (message, subMessage, indentation, commits) => {
// Sub messages are details, so make them bold & italic
@ -26,8 +41,11 @@ const getModChangesFormatting: (commits: Commit[]) => SpecialChangelogFormatting
const authors: string[] = [];
const formattedCommits: string[] = [];
commits.forEach((commit) => {
if (!authors.includes(commit.author_name)) authors.push(commit.author_name);
formattedCommits.push(`[\`${commit.hash.substring(0, 7)}\`](${repoLink}commit/${commit.hash})`);
if (!authors.includes(commit.author_name))
authors.push(commit.author_name);
formattedCommits.push(
`[\`${commit.hash.substring(0, 7)}\`](${repoLink}commit/${commit.hash})`,
);
});
authors.sort();
return `${indentation}* ${message} - **${authors.join("**, **")}** (${formattedCommits.join(", ")})`;
@ -40,27 +58,37 @@ const getModChangesFormatting: (commits: Commit[]) => SpecialChangelogFormatting
return `${indentation}* ${message} - **${author}** ([\`${shortSHA}\`](${repoLink}commit/${commit.hash}))`;
},
storage: commits,
} as SpecialChangelogFormatting<Commit[]>;
} as SpecialChangelogFormatting<Commit[] | undefined>;
};
/**
* Pushes the mod changes, with their relative commits, to their respective sub categories in the specified category.
*/
export default async function generateModChanges(data: ChangelogData): Promise<void> {
const oldManifest: ModpackManifest = JSON.parse(getFileAtRevision("manifest.json", data.since));
const newManifest: ModpackManifest = JSON.parse(getFileAtRevision("manifest.json", data.to));
const comparisonResult = await compareAndExpandManifestDependencies(oldManifest, newManifest);
export default async function generateModChanges(
data: ChangelogData,
): Promise<void> {
const oldManifest: ModpackManifest = JSON.parse(
await getFileAtRevision("manifest.json", data.since),
);
const newManifest: ModpackManifest = JSON.parse(
await getFileAtRevision("manifest.json", data.to),
);
const comparisonResult = await compareAndExpandManifestDependencies(
oldManifest,
newManifest,
);
const commitList = await getChangelog(data.since, data.to, ["manifest.json"]);
const projectIDsToCommits: Map<number, Commit[]> = new Map();
commitList.forEach((commit) => {
const projectIDs = getChangedProjectIDs(commit.hash);
for (const commit of commitList) {
const projectIDs = await getChangedProjectIDs(commit.hash);
projectIDs.forEach((id) => {
if (projectIDsToCommits.has(id)) projectIDsToCommits.get(id).push(commit);
if (projectIDsToCommits.has(id))
projectIDsToCommits.get(id)?.push(commit);
else projectIDsToCommits.set(id, [commit]);
});
});
}
[
{
@ -86,16 +114,18 @@ export default async function generateModChanges(data: ChangelogData): Promise<v
.map((name) => name);
list.forEach((info) => {
let commits: Commit[] = undefined;
let commits: Commit[] | undefined = undefined;
if (info.projectID && projectIDsToCommits.has(info.projectID)) {
commits = projectIDsToCommits.get(info.projectID);
commits = projectIDsToCommits.get(info.projectID) ?? [];
// Sort array so newest commits appear at end instead of start of commit string
sortCommitListReverse(commits);
}
block.allocation.category.changelogSection
.get(block.allocation.subCategory)
.push(getModChangeMessage(info, block.allocation.template, data, commits));
?.get(block.allocation.subCategory)
?.push(
getModChangeMessage(info, block.allocation.template, data, commits),
);
});
});
}
@ -111,13 +141,13 @@ function getModChangeMessage(
info: ModChangeInfo,
template: string,
data: ChangelogData,
commits: Commit[],
commits?: Commit[],
): ChangelogMessage {
const oldVersion = cleanupVersion(info.oldVersion);
const newVersion = cleanupVersion(info.newVersion);
// If not provided with either version, return just mod name
if (!oldVersion && !newVersion)
if ((!oldVersion && !newVersion) || !info.projectID)
return {
commitMessage: info.modName,
specialFormatting: getModChangesFormatting(commits),
@ -131,12 +161,12 @@ function getModChangeMessage(
});
// Parse Info
let subMessages: ChangelogMessage[] = undefined;
let subMessages: ChangelogMessage[] | undefined = undefined;
if (data.modInfoList.has(info.projectID)) {
const modInfo = data.modInfoList.get(info.projectID);
if (modInfo.info) text = `${text} ***(${modInfo.info})***`;
if (modInfo.details)
subMessages = modInfo.details.map((detail) => {
if (modInfo?.info) text = `${text} ***(${modInfo.info})***`;
if (modInfo?.details)
subMessages = modInfo?.details.map((detail) => {
detail.specialFormatting = getModChangesFormatting(commits);
return detail;
});
@ -151,64 +181,56 @@ function getModChangeMessage(
/**
* Gets what project IDs, in manifest.json, a commit changed.
* @param SHA The sha of the commit
* @param sha The sha of the commit
*/
function getChangedProjectIDs(SHA: string): number[] {
const change = getCommitChange(SHA);
async function getChangedProjectIDs(sha: string): Promise<number[]> {
const change = await getCommitChange(sha);
const projectIDs: number[] = [];
if (!change || !change.diff) {
if (!change) {
return projectIDs;
}
// Add all unique IDs from both diff lists
change.diff.added.forEach((index) => {
const id = change.newManifest.files[index].projectID;
if (!projectIDs.includes(id)) projectIDs.push(id);
change.arr1Unique.forEach((file) => {
if (!projectIDs.includes(file.projectID)) projectIDs.push(file.projectID);
});
change.diff.removed.forEach((index) => {
const id = change.oldManifest.files[index].projectID;
if (!projectIDs.includes(id)) projectIDs.push(id);
change.arr2Unique.forEach((file) => {
if (!projectIDs.includes(file.projectID)) projectIDs.push(file.projectID);
});
return projectIDs;
}
/**
* A storage of what parts of the 'manifest.json' file a commit changed.
*/
interface CommitChange {
diff: DiffResult<ModpackManifestFile>;
oldManifest: ModpackManifest;
newManifest: ModpackManifest;
}
/**
* Gets what parts of the 'manifest.json' file a commit changed.
* @param SHA The sha of the commit
* @param sha The sha of the commit
*/
function getCommitChange(SHA: string): CommitChange {
async function getCommitChange(
sha: string,
): Promise<ArrayUnique<ModpackManifestFile> | undefined> {
let oldManifest: ModpackManifest, newManifest: ModpackManifest;
try {
oldManifest = JSON.parse(getFileAtRevision("manifest.json", `${SHA}^`)) as ModpackManifest;
newManifest = JSON.parse(getFileAtRevision("manifest.json", SHA)) as ModpackManifest;
oldManifest = JSON.parse(
await getFileAtRevision("manifest.json", `${sha}^`),
) as ModpackManifest;
newManifest = JSON.parse(
await getFileAtRevision("manifest.json", sha),
) as ModpackManifest;
} catch (e) {
error(dedent`
Failed to parse the manifest.json file at commit ${SHA} or the commit before!
logError(dedent`
Failed to parse the manifest.json file at commit ${sha} or the commit before!
Skipping...`);
return;
return undefined;
}
let result: DiffResult<ModpackManifestFile>;
if (oldManifest && newManifest) {
const differ = new ListDiffer(oldManifest.files, (e) => e.fileID);
result = differ.update(newManifest.files);
}
let result: ArrayUnique<ModpackManifestFile> | undefined = undefined;
if (oldManifest && newManifest)
result = getUniqueToArray(
oldManifest.files,
newManifest.files,
(e) => e.fileID,
);
return {
diff: result,
oldManifest: oldManifest,
newManifest: newManifest,
};
return result;
}

View File

@ -1,16 +1,21 @@
import fs from "fs";
import { rootDirectory } from "../../globals";
import { rootDirectory } from "#globals";
import upath from "upath";
import marked from "marked";
import buildConfig from "../../buildConfig";
import { categoriesSetup } from "./categoryManagement";
import ChangelogData from "./changelogData";
import { parsers } from "./definitions";
import parse from "./parser";
import { specialParserSetup } from "./specialParser";
import generateModChanges from "./generateModChanges";
import pushAll, { pushChangelog, pushSeperator, pushSetup, pushTitle } from "./pusher";
import log from "fancy-log";
import { marked } from "marked";
import buildConfig from "#buildConfig";
import { categoriesSetup } from "./categoryManagement.ts";
import ChangelogData from "./changelogData.ts";
import { changelogParsers } from "./definitions.ts";
import parseParser from "./parser.ts";
import { specialParserSetup } from "./specialParser.ts";
import generateModChanges from "./generateModChanges.ts";
import pushAll, {
pushChangelog,
pushSeperator,
pushSetup,
pushTitle,
} from "./pusher.ts";
import logInfo from "#utils/log.ts";
/**
* Generates a changelog based on environmental variables, and saves it a changelog data class.
@ -27,13 +32,13 @@ async function createChangelog(): Promise<ChangelogData> {
pushTitle(data);
for (const tag of tags) {
const iteration = tags.indexOf(tag);
log(`Iteration ${iteration + 1} of Changelog.`);
logInfo(`Iteration ${iteration + 1} of Changelog.`);
await data.setupIteration(tag);
categoriesSetup();
specialParserSetup(data);
for (const parser of parsers) {
await parse(data, parser);
for (const parser of changelogParsers) {
await parseParser(data, parser);
}
await generateModChanges(data);
@ -47,13 +52,13 @@ async function createChangelog(): Promise<ChangelogData> {
}
return data;
}
log("No Iterations Detected.");
logInfo("No Iterations Detected.");
categoriesSetup();
specialParserSetup(data);
for (const parser of parsers) {
await parse(data, parser);
for (const parser of changelogParsers) {
await parseParser(data, parser);
}
await generateModChanges(data);
@ -71,8 +76,14 @@ export const createRootChangelog = async (): Promise<void> => {
const builder = (await createChangelog()).builder;
// Write files.
await fs.promises.writeFile(upath.join(rootDirectory, "CHANGELOG.md"), builder.join("\n"));
return fs.promises.writeFile(upath.join(rootDirectory, "CHANGELOG_CF.md"), marked.parse(builder.join("\n")));
await fs.promises.writeFile(
upath.join(rootDirectory, "CHANGELOG.md"),
builder.join("\n"),
);
return fs.promises.writeFile(
upath.join(rootDirectory, "CHANGELOG_CF.md"),
await marked(builder.join("\n"), { async: true }),
);
};
/**
@ -83,9 +94,12 @@ export const createBuildChangelog = async (): Promise<void> => {
const builder = (await createChangelog()).builder;
// Write files.
await fs.promises.writeFile(upath.join(buildConfig.buildDestinationDirectory, "CHANGELOG.md"), builder.join("\n"));
await fs.promises.writeFile(
upath.join(buildConfig.buildDestinationDirectory, "CHANGELOG.md"),
builder.join("\n"),
);
return fs.promises.writeFile(
upath.join(buildConfig.buildDestinationDirectory, "CHANGELOG_CF.md"),
marked.parse(builder.join("\n")),
await marked(builder.join("\n"), { async: true }),
);
};

View File

@ -1,4 +1,11 @@
import { Category, Commit, FixUpInfo, Ignored, Parser, SubCategory } from "../../types/changelogTypes";
import {
Category,
Commit,
FixUpInfo,
Ignored,
Parser,
SubCategory,
} from "#types/changelogTypes.ts";
import {
categories,
combineKey,
@ -9,44 +16,67 @@ import {
modInfoKey,
noCategoryKey,
priorityKey,
} from "./definitions";
import { parseCombine, parseDetails, parseExpand, parseIgnore, parseModInfo, parsePriority } from "./specialParser";
import { getChangelog } from "../../util/util";
import ChangelogData from "./changelogData";
} from "./definitions.ts";
import {
parseCombine,
parseDetails,
parseExpand,
parseIgnore,
parseModInfo,
parsePriority,
} from "./specialParser.ts";
import { getChangelog } from "#utils/util.ts";
import ChangelogData from "./changelogData.ts";
export default async function parseParser(data: ChangelogData, parser: Parser): Promise<void> {
export default async function parseParser(
data: ChangelogData,
parser: Parser,
): Promise<void> {
const commits = await getChangelog(data.since, data.to, parser.dirs);
for (const commit of commits) {
if (data.shaList.has(commit.hash)) continue;
let savedFix: FixUpInfo = undefined;
let savedFix: FixUpInfo | undefined = undefined;
if (data.commitFixes.has(commit.hash)) {
const fixUpInfo = data.commitFixes.get(commit.hash);
if (!parser.applyFixCalback || parser.applyFixCalback(fixUpInfo)) {
applyFix(commit, fixUpInfo);
} else {
savedFix = fixUpInfo;
if (fixUpInfo) {
if (!parser.applyFixCalback || parser.applyFixCalback(fixUpInfo)) {
applyFix(commit, fixUpInfo);
} else {
savedFix = fixUpInfo;
}
}
}
if (parser.skipCallback(commit, commit.message, commit.body)) {
if (!parser.addSHACallback || parser.addSHACallback(commit, true)) data.shaList.add(commit.hash);
if (!parser.addSHACallback || parser.addSHACallback(commit, true))
data.shaList.add(commit.hash);
continue;
}
const parsed = await parser.itemCallback(parser, commit, commit.message, commit.body, savedFix);
const parsed = await parser.itemCallback(
parser,
commit,
commit.message,
commit.body,
savedFix,
);
if (parsed instanceof Ignored) {
if (parsed.getCommitList() && parser.addCommitListCallback) {
if (parser.addCommitListCallback(commit, true)) data.commitList.push(commit);
if (parser.addCommitListCallback(commit, true))
data.commitList.push(commit);
}
continue;
}
if (!parsed && parser.leftOverCallback) parser.leftOverCallback(commit, commit.message, commit.body, []);
if (!parser.addSHACallback || parser.addSHACallback(commit, parsed)) data.shaList.add(commit.hash);
if (!parsed && parser.leftOverCallback)
parser.leftOverCallback(commit, commit.message, commit.body, []);
if (!parser.addSHACallback || parser.addSHACallback(commit, parsed))
data.shaList.add(commit.hash);
if (parser.addCommitListCallback(commit, parsed)) data.commitList.push(commit);
if (parser.addCommitListCallback(commit, parsed))
data.commitList.push(commit);
}
}
@ -102,7 +132,8 @@ export async function parseCommitBody(
commitObject.priority = newPriority;
}
if (commitBody.includes(modInfoKey)) await parseModInfo(commitBody, commitObject);
if (commitBody.includes(modInfoKey))
await parseModInfo(commitBody, commitObject);
if (commitBody.includes(detailsKey)) {
await parseDetails(commitMessage, commitBody, commitObject, parser);
return true;
@ -125,13 +156,18 @@ export async function parseCommitBody(
* @param indentation The indentation of the message, if needed. Defaults to "".
* @return added If the commit message was added to a category
*/
function sortCommit(message: string, commitBody: string, commit: Commit, indentation = defaultIndentation): boolean {
function sortCommit(
message: string,
commitBody: string,
commit: Commit,
indentation = defaultIndentation,
): boolean {
const sortedCategories: Category[] = findCategories(commitBody);
if (sortedCategories.length === 0) return false;
sortedCategories.forEach((category) => {
const subCategory = findSubCategory(commitBody, category);
category.changelogSection.get(subCategory).push({
category.changelogSection?.get(subCategory)?.push({
commitMessage: message,
commitObject: commit,
indentation: indentation,
@ -145,7 +181,7 @@ function sortCommit(message: string, commitBody: string, commit: Commit, indenta
* @param commitBody The commit body to sort with
* @return categoryList The categories that the commit belongs in. Return undefined if no category specified via keys.
*/
export function findCategories(commitBody: string): Category[] | undefined {
export function findCategories(commitBody: string): Category[] {
const sortedCategories: Category[] = [];
for (const category of categories) {
if (category.commitKey !== undefined) {
@ -160,7 +196,10 @@ export function findCategories(commitBody: string): Category[] | undefined {
/**
* Finds the correct Sub Category a commit should go in. Must be given the Category first!
*/
export function findSubCategory(commitBody: string, category: Category): SubCategory {
export function findSubCategory(
commitBody: string,
category: Category,
): SubCategory {
for (const subCategory of category.subCategories) {
if (subCategory.commitKey !== undefined) {
if (commitBody.includes(subCategory.commitKey)) {

View File

@ -1,9 +1,9 @@
import ChangelogData from "./changelogData";
import { categories, defaultIndentation } from "./definitions";
import { Category, ChangelogMessage, Commit } from "../../types/changelogTypes";
import { repoLink } from "./definitions";
import ChangelogData from "./changelogData.ts";
import { categories, defaultIndentation } from "./definitions.ts";
import { Category, ChangelogMessage, Commit } from "#types/changelogTypes.ts";
import { repoLink } from "./definitions.ts";
import { Octokit } from "@octokit/rest";
import { getIssueURL, getNewestIssueURLs } from "../../util/util";
import { getIssueURL, getNewestIssueURLs } from "#utils/util.ts";
let data: ChangelogData;
let octokit: Octokit;
@ -39,10 +39,16 @@ export function pushTitle(inputData: ChangelogData): void {
timeZoneName: "short",
});
// noinspection HtmlDeprecatedAttribute
data.builder.push(`<h1 align="center">${data.releaseType} (${date})</h1>`, "");
data.builder.push(
`<h1 align="center">${data.releaseType} (${date})</h1>`,
"",
);
} else {
// noinspection HtmlUnknownAttribute
data.builder.push(`<h1 {{{ CENTER_ALIGN }}}>${data.releaseType} ${data.to}</h1>`, "");
data.builder.push(
`<h1 {{{ CENTER_ALIGN }}}>${data.releaseType} ${data.to}</h1>`,
"",
);
data.builder.push("{{{ CF_REDIRECT }}}", "");
}
}
@ -94,7 +100,7 @@ async function pushCategory(category: Category) {
// Push All Sub Categories
for (const subCategory of category.subCategories) {
// Loop through key list instead of map to produce correct order
const list = category.changelogSection.get(subCategory);
const list = category.changelogSection?.get(subCategory);
if (list && list.length != 0) {
// Push Key Name (only pushes if Key Name is not "")
if (subCategory.keyName) {
@ -137,7 +143,11 @@ async function pushCategory(category: Category) {
* @param transform A function to turn each element of type T into an element of type Commit
* @param backup A backup sort, to call when either element does not have a commit object, or when the commit objects' times are the same. Optional, if not set, will just return 0 (equal) or will compare commit messages.
*/
function sortCommitList<T>(list: T[], transform: (obj: T) => Commit | undefined, backup?: (a: T, b: T) => number) {
function sortCommitList<T>(
list: T[],
transform: (obj: T) => Commit | undefined,
backup?: (a: T, b: T) => number,
) {
list.sort((a, b): number => {
const commitA = transform(a);
const commitB = transform(b);
@ -150,9 +160,11 @@ function sortCommitList<T>(list: T[], transform: (obj: T) => Commit | undefined,
const dateB = new Date(commitB.date);
// This is reversed, so higher priorities go on top
if (commitB.priority !== commitA.priority) return commitB.priority - commitA.priority;
if (commitB.priority !== commitA.priority)
return (commitB.priority ?? 0) - (commitA.priority ?? 0);
// This is reversed, so the newest commits go on top
if (dateB.getTime() - dateA.getTime() !== 0) return dateB.getTime() - dateA.getTime();
if (dateB.getTime() - dateA.getTime() !== 0)
return dateB.getTime() - dateA.getTime();
if (backup) return backup(a, b);
return commitA.message.localeCompare(commitB.message);
});
@ -168,8 +180,9 @@ export function sortCommitListReverse(list: Commit[]): void {
const dateB = new Date(b.date);
// This is reversed, so higher priorities go on top
if (b.priority !== a.priority) return b.priority - a.priority; // Priority is still highest first
if (dateA.getTime() - dateB.getTime() !== 0) return dateA.getTime() - dateB.getTime();
if (b.priority !== a.priority) return (b.priority ?? 0) - (a.priority ?? 0); // Priority is still highest first
if (dateA.getTime() - dateB.getTime() !== 0)
return dateA.getTime() - dateB.getTime();
return a.message.localeCompare(b.message);
});
}
@ -180,8 +193,14 @@ export function sortCommitListReverse(list: Commit[]): void {
* @param subMessage Whether this message is a subMessage (used in details). Set to true to make it a subMessage (different parsing). Defaults to false.
* @return string Formatted Changelog Message
*/
async function formatChangelogMessage(changelogMessage: ChangelogMessage, subMessage = false): Promise<string> {
const indentation = changelogMessage.indentation == undefined ? defaultIndentation : changelogMessage.indentation;
async function formatChangelogMessage(
changelogMessage: ChangelogMessage,
subMessage = false,
): Promise<string> {
const indentation =
changelogMessage.indentation == undefined
? defaultIndentation
: changelogMessage.indentation;
const message = changelogMessage.commitMessage.trim();
if (changelogMessage.specialFormatting)
@ -194,7 +213,8 @@ async function formatChangelogMessage(changelogMessage: ChangelogMessage, subMes
if (changelogMessage.commitObject && !subMessage) {
if (data.combineList.has(changelogMessage.commitObject.hash)) {
const commits = data.combineList.get(changelogMessage.commitObject.hash);
const commits =
data.combineList.get(changelogMessage.commitObject.hash) ?? [];
commits.push(changelogMessage.commitObject);
// Sort original array so newest commits appear at the end instead of start of commit string
@ -207,11 +227,16 @@ async function formatChangelogMessage(changelogMessage: ChangelogMessage, subMes
commits.forEach((commit) => {
if (processedSHAs.has(commit.hash)) return;
if (!authors.includes(commit.author_name) && !authorEmails.has(commit.author_email)) {
if (
!authors.includes(commit.author_name) &&
!authorEmails.has(commit.author_email)
) {
authors.push(commit.author_name);
authorEmails.add(commit.author_email);
}
formattedCommits.push(`[\`${commit.hash.substring(0, 7)}\`](${repoLink}commit/${commit.hash})`);
formattedCommits.push(
`[\`${commit.hash.substring(0, 7)}\`](${repoLink}commit/${commit.hash})`,
);
processedSHAs.add(commit.hash);
});
@ -232,7 +257,11 @@ async function formatChangelogMessage(changelogMessage: ChangelogMessage, subMes
* Returns a formatted commit
*/
function formatCommit(commit: Commit): string {
const date = new Date(commit.date).toLocaleDateString("en-us", { year: "numeric", month: "short", day: "numeric" });
const date = new Date(commit.date).toLocaleDateString("en-us", {
year: "numeric",
month: "short",
day: "numeric",
});
const formattedCommit = `${commit.message} - **${commit.author_name}** (${date})`;
const shortSHA = commit.hash.substring(0, 7);
@ -249,7 +278,11 @@ async function transformAllIssueURLs(changelog: string[]) {
for (let i = 0; i < changelog.length; i++) {
const categoryFormatted = changelog[i];
// Transform PR and/or Issue tags into a link.
promises.push(transformTags(categoryFormatted).then((categoryTransformed) => (changelog[i] = categoryTransformed)));
promises.push(
transformTags(categoryFormatted).then(
(categoryTransformed) => (changelog[i] = categoryTransformed),
),
);
}
// Apply all Link Changes
await Promise.all(promises);
@ -261,13 +294,19 @@ async function transformAllIssueURLs(changelog: string[]) {
async function transformTags(message: string): Promise<string> {
const promises: Promise<string>[] = [];
if (message.search(/#\d+/) !== -1) {
const matched = message.match(/#\d+/g);
const matched = message.match(/#\d+/g) ?? [];
for (const match of matched) {
// Extract digits
const digits = Number.parseInt(match.match(/\d+/)[0]);
const digitsMatch = match.match(/\d+/);
if (!digitsMatch) continue;
const digits = Number.parseInt(digitsMatch[0]);
// Get PR/Issue Info (PRs are listed in the Issue API Endpoint)
promises.push(getIssueURL(digits, octokit).then((url) => message.replace(match, `[#${digits}](${url})`)));
promises.push(
getIssueURL(digits, octokit).then((url) =>
message.replace(match, `[#${digits}](${url})`),
),
);
}
}

View File

@ -10,7 +10,7 @@ import {
ParsedModInfo,
Parser,
PriorityInfo,
} from "../../types/changelogTypes";
} from "#types/changelogTypes.ts";
import dedent from "dedent-js";
import matter, { GrayMatterFile } from "gray-matter";
import {
@ -32,12 +32,13 @@ import {
modInfoKey,
modInfoList,
priorityKey,
} from "./definitions";
import { findCategories, findSubCategory } from "./parser";
import ChangelogData from "./changelogData";
import { error } from "fancy-log";
import { parse } from "toml-v1";
} from "./definitions.ts";
import { findCategories, findSubCategory } from "./parser.ts";
import ChangelogData from "./changelogData.ts";
import toml from "toml-v1";
import { logError } from "#utils/log.ts";
const { parse } = toml;
let data: ChangelogData;
export function specialParserSetup(inputData: ChangelogData): void {
@ -47,18 +48,26 @@ export function specialParserSetup(inputData: ChangelogData): void {
/**
* Reads a commit's priority.
*/
export async function parsePriority(commitBody: string, commitObject: Commit): Promise<number | undefined> {
export async function parsePriority(
commitBody: string,
commitObject: Commit,
): Promise<number | undefined> {
if (!commitBody.includes(priorityKey)) return undefined;
const info = await parseTOML<PriorityInfo>(commitBody, commitObject, priorityKey);
const info = await parseTOML<PriorityInfo>(
commitBody,
commitObject,
priorityKey,
);
if (!info) return undefined;
if (!info.priority) {
error(dedent`
logError(dedent`
Priority Info in body:
\`\`\`
${commitBody}\`\`\`
of commit object ${commitObject.hash} (${commitObject.message}) is missing priority info (key 'priority').`);
if (data.isTest) throw new Error("Failed to Parse Priority Info. See Above.");
if (data.isTest)
throw new Error("Failed to Parse Priority Info. See Above.");
return undefined;
}
@ -70,13 +79,16 @@ export async function parsePriority(commitBody: string, commitObject: Commit): P
* @commit The Commit Body. Does check whether the ignore key is there.
* @return Returns undefined to continue, and an Ignored object if to skip.
*/
export async function parseIgnore(commitBody: string, commitObject: Commit): Promise<Ignored | undefined> {
export async function parseIgnore(
commitBody: string,
commitObject: Commit,
): Promise<Ignored | undefined> {
if (!commitBody.includes(ignoreKey)) return undefined;
const info = await parseTOML<IgnoreInfo>(commitBody, commitObject, ignoreKey);
if (!info) return undefined;
if (!info.checks) {
error(dedent`
logError(dedent`
Ignore Info in body:
\`\`\`
${commitBody}\`\`\`
@ -89,7 +101,7 @@ export async function parseIgnore(commitBody: string, commitObject: Commit): Pro
try {
infoKeys = Object.keys(info.checks);
} catch (err) {
error(dedent`
logError(dedent`
Could not get the keys in Ignore Info of body:
\`\`\`
${commitBody}\`\`\`
@ -102,9 +114,10 @@ export async function parseIgnore(commitBody: string, commitObject: Commit): Pro
const ignoreKeys = new Set<string>(Object.keys(ignoreChecks));
const checkResults: boolean[] = [];
infoKeys.forEach((key) => {
if (ignoreKeys.has(key)) checkResults.push(ignoreChecks[key].call(this, info.checks[key], data));
if (ignoreKeys.has(key))
checkResults.push(ignoreChecks[key](info.checks[key], data));
else {
error(dedent`
logError(dedent`
Ignore Check with key '${key}' in body:
\`\`\`
${commitBody}\`\`\`
@ -112,11 +125,12 @@ export async function parseIgnore(commitBody: string, commitObject: Commit): Pro
Only accepts keys: ${Array.from(ignoreKeys)
.map((key) => `'${key}'`)
.join(", ")}.`);
if (data.isTest) throw new Error("Failed Parsing Ignore Check. See Above.");
if (data.isTest)
throw new Error("Failed Parsing Ignore Check. See Above.");
}
});
if (checkResults.length === 0) {
error(dedent`
logError(dedent`
No Ignore Checks found in body:
\`\`\`
${commitBody}\`\`\`
@ -124,16 +138,18 @@ export async function parseIgnore(commitBody: string, commitObject: Commit): Pro
Only accepts keys: ${Array.from(ignoreKeys)
.map((key) => `'${key}'`)
.join(", ")}.`);
if (data.isTest) throw new Error("Failed Parsing Ignore Checks. See Above.");
if (data.isTest)
throw new Error("Failed Parsing Ignore Checks. See Above.");
return undefined;
}
/* Find Logic */
let logic: IgnoreLogic;
if (info.logic === undefined) logic = defaultIgnoreLogic;
else if (Object.keys(ignoreLogics).includes(info.logic)) logic = ignoreLogics[info.logic];
else if (Object.keys(ignoreLogics).includes(info.logic))
logic = ignoreLogics[info.logic];
else {
error(dedent`
logError(dedent`
Ignore Logic '${info.logic}' in body:
\`\`\`
${commitBody}\`\`\`
@ -145,14 +161,17 @@ export async function parseIgnore(commitBody: string, commitObject: Commit): Pro
logic = defaultIgnoreLogic;
}
if (logic.call(this, checkResults)) return new Ignored(info.addCommitList);
if (logic(checkResults)) return new Ignored(info.addCommitList);
return undefined;
}
/**
* Parses a commit with 'Fixup'.
*/
export async function parseFixUp(commit: Commit, fix?: FixUpInfo): Promise<boolean> {
export async function parseFixUp(
commit: Commit,
fix?: FixUpInfo,
): Promise<boolean> {
if (!commit.body || !commit.body.includes(fixUpKey)) return false;
await parseTOMLWithRootToList<FixUpInfo>(
commit.body,
@ -169,7 +188,9 @@ export async function parseFixUp(commit: Commit, fix?: FixUpInfo): Promise<boole
(matter) => {
let title = commit.message;
// Replace "\r\n" (Caused by editing on GitHub) with "\n", as the output matter has this done.
let body = commit.body.replace(/\r\n/g, "\n").replace(matter.matter.trim(), "");
let body = commit.body
.replace(/\r\n/g, "\n")
.replace(matter.matter.trim(), "");
// Apply Ignored Fixes
if (fix) {
@ -201,19 +222,32 @@ export async function parseFixUp(commit: Commit, fix?: FixUpInfo): Promise<boole
/**
* Parses a commit with 'mod info'.
*/
export async function parseModInfo(commitBody: string, commitObject: Commit): Promise<void> {
export async function parseModInfo(
commitBody: string,
commitObject: Commit,
): Promise<void> {
await parseTOMLWithRootToList<ModInfo>(
commitBody,
commitObject,
modInfoKey,
modInfoList,
(item): boolean => {
const invalidProjectID = !item.projectID || typeof item.projectID !== "number" || Number.isNaN(item.projectID);
// noinspection SuspiciousTypeOfGuard
const invalidProjectID =
!item.projectID ||
typeof item.projectID !== "number" ||
Number.isNaN(item.projectID);
const invalidInfo = !item.info;
const invalidRootDetails = !item.detail;
const invalidDetails = !item.details || !Array.isArray(item.details) || !(item.details.length > 0);
const invalidDetails =
!item.details ||
!Array.isArray(item.details) ||
!(item.details.length > 0);
// Invalid if invalid ID, or invalid info and invalid details
return invalidProjectID || (invalidInfo && invalidRootDetails && invalidDetails);
return (
invalidProjectID ||
(invalidInfo && invalidRootDetails && invalidDetails)
);
},
async (item) => {
data.modInfoList.set(item.projectID, await getParsedModInfo(item));
@ -226,7 +260,11 @@ export async function parseModInfo(commitBody: string, commitObject: Commit): Pr
*/
async function getParsedModInfo(modInfo: ModInfo): Promise<ParsedModInfo> {
const subMessages: ChangelogMessage[] = [];
if (modInfo.detail) subMessages.push({ commitMessage: modInfo.detail, indentation: indentationLevel });
if (modInfo.detail)
subMessages.push({
commitMessage: modInfo.detail,
indentation: indentationLevel,
});
if (modInfo.details && modInfo.details.length > 0)
subMessages.push(
...modInfo.details.map((detail) => {
@ -243,7 +281,11 @@ async function getParsedModInfo(modInfo: ModInfo): Promise<ParsedModInfo> {
/**
* Parses a commit with 'expand'.
*/
export async function parseExpand(commitBody: string, commitObject: Commit, parser: Parser): Promise<void> {
export async function parseExpand(
commitBody: string,
commitObject: Commit,
parser: Parser,
): Promise<void> {
await parseTOMLWithRootToList<ExpandedMessage>(
commitBody,
commitObject,
@ -284,13 +326,18 @@ export async function parseDetails(
if (sortedCategories.length === 0) {
if (parser.leftOverCallback) {
parser.leftOverCallback(commitObject, commitMessage, commitBody, subMessages);
parser.leftOverCallback(
commitObject,
commitMessage,
commitBody,
subMessages,
);
}
} else {
sortedCategories.forEach((category) => {
const subCategory = findSubCategory(commitBody, category);
category.changelogSection.get(subCategory).push({
category.changelogSection?.get(subCategory)?.push({
commitMessage: commitMessage,
commitObject: commitObject,
subChangelogMessages: subMessages,
@ -317,7 +364,13 @@ async function expandDetailsLevel(
async (item) => {
// Nested Details
if (Array.isArray(item)) {
await addDetailsLevel(commitBody, commitObject, item as unknown[], `${indentation}${indentationLevel}`, result);
await addDetailsLevel(
commitBody,
commitObject,
item as unknown[],
`${indentation}${indentationLevel}`,
result,
);
return;
}
let string = item as string;
@ -325,9 +378,19 @@ async function expandDetailsLevel(
// Legacy Nested Details
if (string.includes(detailsKey)) {
result.push(...(await expandDetailsLevel(string, commitObject, `${indentation}${indentationLevel}`)));
result.push(
...(await expandDetailsLevel(
string,
commitObject,
`${indentation}${indentationLevel}`,
)),
);
} else {
result.push({ commitMessage: string, commitObject: commitObject, indentation: indentation });
result.push({
commitMessage: string,
commitObject: commitObject,
indentation: indentation,
});
}
},
(root) => root[detailsRoot] as string,
@ -360,38 +423,43 @@ async function addDetailsLevel(
}
// Transform into String
let detailString: string;
if (typeof detail !== "string") {
try {
detailString = detail.toString();
} catch (e) {
error(dedent`
Failed parsing Detail \`${detail}\` of Details Level:
logError(dedent`
Failed parsing Detail \`${detail}\` of Details Level:
\`\`\`
${details}\`\`\`
of commit object ${commitObject.hash} (${commitObject.message}).
The value is not a string.`);
if (commitObject.body && commitBody !== commitObject.body) {
logError(dedent`
Original Body:
\`\`\`
${details}\`\`\`
of commit object ${commitObject.hash} (${commitObject.message}).
The value could not be converted into a string.`);
if (commitObject.body && commitBody !== commitObject.body) {
error(dedent`
Original Body:
\`\`\`
${commitObject.body}\`\`\``);
}
error(`\n${endMessage}\n`);
if (data.isTest) throw e;
continue;
${commitObject.body}\`\`\``);
}
} else detailString = detail as string;
detailString = dedent(detailString).trim();
logError(`\n${endMessage}\n`);
if (data.isTest) throw new Error("Value is not a string.");
continue;
}
const detailString = dedent(detail).trim();
// Legacy Nested Details
if (detailString.includes(detailsKey)) {
builder.push(...(await expandDetailsLevel(detailString, commitObject, `${indentation}${indentationLevel}`)));
builder.push(
...(await expandDetailsLevel(
detailString,
commitObject,
`${indentation}${indentationLevel}`,
)),
);
} else {
builder.push({ commitMessage: detailString, commitObject: commitObject, indentation: indentation });
builder.push({
commitMessage: detailString,
commitObject: commitObject,
indentation: indentation,
});
}
}
}
@ -399,7 +467,10 @@ async function addDetailsLevel(
/**
* Parses a commit with 'combine'.
*/
export async function parseCombine(commitBody: string, commitObject: Commit): Promise<void> {
export async function parseCombine(
commitBody: string,
commitObject: Commit,
): Promise<void> {
await parseTOMLWithRootToList<string>(
commitBody,
commitObject,
@ -408,7 +479,7 @@ export async function parseCombine(commitBody: string, commitObject: Commit): Pr
(item) => !item,
async (item) => {
if (!data.combineList.has(item)) data.combineList.set(item, []);
data.combineList.get(item).push(commitObject);
data.combineList.get(item)?.push(commitObject);
},
(root) => root[combineRoot] as string,
);
@ -456,7 +527,7 @@ async function parseTOML<T>(
if (!itemKey) item = parseResult.data as T;
else item = parseResult.data[itemKey];
} catch (e) {
error(dedent`
logError(dedent`
Failed parsing TOML in body:
\`\`\`
${commitBody}\`\`\`
@ -464,13 +535,13 @@ async function parseTOML<T>(
This could be because of invalid syntax.`);
if (commitObject.body && commitBody !== commitObject.body) {
error(dedent`
logError(dedent`
Original Body:
\`\`\`
${commitObject.body}\`\`\``);
}
error(`\n${endMessage}\n`);
logError(`\n${endMessage}\n`);
if (data.isTest) throw e;
return undefined;
}
@ -505,19 +576,19 @@ async function parseList<T>(
let index = i + 1;
if (entryModifier) index = entryModifier(i);
error(dedent`
logError(dedent`
Missing Requirements for entry ${index} of list with key '${listKey}' in body:
\`\`\`
${commitBody}\`\`\`
of commit object ${commitObject.hash} (${commitObject.message}).`);
if (commitObject.body && commitBody !== commitObject.body) {
error(dedent`
logError(dedent`
Original Body:
\`\`\`
${commitObject.body}\`\`\``);
}
error(`${endMessage}\n`);
logError(`${endMessage}\n`);
if (data.isTest) throw new Error("Bad Entry. See Above.");
continue;
@ -548,19 +619,26 @@ async function parseTOMLWithRootToList<T>(
rootObjTransform?: (root: Record<string, unknown>) => T,
matterCallback?: (matter: GrayMatterFile<string>) => void,
): Promise<void> {
let root: Record<string, unknown>;
let root: Record<string, unknown> | undefined;
const messages: T[] = [];
const endMessage = getEndMessage(delimiter);
// Parse Root TOML
try {
root = await parseTOML<Record<string, unknown>>(commitBody, commitObject, delimiter, null, matterCallback);
root = await parseTOML<Record<string, unknown>>(
commitBody,
commitObject,
delimiter,
undefined,
matterCallback,
);
if (!root) return;
const rootObj = rootObjTransform ? rootObjTransform(root) : (root as T);
// Only push root if it passes empty check
if (rootObj && !emptyCheck(rootObj)) messages.push(rootObj);
} catch (e) {
error(dedent`
logError(dedent`
Failed parsing Root TOML in body:
\`\`\`
${commitBody}\`\`\`
@ -568,13 +646,13 @@ async function parseTOMLWithRootToList<T>(
This could be because of invalid syntax.`);
if (commitObject.body && commitBody !== commitObject.body) {
error(dedent`
logError(dedent`
Original Body:
\`\`\`
${commitObject.body}\`\`\``);
}
error(`\n${endMessage}\n`);
logError(`\n${endMessage}\n`);
if (data.isTest) throw e;
return undefined;
}
@ -586,38 +664,38 @@ async function parseTOMLWithRootToList<T>(
return;
}
// No Valid Entry
error(dedent`
logError(dedent`
Missing Requirements for root entry, & no list with list key '${listKey}' detected in body:
\`\`\`
${commitBody}\`\`\`
of commit object ${commitObject.hash} (${commitObject.message}).`);
if (commitObject.body && commitBody !== commitObject.body) {
error(dedent`
logError(dedent`
Original Body:
\`\`\`
${commitObject.body}\`\`\``);
}
error(`${endMessage}\n`);
logError(`${endMessage}\n`);
if (data.isTest) throw new Error("No Valid Entry. See Above.");
}
// Parse List TOML
if (!root[listKey] || !Array.isArray(root[listKey])) {
error(dedent`
logError(dedent`
List (key: '${listKey}') in body:
\`\`\`
${commitBody}\`\`\`
of commit object ${commitObject.hash} (${commitObject.message}) not a list, or does not exist.`);
if (commitObject.body && commitBody !== commitObject.body) {
error(dedent`
logError(dedent`
Original Body:
\`\`\`
${commitObject.body}\`\`\``);
}
error(`${endMessage}\n`);
logError(`${endMessage}\n`);
if (data.isTest) throw new Error("Failed Parsing List. See Above.");
return;
@ -627,37 +705,37 @@ async function parseTOMLWithRootToList<T>(
try {
list = root[listKey] as unknown as T[];
} catch (e) {
error(dedent`
logError(dedent`
List (key: '${listKey}') in body:
\`\`\`
${commitBody}\`\`\`
of commit object ${commitObject.hash} (${commitObject.message}) could not be turned into correct list type.`);
if (commitObject.body && commitBody !== commitObject.body) {
error(dedent`
logError(dedent`
Original Body:
\`\`\`
${commitObject.body}\`\`\``);
}
error(`${endMessage}\n`);
logError(`${endMessage}\n`);
if (data.isTest) throw new Error("Failed Parsing List. See Above.");
return;
}
if (list.length === 0) {
error(dedent`
logError(dedent`
List (key: '${listKey}') in body:
\`\`\`
${commitBody}\`\`\`
of commit object ${commitObject.hash} (${commitObject.message}) is empty.`);
if (commitObject.body && commitBody !== commitObject.body) {
error(dedent`
logError(dedent`
Original Body:
\`\`\`
${commitObject.body}\`\`\``);
}
error(`${endMessage}\n`);
logError(`${endMessage}\n`);
if (data.isTest) throw new Error("Failed Parsing List. See Above.");
return;
@ -667,10 +745,28 @@ async function parseTOMLWithRootToList<T>(
messages.push(...list);
// Because we've already done empty check on root obj, no need to suppress error msg
// Keep as index (root: 0, obj1: 1, obj2: 2, ...)
await parseList<T>(messages, listKey, commitBody, commitObject, endMessage, emptyCheck, perItemCallback, (i) => i);
await parseList<T>(
messages,
listKey,
commitBody,
commitObject,
endMessage,
emptyCheck,
perItemCallback,
(i) => i,
);
}
// Normal Parsing of List
else await parseList<T>(list, listKey, commitBody, commitObject, endMessage, emptyCheck, perItemCallback);
else
await parseList<T>(
list,
listKey,
commitBody,
commitObject,
endMessage,
emptyCheck,
perItemCallback,
);
}
function getEndMessage(delimiter: string) {

View File

@ -1,5 +1,5 @@
import gulp from "gulp";
import { checkEnvironmentalVariables } from "../../util/util";
import { checkEnvironmentalVariables } from "#utils/util.ts";
const vars = [
"GITHUB_TOKEN",
@ -18,7 +18,7 @@ const vars = [
async function checkEnv() {
checkEnvironmentalVariables(vars);
if (!/.+\/.+/.exec(process.env.GITHUB_REPOSITORY)) {
if (!/.+\/.+/.exec(process.env.GITHUB_REPOSITORY ?? "")) {
throw new Error("Malformed repository slug.");
}
}

View File

@ -1,19 +1,20 @@
import gulp from "gulp";
import { clientDestDirectory, modpackManifest, overridesFolder, sharedDestDirectory } from "../../globals";
import gulp, { dest, symlink } from "gulp";
import {
clientDestDirectory,
modpackManifest,
sharedDestDirectory,
} from "#globals";
import fs from "fs";
import upath from "upath";
import buildConfig from "../../buildConfig";
import rename from "gulp-rename";
import imagemin from "gulp-imagemin";
import pngToJpeg from "png-to-jpeg";
import { MainMenuConfig } from "../../types/mainMenuConfig";
import del from "del";
import { createModList, ModFileInfo } from "../misc/createModList";
import buildConfig from "#buildConfig";
import { deleteAsync } from "del";
import { createModList, ModFileInfo } from "../misc/createModList.ts";
import dedent from "dedent-js";
import { cleanupVersion } from "../../util/util";
import { cleanupVersion } from "#utils/util.ts";
import filter from "gulp-filter";
async function clientCleanUp() {
return del(upath.join(clientDestDirectory, "*"), { force: true });
return deleteAsync(upath.join(clientDestDirectory, "*"), { force: true });
}
/**
@ -71,7 +72,9 @@ async function copyClientLicense() {
* Copies the update notes file.
*/
function copyClientUpdateNotes() {
return gulp.src("../UPDATENOTES.md", { allowEmpty: true }).pipe(gulp.dest(clientDestDirectory));
return gulp
.src("../UPDATENOTES.md", { allowEmpty: true })
.pipe(gulp.dest(clientDestDirectory));
}
/**
@ -80,16 +83,22 @@ function copyClientUpdateNotes() {
function copyClientChangelog() {
return gulp
.src(upath.join(buildConfig.buildDestinationDirectory, "CHANGELOG.md"))
.pipe(gulp.dest(clientDestDirectory));
.pipe(dest(clientDestDirectory));
}
/**
* Copies modpack overrides.
*/
function copyClientOverrides() {
const f = filter((f) => !f.isDirectory());
return gulp
.src(buildConfig.copyFromSharedClientGlobs, { nodir: true, cwd: sharedDestDirectory, allowEmpty: true })
.pipe(gulp.symlink(upath.join(clientDestDirectory, "overrides")));
.src(buildConfig.copyFromSharedClientGlobs, {
cwd: sharedDestDirectory,
allowEmpty: true,
resolveSymlinks: true,
})
.pipe(f)
.pipe(symlink(upath.join(clientDestDirectory, "overrides")));
}
/**
@ -179,7 +188,10 @@ async function fetchModList() {
</html>
`;
return fs.promises.writeFile(upath.join(clientDestDirectory, "modlist.html"), formattedModList);
return fs.promises.writeFile(
upath.join(clientDestDirectory, "modlist.html"),
formattedModList,
);
}
/**
@ -211,54 +223,6 @@ function getTickCross(bool: boolean): string {
return '<td class="redCross">&#10006;</td>';
}
const bgImageNamespace = "minecraft";
const bgImagePath = "textures/gui/title/background";
const mainMenuConfigPath = "config/CustomMainMenu/mainmenu.json";
/**
* Minifies (converts to jpeg) main menu files so they don't take up 60% of the pack size.
*/
async function compressMainMenuImages() {
const mainMenuImages = [];
const bgImagePathReal = upath.join("resources", bgImageNamespace, bgImagePath);
// Convert each slideshow image to 80% jpg.
await new Promise((resolve) => {
gulp
.src(upath.join(sharedDestDirectory, overridesFolder, bgImagePathReal, "**/*"))
.pipe(imagemin([pngToJpeg({ quality: buildConfig.screenshotsQuality })]))
.pipe(
rename((f) => {
// xd
f.extname = ".jpg";
// Ping back the file name so we don't have to scan the folder again.
mainMenuImages.push(`${f.basename}${f.extname}`);
}),
)
.pipe(gulp.dest(upath.join(clientDestDirectory, overridesFolder, bgImagePathReal)))
.on("end", resolve);
});
if (mainMenuImages.length > 0) {
// Read the CustomMainMenu config and parse it.
const mainMenuConfig: MainMenuConfig = JSON.parse(
(await fs.promises.readFile(upath.join(clientDestDirectory, overridesFolder, mainMenuConfigPath))).toString(),
);
// Fill the config with image paths using the weird "namespace:path" scheme.
mainMenuConfig.other.background.slideshow.images = mainMenuImages.map(
(img) => bgImageNamespace + ":" + upath.join(bgImagePath, img),
);
// Write it back.
return fs.promises.writeFile(
upath.join(clientDestDirectory, overridesFolder, mainMenuConfigPath),
JSON.stringify(mainMenuConfig, null, " "),
);
}
}
export default gulp.series(
clientCleanUp,
createClientDirs,
@ -269,5 +233,4 @@ export default gulp.series(
copyClientChangelog,
copyClientUpdateNotes,
fetchModList,
compressMainMenuImages,
);

View File

@ -1,17 +1,32 @@
import { modpackManifest } from "../../globals";
import { modpackManifest } from "#globals";
import request from "requestretry";
import fs from "fs";
import log from "fancy-log";
import upath from "upath";
import buildConfig from "../../buildConfig";
import { makeArtifactNameBody } from "../../util/util";
import buildConfig from "#buildConfig";
import {
getAxios,
isEnvVariableSet,
makeArtifactNameBody,
} from "#utils/util.ts";
import sanitize from "sanitize-filename";
import mustache from "mustache";
import { DeployReleaseType, inputToDeployReleaseTypes } from "../../types/changelogTypes";
import {
DeployReleaseType,
InputReleaseType,
inputToDeployReleaseTypes,
} from "#types/changelogTypes.ts";
import logInfo from "#utils/log.ts";
import { CurseForgeLegacyMCVersion } from "#types/curseForge.ts";
import * as core from "@actions/core";
import { AxiosRequestConfig } from "axios";
import { filesize } from "filesize";
const CURSEFORGE_LEGACY_ENDPOINT = "https://minecraft.curseforge.com/";
const variablesToCheck = ["CURSEFORGE_API_TOKEN", "CURSEFORGE_PROJECT_ID", "RELEASE_TYPE"];
const variablesToCheck = [
"CURSEFORGE_API_TOKEN",
"CURSEFORGE_PROJECT_ID",
"RELEASE_TYPE",
];
async function upload(files: { name: string; displayName: string }[]) {
files.forEach((file) => {
@ -23,7 +38,9 @@ async function upload(files: { name: string; displayName: string }[]) {
// Since we've built everything beforehand, the changelog must be available in the shared directory.
let changelog = (
await fs.promises.readFile(upath.join(buildConfig.buildDestinationDirectory, "CHANGELOG_CF.md"))
await fs.promises.readFile(
upath.join(buildConfig.buildDestinationDirectory, "CHANGELOG_CF.md"),
)
).toString();
changelog = mustache.render(changelog, {
@ -36,74 +53,104 @@ async function upload(files: { name: string; displayName: string }[]) {
};
// Fetch the list of Minecraft versions from CurseForge.
log("Fetching CurseForge version manifest...");
const versionsManifest =
(await request({
uri: CURSEFORGE_LEGACY_ENDPOINT + "api/game/versions",
logInfo("Fetching CurseForge version manifest...");
const versionsManifest: CurseForgeLegacyMCVersion[] | undefined = (
await getAxios()({
url: CURSEFORGE_LEGACY_ENDPOINT + "api/game/versions",
method: "get",
headers: tokenHeaders,
method: "GET",
json: true,
fullResponse: false,
maxAttempts: 5,
})) || [];
responseType: "json",
})
).data;
if (!versionsManifest) {
throw new Error("Failed to fetch CurseForge version manifest.");
}
const version = versionsManifest.find((m) => m.name == modpackManifest.minecraft.version);
const version = versionsManifest.find(
(m) => m.name == modpackManifest.minecraft.version,
);
if (!version) {
throw new Error(`Version ${modpackManifest.minecraft.version} not found on CurseForge.`);
throw new Error(
`Version ${modpackManifest.minecraft.version} not found on CurseForge.`,
);
}
let clientFileID: number | null;
const uploadedIDs: { filePath: string; displayName: string; id: number }[] =
[];
let parentID: number | undefined = undefined;
const releaseType: DeployReleaseType = inputToDeployReleaseTypes[process.env.RELEASE_TYPE];
const releaseType: DeployReleaseType =
inputToDeployReleaseTypes[
(process.env.RELEASE_TYPE ?? "Release") as InputReleaseType
];
// Upload artifacts.
for (const file of files) {
const options = {
uri: CURSEFORGE_LEGACY_ENDPOINT + `api/projects/${process.env.CURSEFORGE_PROJECT_ID}/upload-file`,
method: "POST",
const path = upath.join(buildConfig.buildDestinationDirectory, file.name);
const options: AxiosRequestConfig<unknown> = {
url:
CURSEFORGE_LEGACY_ENDPOINT +
`api/projects/${process.env.CURSEFORGE_PROJECT_ID}/upload-file`,
method: "post",
headers: {
...tokenHeaders,
"Content-Type": "multipart/form-data",
},
formData: {
data: {
metadata: JSON.stringify({
changelog: changelog,
changelogType: "html",
releaseType: releaseType ? releaseType.cfReleaseType : "release",
parentFileID: clientFileID ? clientFileID : undefined,
gameVersions: clientFileID ? undefined : [version.id],
parentFileID: parentID ? parentID : undefined,
gameVersions: parentID ? undefined : [version.id],
displayName: file.displayName,
}),
file: fs.createReadStream(upath.join(buildConfig.buildDestinationDirectory, file.name)),
file: fs.createReadStream(path),
},
json: true,
fullResponse: false,
responseType: "json",
};
log(`Uploading ${file.name} to CurseForge...` + (clientFileID ? `(child of ${clientFileID})` : ""));
logInfo(
`Uploading ${file.name} to CurseForge...` +
(parentID ? `(child of ${parentID})` : ""),
);
const response = await request(options);
const response: { id: number } = (await getAxios()(options)).data;
if (response && response.id) {
if (!clientFileID) {
clientFileID = response.id;
uploadedIDs.push({ filePath: path, displayName: file.displayName, id: response.id });
if (!parentID) {
parentID = response.id;
}
} else {
throw new Error(`Failed to upload ${file.name}: Invalid Response.`);
}
}
if (isEnvVariableSet("GITHUB_STEP_SUMMARY"))
await core.summary
.addHeading("Nomi-CEu CurseForge Deploy Summary:", 2)
.addTable([
[
{ data: "File Name", header: true },
{ data: "File ID", header: true },
{ data: "File Size", header: true },
],
...uploadedIDs.map((uploaded) => [
uploaded.displayName,
uploaded.id.toString(),
filesize(fs.statSync(uploaded.filePath).size),
]),
])
.write();
}
/**
* Uploads build artifacts to CurseForge.
*/
export async function deployCurseForge(): Promise<void> {
/**
/*
* Obligatory variable check.
*/
["GITHUB_TAG", ...variablesToCheck].forEach((vari) => {
@ -112,15 +159,23 @@ export async function deployCurseForge(): Promise<void> {
}
});
const displayName = process.env.GITHUB_TAG;
const displayName = process.env.GITHUB_TAG ?? "";
const files = [
{
name: sanitize((makeArtifactNameBody(modpackManifest.name) + "-client.zip").toLowerCase()),
name: sanitize(
(
makeArtifactNameBody(modpackManifest.name) + "-client.zip"
).toLowerCase(),
),
displayName: displayName,
},
{
name: sanitize((makeArtifactNameBody(modpackManifest.name) + "-server.zip").toLowerCase()),
name: sanitize(
(
makeArtifactNameBody(modpackManifest.name) + "-server.zip"
).toLowerCase(),
),
displayName: `${displayName}-server`,
},
];

View File

@ -1,16 +1,24 @@
import { modpackManifest } from "../../globals";
import { modpackManifest } from "#globals";
import fs from "fs";
import upath from "upath";
import buildConfig from "../../buildConfig";
import { makeArtifactNameBody } from "../../util/util";
import Bluebird from "bluebird";
import buildConfig from "#buildConfig";
import { makeArtifactNameBody } from "#utils/util.ts";
import { Octokit } from "@octokit/rest";
import sanitize from "sanitize-filename";
import mustache from "mustache";
import { DeployReleaseType, inputToDeployReleaseTypes } from "../../types/changelogTypes";
import {
DeployReleaseType,
InputReleaseType,
inputToDeployReleaseTypes,
} from "#types/changelogTypes.ts";
const variablesToCheck = ["GITHUB_TAG", "GITHUB_TOKEN", "GITHUB_REPOSITORY", "RELEASE_TYPE"];
const variablesToCheck = [
"GITHUB_TAG",
"GITHUB_TOKEN",
"GITHUB_REPOSITORY",
"RELEASE_TYPE",
];
/**
* Uploads build artifacts to GitHub Releases.
@ -26,7 +34,9 @@ async function deployReleases(): Promise<void> {
});
const body = makeArtifactNameBody(modpackManifest.name);
const files = ["client", "server", "lang"].map((file) => sanitize(`${body}-${file}.zip`.toLowerCase()));
const files = ["client", "server", "lang"].map((file) =>
sanitize(`${body}-${file}.zip`.toLowerCase()),
);
/**
* Obligatory file check.
@ -42,7 +52,7 @@ async function deployReleases(): Promise<void> {
auth: process.env.GITHUB_TOKEN,
});
const parsedSlug = /(.+)\/(.+)/.exec(process.env.GITHUB_REPOSITORY);
const parsedSlug = /(.+)\/(.+)/.exec(process.env.GITHUB_REPOSITORY ?? "");
if (!parsedSlug) {
throw new Error("No/malformed GitHub repository slug provided.");
}
@ -53,15 +63,23 @@ async function deployReleases(): Promise<void> {
};
const tag = process.env.GITHUB_TAG;
const releaseType: DeployReleaseType = inputToDeployReleaseTypes[process.env.RELEASE_TYPE];
const releaseType: DeployReleaseType =
inputToDeployReleaseTypes[
(process.env.RELEASE_TYPE ?? "") as InputReleaseType
];
const preRelease = releaseType ? releaseType.isPreRelease : false;
// Since we've grabbed, or built, everything beforehand, the Changelog file should be in the build dir
let changelog = (
await fs.promises.readFile(upath.join(buildConfig.buildDestinationDirectory, "CHANGELOG.md"))
await fs.promises.readFile(
upath.join(buildConfig.buildDestinationDirectory, "CHANGELOG.md"),
)
).toString();
changelog = mustache.render(changelog, { CENTER_ALIGN: 'align="center"', CF_REDIRECT: "" });
changelog = mustache.render(changelog, {
CENTER_ALIGN: 'align="center"',
CF_REDIRECT: "",
});
// Create a release.
const release = await octokit.repos.createRelease({
@ -73,16 +91,20 @@ async function deployReleases(): Promise<void> {
});
// Upload artifacts.
await Bluebird.map(files, async (file) => {
await octokit.repos.uploadReleaseAsset({
name: file,
release_id: release.data.id,
...repo,
await Promise.all(
files.map(async (file) => {
return octokit.repos.uploadReleaseAsset({
name: file,
release_id: release.data.id,
...repo,
// Dumb workaround thanks to broken typings.
data: (await fs.promises.readFile(upath.join(buildConfig.buildDestinationDirectory, file))) as unknown as string,
});
});
// Dumb workaround thanks to broken typings. Data should accept buffers...
data: (await fs.promises.readFile(
upath.join(buildConfig.buildDestinationDirectory, file),
)) as unknown as string,
});
}),
);
await octokit.repos.updateRelease({
release_id: release.data.id,

View File

@ -0,0 +1,728 @@
import { Icon, Quest, QuestBook, QuestVisibility } from "#types/bqQuestBook.ts";
import { diff } from "just-diff";
import {
Changed,
YesIgnoreNo,
QuestChange,
Replacements,
SavedPorter,
SpecialModifierHandler,
} from "#types/actionQBTypes.ts";
import upath from "upath";
import fs from "fs";
import PortQBData from "./questPorting/portQBData.ts";
import { input, select } from "@inquirer/prompts";
import {
configFolder,
configOverridesFolder,
rootDirectory,
storageFolder,
} from "#globals";
import logInfo, { logError, logWarn } from "#utils/log.ts";
import colors from "colors";
import { getUniqueToArray } from "#utils/util.ts";
import sortKeys from "sort-keys";
import lodash from "lodash";
let data: PortQBData;
export const emptyQuestName = "Gap";
export const emptyQuestDescription =
"Unused Gap Quest. Prevents Overriding Of IDs.";
export const emptyQuestVisibility: QuestVisibility = "HIDDEN";
export const emptyQuestIconId = "minecraft:air";
export const emptyQuestTaskId = "bq_standard:checkbox";
export const emptyQuest: Quest = {
"preRequisites:11": [],
"properties:10": {
"betterquesting:10": {
"autoclaim:1": 0,
"desc:8": "Unused Gap Quest. Prevents Overriding Of IDs.",
"globalshare:1": 0,
"icon:10": {
"Count:3": 0,
"Damage:2": 0,
"OreDict:8": "",
"id:8": "minecraft:air",
},
"ignoresview:1": 0,
"ismain:1": 0,
"issilent:1": 1,
"lockedprogress:1": 0,
"name:8": "Gap",
"partysinglereward:1": 0,
"questlogic:8": "AND",
"repeat_relative:1": 1,
"repeattime:3": -1,
"simultaneous:1": 0,
"snd_complete:8": "minecraft:entity.player.levelup",
"snd_update:8": "minecraft:entity.player.levelup",
"tasklogic:8": "AND",
"visibility:8": "HIDDEN",
},
},
"questID:3": 0,
"rewards:9": {},
"tasks:9": {
"0:10": {
"index:3": 0,
"taskID:8": "bq_standard:checkbox",
},
},
};
export const defaultPorter = {
savedQuestMap: [],
ignoreQuestsNormal: [],
ignoreQuestsExpert: [],
alwaysAskQuestsNormal: [],
alwaysAskQuestsExpert: [],
} as SavedPorter;
/* Paths */
export const cfgNormalPath = upath.join(
configFolder,
"betterquesting",
"DefaultQuests.json",
);
export const cfgExpertPath = upath.join(
configFolder,
"betterquesting",
"saved_quests",
"ExpertQuests.json",
);
export const cfgOverrideNormalPath = upath.join(
configOverridesFolder,
"normal",
"betterquesting",
"DefaultQuests.json",
);
export const cfgOverrideExpertPath = upath.join(
configOverridesFolder,
"expert",
"betterquesting",
"DefaultQuests.json",
);
export const savedQuestPorter = upath.join(storageFolder, "savedQBPorter.json");
const nomiCoinMatcher = /^nomilabs:nomicoin[0-9]*$/;
export function setupUtils(dataIn: PortQBData): void {
data = dataIn;
}
export function removeFormatting(input: string): string {
if (!input.includes("§")) return input;
const builder: string[] = [];
for (let i = 0; i < input.length; i++) {
const char = input.charAt(i);
if (char === "§") {
i++; // Skip Next Character
continue;
}
builder.push(char);
}
return builder.join("");
}
export function stripRewards(quest: Quest, shouldCheck = false, log = false) {
for (const rewardKey of Object.keys(quest["rewards:9"])) {
const reward = quest["rewards:9"][rewardKey];
if (
!reward ||
reward["rewardID:8"] !== "bq_standard:item" ||
!reward["rewards:9"]
)
continue;
for (const itemKey of Object.keys(reward["rewards:9"])) {
const item: Icon = reward["rewards:9"][itemKey];
if (item && item["id:8"] && nomiCoinMatcher.test(item["id:8"])) {
if (shouldCheck)
throw new Error(
`Expert Quest with ID ${quest["questID:3"]} has Nomi Coin Reward!`,
);
if (log)
logWarn(
`Removing Nomi Coin Reward for Expert Quest with ID ${quest["questID:3"]}...`,
);
delete reward["rewards:9"][itemKey];
}
}
if (Object.keys(reward["rewards:9"]).length === 0)
delete quest["rewards:9"][rewardKey];
else quest["rewards:9"][rewardKey] = reward;
}
}
let cachedQuestByName: Map<string, Quest>;
/**
* Finds the corresponding quest on the qb to change, using the data cache. If object is not found in the data cache, asks the client questions to determine the quest.
* @param sourceId The id of the quest on the source qb.
* @param sourceQuest The Source Quest, if it is not just `data.currentIDsToQuests.get(sourceId)`.
* @return Returns the quest that is found, or undefined if the quest should be skipped.
*/
export async function findQuest(
sourceId: number,
sourceQuest?: Quest,
): Promise<Quest | undefined> {
if (data.ignoreQuests.has(sourceId)) return undefined;
if (data.foundQuests.has(sourceId)) return data.foundQuests.get(sourceId);
// If no source quest, default behaviour
if (!sourceQuest) sourceQuest = data.currentIDsToQuests.get(sourceId);
// If still no source quest, throw
if (!sourceQuest)
throw new Error(
`Request Find Quest for id ${sourceId}, which is not in IDs to Quests!`,
);
logInfo(
colors.magenta(
`Finding Corresponding Quest for Source Quest with ID ${sourceId} and Name ${name(sourceQuest)}...`,
),
);
// Try Find by ID
const questById = data.toChangeIDsToQuests.get(sourceId);
if (questById && !isEmptyQuest(questById)) {
// Ask the client if the corresponding id on the corresponding qb is correct
const correctQuestById = await isRightQuest(
`Does the Corresponding Quest have ID ${sourceId} and Name ${name(questById)}?`,
);
if (correctQuestById === "YES") {
logInfo("Using Quest...");
await finalizeFoundQuest(sourceId, () =>
data.foundQuests.set(sourceId, questById),
);
return questById;
}
if (correctQuestById === "IGNORE") {
logInfo("Ignoring...");
await finalizeFoundQuest(sourceId, () => data.ignoreQuests.add(sourceId));
return undefined;
}
}
// Generate Quest By Name if Needed
if (!cachedQuestByName) {
cachedQuestByName = new Map<string, Quest>();
[...data.toChangeIDsToQuests.values()].forEach((item) =>
cachedQuestByName.set(removeFormatting(name(item)), item),
);
}
// Try Find by Name
const removeFormattedName = removeFormatting(name(sourceQuest));
const questByName = cachedQuestByName.get(removeFormattedName);
if (questByName && !isEmptyQuest(questByName)) {
// Ask the client if the corresponding id on the corresponding qb is correct
const correctQuestByName = await isRightQuest(
`Does the Corresponding Quest have ID ${id(questByName)} and Name ${name(questByName)}?`,
);
if (correctQuestByName === "YES") {
logInfo("Using Quest...");
await finalizeFoundQuest(sourceId, () =>
data.foundQuests.set(sourceId, questByName),
);
return questByName;
}
if (correctQuestByName === "IGNORE") {
logInfo("Cancelling...");
await finalizeFoundQuest(sourceId, () => data.ignoreQuests.add(sourceId));
return undefined;
}
}
// Finally, ask for the specific ID
let foundBySpecificID: YesIgnoreNo = "NO";
let questBySpecificID: Quest | undefined = undefined;
while (foundBySpecificID === "NO") {
const specID = parseInt(
await input({
message:
"Please Provide a Specific Quest ID to be used as the Corresponding Quest. Enter -1 to Skip/Cancel this Quest!",
validate: (value) => {
const numValue = parseInt(value);
if (numValue === -1) return true; // Allow Cancelling
if (isNaN(numValue) || numValue < 0) {
return "Please Enter a Number Value >= 0!";
}
return true;
},
}),
);
if (specID === -1) {
logInfo("Cancelling...");
foundBySpecificID = "IGNORE";
break;
}
questBySpecificID = data.toChangeIDsToQuests.get(specID);
if (!questBySpecificID) {
logError(`${specID} is not a Quest ID in the Quest Book being Changed!`);
continue;
}
if (isEmptyQuest(questBySpecificID)) {
logError(
`${specID} is a Empty Quest! Enter -1 to Skip/Cancel this Quest, not the ID of an Empty Quest!`,
);
continue;
}
foundBySpecificID = await isRightQuest(
`Are You Sure you Would Like to use Quest with ID ${specID} and Name ${name(questBySpecificID)}?`,
);
}
if (foundBySpecificID === "IGNORE" || !questBySpecificID)
await finalizeFoundQuest(sourceId, () => data.ignoreQuests.add(sourceId));
else
await finalizeFoundQuest(sourceId, () =>
data.foundQuests.set(sourceId, questBySpecificID),
);
return questBySpecificID;
}
async function finalizeFoundQuest(sourceID: number, addToList: () => void) {
if (data.alwaysAskQuests.has(sourceID)) {
logInfo(
"This Quest is set to Ask Each Time. If this is not Desirable, Change this in the Saved Porter!",
);
return;
}
const askEachTime = await booleanSelect(
"Should we Ask the Corresponding ID for this Quest Every Time?",
"Yes",
"No",
false,
);
if (askEachTime) data.alwaysAskQuests.add(sourceID);
else addToList();
}
async function isRightQuest(message: string): Promise<YesIgnoreNo> {
return (await select({
message: message,
choices: [
{
name: "Yes",
value: "YES",
},
{
name: "No",
value: "NO",
},
{
name: "Skip/Ignore this Quest",
value: "IGNORE",
},
],
})) as YesIgnoreNo;
}
export async function booleanSelect(
message: string,
trueMsg = "Yes",
falseMsg = "No",
defaultTo = true,
): Promise<boolean> {
return (
await select({
message: message,
choices: [
{
name: trueMsg,
value: true,
},
{
name: falseMsg,
value: false,
},
],
default: defaultTo,
})
).valueOf();
}
export function id(quest: Quest): number {
return quest["questID:3"];
}
export function name(quest: Quest): string {
return quest["properties:10"]["betterquesting:10"]["name:8"];
}
export function dependencies(quest: Quest): number[] {
return quest["preRequisites:11"];
}
/**
* Paths to Ignore in Quest Change Calculation.
* Prerequisites handled separately.
* Prerequisites Types handled by Prerequisites.
* Rewards not ported (too different across Quest Books)
**/
const ignoreRootPaths = new Set<string>([
"preRequisites:11",
"preRequisiteTypes:7",
"rewards:9",
]);
/**
* Special Handling in Modified. The Path that is added to the changes list should have -CUSTOM appended to the end, to distinguish this from other changes.
*/
const specialModifierHandlers: SpecialModifierHandler[] = [
(old, current, changes) => {
const diff = getUniqueToArray(
old["preRequisites:11"],
current["preRequisites:11"],
);
// Unique to old array: Removed
if (diff.arr1Unique.length > 0 || diff.arr2Unique.length > 0) {
changes.push({
path: ["preRequisites-CUSTOM"],
op: "replace",
value: diff,
});
}
},
];
export function getChanged(
currentQuests: Quest[],
oldQuests: Quest[],
): Changed {
// i is current iter, j is old iter
let i = 0;
let j = 0;
const changed: Changed = { added: [], modified: [], removed: [] };
while (i < currentQuests.length && j < oldQuests.length) {
const currentQuestID = id(currentQuests[i]);
const oldQuestID = id(oldQuests[j]);
if (currentQuestID == oldQuestID) {
let questDiff = diff(oldQuests[j], currentQuests[i]) as QuestChange[];
if (questDiff.length !== 0) {
questDiff = questDiff.filter(
(change) =>
typeof change.path[0] !== "string" ||
!ignoreRootPaths.has(change.path[0]),
);
for (const handler of specialModifierHandlers) {
handler(oldQuests[j], currentQuests[i], questDiff);
}
if (isEmptyQuest(currentQuests[i])) changed.removed.push(oldQuests[j]);
else
changed.modified.push({
currentQuest: currentQuests[i],
oldQuest: oldQuests[j],
change: questDiff,
});
}
i++;
j++;
continue;
}
if (!data.currentIDsToQuests.has(oldQuestID)) {
logWarn(
`A quest has been removed directly! (ID ${id(oldQuests[j])}, Name '${name(
oldQuests[j],
)}') This is NOT recommended! IDs may overlay in the future! Replace quests with empty ones instead!`,
);
changed.removed.push(oldQuests[j]);
j++;
continue;
}
changed.added.push(currentQuests[i]);
i++;
}
if (i < currentQuests.length) {
changed.added.push(...currentQuests.slice(i));
} else if (j < currentQuests.length) {
changed.removed.push(...currentQuests.slice(i));
}
return changed;
}
export function isEmptyQuest(quest: Quest): boolean {
return (
questIsSilent(quest) &&
emptyName(quest) &&
emptyDesc(quest) &&
emptyVisibility(quest) &&
emptyIcon(quest) &&
questHasNoRewards(quest) &&
emptyTasks(quest)
);
}
function emptyName(quest: Quest): boolean {
const questName = name(quest);
return questName === emptyQuestName || !questName;
}
function emptyDesc(quest: Quest): boolean {
const questDesc = quest["properties:10"]["betterquesting:10"]["desc:8"];
return questDesc === emptyQuestDescription || !questDesc;
}
function emptyVisibility(quest: Quest): boolean {
const questVisibility =
quest["properties:10"]["betterquesting:10"]["visibility:8"];
return questVisibility === emptyQuestVisibility;
}
function emptyIcon(quest: Quest): boolean {
const questIcon = quest["properties:10"]["betterquesting:10"]["icon:10"];
return (
!questIcon || questIcon["id:8"] === emptyQuestIconId || !questIcon["id:8"]
);
}
function questIsSilent(quest: Quest): boolean {
return quest["properties:10"]["betterquesting:10"]["issilent:1"] !== 0;
}
function questHasNoRewards(quest: Quest): boolean {
return !quest["rewards:9"] || Object.keys(quest["rewards:9"]).length === 0;
}
function emptyTasks(quest: Quest): boolean {
return (
!quest["tasks:9"] ||
Object.keys(quest["tasks:9"]).length === 0 ||
(Object.keys(quest["tasks:9"]).length === 1 &&
(!quest["tasks:9"]["0:10"] ||
!quest["tasks:9"]["0:10"]["taskID:8"] ||
quest["tasks:9"]["0:10"]["taskID:8"] === emptyQuestTaskId))
);
}
export function stringifyQB(qb: QuestBook): string {
// Formatting Changes
const replacements: Replacements[] = [
{
search: /</g,
replacement: "\\u003c",
},
{
search: />/g,
replacement: "\\u003e",
},
{
search: /&/g,
replacement: "\\u0026",
},
{
search: /=/g,
replacement: "\\u003d",
},
{
search: /'/g,
replacement: "\\u0027",
},
];
qb = sortKeysRecursiveIgnoreArray(qb, (key1, key2): number => {
const defaultVal = key2 < key1 ? 1 : -1;
if (!key1.includes(":") || !key2.includes(":")) return defaultVal;
const num1 = Number.parseInt(key1.split(":")[0]);
const num2 = Number.parseInt(key2.split(":")[0]);
if (Number.isNaN(num1) || Number.isNaN(num2)) return defaultVal;
return num1 - num2;
});
let parsed = JSON.stringify(qb, null, 2).replace(
/("[a-zA-Z_]+:[56]":\s)(-?[0-9]+)(,?)$/gm,
"$1$2.0$3",
); // Add '.0' to any Float/Double Values that are Integers
for (const replacement of replacements) {
parsed = parsed.replace(replacement.search, replacement.replacement);
}
return parsed;
}
/**
* Use our own, instead of sortKeysRecursive, to ignore sorting of arrays.
*/
function sortKeysRecursiveIgnoreArray<T extends object>(
object: T,
compare: (a: string, b: string) => number,
): T {
const result = sortKeys(object as Record<string, unknown>, { compare }) as T;
// We can modify results, Object.Keys returns a static array
Object.keys(result).forEach(function (key) {
const current = lodash.get(result, key);
if (current) {
if (typeof current === "object") {
lodash.set(result, key, sortKeys(current, { compare }));
return;
}
}
});
return result;
}
export async function save(toSave: QuestBook): Promise<void> {
const save = await booleanSelect("Would you like to Save Changes?");
if (!save) return;
const shouldSavePorter = await booleanSelect(
"Would you like to Save the Quest Porter?",
);
if (shouldSavePorter) await savePorter();
const parsed = stringifyQB(toSave);
for (const path of data.outputPaths) {
await fs.promises.writeFile(upath.join(rootDirectory, path), parsed);
}
logInfo(`Saved Files: ${data.outputPaths.join(", ")}!`);
logInfo(
colors.green(
colors.bold(
"Remember to import the JSON Files into your Instance to format them!",
),
),
);
}
async function savePorter() {
logInfo("Saving Porter...");
let porter: SavedPorter;
// Keys of Porter Are Checked on Import
// Porter Definitely Has a Value for Each Key
if (!data.savedPorter) {
if (fs.existsSync(savedQuestPorter)) {
porter = await readFromPorter(false);
} else porter = defaultPorter;
} else porter = data.savedPorter;
// Save Map
porter.savedQuestMap = [];
for (const sourceID of data.foundQuests.keys()) {
const sourceQuest = data.foundQuests.get(sourceID);
if (!sourceQuest) continue;
const targetID = id(sourceQuest);
let normalID: number, expertID: number;
switch (data.type) {
case "NORMAL":
normalID = sourceID;
expertID = targetID;
break;
case "EXPERT":
normalID = targetID;
expertID = sourceID;
break;
}
porter.savedQuestMap.push({
normal: normalID,
expert: expertID,
});
porter.savedQuestMap.sort((a, b) => a.normal - b.normal);
}
// Save Ignore
const ignoreArr = [...data.ignoreQuests];
if (data.type === "NORMAL") porter.ignoreQuestsNormal = ignoreArr;
else porter.ignoreQuestsExpert = ignoreArr;
// Save Always Ask
const alwaysAskArr = [...data.alwaysAskQuests];
if (data.type === "NORMAL") porter.alwaysAskQuestsNormal = alwaysAskArr;
else porter.alwaysAskQuestsExpert = alwaysAskArr;
// Write Porter to File
return fs.promises.writeFile(
savedQuestPorter,
JSON.stringify(porter, null, 2),
);
}
export async function readFromPorter(
replaceExisting: boolean,
): Promise<SavedPorter> {
const savedPorter = JSON.parse(
await fs.promises.readFile(savedQuestPorter, "utf-8"),
) as SavedPorter;
// Make Sure Porter has Every Key
for (const key of Object.keys(defaultPorter)) {
// @ts-expect-error Cannot use String as Key
if (!savedPorter[key]) savedPorter[key] = defaultPorter[key];
}
// Add in Map
if (replaceExisting) data.foundQuests.clear();
for (const savedQuestPath of savedPorter.savedQuestMap) {
if (
Number.isNaN(savedQuestPath.normal) ||
Number.isNaN(savedQuestPath.expert)
)
throw new Error("ID must be a number!");
let sourceID: number, targetID: number;
switch (data.type) {
case "NORMAL":
sourceID = savedQuestPath.normal;
targetID = savedQuestPath.expert;
break;
case "EXPERT":
sourceID = savedQuestPath.expert;
targetID = savedQuestPath.normal;
break;
}
if (!data.currentIDsToQuests.has(sourceID))
throw new Error("ID must be a valid quest!");
const targetQuest = data.toChangeIDsToQuests.get(targetID);
if (!targetQuest) throw new Error("ID must be a valid quest!");
if (!data.foundQuests.has(sourceID))
data.foundQuests.set(sourceID, targetQuest);
}
// Ignore & Always Ask
data.ignoreQuests = addToOrReplaceSet(
replaceExisting,
data.type === "NORMAL"
? savedPorter.ignoreQuestsNormal
: savedPorter.ignoreQuestsExpert,
data.ignoreQuests,
);
data.alwaysAskQuests = addToOrReplaceSet(
replaceExisting,
data.type === "NORMAL"
? savedPorter.alwaysAskQuestsNormal
: savedPorter.alwaysAskQuestsExpert,
data.alwaysAskQuests,
);
data.savedPorter = savedPorter;
return savedPorter;
}
function addToOrReplaceSet<T>(
replaceExisting: boolean,
array: T[],
set: Set<T>,
): Set<T> {
if (replaceExisting) return new Set<T>(array);
array.forEach((value) => set.add(value));
return set;
}

View File

@ -0,0 +1,431 @@
import { QuestBook } from "#types/bqQuestBook.ts";
import fs from "fs";
import {
cfgExpertPath,
cfgNormalPath,
cfgOverrideExpertPath,
cfgOverrideNormalPath,
emptyQuest,
id,
name,
stringifyQB,
stripRewards,
} from "#tasks/helpers/actionQBUtils.ts";
import { input, select } from "@inquirer/prompts";
import { SourceOption } from "#types/actionQBTypes.ts";
import logInfo, { logWarn } from "#utils/log.ts";
import upath from "upath";
import { rootDirectory } from "#globals";
import colors from "colors";
import { isEnvVariableSet } from "#utils/util.ts";
import * as core from "@actions/core";
import lodash from "lodash";
const isAvailableForFormatting = /[0-9a-ek-or]/;
export const check = async () => {
try {
await checkAndFix(true);
} catch (e) {
if (isEnvVariableSet("GITHUB_STEP_SUMMARY")) {
const summary = core.summary
.addHeading("Quest Book Format Error!", 2)
.addRaw(
"Run the below Command in your Local Clone to Format the Quest Book:",
true,
)
.addCodeBlock("npm run gulp fixQB");
if (e instanceof Error) summary.addDetails("Details...", e.message);
await summary.write();
}
throw e;
}
};
export const fix = () => checkAndFix(false);
async function checkAndFix(shouldCheck: boolean) {
logInfo(colors.bold(`${shouldCheck ? "Checking" : "Fixing"} QB...`));
let checkNormalQB: QuestBook;
let checkExpertQB: QuestBook;
if (shouldCheck) {
const nml1 = await fs.promises.readFile(
upath.join(rootDirectory, cfgNormalPath),
"utf-8",
);
const nml2 = await fs.promises.readFile(
upath.join(rootDirectory, cfgOverrideNormalPath),
"utf-8",
);
if (nml1 !== nml2) throw new Error("Normal Quest Books are not the Same!");
const exp1 = await fs.promises.readFile(
upath.join(rootDirectory, cfgExpertPath),
"utf-8",
);
const exp2 = await fs.promises.readFile(
upath.join(rootDirectory, cfgOverrideExpertPath),
"utf-8",
);
if (exp1 !== exp2) throw new Error("Normal Quest Books are not the Same!");
checkNormalQB = JSON.parse(nml1) as QuestBook;
checkExpertQB = JSON.parse(exp1) as QuestBook;
} else {
const normalSrc = (await select({
message: "Which version should we use, for the Normal Source File?",
choices: [
{
name: "Main Config Dir",
value: "CFG" as SourceOption,
},
{
name: "Config Overrides",
value: "CFG-OVERRIDE" as SourceOption,
},
],
})) as SourceOption;
const expertSrc = (await select({
message: "Which version should we use, for the Expert Source File?",
choices: [
{
name: "Main Config Dir",
value: "CFG" as SourceOption,
},
{
name: "Config Overrides",
value: "CFG-OVERRIDE" as SourceOption,
},
],
})) as SourceOption;
checkNormalQB = JSON.parse(
await fs.promises.readFile(
upath.join(
rootDirectory,
normalSrc === "CFG" ? cfgNormalPath : cfgOverrideNormalPath,
),
"utf-8",
),
);
checkExpertQB = JSON.parse(
await fs.promises.readFile(
upath.join(
rootDirectory,
expertSrc === "CFG" ? cfgExpertPath : cfgOverrideExpertPath,
),
"utf-8",
),
);
}
logInfo(colors.bold("Processing Normal QB..."));
await checkAndFixQB(shouldCheck, checkNormalQB, false);
logInfo(colors.bold("Processing Expert QB..."));
await checkAndFixQB(shouldCheck, checkExpertQB, true);
if (!shouldCheck) {
logInfo("Saving...");
const normal = stringifyQB(checkNormalQB);
const expert = stringifyQB(checkExpertQB);
await Promise.all([
fs.promises.writeFile(upath.join(rootDirectory, cfgNormalPath), normal),
fs.promises.writeFile(
upath.join(rootDirectory, cfgOverrideNormalPath),
normal,
),
fs.promises.writeFile(upath.join(rootDirectory, cfgExpertPath), expert),
fs.promises.writeFile(
upath.join(rootDirectory, cfgOverrideExpertPath),
expert,
),
]);
} else logInfo(colors.green("Successful. No Formatting Errors!"));
}
async function checkAndFixQB(
shouldCheck: boolean,
qb: QuestBook,
isExpert: boolean,
) {
let index = 0;
// Use if Should Check is false, so we don't modify the underlying object.
const newQB = shouldCheck
? qb["questDatabase:9"]
: lodash.cloneDeep(qb["questDatabase:9"]);
// Checks for Quests
logInfo("Checking Quests...");
for (const questKey of Object.keys(qb["questDatabase:9"])) {
// Copy Quest if Should Check is false (So we don't modify the underlying object)
const quest = shouldCheck
? qb["questDatabase:9"][questKey]
: { ...qb["questDatabase:9"][questKey] };
const foundID = id(quest);
// Check for Missing Quests
while (foundID > index) {
if (shouldCheck) throw new Error(`Missing Quest at Index ${index}!`);
logWarn(`Adding Empty Quest at Index ${index}...`);
const newQuest = { ...emptyQuest };
newQuest["questID:3"] = index;
newQB[`${index}:10`] = newQuest;
index++;
}
index++;
// Check Name Formatting
quest["properties:10"]["betterquesting:10"]["name:8"] =
stripOrThrowExcessFormatting(
shouldCheck,
name(quest),
foundID,
"Quest",
"Name",
);
// Check for Empty Descriptions (Trim first, might be a space)
if (!quest["properties:10"]["betterquesting:10"]["desc:8"].trim()) {
if (shouldCheck)
throw new Error(`Quest with ID ${foundID} has Empty Description!`);
quest["properties:10"]["betterquesting:10"]["desc:8"] = await input({
message: `Quest with ID ${foundID} and Name ${quest["properties:10"]["betterquesting:10"]["name:8"]} has an Empty Description! What should we Replace it With?`,
default: "No Description",
validate: (value) => Boolean(value.trim()),
});
}
// Check Desc Formatting (Still check if after, as user may have entered dupe formatting)
quest["properties:10"]["betterquesting:10"]["desc:8"] =
stripOrThrowExcessFormatting(
shouldCheck,
quest["properties:10"]["betterquesting:10"]["desc:8"],
foundID,
"Quest",
"Description",
);
const trimmed =
quest["properties:10"]["betterquesting:10"]["desc:8"].trim();
// Check if Description is Trimmed (Still check if after, as user may have entered new lines)
if (quest["properties:10"]["betterquesting:10"]["desc:8"] !== trimmed) {
if (shouldCheck)
throw new Error(
`Quest with ID ${foundID} has Excess Spaces/New Lines in the Description!`,
);
logWarn(`Trimming Description of Quest with ID ${foundID}!`);
quest["properties:10"]["betterquesting:10"]["desc:8"] = trimmed;
}
// Visibility Check
if (
quest["properties:10"]["betterquesting:10"]["visibility:8"] === "NORMAL"
) {
if (shouldCheck)
throw new Error(`Quest with ID ${foundID} has Visibility Normal!`);
quest["properties:10"]["betterquesting:10"]["visibility:8"] =
await select({
message: `Quest with ID ${foundID} has Visibility Normal! What should we Replace it With?`,
choices: [
{
name: "Always",
value: "ALWAYS",
},
{
name: "Chain",
value: "CHAIN",
},
{
name: "Hidden",
value: "HIDDEN",
},
{
name: "Unlocked",
value: "UNLOCKED",
},
],
});
}
// Check the Order of Prerequisites
const oldPrerequisites = shouldCheck
? quest["preRequisites:11"]
: [...quest["preRequisites:11"]]; // Copy if Changing
let rightOrder = true;
let prev: number = -1; // Smallest ID is 0
for (let i = 0; i < oldPrerequisites.length; i++) {
const pre = oldPrerequisites[i];
if (prev < pre) {
prev = pre;
continue;
}
if (prev === pre) {
if (shouldCheck)
throw new Error(
`Duplicate Prerequisites in Quest with ID ${foundID}!`,
);
logWarn(
`Removing Duplicate Prerequisite in Quest with ID ${foundID}...`,
);
quest["preRequisites:11"].splice(i, 1);
}
rightOrder = false;
break;
}
// Sort Prerequisites if Needed
if (!rightOrder) {
if (shouldCheck)
throw new Error(
`Prerequisites in Quest with ID ${foundID} is in the Wrong Order!`,
);
logWarn(`Sorting Prerequisites in Quest with ID ${foundID}...`);
const types = quest["preRequisiteTypes:7"];
if (!types) quest["preRequisites:11"].sort((a, b) => a - b);
else {
const preRequisites = new Map<number, number>();
quest["preRequisites:11"].forEach((pre, index) =>
preRequisites.set(pre, types[index]),
);
quest["preRequisites:11"].sort((a, b) => a - b);
for (let i = 0; i < quest["preRequisites:11"].length; i++) {
types[i] = preRequisites.get(quest["preRequisites:11"][i]) ?? 0;
}
}
}
// Check for Rewards that have Nomicoins
if (isExpert) stripRewards(quest, isExpert, true);
if (!shouldCheck) newQB[`${foundID}:10`] = quest;
}
// Check for Redundant Formatting in Quest Lines
logInfo("Checking Quest Lines...");
for (const lineKey of Object.keys(qb["questLines:9"])) {
const line = qb["questLines:9"][lineKey];
line["properties:10"]["betterquesting:10"]["name:8"] =
stripOrThrowExcessFormatting(
shouldCheck,
line["properties:10"]["betterquesting:10"]["name:8"],
line["lineID:3"],
"Quest Line",
"Name",
);
line["properties:10"]["betterquesting:10"]["desc:8"] =
stripOrThrowExcessFormatting(
shouldCheck,
line["properties:10"]["betterquesting:10"]["desc:8"],
line["lineID:3"],
"Quest Line",
"Description",
);
}
if (!shouldCheck) qb["questDatabase:9"] = newQB;
logInfo("Checking Properties...");
// Check Edit Mode
if (qb["questSettings:10"]["betterquesting:10"]["editmode:1"] !== 0) {
if (shouldCheck) throw new Error("Edit Mode is On!");
logWarn("Turning off Edit Mode...");
qb["questSettings:10"]["betterquesting:10"]["editmode:1"] = 0;
}
}
function stripOrThrowExcessFormatting(
shouldCheck: boolean,
value: string,
id: number,
name: string,
key: string,
): string {
if (!value.includes("§")) return value;
let builder: string[] = [];
for (let i = 0; i < value.length; i++) {
const char = value.charAt(i);
if (builder.at(-1) === "§") {
if (char === "f") {
if (shouldCheck)
throw new Error(
`${name} with ID ${id} at ${key} has Formatting Code 'f'!`,
);
logWarn(
`Replacing Formatting Code 'f' with 'r' in ${name} with ID ${id} at ${key}...`,
);
builder.push("r");
continue;
}
if (!isAvailableForFormatting.test(char)) {
if (shouldCheck)
throw new Error(
`${name} with ID ${id} at ${key} has Lone Formatting Signal!`,
);
logWarn(
`Removing Lone Formatting Signal in ${name} with ID ${id} at ${key}...`,
);
// Remove Last Element
builder = builder.slice(0, -1);
continue;
}
// Start of String, Remove Formatting is NOT Needed
if (builder.length === 1 && char === "r") {
if (shouldCheck)
throw new Error(
`${name} with ID ${id} at ${key} has Redundant Formatting!`,
);
logWarn(
`Removing Redundant Formatting from ${name} with ID ${id} at ${key}...`,
);
// Remove Previous
builder = [];
continue;
}
builder.push(char);
continue;
}
if (char === "§") {
// If two characters before was not § (if builder length < 2, `.at` returns undefined)
if (builder.at(-2) !== "§") {
builder.push(char);
continue;
}
if (shouldCheck)
throw new Error(
`${name} with ID ${id} at ${key} has Redundant Formatting!`,
);
logWarn(
`Removing Redundant Formatting from ${name} with ID ${id} at ${key}...`,
);
// Remove Previous
builder = builder.slice(0, -2);
}
builder.push(char);
}
return builder.join("");
}

View File

@ -0,0 +1,104 @@
import fs from "fs";
import upath from "upath";
import { rootDirectory } from "#globals";
import { Quest, QuestBook, QuestLine } from "#types/bqQuestBook.ts";
import { getFileAtRevision } from "#utils/util.ts";
import { getChanged, id, save, setupUtils } from "../actionQBUtils.ts";
import PortQBData from "./portQBData.ts";
import {
additions,
modifications,
removals,
setupLogic,
} from "./portQBLogic.ts";
import { setupModifications } from "./portQBModifications.ts";
import logInfo, { logError } from "../../../utils/log.ts";
let data: PortQBData;
export default async function portQBChanges(): Promise<void> {
data = new PortQBData();
await data.setup();
setupLogic(data);
setupModifications(data);
setupUtils(data);
const current = JSON.parse(
await fs.promises.readFile(
upath.join(rootDirectory, data.srcPath),
"utf-8",
),
) as QuestBook;
const old = JSON.parse(
await getFileAtRevision(data.srcPath, data.ref),
) as QuestBook;
const currentQuests = Object.values(current["questDatabase:9"]);
const oldQuests = Object.values(old["questDatabase:9"]);
const toChange = JSON.parse(
await fs.promises.readFile(
upath.join(rootDirectory, data.srcPathToChange),
"utf-8",
),
) as QuestBook;
const quests = Object.values(toChange["questDatabase:9"]);
data.currentIDsToQuests = new Map<number, Quest>(
currentQuests.map((quest) => [id(quest), quest]),
);
data.toChangeIDsToQuests = new Map<number, Quest>(
quests.map((quest) => [id(quest), quest]),
);
await data.readSavedPorter();
data.changed = getChanged(currentQuests, oldQuests);
const addedQuestIDs = data.changed.added.map((quest) => id(quest));
const modifiedQuestIDs = data.changed.modified.map((mod) => id(mod.oldQuest));
const removedQuestIDs = data.changed.removed.map((quest) => id(quest));
logInfo(`Quests Added: [${addedQuestIDs.join(", ")}]`);
logInfo(`Quests Modified: [${modifiedQuestIDs.join(", ")}]`);
logInfo(`Quests Removed: [${removedQuestIDs.join(", ")}]`);
if (
addedQuestIDs.length === 0 &&
modifiedQuestIDs.length === 0 &&
removedQuestIDs.length === 0
) {
logError("Files are the Same! No Changes Detected! Exiting...");
return;
}
if (addedQuestIDs.length > 0) await additions();
if (modifiedQuestIDs.length > 0) await modifications();
if (removedQuestIDs.length > 0) {
// Set the Old IDs to Quests
data.oldIDsToQuests = new Map<number, Quest>(
oldQuests.map((quest) => [id(quest), quest]),
);
// Set the Quest Line Changeable
data.questLines = Object.values(toChange["questLines:9"]);
await removals();
}
// Save Quest Database
const obj = {} as { [key: string]: Quest };
const iter = data.toChangeIDsToQuests.values();
for (let i = 0; i < data.toChangeIDsToQuests.size; i++) {
obj[`${i}:10`] = iter.next().value;
}
toChange["questDatabase:9"] = obj;
// Save Quest Lines, if Changed
if (data.questLines) {
const obj = {} as { [key: string]: QuestLine };
for (let i = 0; i < data.questLines.length; i++) {
obj[`${i}:10`] = data.questLines[i];
}
toChange["questLines:9"] = obj;
}
return save(toChange);
}

View File

@ -0,0 +1,172 @@
import { checkbox, input, select } from "@inquirer/prompts";
import {
Changed,
PortingType,
SavedPorter,
SourceOption,
} from "#types/actionQBTypes.ts";
import {
booleanSelect,
cfgExpertPath,
cfgNormalPath,
cfgOverrideExpertPath,
cfgOverrideNormalPath,
readFromPorter,
savedQuestPorter,
} from "../actionQBUtils.ts";
import { Quest, QuestLine } from "#types/bqQuestBook.ts";
import fs from "fs";
import logInfo from "#utils/log.ts";
import { modificationParsers } from "./portQBModifications.ts";
export default class PortQBData {
ref: string;
type: PortingType;
sourceOption: SourceOption;
srcPath: string;
srcPathToChange: string;
outputPaths: string[];
// Changed Maps b/w source qb and qb to change
changed: Changed;
// Map of quest id to quest on the old source qb (Only used in Remove)
oldIDsToQuests?: Map<number, Quest>;
// Map of quest id to quest on the source qb
currentIDsToQuests: Map<number, Quest>;
// Map of quest id to quest on the qb to change
toChangeIDsToQuests: Map<number, Quest>;
// Set of Quest IDs on the source qb to ignore
ignoreQuests: Set<number>;
// Set of Quest IDs on the source qb to always ask
alwaysAskQuests: Set<number>;
// Map from quest id on the source qb to the quest obj on the qb to change
foundQuests: Map<number, Quest>;
// Quest Lines Changeable (Only used in Remove)
questLines?: QuestLine[];
// The Porter that was read from.
savedPorter?: SavedPorter;
// Advanced Settings
modifyParsersIgnore: Set<string>;
/**
* Set some default values, so we don't have to deal with nullable values.
*/
constructor() {
this.ref = "main";
this.type = "NORMAL";
this.sourceOption = "CFG";
this.srcPath = "";
this.srcPathToChange = "";
this.outputPaths = [];
this.changed = { added: [], modified: [], removed: [] };
this.currentIDsToQuests = new Map<number, Quest>();
this.toChangeIDsToQuests = new Map<number, Quest>();
this.ignoreQuests = new Set<number>();
this.foundQuests = new Map<number, Quest>();
this.alwaysAskQuests = new Set<number>();
this.modifyParsersIgnore = new Set<string>();
}
async setup(): Promise<void> {
this.ref = await input({
message:
"What Commit SHA, Tag OR Branch should we compare to? (Defaults to 'main')",
default: "main",
});
this.type = (await select({
message: "How should we port?",
choices: [
{
name: "Normal to Expert",
value: "NORMAL",
},
{
name: "Expert to Normal",
value: "EXPERT",
},
],
})) as PortingType;
this.sourceOption = (await select({
message:
"Which version should we use, for both Source Files? (We need to check both Normal and Expert to Port!)",
choices: [
{
name: "Main Config Dir",
value: "CFG" as SourceOption,
},
{
name: "Config Overrides",
value: "CFG-OVERRIDE" as SourceOption,
},
],
})) as SourceOption;
switch (this.type) {
case "NORMAL":
this.srcPath =
this.sourceOption === "CFG" ? cfgNormalPath : cfgOverrideNormalPath;
this.srcPathToChange =
this.sourceOption === "CFG" ? cfgExpertPath : cfgOverrideExpertPath;
this.outputPaths = [cfgExpertPath, cfgOverrideExpertPath];
break;
case "EXPERT":
this.srcPath =
this.sourceOption === "CFG" ? cfgExpertPath : cfgOverrideExpertPath;
this.srcPathToChange =
this.sourceOption === "CFG" ? cfgNormalPath : cfgOverrideNormalPath;
this.outputPaths = [cfgExpertPath, cfgOverrideExpertPath];
break;
}
// If We Expand This to include a lot of Boolean Options, we can use https://github.com/Bartheleway/inquirer-table-multiple
if (
await booleanSelect(
"Open Advanced Modification Settings?",
"Yes",
"No",
false,
)
) {
this.modifyParsersIgnore = new Set<string>(
await checkbox({
message:
"What Modification Parsers Should We Ignore, If Any? (Other Small Logic And Value Changes are Handled Separately)",
choices: modificationParsers.map((parser) => {
return { name: parser.name, value: parser.id };
}),
required: false,
}),
);
}
}
async readSavedPorter(): Promise<void> {
if (!fs.existsSync(savedQuestPorter)) return;
if (
!(await booleanSelect(
"Saved Quest Port Helper, Which Saves the ID Paths Between the Quest Books, is Available! Use it?",
))
) {
logInfo("Not Using...");
return;
}
await readFromPorter(true);
return;
}
}

View File

@ -0,0 +1,137 @@
import PortQBData from "./portQBData.ts";
import {
booleanSelect,
dependencies,
emptyQuest,
findQuest,
id,
name,
stripRewards,
} from "../actionQBUtils.ts";
import { Quest } from "#types/bqQuestBook.ts";
import { performModification } from "./portQBModifications.ts";
import logInfo, { logNotImportant } from "../../../utils/log.ts";
let data: PortQBData;
export function setupLogic(dataIn: PortQBData): void {
data = dataIn;
}
export async function additions(): Promise<void> {
if (!(await booleanSelect("Port Additions?"))) {
logNotImportant("Skipping...");
return;
}
let newID =
[...data.toChangeIDsToQuests.keys()].sort((a, b) => a - b).pop() ?? 0;
const addingQuests: Quest[] = [];
logNotImportant("Porting Additions...");
for (const addition of data.changed.added) {
if (
!(await booleanSelect(
`Would you like to add Quest with ID ${id(addition)} and Name ${name(addition)}?`,
"Continue",
"Cancel",
))
) {
logInfo("Skipping...");
continue;
}
const addingID = ++newID;
logInfo(`Adding New Quest with ID ${addingID}...`);
const newQuest = { ...addition } as Quest; // Copy Quest
newQuest["questID:3"] = addingID;
addingQuests.push(newQuest);
// Add to Auto Quest Detection Map
data.foundQuests.set(id(addition), newQuest);
}
// Sort out Dependencies (Do Afterwards, so if new quests dep on each other, that works)
if (addingQuests.length === 0) return;
for (const quest of addingQuests) {
logInfo(
`Modifying Deps for Quest with ID ${id(quest)} and Name ${name(quest)}...`,
);
const deps = dependencies(quest);
const depTypes = quest["preRequisiteTypes:7"];
const useDepTypes = depTypes && depTypes.length === deps.length;
for (let i = 0; i < deps.length; i++) {
const depQuest = await findQuest(deps[i]);
if (!depQuest) {
deps.splice(i, 1);
if (useDepTypes) {
depTypes.splice(i, 1);
}
} else deps[i] = id(depQuest);
}
// if we are porting TO expert, strip rewards
if (data.type === "NORMAL") {
logInfo("Stripping Rewards...");
stripRewards(quest);
}
// Push to Output
data.toChangeIDsToQuests.set(id(quest), quest);
}
}
export async function modifications(): Promise<void> {
if (!(await booleanSelect("Port Modifications?"))) {
logNotImportant("Skipping...");
return;
}
logNotImportant("Porting Modifications...");
for (const modified of data.changed.modified) {
await performModification(modified);
}
}
export async function removals(): Promise<void> {
if (!(await booleanSelect("Port Removals?"))) {
logNotImportant("Skipping...");
return;
}
logNotImportant("Porting Removals...");
for (const removal of data.changed.removed) {
if (
!(await booleanSelect(
`Would you like to remove Quest with ID ${id(removal)} and Name ${name(removal)}?`,
"Continue",
"Cancel",
))
) {
logInfo("Skipping...");
continue;
}
const quest = await findQuest(
id(removal),
data.oldIDsToQuests?.get(id(removal)),
);
if (!quest) {
logInfo("Skipping, Could not find Corresponding Quest...");
continue;
}
logInfo("Removing Quest...");
const newId = id(quest);
const newEmptyQuest = { ...emptyQuest } as Quest; // Copy Quest
newEmptyQuest["questID:3"] = newId;
data.toChangeIDsToQuests.set(newId, newEmptyQuest);
// Remove quest from Quest Lines
if (data.questLines) {
for (const line of data.questLines) {
for (const key of Object.keys(line["quests:9"])) {
const questInfo = line["quests:9"][key];
if (newId === questInfo["id:3"]) delete line["quests:9"][key];
}
}
}
}
}

View File

@ -0,0 +1,901 @@
import {
BunchedParserPath,
ChangeAndPath,
CustomDescriptionTaskTemplate,
DescriptionTaskChange,
LogicType,
Message,
Modified,
Parser,
QuestChange,
SimpleLogic,
TaskDifferentSolution,
YesIgnoreNo,
} from "#types/actionQBTypes.ts";
import PortQBData from "./portQBData.ts";
import DiffMatchPatch from "diff-match-patch";
import picomatch from "picomatch";
import { booleanSelect, findQuest, id, name } from "../actionQBUtils.ts";
import fakeDiff from "fake-diff";
import { Operation } from "just-diff";
import logInfo, { logError, logNotImportant, logWarn } from "#utils/log.ts";
import dedent from "dedent-js";
import { confirm, editor, input, select } from "@inquirer/prompts";
import colors from "colors";
import { stringify } from "javascript-stringify";
import { Quest, Task } from "#types/bqQuestBook.ts";
import { ArrayUnique } from "#utils/util.ts";
import lodash from "lodash";
let data: PortQBData;
const dmp = new DiffMatchPatch();
const taskKey = "tasks";
export function setupModifications(dataIn: PortQBData): void {
data = dataIn;
}
export async function performModification(modify: Modified): Promise<void> {
const { formattedNames, callableFunctions } = findAllParsers(modify);
if (formattedNames.length === 0 || callableFunctions.length === 0) {
logNotImportant(
`All Modification Changes on this Quest with ID ${id(modify.currentQuest)} and Name ${name(
modify.currentQuest,
)} were Skipped.`,
);
return;
}
// Relying on the fact that the messages are sorted, condense duplicates.
const output: Message[] = [];
let lastMsg: string | undefined = undefined;
for (let i = 0; i < formattedNames.length; i++) {
if (i !== 0 && formattedNames[i] === lastMsg) {
output[i - 1].incrementRepeats();
continue;
}
output.push(new Message(formattedNames[i]));
lastMsg = formattedNames[i];
}
if (
!(await booleanSelect(
`Would you like to perform modifications on Quest with ID ${id(modify.currentQuest)} and Name ${name(
modify.currentQuest,
)}?\nChanges: ${output.map((msg) => msg.toFormattedString()).join(", ")}`,
))
) {
logNotImportant("Skipping...");
return;
}
const quest = await findQuest(id(modify.currentQuest));
if (!quest) {
logInfo("Skipping, Could not find Corresponding Quest...");
return;
}
for (const func of callableFunctions) {
await func(quest);
}
}
function getSimpleFormattedParserName(
parser: Parser,
logic: SimpleLogic,
path: string[],
op: Operation,
) {
if (!logic.formattedName) return `${parser.name} ${formatOp(op)}`;
return logic.formattedName(path, op);
}
function formatOp(operation: Operation): string {
switch (operation) {
case "add":
return "Addition";
default:
case "replace":
return "Modification";
case "remove":
return "Removal";
}
}
function findAllParsers(modify: Modified): {
formattedNames: string[];
callableFunctions: ((questToModify: Quest) => Promise<void>)[];
} {
const outputFunctions: ((questToModify: Quest) => Promise<void>)[] = [];
const outputFormatted: string[] = [];
const foundBuncableParsers = new Map<string, BunchedParserPath[]>();
const foundSimpleParserIds = new Set<string>();
for (const change of modify.change) {
const pathList = (change.path as string[] | number[]).map(
(path: string | number): string => {
if (typeof path === "number") path = (path as number).toString();
return path.split(":")[0];
},
);
const path = pathList.join("/");
// Instead of filtering out ignored parsers before, we must check if the parser match is one that is ignored
// This is because otherwise the general parser would be called instead
for (const parser of modificationParsers) {
if (!parser.condition(path)) continue;
// ID Checks and Handles
if (data.modifyParsersIgnore.has(parser.id)) {
logNotImportant(
`Skipping Change with Parser with id '${parser.id}'...`,
);
break;
}
if (parser.logic.type === LogicType.Simple) {
if (foundSimpleParserIds.has(parser.id)) break;
if (parser.logic.applyOnce) foundSimpleParserIds.add(parser.id);
}
// Simple Parser Logic
if (parser.logic.type === LogicType.Simple) {
outputFunctions.push(
async (quest) =>
await (parser.logic as SimpleLogic).func(
quest,
modify,
change,
pathList,
),
);
outputFormatted.push(
getSimpleFormattedParserName(
parser,
parser.logic,
pathList,
change.op,
),
);
break;
}
// Bunched Parser Logic
const changeAndPath: ChangeAndPath = { change: change, path: pathList };
if (!foundBuncableParsers.has(parser.id)) {
foundBuncableParsers.set(parser.id, [
{ logic: parser.logic, changeAndPath: [changeAndPath] },
]);
break;
}
let foundBunch = false;
for (const parserBunch of foundBuncableParsers.get(parser.id) ?? []) {
if (
!parserBunch.logic.applyTogether(
parserBunch.changeAndPath[0].path,
pathList,
)
)
continue;
parserBunch.changeAndPath.push(changeAndPath);
foundBunch = true;
}
if (!foundBunch) {
foundBuncableParsers
.get(parser.id)
?.push({ logic: parser.logic, changeAndPath: [changeAndPath] });
}
break;
}
}
// Change Bunched Parsers Into Function
for (const bunchList of foundBuncableParsers.values()) {
for (const bunch of bunchList) {
outputFunctions.push(
async (quest) =>
await bunch.logic.func(quest, modify, bunch.changeAndPath),
);
outputFormatted.push(...bunch.logic.formattedName(bunch.changeAndPath));
}
}
return {
formattedNames: outputFormatted.sort(),
callableFunctions: outputFunctions,
};
}
function assertIsModification(change: QuestChange) {
if (change.op !== "replace")
throw new Error(dedent`
Runtime Exception: Addition/Removal Should Not Happen Here! Report this to the Core Devs of Nomi-CEu!
Path: ${change.path.toString()}
`);
}
const modifyDesc = async (
questToModify: Quest,
modify: Modified,
change: QuestChange,
) => {
assertIsModification(change);
const oldQuest =
modify.oldQuest["properties:10"]["betterquesting:10"]["desc:8"];
const newQuest =
modify.currentQuest["properties:10"]["betterquesting:10"]["desc:8"];
const originalQuest =
questToModify["properties:10"]["betterquesting:10"]["desc:8"];
logInfo(colors.bold("Change in Source Quest:"));
console.log(fakeDiff(oldQuest, newQuest));
const apply = dmp.patch_apply(
dmp.patch_make(oldQuest, newQuest),
originalQuest,
)[0];
logInfo(colors.bold("If Applied:"));
console.log(fakeDiff(originalQuest, apply));
logInfo(colors.bold("If Replaced:"));
console.log(fakeDiff(originalQuest, newQuest));
const applyMode = (await select({
message: "How Should we Apply this Change to the Description?",
choices: [
{ name: "Apply Changes on top of Existing Description", value: "APPLY" },
{ name: "Replace Existing Description", value: "REPLACE" },
{ name: "Enter Own Description", value: "CUSTOM" },
{ name: "Ignore this Change", value: "IGNORE" },
],
})) as DescriptionTaskChange;
if (applyMode === "IGNORE") {
logInfo("Ignoring...");
return;
}
let description: string;
switch (applyMode) {
case "APPLY":
logInfo("Applying Description Change...");
description = apply;
break;
case "REPLACE":
logInfo("Replacing Description...");
description = newQuest;
break;
case "CUSTOM":
const template = (await select({
message: "What Should the Original Text Be?",
choices: [
{ name: "Description with Changes Applied", value: "APPLY" },
{ name: "Description with Changes Replaced", value: "REPLACE" },
{ name: "Original Description", value: "ORIGINAL" },
],
})) as CustomDescriptionTaskTemplate;
const templateStr =
template === "APPLY"
? apply
: template === "REPLACE"
? newQuest
: originalQuest;
description = await editor({
message:
"Enter your Custom Description. Enter an Empty String to Cancel!",
default: templateStr,
});
if (!description) {
logInfo("Cancelling...");
return;
}
break;
}
questToModify["properties:10"]["betterquesting:10"]["desc:8"] = description;
};
const modifyIcon = async (
questToModify: Quest,
modify: Modified,
change: QuestChange,
) => {
assertIsModification(change);
const oldIcon =
modify.oldQuest["properties:10"]["betterquesting:10"]["icon:10"];
const newIcon =
modify.currentQuest["properties:10"]["betterquesting:10"]["icon:10"];
const currentIcon =
questToModify["properties:10"]["betterquesting:10"]["icon:10"];
const newIconString = stringify(newIcon, null, 2) ?? "";
logInfo(colors.bold("Change in Source Quest:"));
console.log(fakeDiff(stringify(oldIcon, null, 2) ?? "", newIconString));
logInfo(colors.bold("If Applied to Current Quest:"));
console.log(fakeDiff(stringify(currentIcon, null, 2) ?? "", newIconString));
if (!(await booleanSelect("Should we Apply This Icon Change?"))) {
logNotImportant("Skipping...");
return;
}
logInfo("Applying Icon Change...");
questToModify["properties:10"]["betterquesting:10"]["icon:10"] = newIcon;
};
const modifyTasks = async (
questToModify: Quest,
modify: Modified,
changeAndPaths: ChangeAndPath[],
) => {
logInfo("Performing Tasks Change...");
const oldTasks = modify.oldQuest["tasks:9"];
const newTasks = modify.currentQuest["tasks:9"];
const currentTasks = questToModify["tasks:9"];
let same: boolean = true;
let toModify: Map<number, Task>;
if (!lodash.isEqual(oldTasks, currentTasks)) {
logWarn(
"The Tasks Object in the Current Quest and the Original Source Quest is different!",
);
logInfo(colors.bold("Change:"));
console.log(
fakeDiff(
stringify(Object.values(currentTasks), null, 2) ?? "",
stringify(Object.values(oldTasks), null, 2) ?? "",
),
);
const solution = (await select({
message: "What should we do?",
choices: [
{
name: "Replace the Current Tasks with the Original Tasks",
value: "APPLY",
},
{
name: "Continue without Replacing (MAY CAUSE PROBLEMS!)",
value: "CONTINUE",
},
{
name: "Ignore this Change",
value: "IGNORE",
},
],
})) as TaskDifferentSolution;
switch (solution) {
case "APPLY":
logInfo("Replacing...");
toModify = new Map(
Object.values(oldTasks).map((task) => [task["index:3"], task]),
);
break;
case "CONTINUE":
logWarn(
"Warning: Please Check the Context of each Change in the JSON File before Applying!",
);
same = false;
break;
case "IGNORE":
logNotImportant("Skipping...");
return;
}
} else {
if (!(await booleanSelect("Should we Apply Task Changes on this Quest?"))) {
logNotImportant("Skipping...");
return;
}
}
toModify ??= new Map(
Object.values(currentTasks).map((task) => [task["index:3"], task]),
);
// Sort Changes into Map of Index to Changes
const changes = new Map<number, ChangeAndPath[]>();
changeAndPaths.forEach((change) => {
const index = getIndex(change.path, taskKey);
if (!changes.has(index)) changes.set(index, [change]);
else changes.get(index)?.push(change);
});
for (const entry of changes.entries()) {
const [index, changes] = entry;
if (index < 0)
throw new Error("Invalid Path! Report to the Core Devs of Nomi-CEu!");
// Are we adding/removing a whole task?
if (
changes.length === 1 &&
isAddingOrRemovingComplexTask(changes[0].path)
) {
let task: Task;
const change = changes[0];
if (change.change.op === "add")
task = newTasks[change.change.path.at(-1) ?? "0:10"];
else {
const foundTask = toModify.get(index);
if (!foundTask) {
logError(
`Current Task Object does not Contain Index ${index}! Skipping...`,
);
continue;
}
task = foundTask;
}
const id = task["taskID:8"];
if (
!(await booleanSelect(
`Should we ${change.change.op === "add" ? "Add" : "Remove"} Task No. ${index + 1} with ID ${id}?`,
))
) {
logNotImportant("Skipping...");
continue;
}
if (change.change.op === "add") {
const newIndex = same
? index
: (lodash.max(Array.from(toModify.keys())) ?? 0) + 1;
const newTask: Task = { ...task };
if (!same) newTask["index:3"] = newIndex;
logInfo(`Adding Task No. ${newIndex + 1} and ID ${id}...`);
toModify.set(newIndex, newTask);
} else {
logInfo(`Removing Task No. ${index + 1} and ID ${id}...`);
toModify.delete(index);
}
continue;
}
// Modification of a Task
const oldTask = Object.values(oldTasks)[index];
const newTask = Object.values(newTasks)[index];
let task = toModify.get(index);
if (same) {
if (!task) {
throw new Error(
`Current Task Object does not Contain Index ${index}! Please Report this to the Core Devs of Nomi-CEu!`,
);
}
logInfo(colors.bold("Change:"));
console.log(
fakeDiff(
stringify(task, null, 2) ?? "",
stringify(newTask, null, 2) ?? "",
),
);
if (!(await booleanSelect("Should we Apply this Change?"))) {
logNotImportant("Skipping...");
continue;
}
logInfo("Applying Change...");
toModify.set(index, { ...newTask });
continue;
}
let confirmedTask: Task | undefined = undefined;
let cancelled: boolean = false;
while (!confirmedTask) {
if (!task) {
const retrievedIndex = Number.parseInt(
await input({
message: `Corresponding Index for Task with Index ${index} is Empty! Please enter the Corresponding Index: (-1 to Cancel/Ignore)`,
validate: (value) => {
const numValue = Number.parseInt(value);
if (numValue === -1) return true; // Allow Cancelling
if (isNaN(numValue) || numValue < 0) {
return "Please Enter a Number Value >= 0!";
}
const foundTask = toModify.get(numValue);
if (!foundTask) {
return "Please Enter a Valid 0-Based Index!";
}
return true;
},
}),
);
if (retrievedIndex === -1) {
logNotImportant("Skipping...");
cancelled = true;
break;
}
task = toModify.get(retrievedIndex);
}
if (!task)
throw new Error(
"Task is Undefined! This should not Happen! Report this to the Core Devs of Nomi-CEu!",
);
logInfo(
`Does Task with Index ${index} in Source Quest Correspond to Task with Index ${task["index:3"]} in the Target Quest?`,
);
logInfo(colors.bold("Difference:"));
console.log(
fakeDiff(
stringify(oldTask, null, 2) ?? "",
stringify(task, null, 2) ?? "",
),
);
const choice = (await select({
message: "Is this Correct?",
choices: [
{
name: "Yes",
value: "YES",
},
{
name: "No",
value: "NO",
},
{
name: "Ignore",
value: "IGNORE",
},
],
})) as YesIgnoreNo;
if (choice === "IGNORE") {
logNotImportant("Skipping...");
cancelled = true;
break;
}
if (choice === "NO") {
logInfo("Please Enter the Correct Index Below.");
task = undefined;
continue;
}
confirmedTask = task;
}
if (cancelled) continue;
const oldTaskString = stringify(oldTask, null, 2) ?? "";
const newTaskString = stringify(newTask, null, 2) ?? "";
const currentTaskString = stringify(confirmedTask, null, 2) ?? "";
logInfo(colors.bold("Change in Source Quest:"));
console.log(fakeDiff(oldTaskString, newTaskString));
const apply = dmp.patch_apply(
dmp.patch_make(oldTaskString, newTaskString),
currentTaskString,
)[0];
logInfo(colors.bold("If Applied:"));
console.log(fakeDiff(currentTaskString, apply));
logInfo(colors.bold("If Replaced:"));
console.log(fakeDiff(currentTaskString, newTaskString));
const applyMode = (await select({
message: "How Should we Apply this Task Change?",
choices: [
{
name: "Apply Changes on top of Existing Task",
value: "APPLY",
},
{ name: "Replace Existing Task", value: "REPLACE" },
{ name: "Enter Own Task", value: "CUSTOM" },
{ name: "Ignore this Change", value: "IGNORE" },
],
})) as DescriptionTaskChange;
if (applyMode === "IGNORE") {
logInfo("Ignoring...");
return;
}
let taskObj: Task | undefined;
switch (applyMode) {
case "APPLY":
logInfo("Applying Description Change...");
try {
taskObj = JSON.parse(apply) as Task;
} catch (e) {
logWarn("Invalid JSON! Enter your own Below!");
taskObj = await getCustomTasks(
currentTaskString,
newTaskString,
apply,
);
}
break;
case "REPLACE":
logInfo("Replacing Description...");
taskObj = { ...newTask };
break;
case "CUSTOM":
taskObj = await getCustomTasks(currentTaskString, newTaskString, apply);
break;
}
if (!taskObj) continue;
logInfo("Performing Task Modification...");
toModify.set(confirmedTask?.["index:3"] ?? 0, taskObj);
}
questToModify["tasks:9"] = {};
for (const entry of toModify) {
questToModify["tasks:9"][`${entry[0]}:10`] = entry[1];
}
};
async function getCustomTasks(
originalTask: string,
newTask: string,
apply: string,
): Promise<Task | undefined> {
let foundTask: Task | undefined = undefined;
while (!foundTask) {
const template = (await select({
message: "What Should the Default Text Be?",
choices: [
{ name: "Description with Changes Applied", value: "APPLY" },
{ name: "Description with Changes Replaced", value: "REPLACE" },
{ name: "Original Description", value: "ORIGINAL" },
],
})) as CustomDescriptionTaskTemplate;
const templateStr =
template === "APPLY"
? apply
: template === "REPLACE"
? newTask
: originalTask;
const taskString = await editor({
message: "Enter your Custom Task. Enter an Empty String to Cancel!",
default: templateStr,
});
if (!taskString) {
logInfo("Cancelling...");
return undefined;
}
try {
foundTask = JSON.parse(taskString) as Task;
} catch (e) {
logWarn("Invalid JSON!");
foundTask = undefined;
}
}
return foundTask;
}
const modifyPrerequisites = async (
questToModify: Quest,
modify: Modified,
change: QuestChange,
) => {
logInfo("Performing Prerequisite Modifications...");
// Get Array Diff
const arrayDiff = change.value as ArrayUnique<number>;
const preRequisiteArrayCurrent = modify.currentQuest["preRequisites:11"];
const preRequisiteTypeArrayCurrent =
modify.currentQuest["preRequisiteTypes:7"];
const preRequisiteArray = questToModify["preRequisites:11"];
const preRequisiteTypeArray = questToModify["preRequisiteTypes:7"];
const preRequisites = new Map<number, number>();
preRequisiteArray.forEach((pre, index) =>
preRequisites.set(
pre,
preRequisiteTypeArray ? preRequisiteTypeArray[index] : 0,
),
);
// Unique to Current: Added.
for (const added of arrayDiff.arr2Unique) {
const toAdd = await findQuest(added);
if (!toAdd) {
logInfo("Skipping, Could not find Corresponding Quest...");
return;
}
if (preRequisites.has(id(toAdd))) {
logNotImportant("Quest Already Contains Added Prerequisite.");
return;
}
if (
!(await booleanSelect(
`Should we Add Quest with ID ${id(toAdd)} and Name ${name(toAdd)} as a Prerequisite?`,
))
) {
logNotImportant("Skipping...");
return;
}
logInfo("Adding Prerequisite...");
const index = preRequisiteArrayCurrent.indexOf(added);
preRequisites.set(
id(toAdd),
index === -1 || !preRequisiteTypeArrayCurrent
? 0
: preRequisiteTypeArrayCurrent[index] ?? 0,
);
}
// Unique to Old: Removed.
for (const removed of arrayDiff.arr1Unique) {
const toRemove = await findQuest(removed);
if (!toRemove) {
logInfo("Skipping, Could not find Corresponding Quest...");
return;
}
if (!preRequisites.has(id(toRemove))) {
logNotImportant("Quest Does Not Contain Removed Prerequisite.");
return;
}
if (
!(await booleanSelect(
`Should we Remove Quest with ID ${id(toRemove)} and Name ${name(toRemove)} as a Prerequisite?`,
))
) {
logNotImportant("Skipping...");
return;
}
logInfo("Removing Prerequisite...");
preRequisites.delete(id(toRemove));
}
// Save
questToModify["preRequisites:11"] = Array.from(preRequisites.keys()).sort(
(a, b) => a - b,
);
if (
Array.from(preRequisites.values()).findIndex((value) => value !== 0) === -1
)
return;
const types: number[] = [];
for (let i = 0; i < questToModify["preRequisites:11"].length; i++) {
types[i] = preRequisites.get(questToModify["preRequisites:11"][i]) ?? 0;
}
questToModify["preRequisiteTypes:7"] = types;
};
const modifyGeneral = async (
questToModify: Quest,
modify: Modified,
change: QuestChange,
path: string[],
): Promise<void> => {
assertIsModification(change);
logInfo(`Change in '${path.pop()}':`);
const newValue = lodash.get(modify.currentQuest, change.path);
const newValueAsString = stringify(newValue) ?? "";
logInfo(colors.bold("Change in Source Quest:"));
console.log(
fakeDiff(
stringify(lodash.get(modify.oldQuest, change.path)) ?? "",
newValueAsString,
),
);
logInfo(colors.bold("Change if Applied:"));
console.log(
fakeDiff(
stringify(lodash.get(questToModify, change.path)) ?? "",
newValueAsString,
),
);
const shouldContinue = await confirm({
message: "Would you like to apply the Change?",
});
if (!shouldContinue) {
logNotImportant("Skipping...");
return;
}
logInfo("Applying Change...");
lodash.set(questToModify, change.path, newValue);
};
function isAddingOrRemovingComplexTask(path: string[]): boolean {
return path.length === 2;
}
function getIndex(path: string[], pathKey: string): number {
const index = path.indexOf(pathKey) + 1;
if (index == 0 || index >= path.length) return -1; // indexOf returns -1 if not found, +1 = 0
const num = Number.parseInt(path[index]);
if (Number.isNaN(num)) return -1;
return num;
}
function getFormattedNameWithIndex(
path: string[],
op: Operation,
pathKey: string,
baseName: string,
): string {
const defaultVal = `${baseName} ${formatOp(op)}`;
if (op !== "replace") return defaultVal;
const index = getIndex(path, pathKey);
if (index === -1) return defaultVal;
return `${baseName} No. ${index + 1} Modification`;
}
export const modificationParsers = [
{
id: "icon",
name: "Icon",
condition: picomatch("properties/betterquesting/icon/*"),
logic: {
type: LogicType.Simple,
applyOnce: true,
func: modifyIcon,
},
},
{
id: "desc",
name: "Description",
condition: picomatch("properties/betterquesting/desc"),
logic: {
type: LogicType.Simple,
applyOnce: true,
func: modifyDesc,
},
},
{
id: taskKey,
name: "Task",
condition: picomatch("tasks/**/*"),
logic: {
type: LogicType.Bunched,
applyTogether: () => true,
formattedName: (changes) => {
const result: string[] = [];
changes = lodash.uniqBy(changes, (change) =>
getIndex(change.path, taskKey),
);
for (const change of changes) {
if (
!isAddingOrRemovingComplexTask(change.path) &&
change.change.op !== "replace"
)
change.change.op = "replace";
result.push(
getFormattedNameWithIndex(
change.path,
change.change.op,
taskKey,
"Task",
),
);
}
return result;
},
func: modifyTasks,
},
},
{
id: "prerequisites",
name: "Prerequisite",
condition: picomatch("preRequisites-CUSTOM"),
logic: {
type: LogicType.Simple,
applyOnce: false,
func: modifyPrerequisites,
},
},
{
id: "general",
name: "General Changes",
condition: picomatch("**/*"),
logic: {
type: LogicType.Simple,
applyOnce: false,
formattedName: (path, op) => `'${path.at(-1)}' ${formatOp(op)}`,
func: modifyGeneral,
},
},
] as Parser[];

View File

@ -2,8 +2,12 @@ import gulp from "gulp";
import rename from "gulp-rename";
import merge from "merge-stream";
import upath from "upath";
import buildConfig from "../../buildConfig";
import { langDestDirectory, overridesFolder, sharedDestDirectory } from "../../globals";
import buildConfig from "#buildConfig";
import {
langDestDirectory,
overridesFolder,
sharedDestDirectory,
} from "#globals";
import fs from "fs";
/**
@ -16,9 +20,13 @@ async function createLangDirs() {
}
async function copyLang() {
const resourcesPath = upath.join(sharedDestDirectory, overridesFolder, "resources");
const resourcesPath = upath.join(
sharedDestDirectory,
overridesFolder,
"resources",
);
const opts = { nodir: true, base: resourcesPath };
const opts = { base: resourcesPath, resolveSymlinks: true };
const streams = [
gulp.src(upath.join(resourcesPath, "pack.mcmeta"), opts),
gulp.src(upath.join(resourcesPath, "**/*.lang"), opts).pipe(
@ -30,7 +38,11 @@ async function copyLang() {
return await new Promise((resolve) => {
merge(...streams)
.pipe(gulp.dest(upath.join(buildConfig.buildDestinationDirectory, langDestDirectory)))
.pipe(
gulp.dest(
upath.join(buildConfig.buildDestinationDirectory, langDestDirectory),
),
)
.on("end", resolve);
});
}

View File

@ -1,9 +1,14 @@
import log from "fancy-log";
import { fetchFileInfo, fetchFilesBulk, fetchProject, fetchProjectsBulk } from "../../util/curseForgeAPI";
import { modpackManifest } from "../../globals";
import { checkGitTag, getFileAtRevision } from "../../util/util";
import { ModpackManifest } from "../../types/modpackManifest";
import { CurseForgeFileInfo, CurseForgeModInfo } from "../../types/curseForge";
import {
fetchFileInfo,
fetchFilesBulk,
fetchProject,
fetchProjectsBulk,
} from "#utils/curseForgeAPI.ts";
import { modpackManifest } from "#globals";
import { checkGitTag, getFileAtRevision } from "#utils/util.ts";
import { ModpackManifest } from "#types/modpackManifest.ts";
import { CurseForgeFileInfo, CurseForgeModInfo } from "#types/curseForge.ts";
import logInfo from "#utils/log.ts";
export interface ModFileInfo {
modInfo: CurseForgeModInfo;
@ -16,13 +21,13 @@ export interface ModFileInfo {
* Fetches mod links and builds a modlist.
*/
export async function createModList(tag = ""): Promise<ModFileInfo[]> {
log("Fetching mod & file infos...");
logInfo("Fetching mod & file infos...");
let manifest: ModpackManifest = modpackManifest;
if (tag) {
checkGitTag(tag);
manifest = JSON.parse(getFileAtRevision("manifest.json", tag));
manifest = JSON.parse(await getFileAtRevision("manifest.json", tag));
}
manifest.files.sort((a, b) => a.projectID - b.projectID);
@ -37,7 +42,7 @@ export async function createModList(tag = ""): Promise<ModFileInfo[]> {
.sort((a, b) => a.fileID - b.fileID),
);
log("Fetched Infos. Creating modlist...");
logInfo("Fetched Infos. Creating modlist...");
// Create modlist
const output: ModFileInfo[] = [];

View File

@ -1,13 +1,13 @@
import { modDestDirectory, modpackManifest } from "../../globals";
import { fetchMods } from "../../util/curseForgeAPI";
import { modDestDirectory, modpackManifest } from "#globals";
import { fetchMods } from "#utils/curseForgeAPI.ts";
import upath from "upath";
import fs from "fs";
import log from "fancy-log";
import del from "del";
import { deleteAsync } from "del";
import gulp from "gulp";
import logInfo from "#utils/log.ts";
async function modCleanUp() {
return del(upath.join(modDestDirectory, "*"), { force: true });
return deleteAsync(upath.join(modDestDirectory, "*"), { force: true });
}
/**
@ -16,11 +16,15 @@ async function modCleanUp() {
async function createModDirs() {
// This also makes the base dir, as it is recursive.
if (!fs.existsSync(upath.join(modDestDirectory, "client"))) {
await fs.promises.mkdir(upath.join(modDestDirectory, "client"), { recursive: true });
await fs.promises.mkdir(upath.join(modDestDirectory, "client"), {
recursive: true,
});
}
if (!fs.existsSync(upath.join(modDestDirectory, "server"))) {
await fs.promises.mkdir(upath.join(modDestDirectory, "server"), { recursive: true });
await fs.promises.mkdir(upath.join(modDestDirectory, "server"), {
recursive: true,
});
}
}
@ -28,19 +32,19 @@ async function createModDirs() {
* Downloads mods according to manifest.json and checks hashes.
*/
export async function downloadMods(): Promise<void> {
log("Fetching Shared Mods...");
logInfo("Fetching Shared Mods...");
await fetchMods(
modpackManifest.files.filter((f) => !f.sides),
modDestDirectory,
);
log("Fetching Client Mods...");
logInfo("Fetching Client Mods...");
await fetchMods(
modpackManifest.files.filter((f) => f.sides && f.sides.includes("client")),
upath.join(modDestDirectory, "client"),
);
log("Fetching Server Mods...");
logInfo("Fetching Server Mods...");
await fetchMods(
modpackManifest.files.filter((f) => f.sides && f.sides.includes("server")),
upath.join(modDestDirectory, "server"),

View File

@ -1,11 +1,11 @@
import { modpackManifest } from "../../globals";
import { makeArtifactNameBody } from "../../util/util";
import { modpackManifest } from "#globals";
import { makeArtifactNameBody } from "#utils/util.ts";
import sanitize from "sanitize-filename";
import { setOutput } from "@actions/core";
export async function makeArtifactNames(): Promise<void> {
const body = makeArtifactNameBody(modpackManifest.name);
const names = {
const names: Record<string, string> = {
client: body + "-client",
server: body + "-server",
lang: body + "-lang",

View File

@ -1,18 +1,13 @@
import Bluebird from "bluebird";
import { modpackManifest } from "../../globals";
import { downloadOrRetrieveFileDef, getVersionManifest, libraryToPath } from "../../util/util";
import { modpackManifest } from "#globals";
import { FORGE_MAVEN, getForgeJar, getVersionManifest } from "#utils/util.ts";
import unzip from "unzipper";
import { ForgeProfile } from "../../types/forgeProfile";
import log from "fancy-log";
import { ForgeProfile } from "#types/forgeProfile.ts";
import sha1 from "sha1";
import { fetchFileInfo } from "../../util/curseForgeAPI";
import { VersionManifest } from "../../types/versionManifest";
import { fetchFileInfo } from "#utils/curseForgeAPI.ts";
import fs from "fs";
import upath from "upath";
import buildConfig from "../../buildConfig";
const FORGE_VERSION_REG = /forge-(.+)/;
const FORGE_MAVEN = "https://files.minecraftforge.net/maven/";
import buildConfig from "#buildConfig";
import logInfo from "#utils/log.ts";
/**
* Download the Forge jar.
@ -21,40 +16,12 @@ const FORGE_MAVEN = "https://files.minecraftforge.net/maven/";
* except we only download/fetch the Forge jar and enumerate the libraries it has.
*/
async function getForgeURLs() {
const minecraft = modpackManifest.minecraft;
/**
* Break down the Forge version defined in manifest.json.
*/
const parsedForgeEntry = FORGE_VERSION_REG.exec(
(minecraft.modLoaders.find((x) => x.id && x.id.indexOf("forge") != -1) || {}).id || "",
);
if (!parsedForgeEntry) {
throw new Error("Malformed Forge version in manifest.json.");
}
/**
* Transform Forge version into Maven library path.
*/
const forgeMavenLibrary = `net.minecraftforge:forge:${minecraft.version}-${parsedForgeEntry[1]}`;
const forgeInstallerPath = libraryToPath(forgeMavenLibrary) + "-installer.jar";
/**
* Fetch the Forge installer
*/
const forgeJar = await fs.promises.readFile(
(
await downloadOrRetrieveFileDef({
url: FORGE_MAVEN + forgeInstallerPath,
})
).cachePath,
);
const { forgeJar, forgeInstallerPath } = await getForgeJar();
/**
* Parse the profile manifest.
*/
let forgeProfile: ForgeProfile;
let forgeProfile: ForgeProfile | undefined = undefined;
const files = (await unzip.Open.buffer(forgeJar))?.files;
if (!files) {
@ -75,9 +42,14 @@ async function getForgeURLs() {
/**
* Finally, fetch libraries.
*/
const libraries = forgeProfile.libraries.filter((x) => Boolean(x?.downloads?.artifact?.url));
const libraries = forgeProfile.libraries.filter((x) =>
Boolean(x?.downloads?.artifact?.url),
);
return [FORGE_MAVEN + forgeInstallerPath, ...libraries.map((library) => library.downloads.artifact.url)];
return [
FORGE_MAVEN + forgeInstallerPath,
...libraries.map((library) => library.downloads.artifact.url),
];
}
/**
@ -90,13 +62,19 @@ export default async function pruneCache(): Promise<void> {
urls.push(...(await getForgeURLs()).map((url) => url));
// Fetch file infos.
const fileInfos = await Bluebird.map(modpackManifest.files, (file) => fetchFileInfo(file.projectID, file.fileID));
const fileInfos = await Promise.all(
modpackManifest.files.map(async (file) =>
fetchFileInfo(file.projectID, file.fileID),
),
);
urls.push(...fileInfos.map((fileInfo) => fileInfo.downloadUrl));
// Fetch the Minecraft server.
const versionManifest: VersionManifest = await getVersionManifest(modpackManifest.minecraft.version);
const versionManifest = await getVersionManifest(
modpackManifest.minecraft.version,
);
if (!versionManifest) {
throw new Error(`No manifest found for Minecraft ${versionManifest.id}`);
throw new Error(`No manifest found for Minecraft ${versionManifest}`);
}
urls.push(versionManifest.downloads.server.url);
@ -105,11 +83,21 @@ export default async function pruneCache(): Promise<void> {
urls.push(...modpackManifest.externalDependencies.map((dep) => dep.url));
}
const cache = (await fs.promises.readdir(buildConfig.downloaderCacheDirectory)).filter((entity) =>
fs.statSync(upath.join(buildConfig.downloaderCacheDirectory, entity)).isFile(),
const cache = (
await fs.promises.readdir(buildConfig.downloaderCacheDirectory)
).filter((entity) =>
fs
.statSync(upath.join(buildConfig.downloaderCacheDirectory, entity))
.isFile(),
);
const shaMap: { [key: string]: boolean } = urls.reduce((map, url) => ((map[sha1(url)] = true), map), {});
const shaMap: { [key: string]: boolean } = urls.reduce(
(map: Record<string, boolean>, url) => {
map[sha1(url)] = true;
return map;
},
{},
);
let count = 0,
bytes = 0;
@ -122,12 +110,12 @@ export default async function pruneCache(): Promise<void> {
if (stat && stat.isFile()) {
count += 1;
bytes += stat.size;
log(`Pruning ${sha}...`);
logInfo(`Pruning ${sha}...`);
await fs.promises.unlink(path);
}
}
}
log(`Pruned ${count} files (${(bytes / 1024 / 1024).toFixed(3)} MiB)`);
logInfo(`Pruned ${count} files (${(bytes / 1024 / 1024).toFixed(3)} MiB)`);
}

View File

@ -7,14 +7,14 @@ import {
serverDestDirectory,
sharedDestDirectory,
templatesFolder,
} from "../../globals";
} from "#globals";
import mustache from "mustache";
import gulp from "gulp";
import dedent from "dedent-js";
import { isEnvVariableSet } from "../../util/util";
import sortedStringify from "json-stable-stringify-without-jsonify";
import { error } from "fancy-log";
import { BuildData } from "../../types/transformFiles";
import { isEnvVariableSet } from "#utils/util.ts";
import { BuildData } from "#types/transformFiles.ts";
import { logWarn } from "#utils/log.ts";
import sortKeysRecursive from "sort-keys-recursive";
// This updates all the files, for a release.
@ -31,7 +31,9 @@ async function updateFilesSetup(): Promise<void> {
// See if current run is to update files
if (isEnvVariableSet("UPDATE_FILES")) {
try {
updateFiles = JSON.parse(process.env.UPDATE_FILES.toLowerCase());
updateFiles = JSON.parse(
(process.env.UPDATE_FILES ?? "false").toLowerCase(),
);
} catch (err) {
throw new Error("Update Files Env Variable set to Invalid Value.");
}
@ -54,7 +56,9 @@ async function updateFilesSetup(): Promise<void> {
updateFileTransformedVersion = buildData.transformedVersion;
return;
}
error("Version.txt does not exist. Creating empty file. This may be an error.");
logWarn(
"Version.txt does not exist. Creating empty file. This may be an error.",
);
// Create Versions.txt
fs.closeSync(fs.openSync(versionsFilePath, "w"));
@ -106,7 +110,9 @@ export async function updateBuildServerProperties(): Promise<void> {
// Replacement Object
const replacementObject: Record<string, unknown> = {
versionTitle: updateFiles ? updateFileTransformedVersion : buildData.transformedVersion,
versionTitle: updateFiles
? updateFileTransformedVersion
: buildData.transformedVersion,
};
// Read and Write paths for normal
@ -121,7 +127,12 @@ export async function updateBuildServerProperties(): Promise<void> {
// Read and Write paths for expert
const readPathExpert: string = upath.join(templatesFolder, fileNameExpert);
const writePathExpert: string = upath.join(serverDestDirectory, "config-overrides", "expert", fileName);
const writePathExpert: string = upath.join(
serverDestDirectory,
"config-overrides",
"expert",
fileName,
);
// Modify Expert File
await modifyFile(readPathExpert, [writePathExpert], replacementObject, false);
@ -140,7 +151,9 @@ export async function updateBuildRandomPatches(): Promise<void> {
// Replacement object
const replacementObject: Record<string, unknown> = {
versionTitle: updateFiles ? updateFileTransformedVersion : buildData.transformedVersion,
versionTitle: updateFiles
? updateFileTransformedVersion
: buildData.transformedVersion,
mode: "Normal",
};
@ -149,7 +162,12 @@ export async function updateBuildRandomPatches(): Promise<void> {
// Change values for Expert Config
replacementObject["mode"] = "Expert";
const writePathExpert = upath.join(sharedDestDirectory, configOverridesFolder, "expert", fileName);
const writePathExpert = upath.join(
sharedDestDirectory,
configOverridesFolder,
"expert",
fileName,
);
// Modify Expert File
await modifyFile(readPath, [writePathExpert], replacementObject, false);
@ -194,7 +212,10 @@ async function updateIssueTemplates(): Promise<void> {
const versionsFilePath: string = upath.join(templatesFolder, "versions.txt");
let versionList: string = await fs.promises.readFile(versionsFilePath, "utf8");
let versionList: string = await fs.promises.readFile(
versionsFilePath,
"utf8",
);
if (!updateFiles) {
if (!buildData.isVersionBuild())
@ -213,7 +234,11 @@ async function updateIssueTemplates(): Promise<void> {
// Write updated Version List
await fs.promises.writeFile(versionsFilePath, versionList);
const issueTemplatesFolder: string = upath.join(rootDirectory, ".github", "ISSUE_TEMPLATE");
const issueTemplatesFolder: string = upath.join(
rootDirectory,
".github",
"ISSUE_TEMPLATE",
);
// Write to issue templates
for (const fileName of fileNames) {
@ -234,7 +259,9 @@ async function updateRandomPatchesConfig(): Promise<void> {
// Replacement object
const replacementObject: Record<string, unknown> = {
versionTitle: updateFiles ? updateFileTransformedVersion : buildData.transformedVersion,
versionTitle: updateFiles
? updateFileTransformedVersion
: buildData.transformedVersion,
mode: "Normal",
};
@ -243,7 +270,12 @@ async function updateRandomPatchesConfig(): Promise<void> {
// Change values for Expert Config
replacementObject["mode"] = "Expert";
const writePathExpert = upath.join(rootDirectory, configOverridesFolder, "expert", fileName);
const writePathExpert = upath.join(
rootDirectory,
configOverridesFolder,
"expert",
fileName,
);
// Modify Expert File
await modifyFile(readPath, [writePathExpert], replacementObject);
@ -261,7 +293,9 @@ async function updateServerProperties(): Promise<void> {
// Replacement Object
const replacementObject: Record<string, unknown> = {
versionTitle: updateFiles ? updateFileTransformedVersion : buildData.transformedVersion,
versionTitle: updateFiles
? updateFileTransformedVersion
: buildData.transformedVersion,
};
// Read and Write paths for normal
@ -276,7 +310,12 @@ async function updateServerProperties(): Promise<void> {
// Read and Write paths for expert
const readPathExpert: string = upath.join(templatesFolder, fileNameExpert);
const writePathExpert: string = upath.join(rootDirectory, configOverridesFolder, "expert", fileName);
const writePathExpert: string = upath.join(
rootDirectory,
configOverridesFolder,
"expert",
fileName,
);
// Modify Expert File
await modifyFile(readPathExpert, [writePathExpert], replacementObject);
@ -286,7 +325,12 @@ async function updateMainMenuConfig(): Promise<void> {
// Filename & paths
const fileName = "mainmenu.json";
const readPath: string = upath.join(templatesFolder, fileName);
const writePath: string = upath.join(rootDirectory, configFolder, "CustomMainMenu", fileName);
const writePath: string = upath.join(
rootDirectory,
configFolder,
"CustomMainMenu",
fileName,
);
if (!updateFiles && !buildData.isVersionBuild())
throw new Error(
@ -302,20 +346,38 @@ async function updateMainMenuConfig(): Promise<void> {
const data: string = await fs.promises.readFile(readPath, "utf8");
// Moustache Render
const modifiedData = JSON.parse(mustache.render(data, replacementObject));
let modifiedData = JSON.parse(mustache.render(data, replacementObject));
// Add warning to not edit file
modifiedData["_comment"] =
"DO NOT EDIT THIS FILE! EDIT THE TEMPlATES INSTEAD! See https://github.com/Nomi-CEu/Nomi-CEu/wiki/Part-1:-Contributing-Information#section-5-template-information!";
// Sort keys so that comment appears first
return await fs.promises.writeFile(writePath, sortedStringify(modifiedData, { space: 2 }), "utf8");
modifiedData = sortKeysRecursive(modifiedData);
return await fs.promises.writeFile(
writePath,
JSON.stringify(modifiedData, null, 2),
"utf8",
);
}
export const updateFilesIssue = gulp.series(updateFilesSetup, updateIssueTemplates);
export const updateFilesRandomPatches = gulp.series(updateFilesSetup, updateRandomPatchesConfig);
export const updateFilesServer = gulp.series(updateFilesSetup, updateServerProperties);
export const updateFilesMainMenu = gulp.series(updateFilesSetup, updateMainMenuConfig);
export const updateFilesIssue = gulp.series(
updateFilesSetup,
updateIssueTemplates,
);
export const updateFilesRandomPatches = gulp.series(
updateFilesSetup,
updateRandomPatchesConfig,
);
export const updateFilesServer = gulp.series(
updateFilesSetup,
updateServerProperties,
);
export const updateFilesMainMenu = gulp.series(
updateFilesSetup,
updateMainMenuConfig,
);
export const updateAll = gulp.series(
updateFilesSetup,

View File

@ -1,25 +0,0 @@
import { MessageBuilder, Webhook } from "discord-webhook-node";
import buildConfig from "../../buildConfig";
import { checkEnvironmentalVariables } from "../../util/util";
export default async function fireNightlyWebhook(): Promise<void> {
checkEnvironmentalVariables(["DISCORD_WEBHOOK", "GITHUB_RUN_ID", "GITHUB_SHA"]);
const webhook = new Webhook(process.env.DISCORD_WEBHOOK);
if (buildConfig.nightlyHookName) {
webhook.setUsername(buildConfig.nightlyHookName);
}
if (buildConfig.nightlyHookAvatar) {
webhook.setAvatar(buildConfig.nightlyHookAvatar);
}
const link = `https://github.com/${process.env.GITHUB_REPOSITORY}/actions/runs/${process.env.GITHUB_RUN_ID}`;
const embed = new MessageBuilder()
.setTitle(`New snapshot available! (**${process.env.GITHUB_SHA.substr(0, 7)}**)`)
.setDescription(`Click to [view on GitHub Actions](${link}).`);
return webhook.send(embed);
}

View File

@ -4,18 +4,20 @@ import {
mmcDestDirectory,
modpackManifest,
serverDestDirectory,
} from "../../globals";
} from "#globals";
import upath from "upath";
import zip from "gulp-zip";
import gulp from "gulp";
import buildConfig from "../../buildConfig";
import { makeArtifactNameBody } from "../../util/util";
import gulp, { src } from "gulp";
import buildConfig from "#buildConfig";
import { makeArtifactNameBody } from "#utils/util.ts";
import sanitize from "sanitize-filename";
async function zipFolder(path: string, zipName: string = upath.basename(path) + ".zip"): Promise<void> {
async function zipFolder(
path: string,
zipName: string = upath.basename(path) + ".zip",
): Promise<void> {
return new Promise((resolve) => {
gulp
.src(upath.join(path, "**"), { nodir: true, base: path, dot: true })
src(upath.join(path, "**"), { base: path, dot: true })
.pipe(zip(zipName))
.pipe(gulp.dest(buildConfig.buildDestinationDirectory))
.on("end", resolve);
@ -26,7 +28,11 @@ function makeZipper(src: string, artifactName: string) {
const zipFn = () => {
return zipFolder(
upath.join(src),
sanitize((makeArtifactNameBody(modpackManifest.name) + `-${artifactName}.zip`).toLowerCase()),
sanitize(
(
makeArtifactNameBody(modpackManifest.name) + `-${artifactName}.zip`
).toLowerCase(),
),
);
};

View File

@ -1,38 +1,39 @@
import { clientDestDirectory, mmcDestDirectory, modDestDirectory, modpackManifest } from "../../globals";
import { fetchMods } from "../../util/curseForgeAPI";
import {
clientDestDirectory,
mmcDestDirectory,
modDestDirectory,
modpackManifest,
} from "#globals";
import * as upath from "upath";
import { series, src, symlink } from "gulp";
import * as fs from "fs";
import gulp from "gulp";
import buildConfig from "../../buildConfig";
import gulp, { series, src, symlink } from "gulp";
import buildConfig from "#buildConfig";
import filter from "gulp-filter";
async function mmcCleanUp(cb) {
async function mmcCleanUp() {
if (fs.existsSync(mmcDestDirectory)) {
await fs.promises.rm(mmcDestDirectory, { recursive: true });
}
cb();
}
/**
* Checks and creates all necessary directories so we can build the MMC zip safely.
*/
async function createMMCDirs(cb) {
async function createMMCDirs() {
if (!fs.existsSync(mmcDestDirectory)) {
await fs.promises.mkdir(mmcDestDirectory, { recursive: true });
}
cb();
}
/**
* Copies the update notes file.
*/
function copyMMCUpdateNotes() {
return gulp.src("../UPDATENOTES.md", { allowEmpty: true }).pipe(gulp.dest(mmcDestDirectory));
return gulp
.src("../UPDATENOTES.md", { allowEmpty: true })
.pipe(gulp.dest(mmcDestDirectory));
}
/**
* Copies the license file.
*/
@ -44,24 +45,31 @@ async function copyMMCLicense() {
* Copies the changelog file.
*/
function copyMMCChangelog() {
return gulp.src(upath.join(buildConfig.buildDestinationDirectory, "CHANGELOG.md")).pipe(gulp.dest(mmcDestDirectory));
return gulp
.src(upath.join(buildConfig.buildDestinationDirectory, "CHANGELOG.md"))
.pipe(gulp.dest(mmcDestDirectory));
}
/**
* Copies modpack overrides.
*/
function copyOverrides() {
const f = filter((f) => !f.isDirectory());
return src(upath.join(clientDestDirectory, "**/*"), {
nodir: true,
resolveSymlinks: false,
}).pipe(symlink(upath.join(mmcDestDirectory)));
})
.pipe(f)
.pipe(symlink(upath.join(mmcDestDirectory)));
}
/**
* Renames copied overrides to '.minecraft'.
*/
async function renameOverrides() {
await fs.promises.rename(upath.join(mmcDestDirectory, "overrides"), upath.join(mmcDestDirectory, ".minecraft"));
await fs.promises.rename(
upath.join(mmcDestDirectory, "overrides"),
upath.join(mmcDestDirectory, ".minecraft"),
);
return fs.promises.rm(upath.join(mmcDestDirectory, "manifest.json"));
}
@ -69,14 +77,17 @@ async function renameOverrides() {
* Copies client & shared mods.
*/
async function copyMMCModJars() {
return src([upath.join(modDestDirectory, "*"), upath.join(modDestDirectory, "client", "*")], {
nodir: true,
resolveSymlinks: false,
}).pipe(symlink(upath.join(mmcDestDirectory, ".minecraft", "mods")));
const f = filter((f) => !f.isDirectory());
return src(["*", upath.join("client", "*")], {
cwd: modDestDirectory,
resolveSymlinks: true,
})
.pipe(f)
.pipe(symlink(upath.join(mmcDestDirectory, ".minecraft", "mods")));
}
async function createMMCConfig() {
const cfg = {
const cfg: Record<string, string> = {
InstanceType: "OneSix",
iconKey: "default",
name: modpackManifest.name,
@ -96,7 +107,7 @@ async function createMMCManifest() {
const manifest = {
components: [],
formatVersion: 1,
};
} as { components: unknown[]; formatVersion: number };
manifest.components.push({
cachedName: "Minecraft",
@ -128,12 +139,18 @@ async function createMMCManifest() {
});
}
return fs.promises.writeFile(upath.join(mmcDestDirectory, "mmc-pack.json"), JSON.stringify(manifest, null, "\t"));
return fs.promises.writeFile(
upath.join(mmcDestDirectory, "mmc-pack.json"),
JSON.stringify(manifest, null, "\t"),
);
}
export default series(
mmcCleanUp,
createMMCDirs,
copyMMCChangelog,
copyMMCLicense,
copyMMCUpdateNotes,
copyOverrides,
renameOverrides,
createMMCConfig,

View File

@ -2,26 +2,31 @@ import upath from "upath";
import unzip from "unzipper";
import through from "through2";
import mustache from "mustache";
import log from "fancy-log";
import gulp, { src, dest, symlink } from "gulp";
import fs from "fs";
import buildConfig from "../../buildConfig";
import Bluebird from "bluebird";
import { ForgeProfile } from "../../types/forgeProfile";
import { FileDef } from "../../types/fileDef";
import { downloadOrRetrieveFileDef, getVersionManifest, libraryToPath, relative } from "../../util/util";
import { modDestDirectory, modpackManifest, serverDestDirectory, sharedDestDirectory } from "../../globals";
import del from "del";
import { VersionManifest } from "../../types/versionManifest";
import { updateBuildServerProperties } from "../misc/transformFiles";
import buildConfig from "#buildConfig";
import { ForgeProfile } from "#types/forgeProfile.ts";
import { FileDef } from "#types/fileDef.ts";
import {
downloadOrRetrieveFileDef,
getForgeJar,
getVersionManifest,
} from "#utils/util.ts";
import {
modDestDirectory,
modpackManifest,
serverDestDirectory,
sharedDestDirectory,
} from "#globals";
import { deleteAsync } from "del";
import { updateBuildServerProperties } from "../misc/transformFiles.ts";
import logInfo, { logWarn } from "#utils/log.ts";
import filter from "gulp-filter";
const FORGE_VERSION_REG = /forge-(.+)/;
const FORGE_MAVEN = "https://files.minecraftforge.net/maven/";
let g_forgeJar;
let g_forgeJar: string | undefined = undefined;
async function serverCleanUp() {
return del(upath.join(serverDestDirectory, "*"), { force: true });
return deleteAsync(upath.join(serverDestDirectory, "*"), { force: true });
}
/**
@ -39,44 +44,16 @@ async function createServerDirs() {
* Extract, parse the profile data and download required libraries.
*/
async function downloadForge() {
const minecraft = modpackManifest.minecraft;
/**
* Break down the Forge version defined in manifest.json.
*/
const parsedForgeEntry = FORGE_VERSION_REG.exec(
(minecraft.modLoaders.find((x) => x.id && x.id.indexOf("forge") != -1) || {}).id || "",
);
if (!parsedForgeEntry) {
throw new Error("Malformed Forge version in manifest.json.");
}
/**
* Transform Forge version into Maven library path.
*/
const forgeMavenLibrary = `net.minecraftforge:forge:${minecraft.version}-${parsedForgeEntry[1]}`;
const forgeInstallerPath = libraryToPath(forgeMavenLibrary) + "-installer.jar";
const forgeUniversalPath = upath.join("maven", libraryToPath(forgeMavenLibrary) + ".jar");
/**
* Fetch the Forge installer
*/
const forgeJar = await fs.promises.readFile(
(
await downloadOrRetrieveFileDef({
url: FORGE_MAVEN + forgeInstallerPath,
})
).cachePath,
);
const { forgeJar, forgeUniversalPath } = await getForgeJar();
/**
* Parse the profile manifest.
*/
let forgeUniversalJar: Buffer, forgeProfile: ForgeProfile;
let forgeUniversalJar: Buffer | undefined = undefined;
let forgeProfile: ForgeProfile | undefined = undefined;
const files = (await unzip.Open.buffer(forgeJar))?.files;
log("Extracting Forge installation profile & jar...");
logInfo("Extracting Forge installation profile & jar...");
if (!files) {
throw new Error("Malformed Forge installation jar.");
@ -102,14 +79,19 @@ async function downloadForge() {
}
if (!forgeUniversalJar) {
throw new Error("Couldn't find the universal Forge jar in the installation jar.");
throw new Error(
"Couldn't find the universal Forge jar in the installation jar.",
);
}
/**
* Move the universal jar into the dist folder.
*/
log("Extracting the Forge jar...");
await fs.promises.writeFile(upath.join(serverDestDirectory, upath.basename(forgeUniversalPath)), forgeUniversalJar);
logInfo("Extracting the Forge jar...");
await fs.promises.writeFile(
upath.join(serverDestDirectory, upath.basename(forgeUniversalPath)),
forgeUniversalJar,
);
/**
* Save the universal jar file name for later.
@ -121,12 +103,13 @@ async function downloadForge() {
/**
* Finally, fetch libraries.
*/
const libraries = forgeProfile.libraries.filter((x) => Boolean(x?.downloads?.artifact?.url));
log(`Fetching ${libraries.length} server libraries...`);
const libraries = forgeProfile.libraries.filter((x) =>
Boolean(x?.downloads?.artifact?.url),
);
logInfo(`Fetching ${libraries.length} server libraries...`);
return Bluebird.map(
libraries,
async (library) => {
await Promise.all(
libraries.map(async (library) => {
const libraryPath = library.downloads.artifact.path;
const def: FileDef = {
@ -134,15 +117,23 @@ async function downloadForge() {
};
if (library.downloads.artifact.sha1) {
def.hashes = [{ id: "sha1", hashes: [library.downloads.artifact.sha1] }];
def.hashes = [
{ id: "sha1", hashes: [library.downloads.artifact.sha1] },
];
}
const destPath = upath.join(serverDestDirectory, "libraries", libraryPath);
const destPath = upath.join(
serverDestDirectory,
"libraries",
libraryPath,
);
await fs.promises.mkdir(upath.dirname(destPath), { recursive: true });
await fs.promises.symlink(relative(destPath, (await downloadOrRetrieveFileDef(def)).cachePath), destPath);
},
{ concurrency: buildConfig.downloaderConcurrency },
return fs.promises.copyFile(
(await downloadOrRetrieveFileDef(def)).cachePath,
destPath,
);
}),
);
}
@ -150,10 +141,12 @@ async function downloadForge() {
* Download the server jar.
*/
async function downloadMinecraftServer() {
log("Fetching the Minecraft version manifest...");
const versionManifest: VersionManifest = await getVersionManifest(modpackManifest.minecraft.version);
logInfo("Fetching the Minecraft version manifest...");
const versionManifest = await getVersionManifest(
modpackManifest.minecraft.version,
);
if (!versionManifest) {
throw new Error(`No manifest found for Minecraft ${versionManifest.id}`);
throw new Error(`No manifest found for Minecraft ${versionManifest}`);
}
/**
@ -170,26 +163,37 @@ async function downloadMinecraftServer() {
throw new Error(`No server jar file found for ${versionManifest.id}`);
}
const dest = upath.join(serverDestDirectory, `minecraft_server.${versionManifest.id}.jar`);
await fs.promises.symlink(relative(dest, serverJar.cachePath), dest);
const dest = upath.join(
serverDestDirectory,
`minecraft_server.${versionManifest.id}.jar`,
);
await fs.promises.symlink(upath.resolve(serverJar.cachePath), dest);
}
/**
* Copies server & shared mods.
*/
async function copyServerMods() {
return src([upath.join(modDestDirectory, "*"), upath.join(modDestDirectory, "server", "*")], {
nodir: true,
resolveSymlinks: false,
}).pipe(symlink(upath.join(serverDestDirectory, "mods")));
const f = filter((f) => !f.isDirectory());
return src(["*", upath.join("server", "*")], {
cwd: modDestDirectory,
resolveSymlinks: true,
})
.pipe(f)
.pipe(symlink(upath.join(serverDestDirectory, "mods")));
}
/**
* Copies modpack overrides.
*/
function copyServerOverrides() {
return gulp
.src(buildConfig.copyFromSharedServerGlobs, { nodir: true, cwd: sharedDestDirectory, allowEmpty: true })
const f = filter((f) => !f.isDirectory());
return src(buildConfig.copyFromSharedServerGlobs, {
cwd: sharedDestDirectory,
allowEmpty: true,
resolveSymlinks: true,
})
.pipe(f)
.pipe(symlink(upath.join(serverDestDirectory)));
}
@ -211,14 +215,18 @@ function copyServerLicense() {
* Copies the update notes file.
*/
function copyServerUpdateNotes() {
return src("../UPDATENOTES.md", { allowEmpty: true }).pipe(dest(serverDestDirectory));
return src("../UPDATENOTES.md", { allowEmpty: true }).pipe(
dest(serverDestDirectory),
);
}
/**
* Copies the changelog file.
*/
function copyServerChangelog() {
return src(upath.join(buildConfig.buildDestinationDirectory, "CHANGELOG.md")).pipe(dest(serverDestDirectory));
return src(
upath.join(buildConfig.buildDestinationDirectory, "CHANGELOG.md"),
).pipe(dest(serverDestDirectory));
}
/**
@ -237,8 +245,8 @@ function processLaunchscripts() {
if (g_forgeJar) {
rules.forgeJar = g_forgeJar;
} else {
log.warn("No forgeJar specified!");
log.warn("Did downloadForge task fail?");
logWarn("No forgeJar specified!");
logWarn("Did downloadForge task fail?");
}
return src(["../launchscripts/**"])
@ -254,7 +262,7 @@ function processLaunchscripts() {
.pipe(dest(serverDestDirectory));
}
export default gulp.series([
export default gulp.series(
serverCleanUp,
createServerDirs,
downloadForge,
@ -267,4 +275,4 @@ export default gulp.series([
copyServerUpdateNotes,
processLaunchscripts,
updateBuildServerProperties,
]);
);

View File

@ -1,23 +1,32 @@
import fs from "fs";
import gulp from "gulp";
import gulp, { dest, src } from "gulp";
import upath from "upath";
import buildConfig from "../../buildConfig";
import buildConfig from "#buildConfig";
import {
modDestDirectory,
modpackManifest,
overridesFolder,
rootDirectory,
sharedDestDirectory,
tempDirectory
} from "../../globals";
import del from "del";
import { FileDef } from "../../types/fileDef";
import Bluebird from "bluebird";
import { downloadFileDef, downloadOrRetrieveFileDef, isEnvVariableSet, relative } from "../../util/util";
tempDirectory,
} from "#globals";
import { deleteAsync } from "del";
import { FileDef } from "#types/fileDef.ts";
import {
downloadFileDef,
downloadOrRetrieveFileDef,
isEnvVariableSet,
} from "#utils/util.ts";
import transformVersion from "./transformVersion.ts";
import { createBuildChangelog } from "../changelog/index.ts";
import mustache from "mustache";
import { updateBuildRandomPatches } from "../misc/transformFiles.ts";
import { transformQuestBook } from "./quest.ts";
import logInfo from "#utils/log.ts";
async function sharedCleanUp() {
await del(upath.join(sharedDestDirectory, "*"), { force: true });
await del(upath.join(tempDirectory, "*"), { force: true });
await deleteAsync(upath.join(sharedDestDirectory, "*"), { force: true });
await deleteAsync(upath.join(tempDirectory, "*"), { force: true });
}
/**
@ -38,10 +47,10 @@ async function createSharedDirs() {
*/
async function copyOverrides() {
// Don't copy server.properties files in config-overrides, it is auto transformed into the server build folder
// Copy, not Symlink, so we can transform the files as we wish
return new Promise((resolve) => {
gulp
.src(buildConfig.copyToSharedDirGlobs, { cwd: upath.join(buildConfig.buildSourceDirectory) })
.pipe(gulp.dest(upath.join(sharedDestDirectory, overridesFolder)))
src(buildConfig.copyToSharedDirGlobs, { cwd: upath.join(rootDirectory) })
.pipe(dest(upath.join(sharedDestDirectory, overridesFolder)))
.on("end", resolve);
});
}
@ -73,17 +82,13 @@ async function fetchExternalDependencies() {
delete modpackManifest.externalDependencies;
return Bluebird.map(
depDefs,
async (depDef) => {
await Promise.all(
depDefs.map(async (depDef) => {
const dest = upath.join(destDirectory, upath.basename(depDef.url));
const cachePath = (await downloadOrRetrieveFileDef(depDef)).cachePath;
const rel = relative(dest, cachePath);
await fs.promises.symlink(rel, dest);
},
{ concurrency: buildConfig.downloaderConcurrency },
return fs.promises.symlink(upath.resolve(dest, cachePath), dest);
}),
);
}
}
@ -92,25 +97,41 @@ async function fetchExternalDependencies() {
* Either fetches the Changelog File, or makes one.
*/
async function fetchOrMakeChangelog() {
if (isEnvVariableSet("CHANGELOG_URL") && isEnvVariableSet("CHANGELOG_CF_URL")) {
log("Using Changelog Files from URL.");
await downloadChangelogs(process.env.CHANGELOG_URL, process.env.CHANGELOG_CF_URL);
return;
}
if (isEnvVariableSet("CHANGELOG_BRANCH")) {
log("Using Changelog Files from Branch.");
const url = "https://raw.githubusercontent.com/Nomi-CEu/Nomi-CEu/{{ branch }}/{{ filename }}";
if (
isEnvVariableSet("CHANGELOG_URL") &&
isEnvVariableSet("CHANGELOG_CF_URL")
) {
logInfo("Using Changelog Files from URL.");
await downloadChangelogs(
mustache.render(url, { branch: process.env.CHANGELOG_BRANCH, filename: "CHANGELOG.md" }),
mustache.render(url, { branch: process.env.CHANGELOG_BRNACH, filename: "CHANGELOG_CF.md" }),
process.env.CHANGELOG_URL ?? "",
process.env.CHANGELOG_CF_URL ?? "",
);
return;
}
log("Creating Changelog Files.");
if (isEnvVariableSet("CHANGELOG_BRANCH")) {
logInfo("Using Changelog Files from Branch.");
const url =
"https://raw.githubusercontent.com/Nomi-CEu/Nomi-CEu/{{ branch }}/{{ filename }}";
await downloadChangelogs(
mustache.render(url, {
branch: process.env.CHANGELOG_BRANCH,
filename: "CHANGELOG.md",
}),
mustache.render(url, {
branch: process.env.CHANGELOG_BRNACH,
filename: "CHANGELOG_CF.md",
}),
);
return;
}
logInfo("Creating Changelog Files.");
await createBuildChangelog();
}
async function downloadChangelogs(changelogURL: string, changelogCFURL: string) {
async function downloadChangelogs(
changelogURL: string,
changelogCFURL: string,
) {
const changelog = await downloadFileDef({ url: changelogURL });
const changelogCF = await downloadFileDef({ url: changelogCFURL });
@ -118,16 +139,23 @@ async function downloadChangelogs(changelogURL: string, changelogCFURL: string)
await writeToChangelog(changelogCF, "CHANGELOG_CF.md", changelogCFURL);
}
async function writeToChangelog(buffer: Buffer, changelogFile: string, url: string) {
let handle: fs.promises.FileHandle;
async function writeToChangelog(
buffer: Buffer,
changelogFile: string,
url: string,
) {
let handle: fs.promises.FileHandle | undefined = undefined;
try {
handle = await fs.promises.open(upath.join(buildConfig.buildDestinationDirectory, changelogFile), "w");
handle = await fs.promises.open(
upath.join(buildConfig.buildDestinationDirectory, changelogFile),
"w",
);
await handle.write(buffer);
await handle.close();
} catch (err) {
if (handle && (await handle.stat()).isFile()) {
log(`Couldn't download changelog from URL ${url}, cleaning up...`);
logInfo(`Couldn't download changelog from URL ${url}, cleaning up...`);
await handle.close();
}
@ -135,13 +163,6 @@ async function writeToChangelog(buffer: Buffer, changelogFile: string, url: stri
}
}
import transformVersion from "./transformVersion";
import { createBuildChangelog } from "../changelog/createChangelog";
import mustache from "mustache";
import log from "fancy-log";
import { updateBuildRandomPatches } from "../misc/transformFiles";
import { transformQuestBook } from "./quest";
export default gulp.series(
sharedCleanUp,
createSharedDirs,

View File

@ -1,10 +1,26 @@
import fs from "fs";
import upath from "upath";
import { overridesFolder, configFolder, configOverridesFolder, sharedDestDirectory } from "../../globals";
import { Quest, QuestBook, QuestLines as QuestLine } from "../../types/bqQuestBook";
import {
overridesFolder,
configFolder,
configOverridesFolder,
sharedDestDirectory,
} from "#globals";
import {
Quest,
QuestBook,
QuestLine as QuestLine,
} from "#types/bqQuestBook.ts";
const sharedQBDefaults = upath.join(sharedDestDirectory, configFolder, "betterquesting");
const sharedConfigOverrides = upath.join(sharedDestDirectory, configOverridesFolder);
const sharedQBDefaults = upath.join(
sharedDestDirectory,
configFolder,
"betterquesting",
);
const sharedConfigOverrides = upath.join(
sharedDestDirectory,
configOverridesFolder,
);
const langFileLocation = "resources/questbook/lang";
@ -49,7 +65,7 @@ function transformKeyPairs(
*
* Interesting, huh?
*/
const uselessProps = {
const uselessProps: Record<string, string | number> = {
"simultaneous:1": 0,
"ismain:1": 0,
"repeat_relative:1": 1,
@ -77,7 +93,7 @@ const uselessProps = {
"ignoresview:1": 0,
};
function stripUselessMetadata(object: unknown) {
function stripUselessMetadata(object: Record<string, unknown>) {
Object.keys(object).forEach((propName) => {
const prop = object[propName];
if (prop === uselessProps[propName]) {
@ -89,9 +105,9 @@ function stripUselessMetadata(object: unknown) {
return delete object[propName];
}
stripUselessMetadata(prop);
stripUselessMetadata(prop as Record<string, unknown>);
if (Object.keys(prop).length === 0) {
if (Object.keys(prop as Record<string, unknown>).length === 0) {
return delete object[propName];
}
}
@ -103,22 +119,54 @@ function stripUselessMetadata(object: unknown) {
*/
export async function transformQuestBook(): Promise<void> {
// Source Quest Book File Locations
const questPathNormalSource = upath.join(sharedQBDefaults, "DefaultQuests.json");
const questPathExpertSource = upath.join(sharedQBDefaults, "saved_quests", "ExpertQuests.json");
const questPathNormalSource = upath.join(
sharedQBDefaults,
"DefaultQuests.json",
);
const questPathExpertSource = upath.join(
sharedQBDefaults,
"saved_quests",
"ExpertQuests.json",
);
// Quest Book Objects
const questBookNormal: QuestBook = JSON.parse(await fs.promises.readFile(questPathNormalSource, "utf-8"));
const questBookExpert: QuestBook = JSON.parse(await fs.promises.readFile(questPathExpertSource, "utf-8"));
const questBookNormal: QuestBook = JSON.parse(
await fs.promises.readFile(questPathNormalSource, "utf-8"),
);
const questBookExpert: QuestBook = JSON.parse(
await fs.promises.readFile(questPathExpertSource, "utf-8"),
);
// Quest Book Paths
const questPathNormalDefault = upath.join(sharedQBDefaults, "DefaultQuests.json");
const questPathNormalOverride = upath.join(sharedConfigOverrides, "normal", "betterquesting", "DefaultQuests.json");
const questPathNormalDefault = upath.join(
sharedQBDefaults,
"DefaultQuests.json",
);
const questPathNormalOverride = upath.join(
sharedConfigOverrides,
"normal",
"betterquesting",
"DefaultQuests.json",
);
const questPathExpertDefault = upath.join(sharedQBDefaults, "saved_quests", "ExpertQuests.json");
const questPathExpertOverride = upath.join(sharedConfigOverrides, "expert", "betterquesting", "DefaultQuests.json");
const questPathExpertDefault = upath.join(
sharedQBDefaults,
"saved_quests",
"ExpertQuests.json",
);
const questPathExpertOverride = upath.join(
sharedConfigOverrides,
"expert",
"betterquesting",
"DefaultQuests.json",
);
// Quest Lang Location
const questLangLocation = upath.join(sharedDestDirectory, overridesFolder, langFileLocation);
const questLangLocation = upath.join(
sharedDestDirectory,
overridesFolder,
langFileLocation,
);
// Traverse through the quest book and rewrite titles/descriptions.
// Extract title/desc pairs into a lang file.
@ -149,16 +197,31 @@ export async function transformQuestBook(): Promise<void> {
// Write lang file.
await fs.promises.mkdir(questLangLocation, { recursive: true });
await fs.promises.writeFile(upath.join(questLangLocation, "en_us.lang"), lines.join("\n"));
await fs.promises.writeFile(
upath.join(questLangLocation, "en_us.lang"),
lines.join("\n"),
);
// Strip useless metadata.
stripUselessMetadata(questBookNormal);
stripUselessMetadata(questBookExpert);
stripUselessMetadata(questBookNormal as unknown as Record<string, unknown>);
stripUselessMetadata(questBookExpert as unknown as Record<string, unknown>);
// Write QB files.
await fs.promises.writeFile(questPathNormalDefault, JSON.stringify(questBookNormal, null, 2));
await fs.promises.writeFile(questPathNormalOverride, JSON.stringify(questBookNormal, null, 2));
await fs.promises.writeFile(
questPathNormalDefault,
JSON.stringify(questBookNormal, null, 2),
);
await fs.promises.writeFile(
questPathNormalOverride,
JSON.stringify(questBookNormal, null, 2),
);
await fs.promises.writeFile(questPathExpertDefault, JSON.stringify(questBookExpert, null, 2));
return await fs.promises.writeFile(questPathExpertOverride, JSON.stringify(questBookExpert, null, 2));
await fs.promises.writeFile(
questPathExpertDefault,
JSON.stringify(questBookExpert, null, 2),
);
return await fs.promises.writeFile(
questPathExpertOverride,
JSON.stringify(questBookExpert, null, 2),
);
}

View File

@ -1,4 +1,4 @@
import { modpackManifest } from "../../globals";
import { modpackManifest } from "#globals";
/**
* Transform the version field of manifest.json.
@ -14,7 +14,11 @@ export default async function transformManifestVersion(): Promise<void> {
modpackManifest.version = `${process.env.GITHUB_HEAD_REF}-${shortCommit}`;
}
// If SHA and ref is provided, append both the branch and short SHA.
else if (process.env.GITHUB_SHA && process.env.GITHUB_REF && process.env.GITHUB_REF.startsWith("refs/heads/")) {
else if (
process.env.GITHUB_SHA &&
process.env.GITHUB_REF &&
process.env.GITHUB_REF.startsWith("refs/heads/")
) {
const shortCommit = process.env.GITHUB_SHA.substring(0, 7);
const branch = /refs\/heads\/(.+)/.exec(process.env.GITHUB_REF)?.[1];
if (!branch) {

View File

@ -1,150 +1,150 @@
{
"images": {
"title": {
"image": "minecraft:textures/gui/title/top.png",
"posX": -150,
"posY": 4,
"width": 300,
"height": 79,
"alignment": "top_center"
}
},
"buttons": {
"singleplayer": {
"text": "menu.singleplayer",
"posX": 4,
"posY": -46,
"width": 120,
"height": 20,
"alignment": "left_center",
"action": {
"type": "openGui",
"gui": "singleplayer"
}
},
"multiplayer": {
"text": "menu.multiplayer",
"posX": 125,
"posY": -46,
"width": 120,
"height": 20,
"alignment": "left_center",
"action": {
"type": "openGui",
"gui": "multiplayer"
}
},
"mods": {
"text": "fml.menu.mods",
"posX": 4,
"posY": -22,
"width": 120,
"height": 20,
"alignment": "left_center",
"action": {
"type": "openGui",
"gui": "mods"
}
},
"options": {
"text": "menu.options",
"posX": 125,
"posY": -22,
"width": 120,
"height": 20,
"alignment": "left_center",
"action": {
"type": "openGui",
"gui": "options"
}
},
"quit": {
"text": "menu.quit",
"posX": 4,
"posY": 74,
"width": 240,
"height": 20,
"alignment": "left_center",
"action": {
"type": "quit"
}
},
"update": {
"text": "Update Changes",
"posX": 125,
"posY": 26,
"width": 120,
"height": 20,
"alignment": "left_center",
"action": {
"type": "openLink",
"link": "https://github.com/Nomi-CEu/Nomi-CEu/releases/tag/{{{version}}}"
}
},
"discord": {
"text": "Join Us On Discord!",
"posX": 4,
"posY": 26,
"width": 120,
"height": 20,
"alignment": "left_center",
"action": {
"type": "openLink",
"link": "https://discord.gg/zwQzqP8b6q"
}
},
"akliz": {
"text": "Rent Your Own Nomifactory Server!",
"posX": 4,
"posY": 50,
"width": 240,
"height": 20,
"alignment": "left_center",
"action": {
"type": "openLink",
"link": "https://www.akliz.net/nomifactory"
}
}
},
"labels": {
"mojang": {
"text": "Copyright Mojang AB. Do not distribute!",
"posX": -197,
"posY": -10,
"color": -1,
"alignment": "bottom_right"
}
},
"other": {
"background": {
"image": "",
"slideshow": {
"displayDuration": 100,
"fadeDuration": 40,
"shuffle" : true,
"images": [
"minecraft:textures/gui/title/background/besoiobiy_1.png",
"minecraft:textures/gui/title/background/besoiobiy_2.png",
"minecraft:textures/gui/title/background/cactus_cool.png",
"minecraft:textures/gui/title/background/cobracreeper1.png",
"minecraft:textures/gui/title/background/darkarkangel.png",
"minecraft:textures/gui/title/background/ely_1.png",
"minecraft:textures/gui/title/background/ely_2.png",
"minecraft:textures/gui/title/background/ely_3.png",
"minecraft:textures/gui/title/background/emiuna.png",
"minecraft:textures/gui/title/background/extracoolcat_1.png",
"minecraft:textures/gui/title/background/extracoolcat_2.png",
"minecraft:textures/gui/title/background/extracoolcat_3.png",
"minecraft:textures/gui/title/background/extracoolcat_4.png",
"minecraft:textures/gui/title/background/gaboggamer.png",
"minecraft:textures/gui/title/background/itstheguywhoasked.png",
"minecraft:textures/gui/title/background/lyeo.png",
"minecraft:textures/gui/title/background/pgs_1.png",
"minecraft:textures/gui/title/background/pgs_2.png",
"minecraft:textures/gui/title/background/qr_est.png",
"minecraft:textures/gui/title/background/supasem.png"
]
}
}
}
"images": {
"title": {
"image": "minecraft:textures/gui/title/top.png",
"posX": -150,
"posY": 4,
"width": 300,
"height": 79,
"alignment": "top_center"
}
},
"buttons": {
"singleplayer": {
"text": "menu.singleplayer",
"posX": 4,
"posY": -46,
"width": 120,
"height": 20,
"alignment": "left_center",
"action": {
"type": "openGui",
"gui": "singleplayer"
}
},
"multiplayer": {
"text": "menu.multiplayer",
"posX": 125,
"posY": -46,
"width": 120,
"height": 20,
"alignment": "left_center",
"action": {
"type": "openGui",
"gui": "multiplayer"
}
},
"mods": {
"text": "fml.menu.mods",
"posX": 4,
"posY": -22,
"width": 120,
"height": 20,
"alignment": "left_center",
"action": {
"type": "openGui",
"gui": "mods"
}
},
"options": {
"text": "menu.options",
"posX": 125,
"posY": -22,
"width": 120,
"height": 20,
"alignment": "left_center",
"action": {
"type": "openGui",
"gui": "options"
}
},
"quit": {
"text": "menu.quit",
"posX": 4,
"posY": 74,
"width": 240,
"height": 20,
"alignment": "left_center",
"action": {
"type": "quit"
}
},
"update": {
"text": "Update Changes",
"posX": 125,
"posY": 26,
"width": 120,
"height": 20,
"alignment": "left_center",
"action": {
"type": "openLink",
"link": "https://github.com/Nomi-CEu/Nomi-CEu/releases/tag/{{{version}}}"
}
},
"discord": {
"text": "Join Us On Discord!",
"posX": 4,
"posY": 26,
"width": 120,
"height": 20,
"alignment": "left_center",
"action": {
"type": "openLink",
"link": "https://discord.gg/zwQzqP8b6q"
}
},
"akliz": {
"text": "Rent Your Own Nomifactory Server!",
"posX": 4,
"posY": 50,
"width": 240,
"height": 20,
"alignment": "left_center",
"action": {
"type": "openLink",
"link": "https://www.akliz.net/nomifactory"
}
}
},
"labels": {
"mojang": {
"text": "Copyright Mojang AB. Do not distribute!",
"posX": -197,
"posY": -10,
"color": -1,
"alignment": "bottom_right"
}
},
"other": {
"background": {
"image": "",
"slideshow": {
"displayDuration": 100,
"fadeDuration": 40,
"shuffle": true,
"images": [
"minecraft:textures/gui/title/background/besoiobiy_1.jpg",
"minecraft:textures/gui/title/background/besoiobiy_2.jpg",
"minecraft:textures/gui/title/background/cactus_cool.jpg",
"minecraft:textures/gui/title/background/cobracreeper1.jpg",
"minecraft:textures/gui/title/background/darkarkangel.jpg",
"minecraft:textures/gui/title/background/ely_1.jpg",
"minecraft:textures/gui/title/background/ely_2.jpg",
"minecraft:textures/gui/title/background/ely_3.jpg",
"minecraft:textures/gui/title/background/emiuna.jpg",
"minecraft:textures/gui/title/background/extracoolcat_1.jpg",
"minecraft:textures/gui/title/background/extracoolcat_2.jpg",
"minecraft:textures/gui/title/background/extracoolcat_3.jpg",
"minecraft:textures/gui/title/background/extracoolcat_4.jpg",
"minecraft:textures/gui/title/background/gaboggamer.jpg",
"minecraft:textures/gui/title/background/itstheguywhoasked.jpg",
"minecraft:textures/gui/title/background/lyeo.jpg",
"minecraft:textures/gui/title/background/pgs_1.jpg",
"minecraft:textures/gui/title/background/pgs_2.jpg",
"minecraft:textures/gui/title/background/qr_est.jpg",
"minecraft:textures/gui/title/background/supasem.jpg"
]
}
}
}
}

View File

@ -0,0 +1,4 @@
import { register } from "node:module";
import { pathToFileURL } from "node:url";
register("ts-node/esm", pathToFileURL("./"));

View File

@ -1,10 +1,25 @@
{
"compilerOptions": {
"sourceMap": true,
"module": "commonjs",
"target": "es5",
"lib": ["es2023"],
"module": "nodenext",
"moduleResolution": "nodenext",
"target": "es2022",
"strict": true,
"noEmit": true,
"outDir": ".dest",
"rootDir": ".",
"allowImportingTsExtensions": true,
"resolvePackageJsonImports": true,
"esModuleInterop": true,
"resolveJsonModule": true,
"downlevelIteration": true,
}
}
"skipLibCheck": true,
"resolveJsonModule": true
},
"include": [
"gulpfile.ts",
"globals.ts",
"buildConfig.ts",
"tasks/**/*",
"types/**/*",
"utils/**/*"
]
}

View File

@ -0,0 +1,122 @@
import { Quest } from "./bqQuestBook.ts";
import { Operation } from "just-diff";
import { Matcher } from "picomatch";
// QB Porting From.
export type PortingType = "NORMAL" | "EXPERT";
// Which File to Source From
export type SourceOption = "CFG" | "CFG-OVERRIDE";
export type YesIgnoreNo = "YES" | "IGNORE" | "NO";
// How should we apply description change?
export type DescriptionTaskChange = "APPLY" | "REPLACE" | "CUSTOM" | "IGNORE";
export type CustomDescriptionTaskTemplate = "APPLY" | "REPLACE" | "ORIGINAL";
export type TaskDifferentSolution = "APPLY" | "CONTINUE" | "IGNORE";
export interface Changed {
added: Quest[];
modified: Modified[];
removed: Quest[];
}
export interface Modified {
currentQuest: Quest;
oldQuest: Quest;
change: QuestChange[];
}
export interface QuestChange {
op: Operation;
path: number[] | string[];
value?: unknown;
}
export interface Replacements {
search: RegExp;
replacement: string;
}
export interface Parser {
id: string;
name: string;
condition: Matcher;
logic: SimpleLogic | BunchedLogic;
}
export enum LogicType {
Simple = "SIMPLE",
Bunched = "BUNCHED",
}
export interface SimpleLogic {
type: LogicType.Simple;
applyOnce: boolean;
formattedName?: (path: string[], op: Operation) => string;
func: (
questToModify: Quest,
modify: Modified,
change: QuestChange,
path: string[],
) => Promise<void>;
}
export interface BunchedLogic {
type: LogicType.Bunched;
applyTogether: (path1: string[], path2: string[]) => boolean;
formattedName: (changeAndPaths: ChangeAndPath[]) => string[];
func: (
questToModify: Quest,
modify: Modified,
changeAndPaths: ChangeAndPath[],
) => Promise<void>;
}
export interface SavedPorter {
savedQuestMap: SavedQuestPath[];
alwaysAskQuestsNormal: number[];
alwaysAskQuestsExpert: number[];
ignoreQuestsNormal: number[];
ignoreQuestsExpert: number[];
}
export interface SavedQuestPath {
normal: number;
expert: number;
}
export interface BunchedParserPath {
logic: BunchedLogic;
changeAndPath: ChangeAndPath[];
}
export type ChangeAndPath = {
change: QuestChange;
path: string[];
};
export type SpecialModifierHandler = (
oldQuest: Quest,
currentQuest: Quest,
questDiff: QuestChange[],
) => void;
export class Message {
private readonly message: string;
private repeats: number;
constructor(message: string) {
this.message = message;
this.repeats = 1;
}
incrementRepeats(): void {
this.repeats++;
}
toFormattedString(): string {
if (this.repeats === 1) return this.message;
else return `${this.message} (x${this.repeats})`;
}
}

Some files were not shown because too many files have changed in this diff Show More