Add Pack Mode Switcher to GitHub and Nightly Builds (#977)

[FEATURE]
This commit is contained in:
Integer Limit 2024-09-01 15:32:04 +10:00 committed by GitHub
parent efe23ea4d0
commit ca47584704
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 93 additions and 25 deletions

View File

@ -150,7 +150,7 @@ jobs:
with:
name: Built Pack
path: |
./build/*.zip
./build/**/*.zip
./build/*.md
if-no-files-found: error
compression-level: 0

View File

@ -52,6 +52,8 @@ export const zipClient = zip.zipClient;
export const zipServer = zip.zipServer;
export const zipLang = zip.zipLang;
export const zipMMC = zip.zipMMC;
export const zipClientCF = zip.zipClientCF;
export const zipServerCF = zip.zipServerCF;
export const zipAll = zip.zipAll;
export default gulp.series(buildAll, zipAll);

View File

@ -52,7 +52,7 @@ async function createChangelog(): Promise<ChangelogData> {
}
return data;
}
logInfo("No Iterations Detected.");
logInfo("Default Iteration Detected.");
categoriesSetup();
specialParserSetup(data);

View File

@ -30,7 +30,11 @@ const variablesToCheck = [
async function upload(files: { name: string; displayName: string }[]) {
files.forEach((file) => {
const path = upath.join(buildConfig.buildDestinationDirectory, file.name);
const path = upath.join(
buildConfig.buildDestinationDirectory,
"cf",
file.name,
);
if (!fs.existsSync(path)) {
throw new Error(`File ${path} doesn't exist!`);
}
@ -120,7 +124,11 @@ async function upload(files: { name: string; displayName: string }[]) {
const response: { id: number } = (await getAxios()(options)).data;
if (response && response.id) {
uploadedIDs.push({ filePath: path, displayName: file.displayName, id: response.id });
uploadedIDs.push({
filePath: path,
displayName: file.displayName,
id: response.id,
});
if (!parentID) {
parentID = response.id;
}

View File

@ -14,20 +14,26 @@ import sanitize from "sanitize-filename";
async function zipFolder(
path: string,
zipName: string = upath.basename(path) + ".zip",
globs: string[],
dest: string,
zipName: string,
): Promise<void> {
return new Promise((resolve) => {
src(upath.join(path, "**"), { base: path, dot: true, encoding: false })
src(globs, { cwd: path, dot: true, encoding: false })
.pipe(zip(zipName))
.pipe(gulp.dest(buildConfig.buildDestinationDirectory))
.pipe(gulp.dest(dest))
.on("end", resolve);
});
}
function makeZipper(src: string, artifactName: string) {
function makeZipper(src: string, artifactName: string, isCFZip = false) {
const zipFn = () => {
return zipFolder(
upath.join(src),
src,
isCFZip ? buildConfig.cfZipGlobs : buildConfig.normalZipGlobs,
isCFZip
? upath.join(buildConfig.buildDestinationDirectory, "cf")
: buildConfig.buildDestinationDirectory,
sanitize(
(
makeArtifactNameBody(modpackManifest.name) + `-${artifactName}.zip`
@ -37,7 +43,7 @@ function makeZipper(src: string, artifactName: string) {
};
Object.defineProperty(zipFn, "name", {
value: `zip${artifactName}`,
value: `zip${artifactName}${isCFZip ? "CF" : ""}`,
configurable: true,
});
@ -48,5 +54,13 @@ export const zipServer = makeZipper(serverDestDirectory, "Server");
export const zipClient = makeZipper(clientDestDirectory, "Client");
export const zipLang = makeZipper(langDestDirectory, "Lang");
export const zipMMC = makeZipper(mmcDestDirectory, "MMC");
export const zipServerCF = makeZipper(serverDestDirectory, "Server", true);
export const zipClientCF = makeZipper(clientDestDirectory, "Client", true);
export const zipAll = gulp.parallel(zipServer, zipClient, zipLang);
export const zipAll = gulp.parallel(
zipServer,
zipClient,
zipLang,
zipServerCF,
zipClientCF,
);

View File

@ -60,6 +60,19 @@ async function copyOverrides() {
});
}
/**
* Copies Modpack Pack Mode Switcher Scripts.
*/
async function copyPackModeSwitchers() {
return new Promise((resolve) => {
src(buildConfig.packModeSwitcherGlobs, {
cwd: upath.join(rootDirectory),
})
.pipe(dest(upath.join(sharedDestDirectory, overridesFolder)))
.on("end", resolve);
});
}
/**
* Fetch external dependencies and remove the field from the manifest file.
*/
@ -176,6 +189,7 @@ export default gulp.series(
sharedCleanUp,
createSharedDirs,
copyOverrides,
copyPackModeSwitchers,
fetchOrMakeChangelog,
fetchExternalDependencies,
updateFilesBuildSetup,

View File

@ -4,12 +4,16 @@
"downloaderConcurrency": 50,
"downloaderCheckHashes": true,
"downloaderCacheDirectory": "../.cache",
"changelogCacheMaxPages": 5,
"launchscriptsMinRAM": "2048M",
"launchscriptsMaxRAM": "2048M",
"launchscriptsJVMArgs": "",
"copyToSharedDirGlobs": ["overrides/**/*"],
"packModeSwitcherGlobs": ["pack-mode-switcher.*"],
"copyFromSharedServerGlobs": ["overrides/**/*", "!overrides/resources/**/*"],
"copyFromSharedClientGlobs": ["overrides/**/*"],
"normalZipGlobs": ["**/*"],
"cfZipGlobs": ["**/*", "!**/pack-mode-switcher.*"],
"buildDestinationDirectory": "../build",
"nightlyHookAvatar": "",
"nightlyHookName": ""

View File

@ -642,13 +642,26 @@ const issueURLCache: Map<number, string> = new Map<number, string>();
export async function getIssueURLs(): Promise<void> {
if (issueURLCache.size > 0) return;
try {
const issues = await octokit.paginate(octokit.issues.listForRepo, {
let page = 1;
const issues = await octokit.paginate(
octokit.issues.listForRepo,
{
owner: repoOwner,
repo: repoName,
per_page: 100,
state: "closed",
sort: "updated",
});
},
(response, done) => {
if (page++ >= buildConfig.changelogCacheMaxPages) {
logInfo(
`Fetched ${buildConfig.changelogCacheMaxPages} Pages of 100 Issues! Final Issue Fetched: #${response.data.at(-1)?.number ?? 0}`,
);
done();
}
return response.data;
},
);
issues.forEach((issue) => {
if (!issueURLCache.has(issue.number))
issueURLCache.set(issue.number, issue.html_url);
@ -703,11 +716,24 @@ const commitAuthorCache: Map<string, string> = new Map<string, string>();
export async function getCommitAuthors(): Promise<void> {
if (commitAuthorCache.size > 0) return;
try {
const commits = await octokit.paginate(octokit.repos.listCommits, {
let page = 1;
const commits = await octokit.paginate(
octokit.repos.listCommits,
{
owner: repoOwner,
repo: repoName,
per_page: 100,
});
},
(response, done) => {
if (page++ >= buildConfig.changelogCacheMaxPages) {
logInfo(
`Fetched ${buildConfig.changelogCacheMaxPages} Pages of 100 Commits! Final Commit Fetched: ${response.data.at(-1)?.sha ?? ""}`,
);
done();
}
return response.data;
},
);
commits.forEach((commit) => {
if (!commitAuthorCache.has(commit.sha))
commitAuthorCache.set(commit.sha, commit.author?.login ?? "");