+ * The order that the categories appear here will be the order that they are parsed in.
+ * Note that unless `addSHA` of the category is set to false, a commit parsed in a previous category will not be allowed to be parsed in future categories,
+ * even if they fit in the dirs.
+ */
+export const parsers: Parser[] = [fixupParsing, overridesParsing, manifestParsing, finalParsing];
+
+/* Parsing Information / Allocations for Mod Changes */
+
+export type ModChangesType = "added" | "removed" | "updated";
+
+/**
+ * An Allocation for mod changes categories to grab from.
+ */
+export interface ModChangesAllocation {
+ /**
+ * Category to put in.
+ */
+ category: Category;
+
+ /**
+ * Sub category of the category to put in.
+ */
+ subCategory: SubCategory;
+
+ /**
+ * The template to use.
+ * Keys:
+ * `{{{ modName }}}` replaced by mod name,
+ * `{{{ oldVersion }}}` replaced by the old version (if applicable)
+ * `{{{ newVersion }}}` replaced by the new version (if applicable)
+ */
+ template: string;
+}
+
+export const modChangesAllocations: Record = {
+ added: {
+ category: generalCategory,
+ subCategory: modAdditions,
+ template: "{{ modName }}: *v{{ newVersion }}*",
+ },
+ updated: {
+ category: generalCategory,
+ subCategory: modUpdates,
+ template: "{{ modName }}: *v{{ oldVersion }} ⇥ v{{ newVersion }}*",
+ },
+ removed: {
+ category: generalCategory,
+ subCategory: modRemovals,
+ template: "{{ modName }}: *v{{ oldVersion }}*",
+ },
+};
diff --git a/tools/tasks/changelog/generateModChanges.ts b/tools/tasks/changelog/generateModChanges.ts
new file mode 100644
index 0000000..ccf7476
--- /dev/null
+++ b/tools/tasks/changelog/generateModChanges.ts
@@ -0,0 +1,177 @@
+import { cleanupVersion, compareAndExpandManifestDependencies, getChangelog, getFileAtRevision } from "../../util/util";
+import { ModpackManifest, ModpackManifestFile } from "../../types/modpackManifest";
+import { Commit, ModChangeInfo } from "../../types/changelogTypes";
+import ListDiffer, { DiffResult } from "@egjs/list-differ";
+import dedent from "dedent-js";
+import mustache from "mustache";
+import { defaultIndentation, modChangesAllocations, repoLink } from "./definitions";
+import ChangelogData from "./changelogData";
+import { SpecialChangelogFormatting } from "../../types/changelogTypes";
+
+/**
+ * Mod Changes special formatting
+ */
+const getModChangesFormatting: (commits: Commit[]) => SpecialChangelogFormatting = (commits) => {
+ return {
+ formatting: (changelogMessage, commits) => {
+ const indentation = changelogMessage.indentation == undefined ? defaultIndentation : changelogMessage.indentation;
+ const message = changelogMessage.commitMessage.trim();
+ if (commits.length > 1) {
+ const authors: string[] = [];
+ const formattedCommits: string[] = [];
+ commits.forEach((commit) => {
+ if (!authors.includes(commit.author_name)) authors.push(commit.author_name);
+ formattedCommits.push(`[\`${commit.hash.substring(0, 7)}\`](${repoLink}commit/${commit.hash})`);
+ });
+ authors.sort();
+ return `${indentation}* ${message} - **${authors.join("**, **")}** (${formattedCommits.join(", ")})`;
+ }
+
+ const commit = commits[0];
+ const shortSHA = commit.hash.substring(0, 7);
+ const author = commit.author_name;
+
+ return `${indentation}* ${message} - **${author}** ([\`${shortSHA}\`](${repoLink}commit/${commit.hash}))`;
+ },
+ storage: commits,
+ } as SpecialChangelogFormatting;
+};
+
+/**
+ * Pushes the mod changes, with their relative commits, to their respective sub categories in the specified category.
+ */
+export default async function generateModChanges(data: ChangelogData): Promise {
+ const oldManifest: ModpackManifest = JSON.parse(getFileAtRevision("manifest.json", data.since));
+ const newManifest: ModpackManifest = JSON.parse(getFileAtRevision("manifest.json", data.to));
+ const comparisonResult = await compareAndExpandManifestDependencies(oldManifest, newManifest);
+
+ const commitList = await getChangelog(data.since, data.to, ["manifest.json"]);
+ const projectIDsToCommits: Map = new Map();
+
+ commitList.forEach((commit) => {
+ const projectIDs = getChangedProjectIDs(commit.hash);
+ projectIDs.forEach((id) => {
+ if (projectIDsToCommits.has(id)) projectIDsToCommits.get(id).push(commit);
+ else projectIDsToCommits.set(id, [commit]);
+ });
+ });
+
+ [
+ {
+ allocation: modChangesAllocations.added,
+ list: comparisonResult.added,
+ },
+ {
+ allocation: modChangesAllocations.updated,
+ list: comparisonResult.modified,
+ },
+ {
+ allocation: modChangesAllocations.removed,
+ list: comparisonResult.removed,
+ },
+ ].forEach((block) => {
+ if (block.list.length == 0) {
+ return;
+ }
+ const list = block.list
+ // Yeet invalid project names.
+ .filter((project) => !/project-\d*/.test(project.modName))
+ .sort()
+ .map((name) => name);
+
+ list.forEach((info) => {
+ let commits: Commit[] = undefined;
+ if (info.projectID && projectIDsToCommits.has(info.projectID)) {
+ commits = projectIDsToCommits.get(info.projectID);
+ }
+ block.allocation.category.changelogSection.get(block.allocation.subCategory).push({
+ commitMessage: getModChangeMessage(info, block.allocation.template),
+ specialFormatting: getModChangesFormatting(commits),
+ });
+ });
+ });
+}
+
+/**
+ * Returns the message, determined by the parameters below.
+ * @param info The mod change info, containing the mod name and versions.
+ * @param template The message template to replace in.
+ */
+function getModChangeMessage(info: ModChangeInfo, template: string): string {
+ const oldVersion = cleanupVersion(info.oldVersion);
+ const newVersion = cleanupVersion(info.newVersion);
+
+ // If not provided with either version, return just mod name
+ if (!oldVersion && !newVersion) return info.modName;
+
+ // Replace in template
+ return mustache.render(template, {
+ modName: info.modName,
+ oldVersion: oldVersion,
+ newVersion: newVersion,
+ });
+}
+
+/**
+ * Gets what project IDs, in manifest.json, a commit changed.
+ * @param SHA The sha of the commit
+ */
+function getChangedProjectIDs(SHA: string): number[] {
+ const change = getCommitChange(SHA);
+ const projectIDs: number[] = [];
+
+ if (!change || !change.diff) {
+ return projectIDs;
+ }
+
+ // Add all unique IDs from both diff lists
+ change.diff.added.forEach((index) => {
+ const id = change.newManifest.files[index].projectID;
+ if (!projectIDs.includes(id)) projectIDs.push(id);
+ });
+
+ change.diff.removed.forEach((index) => {
+ const id = change.oldManifest.files[index].projectID;
+ if (!projectIDs.includes(id)) projectIDs.push(id);
+ });
+
+ return projectIDs;
+}
+
+/**
+ * A storage of what parts of the 'manifest.json' file a commit changed.
+ */
+interface CommitChange {
+ diff: DiffResult;
+ oldManifest: ModpackManifest;
+ newManifest: ModpackManifest;
+}
+
+/**
+ * Gets what parts of the 'manifest.json' file a commit changed.
+ * @param SHA The sha of the commit
+ */
+function getCommitChange(SHA: string): CommitChange {
+ let oldManifest: ModpackManifest, newManifest: ModpackManifest;
+ try {
+ oldManifest = JSON.parse(getFileAtRevision("manifest.json", `${SHA}^`)) as ModpackManifest;
+ newManifest = JSON.parse(getFileAtRevision("manifest.json", SHA)) as ModpackManifest;
+ } catch (e) {
+ console.error(dedent`
+ Failed to parse the manifest.json file at commit ${SHA} or the commit before!
+ Skipping...`);
+ return;
+ }
+
+ let result: DiffResult;
+ if (oldManifest && newManifest) {
+ const differ = new ListDiffer(oldManifest.files, (e) => e.fileID);
+ result = differ.update(newManifest.files);
+ }
+
+ return {
+ diff: result,
+ oldManifest: oldManifest,
+ newManifest: newManifest,
+ };
+}
diff --git a/tools/tasks/changelog/parser.ts b/tools/tasks/changelog/parser.ts
new file mode 100644
index 0000000..5fcc7d6
--- /dev/null
+++ b/tools/tasks/changelog/parser.ts
@@ -0,0 +1,117 @@
+import { Category, Commit, Parser, SubCategory } from "../../types/changelogTypes";
+import { categories, combineKey, defaultIndentation, detailsKey, expandKey, noCategoryKey } from "./definitions";
+import { parseCombine, parseDetails, parseExpand } from "./specialParser";
+import { getChangelog } from "../../util/util";
+import ChangelogData from "./changelogData";
+
+export default async function parseParser(data: ChangelogData, parser: Parser): Promise {
+ const commits = await getChangelog(data.since, data.to, parser.dirs);
+
+ for (const commit of commits) {
+ if (data.shaList.has(commit.hash)) continue;
+
+ if (data.commitFixes.has(commit.hash)) {
+ const fixUpInfo = data.commitFixes.get(commit.hash);
+ commit.message = fixUpInfo.newTitle;
+ commit.body = fixUpInfo.newBody;
+ }
+
+ if (parser.skipCallback(commit, commit.message, commit.body)) {
+ if (!parser.addSHACallback || parser.addSHACallback(commit, true)) data.shaList.add(commit.hash);
+ continue;
+ }
+
+ const parsed = await parser.itemCallback(parser, commit, commit.message, commit.body);
+
+ if (!parsed && parser.leftOverCallback) parser.leftOverCallback(commit, commit.message, commit.body, []);
+ if (!parser.addSHACallback || parser.addSHACallback(commit, parsed)) data.shaList.add(commit.hash);
+
+ if (parser.addCommitListCallback(commit, parsed)) data.commitList.push(commit);
+ }
+}
+
+/**
+ * Parses a commit body.
+ * @param commitMessage The commit message to put into the changelog.
+ * @param commitBody The commit body to parse with.
+ * @param commitObject The commit object.
+ * @param parser The parser object to use for parse expand/details.
+ * @return parsed Returns true if contains parsing keys, false if not.
+ */
+export async function parseCommitBody(
+ commitMessage: string,
+ commitBody: string,
+ commitObject: Commit,
+ parser: Parser,
+): Promise {
+ if (commitBody.includes(expandKey)) {
+ await parseExpand(commitBody, commitObject, parser);
+ return true;
+ }
+ if (commitBody.includes(detailsKey)) {
+ await parseDetails(commitMessage, commitBody, commitObject, parser);
+ return true;
+ }
+ if (commitBody.includes(noCategoryKey)) {
+ return true;
+ }
+ if (commitBody.includes(combineKey)) {
+ await parseCombine(commitBody, commitObject);
+ return true;
+ }
+ return sortCommit(commitMessage, commitBody, commitObject);
+}
+
+/**
+ * Adds the (commit) message to its correct category. Does not parse special effect tags.
+ * @param message The message to add
+ * @param commitBody The body to use to sort
+ * @param commit The commit object to grab date, author and SHA from
+ * @param indentation The indentation of the message, if needed. Defaults to "".
+ * @return added If the commit message was added to a category
+ */
+function sortCommit(message: string, commitBody: string, commit: Commit, indentation = defaultIndentation): boolean {
+ const sortedCategories: Category[] = findCategories(commitBody);
+ if (sortedCategories.length === 0) return false;
+
+ sortedCategories.forEach((category) => {
+ const subCategory = findSubCategory(commitBody, category);
+ category.changelogSection.get(subCategory).push({
+ commitMessage: message,
+ commitObject: commit,
+ indentation: indentation,
+ });
+ });
+ return true;
+}
+
+/**
+ * Finds the categories that a commit fits in.
+ * @param commitBody The commit body to sort with
+ * @return categoryList The categories that the commit belongs in. Return undefined if no category specified via keys.
+ */
+export function findCategories(commitBody: string): Category[] | undefined {
+ const sortedCategories: Category[] = [];
+ for (const category of categories) {
+ if (category.commitKey !== undefined) {
+ if (commitBody.includes(category.commitKey)) {
+ sortedCategories.push(category);
+ }
+ }
+ }
+ return sortedCategories;
+}
+
+/**
+ * Finds the correct Sub Category a commit should go in. Must be given the Category first!
+ */
+export function findSubCategory(commitBody: string, category: Category): SubCategory {
+ for (const subCategory of category.subCategories) {
+ if (subCategory.commitKey !== undefined) {
+ if (commitBody.includes(subCategory.commitKey)) {
+ return subCategory;
+ }
+ }
+ }
+ return category.defaultSubCategory;
+}
diff --git a/tools/tasks/changelog/pusher.ts b/tools/tasks/changelog/pusher.ts
new file mode 100644
index 0000000..c139891
--- /dev/null
+++ b/tools/tasks/changelog/pusher.ts
@@ -0,0 +1,193 @@
+import ChangelogData from "./changelogData";
+import { categories, defaultIndentation } from "./definitions";
+import { Category, ChangelogMessage, Commit } from "../../types/changelogTypes";
+import { repoLink } from "./definitions";
+
+let data: ChangelogData;
+
+export default function pushAll(inputData: ChangelogData): void {
+ data = inputData;
+ // Push the titles.
+ // Center Align is replaced by the correct center align style in the respective deployments.
+ // Must be triple bracketed, to make mustache not html escape it.
+ if (data.releaseType === "Cutting Edge Build") {
+ const date = new Date().toLocaleDateString("en-us", {
+ year: "numeric",
+ month: "short",
+ day: "numeric",
+ hour12: true,
+ hour: "numeric",
+ minute: "numeric",
+ });
+ // noinspection HtmlUnknownAttribute
+ data.builder.push(`
`,
+ });
const tokenHeaders = {
"X-Api-Token": process.env.CURSEFORGE_API_TOKEN,
@@ -97,6 +59,8 @@ async function upload(files: { name: string; displayName: string }[], opts?: CFU
let clientFileID: number | null;
+ const releaseType: DeployReleaseType = inputToDeployReleaseTypes[process.env.RELEASE_TYPE];
+
// Upload artifacts.
for (const file of files) {
const options = {
@@ -109,9 +73,9 @@ async function upload(files: { name: string; displayName: string }[], opts?: CFU
formData: {
metadata: JSON.stringify({
changelog: changelog,
- changelogType: "markdown",
- releaseType: opts.releaseType || "release",
- parentFileID: clientFileID,
+ changelogType: "html",
+ releaseType: releaseType ? releaseType.cfReleaseType : "release",
+ parentFileID: clientFileID ? clientFileID : undefined,
gameVersions: clientFileID ? undefined : [version.id],
displayName: file.displayName,
}),
@@ -148,9 +112,7 @@ export async function deployCurseForge(): Promise {
}
});
- const tag = process.env.GITHUB_TAG;
- const flavorTitle = process.env.BUILD_FLAVOR_TITLE;
- const displayName = [modpackManifest.name, tag.replace(/^v/, ""), flavorTitle].filter(Boolean).join(" - ");
+ const displayName = process.env.GITHUB_TAG;
const files = [
{
@@ -159,9 +121,9 @@ export async function deployCurseForge(): Promise {
},
{
name: sanitize((makeArtifactNameBody(modpackManifest.name) + "-server.zip").toLowerCase()),
- displayName: `${displayName} Server`,
+ displayName: `${displayName}-server`,
},
];
- upload(files);
+ await upload(files);
}
diff --git a/tools/tasks/deploy/releases.ts b/tools/tasks/deploy/releases.ts
index 489d478..e2339d2 100644
--- a/tools/tasks/deploy/releases.ts
+++ b/tools/tasks/deploy/releases.ts
@@ -1,4 +1,4 @@
-import { modpackManifest, sharedDestDirectory } from "../../globals";
+import { modpackManifest } from "../../globals";
import fs from "fs";
import upath from "upath";
@@ -7,8 +7,10 @@ import { makeArtifactNameBody } from "../../util/util";
import Bluebird from "bluebird";
import { Octokit } from "@octokit/rest";
import sanitize from "sanitize-filename";
+import mustache from "mustache";
+import { DeployReleaseType, inputToDeployReleaseTypes } from "../../types/changelogTypes";
-const variablesToCheck = ["GITHUB_TAG", "GITHUB_TOKEN", "GITHUB_REPOSITORY"];
+const variablesToCheck = ["GITHUB_TAG", "GITHUB_TOKEN", "GITHUB_REPOSITORY", "RELEASE_TYPE"];
/**
* Uploads build artifacts to GitHub Releases.
@@ -24,7 +26,7 @@ async function deployReleases(): Promise {
});
const body = makeArtifactNameBody(modpackManifest.name);
- const files = ["client", "server", "lang"].map((file) => sanitize(`${body}-${file}.zip`.toLowerCase()));
+ const files = ["client", "server", "lang", "mmc"].map((file) => sanitize(`${body}-${file}.zip`.toLowerCase()));
/**
* Obligatory file check.
@@ -51,16 +53,21 @@ async function deployReleases(): Promise {
};
const tag = process.env.GITHUB_TAG;
- const flavorTitle = process.env.BUILD_FLAVOR_TITLE;
+ const releaseType: DeployReleaseType = inputToDeployReleaseTypes[process.env.RELEASE_TYPE];
+ const preRelease = releaseType ? releaseType.isPreRelease : false;
- // Since we've built everything beforehand, the changelog must be available in the shared directory.
- const changelog = await (await fs.promises.readFile(upath.join(sharedDestDirectory, "CHANGELOG.md"))).toString();
+ // Since we've grabbed, or built, everything beforehand, the Changelog file should be in the build dir
+ let changelog = (
+ await fs.promises.readFile(upath.join(buildConfig.buildDestinationDirectory, "CHANGELOG.md"))
+ ).toString();
+
+ changelog = mustache.render(changelog, { CENTER_ALIGN: 'align="center"', CF_REDIRECT: "" });
// Create a release.
const release = await octokit.repos.createRelease({
tag_name: tag || "latest-dev-preview",
- prerelease: !tag,
- name: [modpackManifest.name, tag.replace(/^v/, ""), flavorTitle].filter(Boolean).join(" - "),
+ prerelease: preRelease,
+ name: tag || "latest-dev-preview",
body: changelog,
...repo,
});
diff --git a/tools/tasks/github/quest.ts b/tools/tasks/github/quest.ts
index cef936a..4e62a25 100644
--- a/tools/tasks/github/quest.ts
+++ b/tools/tasks/github/quest.ts
@@ -105,7 +105,7 @@ export async function transformQuestBook(): Promise {
// Source Quest Book File Locations
const questPathNormalDev = upath.join(sharedQBDefaults, "saved_quests", "NormalQuestsDev.json");
const questPathExpertDev = upath.join(sharedQBDefaults, "saved_quests", "ExpertQuestsDev.json");
-
+
// Quest Book Objects
const questBookNormal: QuestBook = JSON.parse((await fs.promises.readFile(questPathNormalDev)).toString());
const questBookExpert: QuestBook = JSON.parse((await fs.promises.readFile(questPathExpertDev)).toString());
@@ -113,31 +113,27 @@ export async function transformQuestBook(): Promise {
// Quest Book Paths
const questPathNormalDefault = upath.join(sharedQBDefaults, "DefaultQuests.json");
const questPathNormalOverride = upath.join(sharedConfigOverrides, "normal", "betterquesting", "DefaultQuests.json");
-
+
const questPathExpertDefault = upath.join(sharedQBDefaults, "saved_quests", "ExpertQuests.json");
const questPathExpertOverride = upath.join(sharedConfigOverrides, "expert", "betterquesting", "DefaultQuests.json");
-
+
// Quest Lang Location
const questLangLocation = upath.join(buildConfig.buildSourceDirectory, overridesFolder, langFileLocation);
// Traverse through the quest book and rewrite titles/descriptions.
// Extract title/desc pairs into a lang file.
const lines: string[] = [];
-
- lines.push("Normal Quest Lang Entries:",
- "",
- );
+
+ lines.push("Normal Quest Lang Entries:", "");
// Normal Mode Quest lines.
transformKeyPairs(questBookNormal["questLines:9"], "normal", "line", lines);
// Normal Mode Quests themselves.
transformKeyPairs(questBookNormal["questDatabase:9"], "normal", "db", lines);
-
- lines.push("Expert Quest Lang Entries:",
- "",
- );
-
+
+ lines.push("Expert Quest Lang Entries:", "");
+
// Expert Mode Quest lines.
transformKeyPairs(questBookExpert["questLines:9"], "expert", "line", lines);
@@ -151,13 +147,11 @@ export async function transformQuestBook(): Promise {
// Strip useless metadata.
stripUselessMetadata(questBookNormal);
stripUselessMetadata(questBookExpert);
-
+
// Write QB files.
- fs.promises.writeFile(questPathNormalDefault, JSON.stringify(questBookNormal, null, 2));
- fs.promises.writeFile(questPathNormalOverride, JSON.stringify(questBookNormal, null, 2));
-
- fs.promises.writeFile(questPathExpertDefault, JSON.stringify(questBookExpert, null, 2));
- fs.promises.writeFile(questPathExpertOverride, JSON.stringify(questBookExpert, null, 2));
+ await fs.promises.writeFile(questPathNormalDefault, JSON.stringify(questBookNormal, null, 2));
+ await fs.promises.writeFile(questPathNormalOverride, JSON.stringify(questBookNormal, null, 2));
+
+ await fs.promises.writeFile(questPathExpertDefault, JSON.stringify(questBookExpert, null, 2));
+ return await fs.promises.writeFile(questPathExpertOverride, JSON.stringify(questBookExpert, null, 2));
}
-
-
diff --git a/tools/tasks/misc/createModList.ts b/tools/tasks/misc/createModList.ts
new file mode 100644
index 0000000..325f4ac
--- /dev/null
+++ b/tools/tasks/misc/createModList.ts
@@ -0,0 +1,69 @@
+import log from "fancy-log";
+import { fetchFileInfo, fetchFilesBulk, fetchProject, fetchProjectsBulk } from "../../util/curseForgeAPI";
+import { modpackManifest } from "../../globals";
+import { checkGitTag, getFileAtRevision } from "../../util/util";
+import { ModpackManifest } from "../../types/modpackManifest";
+import { CurseForgeFileInfo, CurseForgeModInfo } from "../../types/curseForge";
+
+export interface ModFileInfo {
+ modInfo: CurseForgeModInfo;
+ fileInfo: CurseForgeFileInfo;
+ inClient: boolean;
+ inServer: boolean;
+}
+
+/**
+ * Fetches mod links and builds a modlist.
+ */
+export async function createModList(tag = ""): Promise {
+ log("Fetching mod & file infos...");
+
+ let manifest: ModpackManifest = modpackManifest;
+
+ if (tag) {
+ checkGitTag(tag);
+ manifest = JSON.parse(getFileAtRevision("manifest.json", tag));
+ }
+
+ manifest.files.sort((a, b) => a.projectID - b.projectID);
+
+ // Fetch mod/addon & file infos, discard result. Further calls will hit cache.
+ await fetchProjectsBulk(manifest.files.map((mod) => mod.projectID));
+ await fetchFilesBulk(
+ // Use this instead of referencing the original array, as .sort sorts the input array
+ [...manifest.files]
+ .sort((a, b) => a.fileID - b.fileID)
+ .map((mod) => {
+ return { projectID: mod.projectID, fileID: mod.fileID };
+ }),
+ );
+
+ log("Fetched Infos. Creating modlist...");
+
+ // Create modlist
+ const output: ModFileInfo[] = [];
+
+ for (const file of manifest.files) {
+ const itemModInfo = await fetchProject(file.projectID);
+ const itemFileInfo = await fetchFileInfo(file.projectID, file.fileID);
+
+ let itemInClient = false;
+ let itemInServer = false;
+ if (file.sides) {
+ if (file.sides.includes("client")) itemInClient = true;
+
+ if (file.sides.includes("server")) itemInServer = true;
+ } else {
+ itemInClient = true;
+ itemInServer = true;
+ }
+
+ output.push({
+ modInfo: itemModInfo,
+ fileInfo: itemFileInfo,
+ inClient: itemInClient,
+ inServer: itemInServer,
+ });
+ }
+ return output;
+}
diff --git a/tools/tasks/misc/downloadMods.ts b/tools/tasks/misc/downloadMods.ts
new file mode 100644
index 0000000..28ff223
--- /dev/null
+++ b/tools/tasks/misc/downloadMods.ts
@@ -0,0 +1,50 @@
+import { modDestDirectory, modpackManifest } from "../../globals";
+import { fetchMods } from "../../util/curseForgeAPI";
+import upath from "upath";
+import fs from "fs";
+import log from "fancy-log";
+import del from "del";
+import gulp from "gulp";
+
+async function modCleanUp() {
+ return del(upath.join(modDestDirectory, "*"), { force: true });
+}
+
+/**
+ * Checks and creates all necessary directories so we can download the mods safely.
+ */
+async function createModDirs() {
+ // This also makes the base dir, as it is recursive.
+ if (!fs.existsSync(upath.join(modDestDirectory, "client"))) {
+ await fs.promises.mkdir(upath.join(modDestDirectory, "client"), { recursive: true });
+ }
+
+ if (!fs.existsSync(upath.join(modDestDirectory, "server"))) {
+ await fs.promises.mkdir(upath.join(modDestDirectory, "server"), { recursive: true });
+ }
+}
+
+/**
+ * Downloads mods according to manifest.json and checks hashes.
+ */
+export async function downloadMods(): Promise {
+ log("Fetching Shared Mods...");
+ await fetchMods(
+ modpackManifest.files.filter((f) => !f.sides),
+ modDestDirectory,
+ );
+
+ log("Fetching Client Mods...");
+ await fetchMods(
+ modpackManifest.files.filter((f) => f.sides && f.sides.includes("client")),
+ upath.join(modDestDirectory, "client"),
+ );
+
+ log("Fetching Server Mods...");
+ await fetchMods(
+ modpackManifest.files.filter((f) => f.sides && f.sides.includes("server")),
+ upath.join(modDestDirectory, "server"),
+ );
+}
+
+export default gulp.series(modCleanUp, createModDirs, downloadMods);
diff --git a/tools/tasks/misc/releaseCommit.ts b/tools/tasks/misc/releaseCommit.ts
new file mode 100644
index 0000000..581350a
--- /dev/null
+++ b/tools/tasks/misc/releaseCommit.ts
@@ -0,0 +1,207 @@
+import fs from "fs";
+import upath from "upath";
+import { configFolder, configOverridesFolder, rootDirectory, templatesFolder } from "../../globals";
+import mustache from "mustache";
+import gulp from "gulp";
+import dedent from "dedent-js";
+import { checkEnvironmentalVariables } from "../../util/util";
+
+// This updates all the files, for a release.
+
+// IF DEBUGGING:
+// Change debug value to true
+// Change version to a string
+const debug = false;
+const version: string = process.env.VERSION;
+
+// If it is not a release, and thus no changes to versions need to be made.
+// This occurs when the files are to be updated from the templates outside of a release.
+// Optional variable to set.
+let notRelease = false;
+
+/**
+ * Checks if env variable are set, creates versions.txt if file does not exist, and checks if new version already exists in versions.txt.
+ */
+export async function check(): Promise {
+ if (!debug) {
+ checkEnvironmentalVariables(["VERSION"]);
+ }
+ const versionsFilePath: string = upath.join(templatesFolder, "versions.txt");
+
+ if (notRelease) {
+ console.log("Detected that this is not a release commit.");
+ console.log("Version info will not change, but the files will be updated from the template.");
+ await checkNotRelease(versionsFilePath);
+ } else {
+ console.log("Detected that this is a release commit.");
+ await checkRelease(versionsFilePath);
+ }
+}
+
+/**
+ * Sets this workflow as a non-release.
+ */
+export async function setNotRelease(): Promise {
+ notRelease = true;
+}
+
+// Checks for non-release commits
+async function checkNotRelease(versionsFilePath: string) {
+ // Check if versions.txt exists
+ if (!fs.existsSync(versionsFilePath)) {
+ console.error(
+ `Version.txt does not exist. Creating empty file, and adding ${version} to it. This may be an error.`,
+ );
+
+ // Create Versions.txt, with version
+ await fs.promises.writeFile(versionsFilePath, ` - ${version}`);
+ } else {
+ // Check for duplicate entries
+ let versionList = await fs.promises.readFile(versionsFilePath, "utf8");
+
+ // No Duplicate Key
+ if (!versionList.includes(version)) {
+ console.error(`Version is not in version.txt. Adding ${version} to version.txt. This may be an error.`);
+
+ versionList = ` - ${version}\n${versionList}`;
+ await fs.promises.writeFile(versionsFilePath, versionList);
+ }
+ }
+}
+
+// Checks for release Commits
+async function checkRelease(versionsFilePath: string) {
+ // Check if versions.txt exists
+ if (!fs.existsSync(versionsFilePath)) {
+ console.error("Version.txt does not exist. Creating empty file. This may be an error.");
+
+ // Create Versions.txt
+ fs.closeSync(fs.openSync(versionsFilePath, "w"));
+ } else {
+ // Check for duplicate entries
+ const versionList = await fs.promises.readFile(versionsFilePath, "utf8");
+
+ // Duplicate Key
+ if (versionList.includes(`${version}\n`)) {
+ throw new Error("Version already exists in version.txt. Exiting...");
+ }
+ }
+}
+
+/**
+ * @param readPath The filepath to read from. (Template)
+ * @param writePaths The filepaths to write to.
+ * @param replacementObject A record, of type string to type unknown, containing the keys, and replacement for those keys
+ *
+ *
+ * A warning not to edit the file will also be added to the start of the file.
+ */
+async function modifyFile(readPath: string, writePaths: string[], replacementObject: Record) {
+ // Read the file content
+ const data: string = await fs.promises.readFile(readPath, "utf8");
+
+ // Moustache Render
+ let modifiedData: string = mustache.render(data, replacementObject);
+
+ // Add warning to not edit file
+ modifiedData = dedent`# DO NOT EDIT THIS FILE! EDIT THE TEMPlATES INSTEAD!
+ # See https://github.com/Nomi-CEu/Nomi-CEu/wiki/Part-1:-Contributing-Information#section-5-template-information!
+ ${modifiedData}`;
+
+ // Write the modified content back to the file
+ for (const filename of writePaths) {
+ await fs.promises.writeFile(filename, modifiedData, "utf8");
+ }
+}
+
+export async function updateIssueTemplates(): Promise {
+ // Filenames
+ const fileNames: string[] = ["001-bug-report.yml", "002-feature-request.yml"];
+
+ const versionsFilePath: string = upath.join(templatesFolder, "versions.txt");
+
+ let versionList: string = await fs.promises.readFile(versionsFilePath, "utf8");
+
+ if (!notRelease) {
+ // Add new version to list, with indent
+ versionList = ` - ${version}\n${versionList}`;
+ }
+
+ // Replacement Object
+ const replacementObject: Record = {
+ versions: versionList,
+ };
+
+ // Write updated Version List
+ await fs.promises.writeFile(versionsFilePath, versionList);
+
+ const issueTemplatesFolder: string = upath.join(rootDirectory, ".github", "ISSUE_TEMPLATE");
+
+ // Write to issue templates
+ for (const fileName of fileNames) {
+ const readPath = upath.join(templatesFolder, fileName);
+ const writePath = upath.join(issueTemplatesFolder, fileName);
+ await modifyFile(readPath, [writePath], replacementObject);
+ }
+}
+
+export async function updateRandomPatchesConfig(): Promise {
+ // Filename & paths
+ const fileName = "randompatches.cfg";
+ const readPath: string = upath.join(templatesFolder, fileName);
+ const writePathsNormal: string[] = [
+ upath.join(rootDirectory, configFolder, fileName),
+ upath.join(rootDirectory, configOverridesFolder, "normal", fileName),
+ ];
+
+ // Replacement object
+ const replacementObject: Record = {
+ version: version,
+ mode: "Normal",
+ };
+
+ // Modify Normal File
+ await modifyFile(readPath, writePathsNormal, replacementObject);
+
+ // Change values for Expert Config
+ replacementObject["mode"] = "Expert";
+ const writePathExpert = upath.join(rootDirectory, configOverridesFolder, "expert", fileName);
+
+ // Modify Expert File
+ await modifyFile(readPath, [writePathExpert], replacementObject);
+}
+
+export async function updateServerProperties(): Promise {
+ // File name of the output files
+ const fileName = "server.properties";
+
+ // File name of the Normal Template File
+ const fileNameNormal = "server_normal.properties";
+
+ // File name of the Expert Template File
+ const fileNameExpert = "server_expert.properties";
+
+ // Replacement Object
+ const replacementObject: Record = {
+ version: version,
+ };
+
+ // Read and Write paths for normal
+ const readPathNormal: string = upath.join(templatesFolder, fileNameNormal);
+ const writePathsNormal: string[] = [
+ upath.join(rootDirectory, "serverfiles", fileName),
+ upath.join(rootDirectory, configOverridesFolder, "normal", fileName),
+ ];
+
+ // Modify Normal File
+ await modifyFile(readPathNormal, writePathsNormal, replacementObject);
+
+ // Read and Write paths for expert
+ const readPathExpert: string = upath.join(templatesFolder, fileNameExpert);
+ const writePathExpert: string = upath.join(rootDirectory, configOverridesFolder, "expert", fileName);
+
+ // Modify Expert File
+ await modifyFile(readPathExpert, [writePathExpert], replacementObject);
+}
+
+export const updateAll = gulp.series(updateIssueTemplates, updateRandomPatchesConfig, updateServerProperties);
diff --git a/tools/tasks/misc/zip.ts b/tools/tasks/misc/zip.ts
index 3d95d7c..ec50e64 100644
--- a/tools/tasks/misc/zip.ts
+++ b/tools/tasks/misc/zip.ts
@@ -43,4 +43,4 @@ export const zipClient = makeZipper(clientDestDirectory, "Client");
export const zipLang = makeZipper(langDestDirectory, "Lang");
export const zipMMC = makeZipper(mmcDestDirectory, "MMC");
-export const zipAll = gulp.series(zipServer, zipClient, zipLang);
+export const zipAll = gulp.series(zipServer, zipClient, zipLang, zipMMC);
diff --git a/tools/tasks/mmc/index.ts b/tools/tasks/mmc/index.ts
index 961dafb..a8812a7 100644
--- a/tools/tasks/mmc/index.ts
+++ b/tools/tasks/mmc/index.ts
@@ -1,8 +1,10 @@
-import { clientDestDirectory, mmcDestDirectory, modpackManifest } from "../../globals";
+import { clientDestDirectory, mmcDestDirectory, modDestDirectory, modpackManifest } from "../../globals";
import { fetchMods } from "../../util/curseForgeAPI";
import * as upath from "upath";
import { series, src, symlink } from "gulp";
import * as fs from "fs";
+import gulp from "gulp";
+import buildConfig from "../../buildConfig";
async function mmcCleanUp(cb) {
if (fs.existsSync(mmcDestDirectory)) {
@@ -13,7 +15,7 @@ async function mmcCleanUp(cb) {
}
/**
- * Checks and creates all necessary directories so we can build the client safely.
+ * Checks and creates all necessary directories so we can build the MMC zip safely.
*/
async function createMMCDirs(cb) {
if (!fs.existsSync(mmcDestDirectory)) {
@@ -23,6 +25,28 @@ async function createMMCDirs(cb) {
cb();
}
+/**
+ * Copies the update notes file.
+ */
+function copyMMCUpdateNotes() {
+ return gulp.src("../UPDATENOTES.md", { allowEmpty: true }).pipe(gulp.dest(mmcDestDirectory));
+}
+
+
+/**
+ * Copies the license file.
+ */
+async function copyMMCLicense() {
+ return gulp.src("../LICENSE.md").pipe(gulp.dest(mmcDestDirectory));
+}
+
+/**
+ * Copies the changelog file.
+ */
+function copyMMCChangelog() {
+ return gulp.src(upath.join(buildConfig.buildDestinationDirectory, "CHANGELOG.md")).pipe(gulp.dest(mmcDestDirectory));
+}
+
/**
* Copies modpack overrides.
*/
@@ -34,7 +58,7 @@ function copyOverrides() {
}
/**
- * Copies modpack overrides.
+ * Renames copied overrides to '.minecraft'.
*/
async function renameOverrides() {
await fs.promises.rename(upath.join(mmcDestDirectory, "overrides"), upath.join(mmcDestDirectory, ".minecraft"));
@@ -42,13 +66,13 @@ async function renameOverrides() {
}
/**
- * Downloads client mods according to manifest.json and checks hashes.
+ * Copies client & shared mods.
*/
-async function fetchModJars() {
- return fetchMods(
- modpackManifest.files.filter((f) => !f.sides || f.sides.includes("client")),
- upath.join(mmcDestDirectory, ".minecraft"),
- );
+async function copyMMCModJars() {
+ return src([upath.join(modDestDirectory, "*"), upath.join(modDestDirectory, "client", "*")], {
+ nodir: true,
+ resolveSymlinks: false,
+ }).pipe(symlink(upath.join(mmcDestDirectory, ".minecraft", "mods")));
}
async function createMMCConfig() {
@@ -114,5 +138,5 @@ export default series(
renameOverrides,
createMMCConfig,
createMMCManifest,
- fetchModJars,
+ copyMMCModJars,
);
diff --git a/tools/tasks/server/index.ts b/tools/tasks/server/index.ts
index 0f97541..7301185 100644
--- a/tools/tasks/server/index.ts
+++ b/tools/tasks/server/index.ts
@@ -10,7 +10,13 @@ import Bluebird from "bluebird";
import { ForgeProfile } from "../../types/forgeProfile";
import { FileDef } from "../../types/fileDef";
import { downloadOrRetrieveFileDef, getVersionManifest, libraryToPath, relative } from "../../util/util";
-import { modpackManifest, serverDestDirectory, sharedDestDirectory } from "../../globals";
+import {
+ mmcDestDirectory,
+ modDestDirectory,
+ modpackManifest,
+ serverDestDirectory,
+ sharedDestDirectory
+} from "../../globals";
import del from "del";
import { VersionManifest } from "../../types/versionManifest";
import { fetchMods } from "../../util/curseForgeAPI";
@@ -25,7 +31,7 @@ async function serverCleanUp() {
}
/**
- * Checks and creates all necessary directories so we can build the client safely.
+ * Checks and creates all necessary directories so we can build the server safely.
*/
async function createServerDirs() {
if (!fs.existsSync(serverDestDirectory)) {
@@ -175,13 +181,13 @@ async function downloadMinecraftServer() {
}
/**
- * Downloads mods according to manifest.json and checks hashes.
+ * Copies server & shared mods.
*/
-async function downloadMods() {
- return fetchMods(
- modpackManifest.files.filter((f) => !f.sides || f.sides.includes("server")),
- serverDestDirectory,
- );
+async function copyServerMods() {
+ return src([upath.join(modDestDirectory, "*"), upath.join(modDestDirectory, "server", "*")], {
+ nodir: true,
+ resolveSymlinks: false,
+ }).pipe(symlink(upath.join(serverDestDirectory, "mods")));
}
/**
@@ -218,7 +224,7 @@ function copyServerUpdateNotes() {
* Copies the changelog file.
*/
function copyServerChangelog() {
- return src(upath.join(sharedDestDirectory, "CHANGELOG.md")).pipe(dest(serverDestDirectory));
+ return src(upath.join(buildConfig.buildDestinationDirectory, "CHANGELOG.md")).pipe(dest(serverDestDirectory));
}
/**
@@ -259,7 +265,7 @@ export default gulp.series([
createServerDirs,
downloadForge,
downloadMinecraftServer,
- downloadMods,
+ copyServerMods,
copyServerOverrides,
copyServerfiles,
copyServerLicense,
diff --git a/tools/tasks/shared/index.ts b/tools/tasks/shared/index.ts
index e6cac32..4148055 100644
--- a/tools/tasks/shared/index.ts
+++ b/tools/tasks/shared/index.ts
@@ -2,18 +2,11 @@ import fs from "fs";
import gulp from "gulp";
import upath from "upath";
import buildConfig from "../../buildConfig";
-import { modpackManifest, overridesFolder, sharedDestDirectory, tempDirectory } from "../../globals";
+import { modDestDirectory, modpackManifest, overridesFolder, sharedDestDirectory, tempDirectory } from "../../globals";
import del from "del";
import { FileDef } from "../../types/fileDef";
import Bluebird from "bluebird";
-import {
- compareAndExpandManifestDependencies,
- downloadOrRetrieveFileDef,
- getChangeLog,
- getFileAtRevision,
- getLastGitTag,
- relative,
-} from "../../util/util";
+import { downloadFileDef, downloadOrRetrieveFileDef, isEnvVariableSet, relative } from "../../util/util";
async function sharedCleanUp() {
await del(upath.join(sharedDestDirectory, "*"), { force: true });
@@ -51,7 +44,7 @@ async function copyOverrides() {
async function fetchExternalDependencies() {
const dependencies = modpackManifest.externalDependencies;
if (dependencies) {
- const destDirectory = upath.join(sharedDestDirectory, overridesFolder, "mods");
+ const destDirectory = upath.join(modDestDirectory, "mods");
if (!fs.existsSync(destDirectory)) {
await fs.promises.mkdir(destDirectory, { recursive: true });
@@ -88,90 +81,62 @@ async function fetchExternalDependencies() {
}
/**
- * Generates a changelog based on environmental variables.
+ * Either fetches the Changelog File, or makes one.
*/
-async function makeChangelog() {
- let since = getLastGitTag(),
- to: string;
-
- // If this is a tagged build, fetch the tag before last.
- if (process.env.GITHUB_TAG) {
- since = getLastGitTag(process.env.GITHUB_TAG);
- to = process.env.GITHUB_TAG;
+async function fetchOrMakeChangelog() {
+ if (isEnvVariableSet("CHANGELOG_URL") && isEnvVariableSet("CHANGELOG_CF_URL")) {
+ console.log("Using Changelog Files from URL.");
+ await downloadChangelogs(process.env.CHANGELOG_URL, process.env.CHANGELOG_CF_URL);
+ return;
}
- // Back-compat in case this crap is still around.
- else if (since == "latest-dev-preview") {
- since = getLastGitTag(since);
- }
-
- const old = JSON.parse(getFileAtRevision("manifest.json", since)) as ModpackManifest;
- const current = modpackManifest;
- const commitList = getChangeLog(since, to, [upath.join("..", modpackManifest.overrides), "manifest.json"]);
-
- const builder: string[] = [];
- // If the UPDATENOTES.md file is present, prepend it verbatim.
- if (fs.existsSync("../UPDATENOTES.md")) {
- builder.push((await fs.promises.readFile("../UPDATENOTES.md")).toString());
- }
-
- // Push the title.
- builder.push(`# Changes since ${since}`);
-
- const comparisonResult = await compareAndExpandManifestDependencies(old, current);
-
- // Push mod update blocks.
- [
- {
- name: "## New mods",
- list: comparisonResult.added,
- },
- {
- name: "## Updated mods",
- list: comparisonResult.modified,
- },
- {
- name: "## Removed mods",
- list: comparisonResult.removed,
- },
- ].forEach((block) => {
- if (block.list.length == 0) {
- return;
- }
-
- builder.push("");
- builder.push(block.name);
- builder.push(
- ...block.list
- // Yeet invalid project names.
- .filter((project) => !/project-\d*/.test(project))
- .sort()
- .map((name) => `* ${name}`),
+ if (isEnvVariableSet("CHANGELOG_BRANCH")) {
+ console.log("Using Changelog Files from Branch.");
+ const url = "https://raw.githubusercontent.com/Nomi-CEu/Nomi-CEu/{{ branch }}/{{ filename }}";
+ await downloadChangelogs(
+ mustache.render(url, { branch: process.env.CHANGELOG_BRANCH, filename: "CHANGELOG.md" }),
+ mustache.render(url, { branch: process.env.CHANGELOG_BRNACH, filename: "CHANGELOG_CF.md" }),
);
- });
-
- // Push the changelog itself.
- if (commitList) {
- builder.push("");
- builder.push("## Commits");
- builder.push(commitList);
+ return;
}
-
- // Check if the builder only contains the title.
- if (builder.length == 1) {
- builder.push("");
- builder.push("There haven't been any changes.");
- }
-
- return fs.promises.writeFile(upath.join(sharedDestDirectory, "CHANGELOG.md"), builder.join("\n"));
+ console.log("Creating Changelog Files.");
+ await createBuildChangelog();
}
-import transforms from "./transforms";
-import { ModpackManifest } from "../../types/modpackManifest";
+async function downloadChangelogs(changelogURL: string, changelogCFURL: string) {
+ const changelog = await downloadFileDef({ url: changelogURL });
+ const changelogCF = await downloadFileDef({ url: changelogCFURL });
+
+ await writeToChangelog(changelog, "CHANGELOG.md", changelogURL);
+ await writeToChangelog(changelogCF, "CHANGELOG_CF.md", changelogCFURL);
+}
+
+async function writeToChangelog(buffer: Buffer, changelogFile: string, url: string) {
+ let handle: fs.promises.FileHandle;
+ try {
+ handle = await fs.promises.open(upath.join(buildConfig.buildDestinationDirectory, changelogFile), "w");
+
+ await handle.write(buffer);
+ await handle.close();
+ } catch (err) {
+ if (handle && (await handle.stat()).isFile()) {
+ log(`Couldn't download changelog from URL ${url}, cleaning up...`);
+
+ await handle.close();
+ }
+ throw err;
+ }
+}
+
+import transformVersion from "./transformVersion";
+import { createBuildChangelog } from "../changelog/createChangelog";
+import mustache from "mustache";
+import log from "fancy-log";
+
export default gulp.series(
sharedCleanUp,
createSharedDirs,
copyOverrides,
- makeChangelog,
+ fetchOrMakeChangelog,
fetchExternalDependencies,
- ...transforms,
+ transformVersion,
);
diff --git a/tools/tasks/shared/transformVersion.ts b/tools/tasks/shared/transformVersion.ts
new file mode 100644
index 0000000..4b03517
--- /dev/null
+++ b/tools/tasks/shared/transformVersion.ts
@@ -0,0 +1,25 @@
+import { modpackManifest } from "../../globals";
+
+/**
+ * Transform the version field of manifest.json.
+ */
+export default async function transformManifestVersion(): Promise {
+ // We're building a tag.
+ if (process.env.GITHUB_TAG) {
+ modpackManifest.version = process.env.GITHUB_TAG.replace(/^v/, "");
+ }
+ // If SHA is provided and the build isn't tagged, append both the branch and short SHA.
+ else if (process.env.GITHUB_SHA && process.env.GITHUB_REF && process.env.GITHUB_REF.startsWith("refs/heads/")) {
+ const shortCommit = process.env.GITHUB_SHA.substring(0, 7);
+ const branch = /refs\/heads\/(.+)/.exec(process.env.GITHUB_REF)?.[1];
+ if (!branch) {
+ throw new Error(`Invalid git ref: ${process.env.GITHUB_REF}`);
+ }
+
+ modpackManifest.version = `${branch}-${shortCommit}`;
+ } else {
+ modpackManifest.version = "manual-build";
+ }
+
+ modpackManifest.name = "";
+}
diff --git a/tools/tasks/shared/transforms/index.ts b/tools/tasks/shared/transforms/index.ts
deleted file mode 100644
index b22cb50..0000000
--- a/tools/tasks/shared/transforms/index.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-import scannable from "./scannable";
-import version from "./version";
-
-export default [scannable, version];
diff --git a/tools/tasks/shared/transforms/scannable.ts b/tools/tasks/shared/transforms/scannable.ts
deleted file mode 100644
index 00cf5c1..0000000
--- a/tools/tasks/shared/transforms/scannable.ts
+++ /dev/null
@@ -1,28 +0,0 @@
-import { overridesFolder, sharedDestDirectory } from "../../../globals";
-
-import upath from "upath";
-import fs from "fs";
-
-const scannableConfigFile = "config/scannable.cfg";
-
-/**
- * Transform the scannable config.
- * Trim excess newlines and remove comments.
- */
-export default async function transformScannable(): Promise {
- const configPath = upath.join(sharedDestDirectory, overridesFolder, scannableConfigFile);
-
- const contents = (await fs.promises.readFile(configPath))
- .toString()
- // Match arrays (S:array < ... >)
- .replace(/([ \t]+)(S:\w+) <([^>]+)>/g, (_, g0, g1, g2) => {
- const body = g2
- .replace(/#[^\r\n]+/gm, "") // Comments
- .replace(/^\s+$/gm, "") // Trailing whitespaces
- .replace(/[\r\n]{2,}/gm, "\n"); // Extra newlines
-
- return g0 + g1 + " <" + body + (body ? "" : "\n") + g0 + " >";
- });
-
- return fs.promises.writeFile(configPath, contents);
-}
diff --git a/tools/tasks/shared/transforms/version.ts b/tools/tasks/shared/transforms/version.ts
deleted file mode 100644
index 25d9d91..0000000
--- a/tools/tasks/shared/transforms/version.ts
+++ /dev/null
@@ -1,64 +0,0 @@
-import fs from "fs";
-import upath from "path";
-import mustache from "mustache";
-import { modpackManifest, overridesFolder, sharedDestDirectory } from "../../../globals";
-
-const randomPatchesConfigFile = "config/randompatches.cfg";
-
-/**
- * Transform the version field of manifest.json.
- */
-export default async function transformManifestVersion(): Promise {
- let versionTitle;
-
- // We're building a tag.
- if (process.env.GITHUB_TAG) {
- const flavorTitle = process.env.BUILD_FLAVOR_TITLE;
- const tag = process.env.GITHUB_TAG.replace(/^v/, "");
-
- versionTitle = [modpackManifest.name, tag, flavorTitle].filter(Boolean).join(" - ");
-
- modpackManifest.version = tag;
- }
- // If we're building a release candidate, transform it appropriately.
- else if (process.env.RC_VERSION) {
- const rcVer = process.env.RC_VERSION;
- const flavorTitle = process.env.BUILD_FLAVOR_TITLE;
- const tag = rcVer.replace(/^v/, "");
-
- versionTitle = [modpackManifest.name, [tag, "Release Candidate"].join(" "), flavorTitle]
- .filter(Boolean)
- .join(" - ");
-
- modpackManifest.version = rcVer;
- // modpackManifest.version = [rcVer, "rc"].join("-"); // No need for rc at end of name
- }
- // If SHA is provided and the build isn't tagged, append both the branch and short SHA.
- else if (process.env.GITHUB_SHA && process.env.GITHUB_REF && process.env.GITHUB_REF.startsWith("refs/heads/")) {
- const shortCommit = process.env.GITHUB_SHA.substr(0, 7);
- const branch = /refs\/heads\/(.+)/.exec(process.env.GITHUB_REF)?.[1];
- if (!branch) {
- throw new Error(`Invalid git ref: ${process.env.GITHUB_REF}`);
- }
-
- versionTitle = `${modpackManifest.name} (${branch} branch, ${shortCommit})`;
-
- modpackManifest.version = `${branch}-${shortCommit}`;
- } else {
- versionTitle = `${modpackManifest.name} (manual build)`;
-
- modpackManifest.version = "manual-build";
- }
-
- modpackManifest.name = versionTitle;
-
- const randomPatchesConfigFilePath = upath.join(sharedDestDirectory, overridesFolder, randomPatchesConfigFile);
- const randomPatchesFile = (await fs.promises.readFile(randomPatchesConfigFilePath)).toString();
-
- return fs.promises.writeFile(
- randomPatchesConfigFilePath,
- mustache.render(randomPatchesFile, {
- title: versionTitle,
- }),
- );
-}
diff --git a/tools/templates/001-bug-report.yml b/tools/templates/001-bug-report.yml
new file mode 100644
index 0000000..c1b38fa
--- /dev/null
+++ b/tools/templates/001-bug-report.yml
@@ -0,0 +1,103 @@
+name: Bug Report
+description: "Crashes or unintended behaviors arising from Nomi CEu's mods, configurations, or custom scripts."
+labels: bug
+body:
+ - type: markdown
+ attributes:
+ value: "Note: If you need general tech support for things like server configuration, or general game support (how to I make this?), Discord is a better venue. Please open an issue only if there is a clear bug with the pack or if you have been asked to by one of the Discord staff."
+ - type: dropdown
+ id: version
+ attributes:
+ label: Nomi CEu Version
+ description: The version of Nomi CEu you were using when this bug was encountered. If you do not know what it is, check the title of your instance window. If you do not see your version here, please update to the newest version of the pack, which currently is 1.6, or the newest alpha/beta, which currently is 1.6.1-beta-2.
+ options:
+{{versions}}
+
+ validations:
+ required: true
+ - type: input
+ id: launcher
+ attributes:
+ label: Launcher
+ description: "What launcher you were using when you experienced this issue. If you were using the CurseForge Launcher, please try to see if the issue occurs on a different launcher."
+ placeholder: "Example: Prism Launcher"
+ validations:
+ required: true
+ - type: textarea
+ id: changed
+ attributes:
+ label: Configurations or Mods Changed
+ description: Any changed configs, and removed or added mods. Make sure to also include the version of any mods you have added. If you have not changed any configurations, and not removed or added any mods, please leave this field blank.
+ placeholder: |
+ Example:
+ Changed Loliasm Config: B:onDemandAnimatedTextures to false.
+ Added AE2 Fluid Crafting Rework: Version 2.4.18-r.
+ validations:
+ required: false
+ - type: input
+ id: environment
+ attributes:
+ label: Environment
+ description: "How you were playing on the world. Typical answers include: Singleplayer, Open to LAN, Forge Server, Sponge Server, or Mohist Server."
+ placeholder: "Example: Singleplayer"
+ validations:
+ required: true
+ - type: dropdown
+ id: mode
+ attributes:
+ label: Mode
+ description: "What pack mode were you using when you came across this error?"
+ options:
+ - "Normal Mode"
+ - "Expert Mode"
+ - "Both Modes"
+ - "N/A"
+ validations:
+ required: true
+ - type: textarea
+ id: problem
+ attributes:
+ label: What Happened
+ description: What happened, of which you believe is a bug. Attach screenshots here as necessary.
+ placeholder: "Example: Produced one X but Y was not consumed."
+ validations:
+ required: true
+ - type: textarea
+ id: expected
+ attributes:
+ label: Expected Behavior
+ description: What you expected to happen. Attach screenshots here as necessary.
+ placeholder: "Example: Expected to produce X by consuming Y."
+ validations:
+ required: true
+ - type: textarea
+ id: reproduction
+ attributes:
+ label: Reproduction Steps
+ description: "How do you trigger this bug? Please walk us through it step by step."
+ placeholder: |
+ 1.
+ 2.
+ 3.
+ ...
+ validations:
+ required: true
+ - type: textarea
+ id: logs
+ attributes:
+ label: Logs
+ description: "If your client crashed as a result of this bug, please upload the generated crash log. This is found in your launcher's Nomi CEu instance, in a folder called `crash-reports`. If there was no crash, but instead an error screen, please upload your `latest.log`, found in a folder called `logs`. If there was a script error, please upload your `crafttweaker.log`, found in your base instance folder, instead. Otherwise, please leave this field blank."
+ placeholder: "You can upload into a external site like paste-bin, and send the link, or just drag the file into this text field."
+ validations:
+ required: false
+ - type: textarea
+ id: additional-info
+ attributes:
+ label: Additional Information
+ description: Any additional information you wish to provide. Please add anything which did not fit into the other sections here.
+ placeholder: "Example: This is likely caused by X because..."
+ validations:
+ required: false
+ - type: markdown
+ attributes:
+ value: Thank you for taking the time to fill out this bug report.
diff --git a/tools/templates/002-feature-request.yml b/tools/templates/002-feature-request.yml
new file mode 100644
index 0000000..4455531
--- /dev/null
+++ b/tools/templates/002-feature-request.yml
@@ -0,0 +1,62 @@
+name: Feature Request
+description: Suggest an idea, including mod additions or addon scripts, for Nomi CEu.
+labels: enhancement
+body:
+ - type: markdown
+ attributes:
+ value: "Note: balancing ideas should use this template, but only for changes. However, if its a problem, not a change, like `X material always runs out`, please use the `Bug Report` Template."
+ - type: dropdown
+ id: version
+ attributes:
+ label: Nomi CEu Version
+ description: The version of Nomi CEu you are using as the basis for this feature request. If you do not know what it is, check the title of your instance window. If you do not see your version here, please update to the newest version of the pack, which currently is 1.6, or the newest alpha/beta, which currently is 1.6.1-beta-2.
+ options:
+{{versions}}
+
+ validations:
+ required: true
+ - type: textarea
+ id: problem
+ attributes:
+ label: Related Problem
+ description: If the feature you wish to change is related to a problem, please desscribe it. Leave this field blank if it is not related to a problem.
+ placeholder: "Example: I'm always frustrated when..."
+ validations:
+ required: false
+ - type: textarea
+ id: solution
+ attributes:
+ label: Your Solution
+ description: Describe the solution you would like to have happen.
+ placeholder: "Example: If I could..."
+ validations:
+ required: true
+ - type: textarea
+ id: work
+ attributes:
+ label: How Will Your Solution Work
+ description: |
+ Describe how the solution will fix the problem, or add to the pack.
+ If you are suggesting a mod, please describe what the mod does, and how it will add to the pack.
+ placeholder: "Example: If X was done, than Y will get a use, and..."
+ validations:
+ required: true
+ - type: textarea
+ id: alternatives
+ attributes:
+ label: "Alternatives You've Considered"
+ description: "What alternatives have you considered that might help solve the problem? If you aren't sure, or this is not related to a problem, please leave this field blank."
+ placeholder: "Example: Otherwise, we can also do Z..."
+ validations:
+ required: true
+ - type: textarea
+ id: additional-info
+ attributes:
+ label: Additional Information
+ description: "Add any other context or screenshots about the feature request here. If you are suggesting a mod, please also add a link to the mod's CurseForge page."
+ placeholder: "Example: This main motive for this idea is because X..."
+ validations:
+ required: false
+ - type: markdown
+ attributes:
+ value: Thank you for taking the time to fill out this feature request.
diff --git a/tools/templates/randompatches.cfg b/tools/templates/randompatches.cfg
new file mode 100644
index 0000000..5fc834a
--- /dev/null
+++ b/tools/templates/randompatches.cfg
@@ -0,0 +1,272 @@
+# Configuration file
+
+##########################################################################################################
+# boats
+#--------------------------------------------------------------------------------------------------------#
+# Options related to boats.
+##########################################################################################################
+
+boats {
+ # Whether to patch EntityBoat.
+ # Default: true
+ B:patchEntityBoat=true
+
+ # Prevents underwater boat passengers from being ejected after 60 ticks (3 seconds).
+ # Default: false
+ B:preventUnderwaterBoatPassengerEjection=false
+
+ # The buoyancy of boats when they are under flowing water.
+ # The vanilla default is -0.0007.
+ # Min: -1.7976931348623157E308
+ # Max: 1.7976931348623157E308
+ # Default: 0.023
+ D:underwaterBoatBuoyancy=0.023
+}
+
+
+##########################################################################################################
+# client
+#--------------------------------------------------------------------------------------------------------#
+# Options related to client-sided features.
+##########################################################################################################
+
+client {
+ # Adds a separate keybind for dismounting.
+ # Default: true
+ B:dismountKeybind=false
+
+ # Speeds up language switching.
+ # Default: true
+ B:fastLanguageSwitch=true
+
+ # Forces Minecraft to show the title screen after disconnecting rather than the Multiplayer or Realms menu.
+ # Default: false
+ B:forceTitleScreenOnDisconnect=true
+
+ # The framerate limit slider step size.
+ # If this is set to 10.0, vanilla behavior is not changed.
+ # Min: 4.9E-324
+ # Max: 260.0
+ # Default: 1.0
+ D:framerateLimitSliderStepSize=1.0
+
+ # Whether to fix the player model occasionally disappearing when flying with elytra in a straight line in third-person mode.
+ # Default: true
+ B:invisiblePlayerModelFix=true
+
+ # Whether to add the Toggle Narrator keybind to the controls.
+ # Default: true
+ B:narratorKeybind=true
+
+ # Set this to false to disable the Minecraft class patches (the Toggle Narrator keybind and custom window title/icon).
+ # Default: true
+ B:patchMinecraftClass=true
+
+ # Set this to false to force disable the "force title screen on disconnect" patch.
+ # Default: true
+ B:patchTitleScreenOnDisconnect=true
+
+ # Whether to apply the potion glint patch so that the potion glowing effect can be toggled.
+ # Default: true
+ B:patchPotionGlint=false
+
+ # Whether to remove the glowing effect from potions.
+ # Default: false
+ B:removePotionGlint=false
+
+ # Backports the smooth eye level change animations from Minecraft 1.13 and newer.
+ # Default: true
+ B:patchSmoothEyeLevelChanges=true
+
+ # Whether smooth eye level change animations should be enabled.
+ # Default: true
+ B:smoothEyeLevelChanges=true
+
+ # Enables the /rpreloadclient command.
+ # Default: true
+ B:rpreloadclient=true
+
+ ##########################################################################################################
+ # window
+ #--------------------------------------------------------------------------------------------------------#
+ # Options related to the Minecraft window.
+ ##########################################################################################################
+
+ window {
+ # The path to the 16x16 Minecraft window icon.
+ # Leave this and the 32x32 icon blank to use the default icon.
+ # Default:
+ S:icon16=resources/modpack/textures/logo/256x.png
+
+ # The path to the 32x32 Minecraft window icon.
+ # Leave this and the 16x16 icon blank to use the default icon.
+ # Default:
+ S:icon32=resources/modpack/textures/logo/256x.png
+
+ # The path to the 256x256 window icon which is used on Mac OS X.
+ # Leave this, the 16x16 icon and the 32x32 icon blank to use the default icon.
+ # Default:
+ S:icon256=resources/modpack/textures/logo/256x.png
+
+ # The Minecraft window title.
+ # Default: Minecraft 1.12.2
+ S:title=Nomifactory CEu, v{{version}}, {{mode}} Mode)
+ }
+
+}
+
+
+##########################################################################################################
+# misc
+#--------------------------------------------------------------------------------------------------------#
+# Options that don't fit into any other categories.
+##########################################################################################################
+
+misc {
+ # Whether to prevent the observer from emitting a signal when it is placed.
+ # This fixes MC-109832.
+ # Default: true
+ B:disableObserverSignalOnPlace=true
+
+ # Whether to fix dismount positions being too high.
+ # This fixes MC-3328 and MC-111726.
+ # Default: true
+ B:dismountPositionFix=true
+
+ # Fixes the End portal and End gateway break particle textures and improves End portal rendering.
+ # Default: true
+ B:endPortalTweaks=true
+
+ # Whether to patch WorldServer to prevent a "TickNextTick list out of synch" IllegalStateException.
+ # Default: true
+ B:fixTickNextTickListOutOfSynch=true
+
+ # Fixes MC-2025.
+ # More information can be found here: https://www.reddit.com/r/Mojira/comments/8pgd4q/final_and_proper_fix_to_mc2025_simple_reliable/
+ # Default: true
+ B:mc2025Fix=true
+
+ # Fixes MC-64836, which causes non-player entities to be allowed to control minecarts using their AI.
+ # Default: true
+ B:minecartAIFix=true
+
+ # Fixes MC-5694, which causes fast mining to sometimes only destroy blocks client-side only.
+ # Default: true
+ B:miningGhostBlocksFix=true
+
+ # Fixes MC-10369 (server-side particle spawning not creating particles for clients) and MC-93826 (breeding hearts only showing once instead of all of the time an animal can breed).
+ # Default: true
+ B:particleFixes=true
+
+ # Set this to false to disable the NetHandlerPlayServer patches (the speed limits and disconnect timeouts).
+ # Default: true
+ B:patchNetHandlerPlayServer=true
+
+ # Whether to patch the packet size limit.
+ # Default: true
+ B:patchPacketSizeLimit=false
+
+ # The packet size limit.
+ # The vanilla limit is 2097152.
+ # Min: 257
+ # Max: 2147483647
+ # Default: 16777216
+ I:packetSizeLimit=16777216
+
+ # Fixes MC-54026, which causes blocks attached to slime blocks in some circumstances to create ghost blocks if a piston pushes the slime block.
+ # Default: true
+ B:pistonGhostBlocksFix=true
+
+ # Fixes MC-11944, which allows players to replace End portals, End gateways and Nether portals using buckets.
+ # Default: true
+ B:portalBucketReplacementFix=true
+
+ # Enables the portal bucket replacement fix for Nether portals.
+ # Default: false
+ B:portalBucketReplacementFixForNetherPortals=false
+
+ # Fixes MC-129057, which prevents ingredients with NBT data from being transferred to the crafting grid when a recipe is clicked in the recipe book.
+ # Default: true
+ B:recipeBookNBTFix=true
+
+ # Enables the /rpreload command.
+ # Default: true
+ B:rpreload=true
+
+ # Fixes player skull stacking.
+ # Default: true
+ B:skullStackingFix=true
+
+ # Whether skull stacking requires the same textures or just the same player profile.
+ # Default: true
+ B:skullStackingRequiresSameTextures=true
+}
+
+
+##########################################################################################################
+# speedLimits
+#--------------------------------------------------------------------------------------------------------#
+# Options related to the movement speed limits.
+##########################################################################################################
+
+speedLimits {
+ # The maximum player speed.
+ # The vanilla default is 100.0.
+ # Min: 1.0
+ # Max: 3.4028234663852886E38
+ # Default: 1000000.0
+ D:maxPlayerSpeed=1000000.0
+
+ # The maximum player elytra speed.
+ # The vanilla default is 300.0.
+ # Min: 1.0
+ # Max: 3.4028234663852886E38
+ # Default: 1000000.0
+ D:maxPlayerElytraSpeed=1000000.0
+
+ # The maximum player vehicle speed.
+ # The vanilla default is 100.0.
+ # Min: 1.0
+ # Max: 1.7976931348623157E308
+ # Default: 1000000.0
+ D:maxPlayerVehicleSpeed=1000000.0
+}
+
+
+##########################################################################################################
+# timeouts
+#--------------------------------------------------------------------------------------------------------#
+# Options related to the disconnect timeouts.
+##########################################################################################################
+
+timeouts {
+ # The interval at which the server sends the KeepAlive packet.
+ # Min: 1
+ # Max: 2147483647
+ # Default: 15
+ I:keepAlivePacketInterval=15
+
+ # The login timeout in ticks.
+ # Min: 1
+ # Max: 2147483647
+ # Default: 1800
+ I:loginTimeout=1800
+
+ # Whether to apply the login timeout.
+ # Default: true
+ B:patchLoginTimeout=true
+
+ # The read timeout in seconds.
+ # This is the time it takes for a player to be disconnected after not responding to a KeepAlive packet.
+ # This value is automatically rounded up to a product of keepAlivePacketInterval.
+ # Min: 1
+ # Max: 2147483647
+ # Default: 90
+ I:readTimeout=90
+
+ # Whether to patch NetworkManager to apply the client-sided read timeout.
+ # Default: true
+ B:patchNetworkManager=true
+}
+
+
diff --git a/tools/templates/server_expert.properties b/tools/templates/server_expert.properties
new file mode 100644
index 0000000..c16a01a
--- /dev/null
+++ b/tools/templates/server_expert.properties
@@ -0,0 +1,32 @@
+#Minecraft server properties
+op-permission-level=4
+level-name=world
+allow-flight=true
+prevent-proxy-connections=false
+server-port=25565
+max-world-size=29999984
+level-seed=
+force-gamemode=false
+server-ip=
+network-compression-threshold=256
+max-build-height=256
+spawn-npcs=true
+white-list=false
+spawn-animals=true
+hardcore=false
+snooper-enabled=true
+resource-pack-sha1=
+online-mode=true
+resource-pack=
+pvp=true
+difficulty=0
+enable-command-block=false
+gamemode=0
+player-idle-timeout=0
+max-players=20
+spawn-monsters=true
+view-distance=10
+generate-structures=true
+motd=Nomi CEu Server, v{{version}}, Expert Mode
+level-type=lostcities
+generator-settings=
diff --git a/tools/templates/server_normal.properties b/tools/templates/server_normal.properties
new file mode 100644
index 0000000..e11f813
--- /dev/null
+++ b/tools/templates/server_normal.properties
@@ -0,0 +1,32 @@
+#Minecraft server properties
+op-permission-level=4
+level-name=world
+allow-flight=true
+prevent-proxy-connections=false
+server-port=25565
+max-world-size=29999984
+level-seed=
+force-gamemode=false
+server-ip=
+network-compression-threshold=256
+max-build-height=256
+spawn-npcs=true
+white-list=false
+spawn-animals=true
+hardcore=false
+snooper-enabled=true
+resource-pack-sha1=
+online-mode=true
+resource-pack=
+pvp=true
+difficulty=1
+enable-command-block=false
+gamemode=0
+player-idle-timeout=0
+max-players=20
+spawn-monsters=true
+view-distance=10
+generate-structures=true
+motd=Nomi CEu Server, v{{version}}, Normal Mode
+level-type=lostcities
+generator-settings=
diff --git a/tools/templates/versions.txt b/tools/templates/versions.txt
new file mode 100644
index 0000000..9e852e6
--- /dev/null
+++ b/tools/templates/versions.txt
@@ -0,0 +1,13 @@
+ - 1.6.1a
+ - 1.6.1-beta-4
+ - 1.6.1-beta-3a
+ - 1.6.1-beta-2
+ - 1.6.1-alpha-1
+ - 1.6
+ - 1.5.2
+ - 1.5.1
+ - 1.5
+ - 1.4.3
+ - 1.4.2
+ - 1.4.1a
+ - 1.4
\ No newline at end of file
diff --git a/tools/types/changelogTypes.ts b/tools/types/changelogTypes.ts
new file mode 100644
index 0000000..1c96792
--- /dev/null
+++ b/tools/types/changelogTypes.ts
@@ -0,0 +1,242 @@
+export interface Commit {
+ hash: string;
+ date: string;
+ message: string;
+ refs: string;
+ body: string;
+ author_name: string;
+ author_email: string;
+}
+
+/**
+ * A Changelog Category.
+ */
+export interface Category {
+ /**
+ * Commit Key: The key used in the commit's body.
+ *
+ * Optional. If not set, then commits cannot be added to this category during the parse commit task.
+ * Can still be added manually.
+ */
+ commitKey?: string;
+
+ /**
+ * Key Name: The title of this Category in the changelog.
+ *
+ * Can be set to "" to have no title.
+ */
+ categoryName: string;
+
+ /**
+ * Changelog Section: The changelog section map that the key should push to.
+ *
+ * Will be initialized later, if put into categoryKeys.
+ */
+ changelogSection?: Map;
+
+ /**
+ * Default Sub Category. Any commits not placed into other sub-categories will be placed in here.
+ *
+ * Should be a Sub Category added to subCategoryKeys, as otherwise the category would not appear in the changelog.
+ *
+ * This can also be done with a SubCategoryKey placed at the end, with the commitKey set to `""`.
+ * However, this is useful for places where the Default Sub Category should not be at the end.
+ *
+ * This is also needed for certain parsing operations.
+ */
+ defaultSubCategory: SubCategory;
+
+ /**
+ * Sub Category Keys: The list of sub-category keys.
+ *
+ * Commits being added can only be in one sub-category, and the priority will be in the order provided.
+ * Furthermore, the order provided will also be the order the commits appear in.
+ *
+ * The last item on this list should have the `commitKey` set to "", to allow any commits not put into previous sub categories in, otherwise they would be ignored.
+ * However, this can also be done by setting the defaultSubCategory.
+ */
+ subCategories: SubCategory[];
+}
+
+/**
+ * A Sub Category.
+ */
+export interface SubCategory {
+ /**
+ * Commit Key: The key used in the commit's body.
+ *
+ * This can be set to "" to allow any commit in.
+ *
+ * Optional. If not set, then no commit will be allowed in during the parse commit task.
+ * Can still be added to by DefaultSubCategory, or manually.
+ */
+ commitKey?: string;
+
+ /**
+ * Key Name: The key to be used in the changelogSection. Also will be the title of this subCategory in the changelog.
+ *
+ * Can be set to "" to have no title.
+ */
+ keyName: string;
+}
+
+/**
+ * A Changelog Message Object.
+ */
+export interface ChangelogMessage {
+ /**
+ * Commit Message
+ */
+ commitMessage: string;
+
+ /**
+ * Commit Object
+ *
+ * Provides the Commit SHA, the Commit Author, and the Commit Date.
+ */
+ commitObject?: Commit;
+
+ /**
+ * Sub Changelog Messages
+ */
+ subChangelogMessages?: ChangelogMessage[];
+
+ /**
+ * Indentation
+ *
+ * Optional. Defaults to "".
+ */
+ indentation?: string;
+
+ /**
+ * If this changelog message is special. This is special formatting for it.
+ */
+ specialFormatting?: SpecialChangelogFormatting;
+}
+
+/**
+ * A special changelog message object, for special formatting.
+ */
+export interface SpecialChangelogFormatting {
+ /**
+ * Formatting Function
+ */
+ formatting: (message: ChangelogMessage, storage?: T) => string;
+
+ /**
+ * Storage
+ */
+ storage: T;
+}
+
+/**
+ * A parsing category, which defines parsing rules and callbacks for different dirs.
+ */
+export interface Parser {
+ /**
+ * Dirs to parse. If not set, will just parse commit list of all changes.
+ */
+ dirs?: string[];
+
+ /**
+ * Callback to determine whether a commit should be skipped.
+ *
+ * If skipped, then all further parsing for the commit will stop. This condition does not include commits which are in the sha list, they are automatically skipped.
+ *
+ * Expanded Commits from parseExpand go here too!
+ * commit: The commit object.
+ * commitMessage: The message of the commit.
+ * commitBody: The body of the commit. Might be undefined.
+ * Expanded Commits from parseExpand go here too!
+ * parser: This parser object, for convenience of use when calling parseCommitBody.
+ * commit: The commit object.
+ * commitMessage: The message of the commit.
+ * commitBody: The body of the commit. Might be undefined.
+ * return: True if parsing was successful, false if not.
+ */
+ itemCallback: (parser: Parser, commit: Commit, commitMessage: string, commitBody?: string) => Promise;
+
+ /**
+ * The callback to perform on any commits, which did not pass parsing. If not set, no callback will be performed, and those commits will be discarded.
+ *
+ * Expanded Commits from parseExpand and parseDetails go here too!
+ * commit: The commit object.
+ * commitMessage: The message of the commit.
+ * commitBody: The body of the commit. Might be undefined.
+ * subMessages: Any sub-messages, coming from parseDetails. Might be undefined.
+ */
+ leftOverCallback?: (
+ commit: Commit,
+ commitMessage: string,
+ commitBody?: string,
+ subMessages?: ChangelogMessage[],
+ ) => void;
+
+ /**
+ * Callback to determine whether to add the sha of that commit into the sha list, forbidding further parsing of it.
+ *
+ * If not set, will just add SHA of every commit included in `dirs`.
+ * commit: The commit object.
+ * parsed: If parsing was successful. This is also true if the commit was skipped.
+ * return: True if to add sha, false if to not.
+ */
+ addSHACallback?: (commit: Commit, parsed: boolean) => boolean;
+
+ /**
+ * Callback to determine whether or not the commit should be added to the commit list.
+ *
+ * commit: The commit to determine.
+ * parsed: If parsing was successful.
+ * return: True if to add, false if not.
+ */
+ addCommitListCallback: (commit: Commit, parsed: boolean) => boolean;
+}
+
+export interface ModChangeInfo {
+ modName: string;
+ projectID?: number;
+ oldVersion?: string;
+ newVersion?: string;
+}
+
+export interface ExpandedMessage {
+ messageTitle: string;
+ messageBody?: string;
+}
+
+export interface FixUpInfo {
+ sha: string;
+ newTitle: string;
+ newBody?: string;
+}
+
+export type InputReleaseType = "Release" | "Beta Release" | "Alpha Release" | "Cutting Edge Build";
+
+export interface DeployReleaseType {
+ isPreRelease: boolean;
+ cfReleaseType: "release" | "beta" | "alpha";
+}
+
+// Cutting Edge Build is not needed here, as this type is only used for deploying, and not building.
+export const inputToDeployReleaseTypes: Record = {
+ Release: {
+ isPreRelease: false,
+ cfReleaseType: "release",
+ },
+ "Beta Release": {
+ isPreRelease: true,
+ cfReleaseType: "beta",
+ },
+ "Alpha Release": {
+ isPreRelease: true,
+ cfReleaseType: "alpha",
+ },
+ "Cutting Edge Build": undefined,
+};
diff --git a/tools/types/curseForge.ts b/tools/types/curseForge.ts
index 7eded33..1c982d4 100644
--- a/tools/types/curseForge.ts
+++ b/tools/types/curseForge.ts
@@ -1,154 +1,121 @@
-interface Author {
+interface CurseForgeAuthor {
+ id: number;
name: string;
url: string;
- projectId: number;
- id: number;
- projectTitleId?: number;
- projectTitleTitle: string;
- userId: number;
- twitchId?: number;
-}
-
-interface Attachment {
- id: number;
- projectId: number;
- description: string;
- isDefault: boolean;
- thumbnailUrl: string;
- title: string;
- url: string;
- status: number;
}
export interface CurseForgeFileInfo {
id: number;
+ gameId: number;
+ modId: number;
+ isAvailable: boolean;
displayName: string;
fileName: string;
- fileDate: Date;
- fileLength: number;
releaseType: number;
fileStatus: number;
+ hashes: CurseForgeHash[];
+ fileDate: Date;
+ fileLength: number;
+ downloadCount: number;
+ fileSizeOnDisk: number | undefined;
downloadUrl: string;
- isAlternate: boolean;
- alternateFileId: number;
- dependencies: Dependency[];
- isAvailable: boolean;
- modules: Module[];
- gameVersion: string[];
- sortableGameVersion: SortableGameVersion[];
- hasInstallScript: boolean;
- isCompatibleWithClient: boolean;
- categorySectionPackageType: number;
- restrictProjectFileAccess: number;
- projectStatus: number;
- renderCacheId: number;
- packageFingerprintId: number;
- gameVersionDateReleased: Date;
- gameVersionMappingId: number;
- gameVersionId: number;
- gameId: number;
- isServerPack: boolean;
+ gameVersions: string[];
+ sortableGameVersions: CurseForgeSortableGameVersion[];
+ dependencies: CurseForgeDependency[];
+ exposeAsAlternative: boolean | undefined;
+ parentProjectFileId: number | undefined;
+ alternateFileId: number | undefined;
+ isServerPack: boolean | undefined;
+ serverPackFileId: number | undefined;
+ isEarlyAccessContent: boolean | undefined;
+ earlyAccessEndDate: Date | undefined;
+ fileFingerprint: number;
+ modules: CurseForgeModule[];
}
-interface Category {
- categoryId: number;
- name: string;
- url: string;
- avatarUrl: string;
- parentId: number;
- rootId: number;
- projectId: number;
- avatarId: number;
- gameId: number;
-}
-
-interface CategorySection {
+interface CurseForgeCategory {
id: number;
gameId: number;
name: string;
- packageType: number;
- path: string;
- initialInclusionPattern: string;
- gameCategoryId: number;
+ slug: string;
+ url: string;
+ iconUrl: string;
+ dateModified: Date;
+ isClass: boolean;
+ classId: number;
+ parentCategoryId: number;
}
-interface GameVersionLatestFile {
+interface CurseForgeFileIndex {
gameVersion: string;
- projectFileId: number;
- projectFileName: string;
- fileType: number;
+ fileId: number;
+ fileName: string;
+ releaseType: number;
+ gameVersionTypeId: number;
+ modLoader: number;
+}
+
+interface CurseForgeLinkInfo {
+ websiteUrl: string;
+ wikiUrl: string;
+ issuesUrl: string;
+ sourceUrl: string;
+}
+
+interface CurseForgeAsset {
+ id: number;
+ modID: number;
+ title: string;
+ description: string;
+ thumbnailUrl: string;
+ url: string;
}
export interface CurseForgeModInfo {
id: number;
+ gameId: number;
name: string;
- authors: Author[];
- attachments: Attachment[];
- websiteUrl: string;
- gameId: number;
- summary: string;
- defaultFileId: number;
- downloadCount: number;
- latestFiles: CurseForgeFileInfo[];
- categories: Category[];
- status: number;
- primaryCategoryId: number;
- categorySection: CategorySection;
slug: string;
- gameVersionLatestFiles: GameVersionLatestFile[];
- isFeatured: boolean;
- popularityScore: number;
- gamePopularityRank: number;
- primaryLanguage: string;
- gameSlug: string;
- gameName: string;
- portalName: string;
- dateModified: Date;
- dateCreated: Date;
- dateReleased: Date;
- isAvailable: boolean;
- isExperiemental: boolean;
-}
-
-export interface CurseForgeFetchedFileInfo {
- id: number;
- gameId: number;
- modId: number;
- isAvailable: boolean;
- displayName: string;
- fileName: string;
- releaseType: number;
- fileStatus: number;
- hashes?: Hash[];
- fileDate: Date;
- fileLength: number;
+ links: CurseForgeLinkInfo;
+ summary: string;
+ status: number;
downloadCount: number;
- downloadUrl?: string;
- gameVersions: string[];
- sortableGameVersions: SortableGameVersion[];
- dependencies: Dependency[];
- alternateFileId: number;
- isServerPack: boolean;
- fileFingerprint: number;
- modules: Module[];
+ isFeatured: boolean;
+ primaryCategoryId: number;
+ categories: CurseForgeCategory[];
+ classId: number;
+ authors: CurseForgeAuthor[];
+ logo: CurseForgeAsset;
+ screenshots: CurseForgeAsset[];
+ mainFileId: number;
+ latestFiles: CurseForgeFileInfo[];
+ latestFilesIndexes: CurseForgeFileIndex[];
+ latestEarlyAccessFilesIndexes: CurseForgeFileIndex[];
+ dateCreated: Date;
+ dateModified: Date;
+ dateReleased: Date;
+ allowModDistribution: boolean;
+ gamePopularityRank: number;
+ isAvailable: boolean;
+ thumbsUpCount: number;
}
-interface Dependency {
+interface CurseForgeDependency {
modId: number;
relationType: number;
}
-interface Hash {
+interface CurseForgeHash {
value: string;
algo: number;
}
-interface Module {
+interface CurseForgeModule {
name: string;
fingerprint: number;
}
-interface SortableGameVersion {
+interface CurseForgeSortableGameVersion {
gameVersionName: string;
gameVersionPadded: string;
gameVersion: string;
diff --git a/tools/util/buildConfig.default.json b/tools/util/buildConfig.default.json
index da0498a..7872bdd 100644
--- a/tools/util/buildConfig.default.json
+++ b/tools/util/buildConfig.default.json
@@ -17,5 +17,6 @@
"buildDestinationDirectory": "../build",
"buildSourceDirectory": "../",
"nightlyHookAvatar": "",
- "nightlyHookName": ""
-}
\ No newline at end of file
+ "nightlyHookName": "",
+ "screenshotsQuality": 90
+}
diff --git a/tools/util/curseForgeAPI.ts b/tools/util/curseForgeAPI.ts
index c0cacab..c79ddef 100644
--- a/tools/util/curseForgeAPI.ts
+++ b/tools/util/curseForgeAPI.ts
@@ -1,5 +1,5 @@
import bluebird from "bluebird";
-import { CurseForgeFetchedFileInfo, CurseForgeModInfo as CurseForgeProject } from "../types/curseForge";
+import { CurseForgeFileInfo, CurseForgeModInfo as CurseForgeProject } from "../types/curseForge";
import log from "fancy-log";
import request from "requestretry";
import { ModpackManifestFile } from "../types/modpackManifest";
@@ -43,22 +43,20 @@ export async function fetchProject(toFetch: number): Promise
throw new Error(`Failed to fetch project ${toFetch}`);
}
- if (project) {
- curseForgeProjectCache[toFetch] = project;
- }
+ curseForgeProjectCache[toFetch] = project;
return project;
}
-const fetchedFileInfoCache: { [key: string]: CurseForgeFetchedFileInfo } = {};
-export async function fetchFileInfo(projectID: number, fileID: number): Promise {
+const fetchedFileInfoCache: { [key: string]: CurseForgeFileInfo } = {};
+export async function fetchFileInfo(projectID: number, fileID: number): Promise {
const slug = `${projectID}/${fileID}`;
if (fetchedFileInfoCache[slug]) {
return fetchedFileInfoCache[slug];
}
- const fileInfo: CurseForgeFetchedFileInfo = (
+ const fileInfo: CurseForgeFileInfo = (
await request({
uri: `${buildConfig.cfCoreApiEndpoint}/v1/mods/${projectID}/files/${fileID}`,
json: true,
@@ -73,17 +71,126 @@ export async function fetchFileInfo(projectID: number, fileID: number): Promise<
throw new Error(`Failed to download file ${projectID}/file/${fileID}`);
}
- if (fileInfo) {
- fetchedFileInfoCache[slug] = fileInfo;
+ fetchedFileInfoCache[slug] = fileInfo;
- if (!fileInfo.downloadUrl) {
- const fid = `${Math.floor(fileInfo.id / 1000)}/${fileInfo.id % 1000}`;
+ return fileInfo;
+}
- fileInfo.downloadUrl = `https://edge.forgecdn.net/files/${fid}/${fileInfo.fileName}`;
+export interface ProjectToFileId {
+ projectID: number;
+ fileID: number;
+}
+
+/**
+ * Fetches multiple CurseForge files.
+ * Falls back to fetchFileInfo in case it's impossible to bulk-fetch some files.
+ *
+ * @param toFetch List of Project IDs to File IDs, to fetch.
+ * @returns CurseForge file infos.
+ */
+export async function fetchFilesBulk(toFetch: ProjectToFileId[]): Promise {
+ const fileInfos: CurseForgeFileInfo[] = [];
+ // Map of file ids not fetched (project ID to file ID)
+ const unfetched: ProjectToFileId[] = [];
+
+ // Determine projects that have been fetched already.
+ toFetch.forEach((file) => {
+ const slug = `${file.projectID}/${file.fileID}`;
+ const cached = fetchedFileInfoCache[slug];
+ if (cached) fileInfos.push(cached);
+ else unfetched.push(file);
+ });
+
+ // Sort list (reduces risk of duplicate entries)
+ unfetched.sort((a, b) => a.fileID - b.fileID);
+
+ if (unfetched.length > 0) {
+ // Augment the array of known files with new info.
+ const fetched: CurseForgeFileInfo[] = (
+ await request.post({
+ uri: `${buildConfig.cfCoreApiEndpoint}/v1/mods/files`,
+ json: {
+ fileIds: unfetched.map((file) => file.fileID),
+ },
+ fullResponse: false,
+ maxAttempts: 5,
+ headers: {
+ "X-Api-Key": getCurseForgeToken(),
+ },
+ })
+ )?.data;
+
+ if (!fetched) {
+ throw new Error(
+ `Failed to bulk-fetch files:\n${unfetched
+ .map((file) => `File ${file.fileID} of mod ${file.projectID},`)
+ .join("\n")}`,
+ );
+ }
+
+ // Remove duplicate entries (Batch Fetch sometimes returns duplicate inputs... for some reason)
+ if (fetched.length > unfetched.length) {
+ // Can't directly use Set, as Set compares object ref, not object data
+ const uniqueFileIDs: number[] = [];
+ fetched.forEach((file) => {
+ if (!uniqueFileIDs.includes(file.id)) {
+ fileInfos.push(file);
+ uniqueFileIDs.push(file.id);
+ }
+ });
+ } else {
+ fileInfos.push(...fetched);
+ }
+
+ // Cache fetched stuff.
+ fetched.forEach((info) => {
+ fetchedFileInfoCache[`${info.modId}/${info.id}`] = info;
+ });
+
+ // In case we haven't received the proper amount of mod infos,
+ // try requesting them individually.
+ if (fileInfos.length < toFetch.length) {
+ // Set of fetched fileIDs.
+ const fileInfoIDs: Set = new Set(
+ fileInfos.map((file) => {
+ return file.id;
+ }),
+ );
+ const toFetchMissing = [...new Set(toFetch.filter((x) => !fileInfoIDs.has(x.fileID)))];
+
+ if (toFetchMissing.length > 0) {
+ log.warn(
+ `Couldn't fetch next project IDs in bulk:\n${toFetchMissing
+ .map((file) => `File ${file.fileID} of mod ${file.projectID},`)
+ .join("\n")}`,
+ );
+
+ // Try fetching files individually, in case they've been deleted.
+ let count = 0;
+ const missingFileInfos: CurseForgeFileInfo[] = await bluebird.map(toFetchMissing, async (file) => {
+ log.info(
+ `Fetching file ${file.fileID} of mod ${file.projectID} directly... (${++count} / ${toFetchMissing.length})`,
+ );
+
+ try {
+ // In case something fails to download; catch, rewrite, rethrow.
+ return await fetchFileInfo(file.projectID, file.fileID);
+ } catch (err) {
+ err.message = `Couldn't fetch file ${file.fileID} of mod ${file.projectID}. ${
+ err.message || "Unknown error"
+ }`;
+ throw err;
+ }
+ });
+
+ // The code above is expected to throw and terminate the further execution,
+ // so we can just do this.
+ fileInfos.push(...missingFileInfos);
+ }
}
}
- return fileInfo;
+ return fileInfos;
}
/**
@@ -136,11 +243,11 @@ export async function fetchProjectsBulk(toFetch: number[]): Promise mi.id));
const toFetchMissing = [...new Set(toFetch.filter((x) => !modInfoIDs.has(x)))];
- log.warn(`Couldn't fetch next project IDs in bulk: ${toFetchMissing.join(", ")}`);
+ log.warn(`Couldn't fetch some project IDs in bulk: ${toFetchMissing.join(", ")}`);
// Try fetching mods individually, in case they've been deleted.
let count = 0;
@@ -165,19 +272,20 @@ export async function fetchProjectsBulk(toFetch: number[]): Promise {
if (toFetch.length > 0) {
log(`Fetching ${toFetch.length} mods...`);
- const modsPath = upath.join(destination, "mods");
- await fs.promises.mkdir(modsPath, { recursive: true });
-
let fetched = 0;
return Bluebird.map(
toFetch,
async (file) => {
const fileInfo = await fetchFileInfo(file.projectID, file.fileID);
-
const fileDef: FileDef = {
url: fileInfo.downloadUrl,
};
@@ -199,7 +307,7 @@ export async function fetchMods(toFetch: ModpackManifestFile[], destination: str
log(`Fetched ${upath.basename(fileDef.url)} from cache... (${fetched} / ${toFetch.length})`);
}
- const dest = upath.join(destination, "mods", fileInfo.fileName);
+ const dest = upath.join(destination, fileInfo.fileName);
await fs.promises.symlink(relative(dest, modFile.cachePath), dest);
},
diff --git a/tools/util/util.ts b/tools/util/util.ts
index 96de220..914545d 100644
--- a/tools/util/util.ts
+++ b/tools/util/util.ts
@@ -4,19 +4,25 @@ import fs from "fs";
import buildConfig from "../buildConfig";
import upath from "upath";
import requestretry from "requestretry";
+import request from "requestretry";
import http from "http";
import { compareBufferToHashDef } from "./hashes";
import { execSync } from "child_process";
-import { ModpackManifest, ModpackManifestFile, ExternalDependency } from "../types/modpackManifest";
-import { fetchProject, fetchProjectsBulk } from "./curseForgeAPI";
+import { ExternalDependency, ModpackManifest, ModpackManifestFile } from "../types/modpackManifest";
+import { fetchFileInfo, fetchProject, fetchProjectsBulk } from "./curseForgeAPI";
import Bluebird from "bluebird";
import { VersionManifest } from "../types/versionManifest";
import { VersionsManifest } from "../types/versionsManifest";
-import request from "requestretry";
import log from "fancy-log";
+import { pathspec, SimpleGit, simpleGit } from "simple-git";
+import { Commit, ModChangeInfo } from "../types/changelogTypes";
+import { rootDirectory } from "../globals";
const LIBRARY_REG = /^(.+?):(.+?):(.+?)$/;
+// Make git commands run in root dir
+const git: SimpleGit = simpleGit(rootDirectory);
+
/**
* Parses the library name into path following the standard package naming convention.
*
@@ -29,9 +35,7 @@ export const libraryToPath = (library: string): string => {
const name = parsedLibrary[2];
const version = parsedLibrary[3];
- const newURL = `${pkg}/${name}/${version}/${name}-${version}`;
-
- return newURL;
+ return `${pkg}/${name}/${version}/${name}-${version}`;
}
};
@@ -40,12 +44,31 @@ export const libraryToPath = (library: string): string => {
*/
export const checkEnvironmentalVariables = (vars: string[]): void => {
vars.forEach((vari) => {
- if (!process.env[vari] || process.env[vari] == "") {
+ if (!isEnvVariableSet(vari)) {
throw new Error(`Environmental variable ${vari} is unset.`);
}
});
};
+/**
+ * Returns true if given variable set, false otherwise.
+ */
+export const isEnvVariableSet = (env: string): boolean => {
+ return process.env[env] && process.env[env] != "";
+};
+
+/**
+ * Check if given git tag exists. Throws otherwise.
+ */
+export const checkGitTag = (tag: string): void => {
+ // The below command returns an empty buffer if the given tag does not exist.
+ const tagBuffer = execSync(`git tag --list ${tag}`);
+
+ if (!tagBuffer || tagBuffer.toString().trim() != tag) {
+ throw new Error(`Tag ${tag} could not be found.`);
+ }
+};
+
export enum RetrievedFileDefReason {
Downloaded,
CacheHit,
@@ -61,6 +84,8 @@ export interface RetrievedFileDef {
*
* Internally hashes the URL of the provided FileDef and looks it up in the cache directory.
* In case of no cache hit, downloads the file and stores within the cache directory for later use.
+ *
+ * @param fileDef The file def to download or retrieve.
*/
export async function downloadOrRetrieveFileDef(fileDef: FileDef): Promise {
const fileNameSha = sha1(fileDef.url);
@@ -98,37 +123,7 @@ export async function downloadOrRetrieveFileDef(fileDef: FileDef): Promise {
- // Verify hashes.
- if (!err && fileDef.hashes && body) {
- const success = fileDef.hashes.every((hashDef) => {
- return compareBufferToHashDef(body as Buffer, hashDef);
- });
-
- if (!success) {
- if (hashFailed) {
- throw new Error(`Couldn't verify checksums of ${upath.basename(fileDef.url)}`);
- }
-
- hashFailed = true;
- return true;
- }
- }
- return requestretry.RetryStrategies.HTTPOrNetworkError(err, response, body);
- };
-
- const data: Buffer = Buffer.from(
- await requestretry({
- url: fileDef.url,
- fullResponse: false,
- encoding: null,
- retryStrategy: retryStrategy,
- maxAttempts: 5,
- }),
- );
-
- await handle.write(data);
+ await handle.write(await downloadFileDef(fileDef));
await handle.close();
return {
@@ -147,6 +142,44 @@ export async function downloadOrRetrieveFileDef(fileDef: FileDef): Promise {
+ let hashFailed = false;
+ const retryStrategy = (err: Error, response: http.IncomingMessage, body: unknown) => {
+ if (response.statusCode === 404) {
+ throw new Error(`URL ${fileDef.url} returned status 404.`);
+ }
+ // Verify hashes.
+ if (!err && fileDef.hashes && body) {
+ const success = fileDef.hashes.every((hashDef) => {
+ return compareBufferToHashDef(body as Buffer, hashDef);
+ });
+
+ if (!success) {
+ if (hashFailed) {
+ throw new Error(`Couldn't verify checksums of ${upath.basename(fileDef.url)}`);
+ }
+
+ hashFailed = true;
+ return true;
+ }
+ }
+ return requestretry.RetryStrategies.HTTPOrNetworkError(err, response, body);
+ };
+
+ return Buffer.from(
+ await requestretry({
+ url: fileDef.url,
+ fullResponse: false,
+ encoding: null,
+ retryStrategy: retryStrategy,
+ maxAttempts: 5,
+ }),
+ );
+}
+
/**
* Returns artifact name body depending on environment variables.
* Mostly intended to be called by CI/CD.
@@ -156,13 +189,9 @@ export function makeArtifactNameBody(baseName: string): string {
if (process.env.GITHUB_TAG) {
return `${baseName}-${process.env.GITHUB_TAG}`;
}
- // RC.
- else if (process.env.RC_VERSION) {
- return `${baseName}-${process.env.RC_VERSION.replace(/^v/, "")}`;
- }
// If SHA is provided and the build isn't tagged, append both the branch and short SHA.
else if (process.env.GITHUB_SHA && process.env.GITHUB_REF && process.env.GITHUB_REF.startsWith("refs/heads/")) {
- const shortCommit = process.env.GITHUB_SHA.substr(0, 7);
+ const shortCommit = process.env.GITHUB_SHA.substring(0, 7);
const branch = /refs\/heads\/(.+)/.exec(process.env.GITHUB_REF);
return `${baseName}-${branch[1]}-${shortCommit}`;
} else {
@@ -171,12 +200,18 @@ export function makeArtifactNameBody(baseName: string): string {
}
/**
- * Fetches the last tag known to Git using the current branch.
- * @param {string | nil} before Tag to get the tag before.
+ * Returns the COMPARE_TAG env if set, else fetches the last tag known to Git using the current branch.
+ * @param before Tag to get the tag before.
* @returns string Git tag.
* @throws
*/
export function getLastGitTag(before?: string): string {
+ if (isEnvVariableSet("COMPARE_TAG")) {
+ checkGitTag(process.env["COMPARE_TAG"]);
+
+ return process.env["COMPARE_TAG"];
+ }
+
if (before) {
before = `"${before}^"`;
}
@@ -188,40 +223,46 @@ export function getLastGitTag(before?: string): string {
/**
* Generates a changelog based on the two provided Git refs.
- * @param {string} since Lower boundary Git ref.
- * @param {string} to Upper boundary Git ref.
- * @param {string[]} dirs Optional scopes.
+ * @param since Lower boundary Git ref.
+ * @param to Upper boundary Git ref.
+ * @param dirs Optional scopes. These are of the perspective of the root dir.
+ * @returns changelog Object Array of Changelog
*/
-export function getChangeLog(since = "HEAD", to = "HEAD", dirs: string[] = undefined): string {
- const command = [
- "git log",
- "--no-merges",
- '--date="format:%d %b %Y"',
- '--pretty="* %s - **%an** (%ad)"',
- `${since}..${to}`,
- ];
-
+export async function getChangelog(since = "HEAD", to = "HEAD", dirs: string[] = undefined): Promise {
+ const options: string[] = ["--no-merges", `${since}..${to}`];
if (dirs) {
- command.push("--", dirs.join(" -- "));
+ dirs.forEach((dir) => {
+ options.push(pathspec(dir));
+ });
}
- return execSync(command.join(" ")).toString().trim();
+ const commitList: Commit[] = [];
+ await git.log(options, (err, output) => {
+ if (err) {
+ console.error(err);
+ throw new Error();
+ }
+
+ // Cannot simply set commitList as output.all as is read only, must do this
+ output.all.forEach((commit) => commitList.push(commit));
+ });
+
+ return commitList;
}
/**
- * Generates a changelog based on the two provided Git refs.
- * @param {string} since Lower boundary Git ref.
- * @param {string} to Upper boundary Git ref.
- * @param {string[]} dirs Optional scopes.
+ * Gets the file at a certain point in time.
+ * @param path The path to the file
+ * @param revision The git ref point. Can also be a commit SHA
*/
export function getFileAtRevision(path: string, revision = "HEAD"): string {
return execSync(`git show ${revision}:"${path}"`).toString().trim();
}
export interface ManifestFileListComparisonResult {
- removed: string[];
- modified: string[];
- added: string[];
+ removed: ModChangeInfo[];
+ modified: ModChangeInfo[];
+ added: ModChangeInfo[];
}
export async function compareAndExpandManifestDependencies(
@@ -229,18 +270,18 @@ export async function compareAndExpandManifestDependencies(
newFiles: ModpackManifest,
): Promise {
// Map inputs for efficient joining.
- const oldFileMap: { [key: number]: ModpackManifestFile } = oldFiles.files.reduce(
- (map, file) => ((map[file.projectID] = file), map),
- {},
- );
- const newFileMap: { [key: number]: ModpackManifestFile } = newFiles.files.reduce(
- (map, file) => ((map[file.projectID] = file), map),
- {},
- );
+ const oldFileMap: { [key: number]: ModpackManifestFile } = oldFiles.files.reduce((map, file) => {
+ map[file.projectID] = file;
+ return map;
+ }, {});
+ const newFileMap: { [key: number]: ModpackManifestFile } = newFiles.files.reduce((map, file) => {
+ map[file.projectID] = file;
+ return map;
+ }, {});
- const removed: string[] = [],
- modified: string[] = [],
- added: string[] = [];
+ const removed: ModChangeInfo[] = [],
+ modified: ModChangeInfo[] = [],
+ added: ModChangeInfo[] = [];
// Create a distinct map of project IDs.
const projectIDs = Array.from(
@@ -259,15 +300,28 @@ export async function compareAndExpandManifestDependencies(
// Doesn't exist in new, but exists in old. Removed. Left outer join.
if (!newFileInfo && oldFileInfo) {
- removed.push((await fetchProject(oldFileInfo.projectID)).name);
+ removed.push({
+ modName: (await fetchProject(oldFileInfo.projectID)).name,
+ projectID: projectID,
+ oldVersion: (await fetchFileInfo(oldFileInfo.projectID, oldFileInfo.fileID)).displayName,
+ });
}
// Doesn't exist in old, but exists in new. Added. Right outer join.
else if (newFileMap[projectID] && !oldFileMap[projectID]) {
- added.push((await fetchProject(newFileInfo.projectID)).name);
+ added.push({
+ modName: (await fetchProject(newFileInfo.projectID)).name,
+ projectID: projectID,
+ newVersion: (await fetchFileInfo(newFileInfo.projectID, newFileInfo.fileID)).displayName,
+ });
}
// Exists in both. Modified? Inner join.
else if (oldFileInfo.fileID != newFileInfo.fileID) {
- modified.push((await fetchProject(newFileInfo.projectID)).name);
+ modified.push({
+ modName: (await fetchProject(newFileInfo.projectID)).name,
+ projectID: projectID,
+ oldVersion: (await fetchFileInfo(newFileInfo.projectID, oldFileInfo.fileID)).displayName,
+ newVersion: (await fetchFileInfo(newFileInfo.projectID, newFileInfo.fileID)).displayName,
+ });
}
},
{ concurrency: buildConfig.downloaderConcurrency },
@@ -275,11 +329,17 @@ export async function compareAndExpandManifestDependencies(
// Compare external dependencies the same way.
const oldExternalMap: { [key: string]: ExternalDependency } = (oldFiles.externalDependencies || []).reduce(
- (map, file) => ((map[file.name] = file), map),
+ (map, file) => {
+ map[file.name] = file;
+ return map;
+ },
{},
);
const newExternalMap: { [key: string]: ExternalDependency } = (newFiles.externalDependencies || []).reduce(
- (map, file) => ((map[file.name] = file), map),
+ (map, file) => {
+ map[file.name] = file;
+ return map;
+ },
{},
);
@@ -290,21 +350,21 @@ export async function compareAndExpandManifestDependencies(
]),
);
- externalNames.forEach(async (name) => {
+ externalNames.forEach((name) => {
const oldDep = oldExternalMap[name];
const newDep = newExternalMap[name];
// Doesn't exist in new, but exists in old. Removed. Left outer join.
if (!newDep && oldDep) {
- removed.push(oldDep.name);
+ removed.push({ modName: oldDep.name });
}
// Doesn't exist in old, but exists in new. Added. Right outer join.
else if (newDep && !oldDep) {
- added.push(newDep.name);
+ added.push({ modName: newDep.name });
}
// Exists in both. Modified? Inner join.
else if (oldDep.url != newDep.url || oldDep.name != newDep.name) {
- modified.push(newDep.name);
+ modified.push({ modName: newDep.name });
}
});
@@ -338,17 +398,12 @@ export async function getVersionManifest(minecraftVersion: string): Promise