fix gulpfile
This commit is contained in:
parent
3bf2612dd0
commit
a8a1e6e133
546
gulpfile.ts
546
gulpfile.ts
|
|
@ -1,29 +1,28 @@
|
|||
import * as gulp from "gulp"
|
||||
import fs from "fs-extra"
|
||||
import * as path from "path"
|
||||
import archiver from "archiver"
|
||||
import stringify from "json-stringify-pretty-compact"
|
||||
import * as gulp from "gulp";
|
||||
import fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import archiver from "archiver";
|
||||
import stringify from "json-stringify-pretty-compact";
|
||||
|
||||
const sourcemaps = require('gulp-sourcemaps')
|
||||
const uglify = require('gulp-uglify')
|
||||
const concat = require("gulp-concat")
|
||||
const buffer = require('vinyl-buffer')
|
||||
const source = require('vinyl-source-stream')
|
||||
const through = require('through2')
|
||||
const jsonminify = require('gulp-jsonminify')
|
||||
const merge2 = require('merge2')
|
||||
const sourcemaps = require("gulp-sourcemaps");
|
||||
const uglify = require("gulp-uglify");
|
||||
const concat = require("gulp-concat");
|
||||
const buffer = require("vinyl-buffer");
|
||||
const source = require("vinyl-source-stream");
|
||||
const through = require("through2");
|
||||
const jsonminify = require("gulp-jsonminify");
|
||||
const merge2 = require("merge2");
|
||||
|
||||
const git = require('gulp-git-streamed')
|
||||
const git = require("gulp-git-streamed");
|
||||
|
||||
const loadJson = (path: string): any => {
|
||||
try {
|
||||
let str = fs.readFileSync(path).toString()
|
||||
return JSON.parse(str)
|
||||
}
|
||||
catch {
|
||||
throw Error("Unable to load " + path)
|
||||
}
|
||||
let str = fs.readFileSync(path).toString();
|
||||
return JSON.parse(str);
|
||||
} catch {
|
||||
throw Error("Unable to load " + path);
|
||||
}
|
||||
};
|
||||
|
||||
import {
|
||||
createLiteral,
|
||||
|
|
@ -38,36 +37,37 @@ import {
|
|||
TransformerFactory,
|
||||
visitEachChild,
|
||||
visitNode,
|
||||
} from "typescript"
|
||||
import less from "gulp-less"
|
||||
} from "typescript";
|
||||
import less from "gulp-less";
|
||||
|
||||
import Logger from "./source/utils/Logger"
|
||||
import {ModuleData} from "@league-of-foundry-developers/foundry-vtt-types/src/foundry/common/packages.mjs"
|
||||
import browserify from "browserify"
|
||||
const tsify = require("tsify")
|
||||
import Logger from "./source/utils/Logger";
|
||||
import { ModuleData } from "@league-of-foundry-developers/foundry-vtt-types/src/foundry/common/packages.mjs";
|
||||
import browserify from "browserify";
|
||||
import { data } from "jquery";
|
||||
const tsify = require("tsify");
|
||||
|
||||
const ts = require("gulp-typescript")
|
||||
const ts = require("gulp-typescript");
|
||||
|
||||
const argv = require("yargs").argv
|
||||
const argv = require("yargs").argv;
|
||||
|
||||
let distPath = "dist"
|
||||
let distPath = "dist";
|
||||
|
||||
function getConfig() {
|
||||
const configPath = path.resolve(process.cwd(), "foundryconfig.json")
|
||||
let config
|
||||
const configPath = path.resolve(process.cwd(), "foundryconfig.json");
|
||||
let config;
|
||||
|
||||
if (fs.existsSync(configPath)) {
|
||||
config = loadJson(configPath)
|
||||
return config
|
||||
config = loadJson(configPath);
|
||||
return config;
|
||||
} else {
|
||||
return
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
interface Manifest {
|
||||
root: string
|
||||
file: ModuleData
|
||||
name: string
|
||||
root: string;
|
||||
file: ModuleData;
|
||||
name: string;
|
||||
}
|
||||
|
||||
const getManifest = (): Manifest | null => {
|
||||
|
|
@ -75,237 +75,211 @@ const getManifest = (): Manifest | null => {
|
|||
root: "",
|
||||
// @ts-ignore
|
||||
file: {},
|
||||
name: ""
|
||||
}
|
||||
name: "",
|
||||
};
|
||||
|
||||
if (fs.existsSync("source")) {
|
||||
json.root = "source"
|
||||
json.root = "source";
|
||||
} else {
|
||||
json.root = distPath
|
||||
json.root = distPath;
|
||||
}
|
||||
|
||||
const modulePath = path.join(json.root, "module.json")
|
||||
const systemPath = path.join(json.root, "system.json")
|
||||
|
||||
const modulePath = path.join(json.root, "module.json");
|
||||
const systemPath = path.join(json.root, "system.json");
|
||||
if (fs.existsSync(modulePath)) {
|
||||
json.file = loadJson(modulePath) as ModuleData
|
||||
json.name = "module.json"
|
||||
json.file = loadJson(modulePath) as ModuleData;
|
||||
json.name = "module.json";
|
||||
} else if (fs.existsSync(systemPath)) {
|
||||
json.file = loadJson(systemPath) as ModuleData
|
||||
json.name = "system.json"
|
||||
json.file = loadJson(systemPath) as ModuleData;
|
||||
json.name = "system.json";
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
|
||||
return json
|
||||
return null;
|
||||
}
|
||||
return json;
|
||||
};
|
||||
|
||||
const createTransformer = (): TransformerFactory<any> => {
|
||||
/**
|
||||
* @param {typescript.Node} node
|
||||
*/
|
||||
const shouldMutateModuleSpecifier = (node: Node): boolean => {
|
||||
if (!isImportDeclaration(node) && !isExportDeclaration(node))
|
||||
return false
|
||||
if (node.moduleSpecifier === undefined)
|
||||
return false
|
||||
if (!isStringLiteral(node.moduleSpecifier))
|
||||
return false
|
||||
if (!node.moduleSpecifier.text.startsWith("./") && !node.moduleSpecifier.text.startsWith("../"))
|
||||
return false
|
||||
if (!isImportDeclaration(node) && !isExportDeclaration(node)) return false;
|
||||
if (node.moduleSpecifier === undefined) return false;
|
||||
if (!isStringLiteral(node.moduleSpecifier)) return false;
|
||||
if (!node.moduleSpecifier.text.startsWith("./") && !node.moduleSpecifier.text.startsWith("../")) return false;
|
||||
|
||||
return path.extname(node.moduleSpecifier.text) === ""
|
||||
}
|
||||
return path.extname(node.moduleSpecifier.text) === "";
|
||||
};
|
||||
|
||||
return (context: TransformationContext): TSTransformer<any> => {
|
||||
return (node: Node) => {
|
||||
function visitor(node: Node): Node {
|
||||
if (shouldMutateModuleSpecifier(node)) {
|
||||
if (isImportDeclaration(node)) {
|
||||
const newModuleSpecifier = createLiteral(`${(node.moduleSpecifier as LiteralExpression).text}.js`)
|
||||
return factory.updateImportDeclaration(node, node.decorators, node.modifiers, node.importClause, newModuleSpecifier, undefined)
|
||||
const newModuleSpecifier = createLiteral(`${(node.moduleSpecifier as LiteralExpression).text}.js`);
|
||||
return factory.updateImportDeclaration(node, node.decorators, node.modifiers, node.importClause, newModuleSpecifier, undefined);
|
||||
} else if (isExportDeclaration(node)) {
|
||||
const newModuleSpecifier = createLiteral(`${(node.moduleSpecifier as LiteralExpression).text}.js`)
|
||||
return factory.updateExportDeclaration(node, node.decorators, node.modifiers, false, node.exportClause, newModuleSpecifier, undefined)
|
||||
const newModuleSpecifier = createLiteral(`${(node.moduleSpecifier as LiteralExpression).text}.js`);
|
||||
return factory.updateExportDeclaration(node, node.decorators, node.modifiers, false, node.exportClause, newModuleSpecifier, undefined);
|
||||
}
|
||||
}
|
||||
return visitEachChild(node, visitor, context)
|
||||
return visitEachChild(node, visitor, context);
|
||||
}
|
||||
|
||||
return visitNode(node, visitor)
|
||||
}
|
||||
}
|
||||
}
|
||||
return visitNode(node, visitor);
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
const tsConfig = ts.createProject("tsconfig.json", {
|
||||
getCustomTransformers: (_program: any) => ({
|
||||
after: [createTransformer()],
|
||||
}),
|
||||
})
|
||||
});
|
||||
|
||||
function buildTS() {
|
||||
const debug = process.env.npm_lifecycle_event !== "package"
|
||||
let res = tsConfig.src()
|
||||
.pipe(sourcemaps.init())
|
||||
.pipe(tsConfig())
|
||||
const debug = process.env.npm_lifecycle_event !== "package";
|
||||
let res = tsConfig.src().pipe(sourcemaps.init()).pipe(tsConfig());
|
||||
|
||||
return res.js
|
||||
.pipe(sourcemaps.write('', { debug: debug, includeContent: true, sourceRoot: './ts/source' }))
|
||||
.pipe(gulp.dest(distPath))
|
||||
return res.js.pipe(sourcemaps.write("", { debug: debug, includeContent: true, sourceRoot: "./ts/source" })).pipe(gulp.dest(distPath));
|
||||
}
|
||||
|
||||
const bundleModule = () => {
|
||||
const debug = argv.dbg || argv.debug
|
||||
const bsfy = browserify(path.join(__dirname, "source/index.ts"), { debug: debug })
|
||||
return bsfy.on('error', Logger.err)
|
||||
const debug = argv.dbg || argv.debug;
|
||||
const bsfy = browserify(path.join(__dirname, "source/index.ts"), { debug: debug });
|
||||
return bsfy
|
||||
.on("error", Logger.err)
|
||||
.plugin(tsify)
|
||||
.bundle()
|
||||
.pipe(source(path.join(distPath, "bundle.js")))
|
||||
.pipe(buffer())
|
||||
.pipe(sourcemaps.init({ loadMaps: true }))
|
||||
.pipe(uglify())
|
||||
.pipe(sourcemaps.write('./'))
|
||||
.pipe(gulp.dest('./'))
|
||||
}
|
||||
.pipe(sourcemaps.write("./"))
|
||||
.pipe(gulp.dest("./"));
|
||||
};
|
||||
|
||||
const buildLess = () => {
|
||||
return gulp.src("source/style/*.less")
|
||||
.pipe(less())
|
||||
.pipe(concat("bundle.css"))
|
||||
.pipe(gulp.dest(distPath))
|
||||
}
|
||||
return gulp.src("source/style/*.less").pipe(less()).pipe(concat("bundle.css")).pipe(gulp.dest(distPath));
|
||||
};
|
||||
|
||||
interface Pack {
|
||||
root: string,
|
||||
type: string,
|
||||
name: string
|
||||
root: string;
|
||||
type: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
const buildPack = (pack: Pack): NodeJS.ReadWriteStream => {
|
||||
return gulp.src(pack.root + "/" + pack.type + "/" + pack.name + "/*.json")
|
||||
return gulp
|
||||
.src(pack.root + "/" + pack.type + "/" + pack.name + "/*.json")
|
||||
.pipe(jsonminify())
|
||||
.pipe(concat(pack.name + ".db"))
|
||||
.pipe(gulp.dest(distPath + "/" + pack.root + "/" + pack.type))
|
||||
}
|
||||
.pipe(gulp.dest(distPath + "/" + pack.root + "/" + pack.type));
|
||||
};
|
||||
|
||||
const buildPacks = () => {
|
||||
let packs: Pack[] = []
|
||||
let packs: Pack[] = [];
|
||||
|
||||
const rootDir = "packs"
|
||||
const packTypes = fs.readdirSync(rootDir).filter(p => fs.statSync(path.join(rootDir, p)).isDirectory())
|
||||
packTypes.forEach(packType => {
|
||||
const packDir = path.join(rootDir, packType)
|
||||
const packNames = fs.readdirSync(packDir).filter(p => fs.statSync(path.join(packDir, p)).isDirectory())
|
||||
packNames.forEach(packName => {
|
||||
const rootDir = "packs";
|
||||
const packTypes = fs.readdirSync(rootDir).filter((p) => fs.statSync(path.join(rootDir, p)).isDirectory());
|
||||
packTypes.forEach((packType) => {
|
||||
const packDir = path.join(rootDir, packType);
|
||||
const packNames = fs.readdirSync(packDir).filter((p) => fs.statSync(path.join(packDir, p)).isDirectory());
|
||||
packNames.forEach((packName) => {
|
||||
packs.push({
|
||||
name: packName,
|
||||
type: packType,
|
||||
root: rootDir
|
||||
})
|
||||
})
|
||||
})
|
||||
root: rootDir,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
return merge2(packs.map(p => buildPack(p)))
|
||||
}
|
||||
return merge2(packs.map((p) => buildPack(p)));
|
||||
};
|
||||
|
||||
const copyFiles = async () => {
|
||||
const recursiveFileSearch = (dir: string, callback: (err: NodeJS.ErrnoException | null, res: Array<string>) => void) => {
|
||||
let results: Array<string> = []
|
||||
let results: Array<string> = [];
|
||||
fs.readdir(dir, (err, list) => {
|
||||
if (err)
|
||||
return callback(err, results)
|
||||
if (err) return callback(err, results);
|
||||
|
||||
let pending = list.length
|
||||
if (!pending)
|
||||
return callback(null, results)
|
||||
let pending = list.length;
|
||||
if (!pending) return callback(null, results);
|
||||
|
||||
for (let file of list) {
|
||||
file = path.resolve(dir, file)
|
||||
file = path.resolve(dir, file);
|
||||
fs.stat(file, (err, stat) => {
|
||||
if (stat && stat.isDirectory()) {
|
||||
recursiveFileSearch(file, (err, res) => {
|
||||
results = results.concat(res)
|
||||
if (!--pending)
|
||||
callback(null, results)
|
||||
})
|
||||
results = results.concat(res);
|
||||
if (!--pending) callback(null, results);
|
||||
});
|
||||
} else {
|
||||
results.push(file);
|
||||
if (!--pending) callback(null, results);
|
||||
}
|
||||
else {
|
||||
results.push(file)
|
||||
if (!--pending)
|
||||
callback(null, results)
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
try {
|
||||
const modulePath = path.join("source", "module.json");
|
||||
if (fs.existsSync(modulePath)) await fs.copyFile(modulePath, path.join(distPath, "module.json"));
|
||||
|
||||
const modulePath = path.join("source", "module.json")
|
||||
if (fs.existsSync(modulePath))
|
||||
await fs.copyFile(modulePath, path.join(distPath, "module.json"))
|
||||
const systemPath = path.join("source/system.json");
|
||||
if (fs.existsSync(systemPath)) await fs.copyFile(systemPath, path.join(distPath, "system.json"));
|
||||
|
||||
const systemPath = path.join("source/system.json")
|
||||
if (fs.existsSync(systemPath))
|
||||
await fs.copyFile(systemPath, path.join(distPath, "system.json"))
|
||||
|
||||
if (!fs.existsSync(path.resolve(__dirname, "assets")))
|
||||
return Promise.resolve()
|
||||
if (!fs.existsSync(path.resolve(__dirname, "assets"))) return Promise.resolve();
|
||||
|
||||
const filter = (src: string, dest: string): boolean => {
|
||||
Logger.ok("Copying file: " + dest)
|
||||
return true
|
||||
}
|
||||
Logger.ok("Copying file: " + dest);
|
||||
return true;
|
||||
};
|
||||
|
||||
await fs.copyFile(path.join("source", "template.json"), path.join(distPath, "template.json"))
|
||||
await fs.copyFile(path.join("source", "template.json"), path.join(distPath, "template.json"));
|
||||
|
||||
fs.copySync(path.resolve(__dirname, "assets"), path.resolve(__dirname, distPath + "/assets"), { overwrite: true, filter })
|
||||
fs.copySync(path.resolve(__dirname, "lang"), path.resolve(__dirname, distPath + "/lang"), { overwrite: true, filter })
|
||||
fs.copySync(path.resolve(__dirname, "assets"), path.resolve(__dirname, distPath + "/assets"), { overwrite: true, filter });
|
||||
fs.copySync(path.resolve(__dirname, "lang"), path.resolve(__dirname, distPath + "/lang"), { overwrite: true, filter });
|
||||
//fs.copySync(path.resolve(__dirname, "packs"), path.resolve(__dirname, distPath + "/packs"), { overwrite: true, filter })
|
||||
fs.copySync(path.resolve(__dirname, "templates"), path.resolve(__dirname, distPath + "/templates"), { overwrite: true, filter })
|
||||
return Promise.resolve()
|
||||
fs.copySync(path.resolve(__dirname, "templates"), path.resolve(__dirname, distPath + "/templates"), { overwrite: true, filter });
|
||||
return Promise.resolve();
|
||||
} catch (err) {
|
||||
await Promise.reject(err)
|
||||
}
|
||||
await Promise.reject(err);
|
||||
}
|
||||
};
|
||||
|
||||
const cleanDist = async () => {
|
||||
if (argv.dbg || argv.debug)
|
||||
return
|
||||
Logger.log("Cleaning dist file clutter")
|
||||
if (argv.dbg || argv.debug) return;
|
||||
Logger.log("Cleaning dist file clutter");
|
||||
|
||||
const files: string[] = []
|
||||
const files: string[] = [];
|
||||
const getFiles = async (dir: string) => {
|
||||
const arr = await fs.promises.readdir(dir)
|
||||
for(const entry of arr)
|
||||
{
|
||||
const fullPath = path.join(dir, entry)
|
||||
const stat = await fs.promises.stat(fullPath)
|
||||
if (stat.isDirectory())
|
||||
await getFiles(fullPath)
|
||||
else
|
||||
files.push(fullPath)
|
||||
}
|
||||
const arr = await fs.promises.readdir(dir);
|
||||
for (const entry of arr) {
|
||||
const fullPath = path.join(dir, entry);
|
||||
const stat = await fs.promises.stat(fullPath);
|
||||
if (stat.isDirectory()) await getFiles(fullPath);
|
||||
else files.push(fullPath);
|
||||
}
|
||||
};
|
||||
|
||||
await getFiles(path.resolve(distPath))
|
||||
await getFiles(path.resolve(distPath));
|
||||
for (const file of files) {
|
||||
if (file.endsWith("bundle.js") || file.endsWith(".css") || file.endsWith("module.json"))
|
||||
continue
|
||||
if (file.endsWith("bundle.js") || file.endsWith(".css") || file.endsWith("module.json")) continue;
|
||||
|
||||
Logger.warn("Cleaning " + path.relative(process.cwd(), file))
|
||||
await fs.promises.unlink(file)
|
||||
}
|
||||
Logger.warn("Cleaning " + path.relative(process.cwd(), file));
|
||||
await fs.promises.unlink(file);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Watch for changes for each build step
|
||||
*/
|
||||
const buildWatch = () => {
|
||||
gulp.watch("source/**/*.ts", { ignoreInitial: false }, gulp.series(buildTS, bundleModule))
|
||||
gulp.watch("source/**/*.less", { ignoreInitial: false }, buildLess)
|
||||
gulp.watch("packs", { ignoreInitial: false }, buildPacks)
|
||||
gulp.watch(["assets", "lang", "templates", "source/*.json"], { ignoreInitial: false }, copyFiles)
|
||||
}
|
||||
gulp.watch("source/**/*.ts", { ignoreInitial: false }, gulp.series(buildTS, bundleModule));
|
||||
gulp.watch("source/**/*.less", { ignoreInitial: false }, buildLess);
|
||||
gulp.watch("packs", { ignoreInitial: false }, buildPacks);
|
||||
gulp.watch(["assets", "lang", "templates", "source/*.json"], { ignoreInitial: false }, copyFiles);
|
||||
};
|
||||
|
||||
/********************/
|
||||
/* CLEAN */
|
||||
|
|
@ -316,67 +290,63 @@ const buildWatch = () => {
|
|||
* while ignoring source files
|
||||
*/
|
||||
const clean = async () => {
|
||||
if (!fs.existsSync(distPath))
|
||||
fs.mkdirSync(distPath)
|
||||
if (!fs.existsSync(distPath)) fs.mkdirSync(distPath);
|
||||
else {
|
||||
// Attempt to remove the files
|
||||
try {
|
||||
fs.rmSync(distPath, { recursive: true, force: true })
|
||||
fs.mkdirSync(distPath)
|
||||
return Promise.resolve()
|
||||
fs.rmSync(distPath, { recursive: true, force: true });
|
||||
fs.mkdirSync(distPath);
|
||||
return Promise.resolve();
|
||||
} catch (err) {
|
||||
await Promise.reject(err)
|
||||
}
|
||||
await Promise.reject(err);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const setTargetDir = async () => {
|
||||
const dp = process.env.FOUNDRY_PATH
|
||||
if (!dp)
|
||||
throw Error("FOUNDRY_PATH not defined in environment")
|
||||
const dp = process.env.FOUNDRY_PATH;
|
||||
if (!dp) throw Error("FOUNDRY_PATH not defined in environment");
|
||||
|
||||
const name = getManifest()!.file.name ?? "midgard5"
|
||||
distPath = path.join(dp, "Data", "systems", name)
|
||||
}
|
||||
const name = getManifest()!.file.name ?? "midgard5";
|
||||
distPath = path.join(dp, "Data", "systems", name);
|
||||
};
|
||||
|
||||
const linkUserData = async () => {
|
||||
const name = getManifest()!.file.name
|
||||
const name = getManifest()!.file.name;
|
||||
|
||||
let destDir
|
||||
let destDir;
|
||||
try {
|
||||
if (fs.existsSync(path.resolve(".", distPath, "module.json")) || fs.existsSync(path.resolve(".", "source", "module.json"))) {
|
||||
destDir = "modules"
|
||||
destDir = "modules";
|
||||
} else if (fs.existsSync(path.resolve(".", distPath, "system.json")) || fs.existsSync(path.resolve(".", "source", "system.json"))) {
|
||||
destDir = "systems"
|
||||
destDir = "systems";
|
||||
} else {
|
||||
throw Error(`Could not find module.json or system.json`)
|
||||
throw Error(`Could not find module.json or system.json`);
|
||||
}
|
||||
|
||||
let linkDir
|
||||
const dataPath = process.env.FOUNDRY_PATH
|
||||
let linkDir;
|
||||
const dataPath = process.env.FOUNDRY_PATH;
|
||||
if (dataPath) {
|
||||
if (!fs.existsSync(path.join(dataPath, "Data")))
|
||||
throw Error("User Data path invalid, no Data directory found")
|
||||
|
||||
linkDir = path.join(dataPath, "Data", destDir, name as string)
|
||||
if (!fs.existsSync(path.join(dataPath, "Data"))) throw Error("User Data path invalid, no Data directory found");
|
||||
linkDir = path.join(dataPath, "Data", destDir, name as string);
|
||||
} else {
|
||||
throw Error("FOUNDRY_PATH not defined in environment")
|
||||
throw Error("FOUNDRY_PATH not defined in environment");
|
||||
}
|
||||
|
||||
//if (argv.clean || argv.c) {
|
||||
Logger.warn(`Removing build in ${linkDir}`)
|
||||
fs.rmSync(linkDir, { recursive: true, force: true })
|
||||
fs.mkdirSync(linkDir)
|
||||
Logger.warn(`Removing build in ${linkDir}`);
|
||||
fs.rmSync(linkDir, { recursive: true, force: true });
|
||||
fs.mkdirSync(linkDir);
|
||||
//}
|
||||
|
||||
Logger.ok(`Copying build to ${linkDir}`)
|
||||
fs.copySync(path.resolve(distPath), linkDir, { overwrite: true })
|
||||
Logger.ok(`Copying build to ${linkDir}`);
|
||||
fs.copySync(path.resolve(distPath), linkDir, { overwrite: true });
|
||||
|
||||
return Promise.resolve()
|
||||
return Promise.resolve();
|
||||
} catch (err) {
|
||||
await Promise.reject(err)
|
||||
}
|
||||
await Promise.reject(err);
|
||||
}
|
||||
};
|
||||
|
||||
/*********************/
|
||||
/* PACKAGE */
|
||||
|
|
@ -386,48 +356,51 @@ const linkUserData = async () => {
|
|||
* Package build
|
||||
*/
|
||||
async function packageBuild() {
|
||||
const manifest = getManifest()
|
||||
const manifest = getManifest();
|
||||
if (manifest === null) {
|
||||
Logger.err("Manifest file could not be loaded.")
|
||||
throw Error()
|
||||
Logger.err("Manifest file could not be loaded.");
|
||||
throw Error();
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
try {
|
||||
// Remove the package dir without doing anything else
|
||||
if (argv.clean || argv.c) {
|
||||
Logger.warn("Removing all packaged files")
|
||||
fs.rmSync(distPath, { force: true, recursive: true })
|
||||
return
|
||||
Logger.warn("Removing all packaged files");
|
||||
fs.rmSync(distPath, { force: true, recursive: true });
|
||||
return;
|
||||
}
|
||||
|
||||
// Ensure there is a directory to hold all the packaged versions
|
||||
if(!fs.existsSync(distPath))
|
||||
fs.mkdirSync(distPath)
|
||||
if (!fs.existsSync(distPath)) fs.mkdirSync(distPath);
|
||||
|
||||
// Initialize the zip file
|
||||
const zipName = `${manifest.file.name}-v${manifest.file.version}.zip`
|
||||
const zipFile = fs.createWriteStream(path.join(distPath, zipName))
|
||||
const zip = archiver("zip", { zlib: { level: 9 } })
|
||||
|
||||
const zipName = `${manifest.file.name}-v${manifest.file.version}.zip`;
|
||||
const zipFile = fs.createWriteStream(zipName);
|
||||
const zip = archiver("zip", { zlib: { level: 9 } });
|
||||
zipFile.on("close", () => {
|
||||
Logger.ok(zip.pointer() + " total bytes")
|
||||
Logger.ok(`Zip file ${zipName} has been written`)
|
||||
return resolve(true)
|
||||
})
|
||||
Logger.ok(zip.pointer() + " total bytes");
|
||||
Logger.ok(`Zip file ${zipName} has been written`);
|
||||
return resolve(true);
|
||||
});
|
||||
|
||||
zipFile.on("end", function () {
|
||||
throw "Data has been drained";
|
||||
});
|
||||
|
||||
zip.on("error", (err) => {
|
||||
throw err
|
||||
})
|
||||
throw err;
|
||||
});
|
||||
|
||||
zip.pipe(zipFile)
|
||||
zip.pipe(zipFile);
|
||||
|
||||
zip.directory(path.join(process.cwd(), distPath), false)
|
||||
return zip.finalize()
|
||||
zip.directory(distPath, manifest.file.name);
|
||||
|
||||
return zip.finalize();
|
||||
} catch (err) {
|
||||
return reject(err)
|
||||
return reject(err);
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
/*********************/
|
||||
|
|
@ -438,116 +411,113 @@ async function packageBuild() {
|
|||
* Update version and URLs in the manifest JSON
|
||||
*/
|
||||
const updateManifest = (cb: any) => {
|
||||
const packageJson = loadJson("package.json")
|
||||
const packageJson = loadJson("package.json");
|
||||
const config = getConfig(),
|
||||
manifest = getManifest(),
|
||||
rawURL = config.rawURL,
|
||||
repoURL = config.repository,
|
||||
manifestRoot = manifest!.root
|
||||
manifestRoot = manifest!.root;
|
||||
|
||||
if (!config)
|
||||
cb(Error("foundryconfig.json not found"))
|
||||
if (!config) cb(Error("foundryconfig.json not found"));
|
||||
if (manifest === null) {
|
||||
cb(Error("Manifest JSON not found"))
|
||||
return
|
||||
cb(Error("Manifest JSON not found"));
|
||||
return;
|
||||
}
|
||||
if (!rawURL || !repoURL)
|
||||
cb(Error("Repository URLs not configured in foundryconfig.json"))
|
||||
if (!rawURL || !repoURL) cb(Error("Repository URLs not configured in foundryconfig.json"));
|
||||
|
||||
try {
|
||||
const version = argv.update || argv.u
|
||||
const version = argv.update || argv.u;
|
||||
|
||||
/* Update version */
|
||||
|
||||
const versionMatch = /^(\d{1,}).(\d{1,}).(\d{1,})$/
|
||||
const currentVersion = manifest!.file.version
|
||||
let targetVersion = ""
|
||||
const versionMatch = /^(\d{1,}).(\d{1,}).(\d{1,})$/;
|
||||
const currentVersion = manifest!.file.version;
|
||||
let targetVersion = "";
|
||||
|
||||
if (!version) {
|
||||
cb(Error("Missing version number"))
|
||||
cb(Error("Missing version number"));
|
||||
}
|
||||
|
||||
if (versionMatch.test(version)) {
|
||||
targetVersion = version
|
||||
targetVersion = version;
|
||||
} else {
|
||||
targetVersion = currentVersion.replace(versionMatch, (substring: string, major: string, minor: string, patch: string) => {
|
||||
console.log(substring, Number(major) + 1, Number(minor) + 1, Number(patch) + 1)
|
||||
console.log(substring, Number(major) + 1, Number(minor) + 1, Number(patch) + 1);
|
||||
if (version === "major") {
|
||||
return `${Number(major) + 1}.0.0`
|
||||
return `${Number(major) + 1}.0.0`;
|
||||
} else if (version === "minor") {
|
||||
return `${major}.${Number(minor) + 1}.0`
|
||||
return `${major}.${Number(minor) + 1}.0`;
|
||||
} else if (version === "patch") {
|
||||
return `${major}.${minor}.${Number(patch) + 1}`
|
||||
return `${major}.${minor}.${Number(patch) + 1}`;
|
||||
} else {
|
||||
return ""
|
||||
return "";
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
if (targetVersion === "") {
|
||||
return cb(Error("Error: Incorrect version arguments."))
|
||||
return cb(Error("Error: Incorrect version arguments."));
|
||||
}
|
||||
|
||||
if (targetVersion === currentVersion) {
|
||||
return cb(Error("Error: Target version is identical to current version."))
|
||||
return cb(Error("Error: Target version is identical to current version."));
|
||||
}
|
||||
|
||||
Logger.ok(`Updating version number to '${targetVersion}'`)
|
||||
Logger.ok(`Updating version number to '${targetVersion}'`);
|
||||
|
||||
packageJson.version = targetVersion
|
||||
manifest.file.version = targetVersion
|
||||
packageJson.version = targetVersion;
|
||||
manifest.file.version = targetVersion;
|
||||
|
||||
/* Update URLs */
|
||||
|
||||
const result = `${rawURL}/v${manifest.file.version}/${distPath}/${manifest.file.name}-v${manifest.file.version}.zip`
|
||||
const result = `${repoURL}/releases/download`;
|
||||
|
||||
manifest.file.url = repoURL
|
||||
manifest.file.manifest = `${rawURL}/master/${manifestRoot}/${manifest.name}`
|
||||
manifest.file.download = result
|
||||
manifest.file.url = repoURL;
|
||||
manifest.file.manifest = `${result}/v${manifest.file.version}/${manifest.name}`;
|
||||
manifest.file.download = `${result}/v${manifest.file.version}/${manifest.file.name}-v${manifest.file.version}.zip`;
|
||||
|
||||
const prettyProjectJson = stringify(manifest.file, {
|
||||
maxLength: 35,
|
||||
indent: "\t",
|
||||
})
|
||||
});
|
||||
|
||||
fs.writeFileSync("package.json", JSON.stringify(packageJson, null, '\t'))
|
||||
fs.writeFileSync(path.join(manifest.root, manifest.name), prettyProjectJson, "utf8")
|
||||
fs.writeFileSync("package.json", JSON.stringify(packageJson, null, "\t"));
|
||||
fs.writeFileSync(path.join(manifest.root, manifest.name), prettyProjectJson, "utf8");
|
||||
|
||||
return cb()
|
||||
return cb();
|
||||
} catch (err) {
|
||||
return cb(err)
|
||||
}
|
||||
return cb(err);
|
||||
}
|
||||
};
|
||||
|
||||
const gitTaskManifest = (cb: gulp.TaskFunctionCallback) => {
|
||||
const manifest = getManifest()
|
||||
if (!manifest)
|
||||
return cb(Error("could not load manifest."))
|
||||
|
||||
return gulp.src([`package.json`, `source/module.json`])
|
||||
const manifest = getManifest();
|
||||
if (!manifest) return cb(Error("could not load manifest."));
|
||||
return gulp
|
||||
.src([`package.json`, `source/system.json`])
|
||||
.pipe(git.add({ args: "--no-all -f" }))
|
||||
.pipe(git.commit(`v${manifest.file.version}`, { args: "-a", disableAppendPaths: true }))
|
||||
}
|
||||
.pipe(git.commit(`v${manifest.file.version}`, { args: "-a", disableAppendPaths: true }));
|
||||
};
|
||||
|
||||
const gitTaskBuild = (cb: gulp.TaskFunctionCallback) => {
|
||||
const manifest = getManifest()
|
||||
if (!manifest)
|
||||
return cb(Error("could not load manifest."))
|
||||
const manifest = getManifest();
|
||||
if (!manifest) return cb(Error("could not load manifest."));
|
||||
|
||||
return gulp.src(`${distPath}/${manifest.file.name}-v${manifest.file.version}.zip`)
|
||||
.pipe(git.checkout(`v${manifest.file.version}`, { args: '-b' }))
|
||||
return gulp
|
||||
.src(`${manifest.file.name}-v${manifest.file.version}.zip`)
|
||||
.pipe(git.checkout(`v${manifest.file.version}`, { args: "-b" }))
|
||||
.pipe(git.add({ args: "--no-all -f" }))
|
||||
.pipe(git.commit(`v${manifest.file.version}`, { args: "-a", disableAppendPaths: true }))
|
||||
}
|
||||
.pipe(git.commit(`v${manifest.file.version}`, { args: "-a", disableAppendPaths: true }));
|
||||
};
|
||||
|
||||
const execBuild = gulp.parallel(buildTS, buildLess, buildPacks, copyFiles)
|
||||
const execBuild = gulp.parallel(buildTS, buildLess, buildPacks, copyFiles);
|
||||
|
||||
exports.build = gulp.series(clean, execBuild, bundleModule)
|
||||
exports.buildTarget = gulp.series(setTargetDir, clean, execBuild, bundleModule)
|
||||
exports.watch = buildWatch
|
||||
exports.watchTarget = gulp.series(setTargetDir, buildWatch)
|
||||
exports.clean = clean
|
||||
exports.link = linkUserData
|
||||
exports.package = packageBuild
|
||||
exports.update = updateManifest
|
||||
//exports.publish = gulp.series(clean, updateManifest, execBuild, bundleModule, packageBuild, gitTaskManifest, gitTaskBuild)
|
||||
exports.build = gulp.series(clean, execBuild, bundleModule);
|
||||
exports.buildTarget = gulp.series(setTargetDir, clean, execBuild, bundleModule);
|
||||
exports.watch = buildWatch;
|
||||
exports.watchTarget = gulp.series(setTargetDir, buildWatch);
|
||||
exports.clean = clean;
|
||||
exports.link = linkUserData;
|
||||
exports.package = packageBuild;
|
||||
exports.update = updateManifest;
|
||||
exports.publish = gulp.series(clean, updateManifest, execBuild, bundleModule, packageBuild, gitTaskManifest, gitTaskBuild);
|
||||
|
|
|
|||
Loading…
Reference in New Issue