#!/usr/bin/env node import path from 'path'; import fetch from "node-fetch"; import express from 'express'; import {create} from 'express-handlebars'; import fsExtra from 'fs-extra'; import browserSync from 'browser-sync'; import config from 'config'; import gulp from 'gulp'; import babel from "gulp-babel"; import uglify from "gulp-uglify"; import rename from "gulp-rename"; import dartSass from 'sass'; import gulpSass from 'gulp-sass'; import sourcemaps from "gulp-sourcemaps"; import fs from "fs/promises"; import open from "open"; import {sanitizeUrl} from "@braintree/sanitize-url"; import sanitizeHtml from 'sanitize-html'; import {escape} from "lodash-es"; import archiver from 'archiver'; /** * Constants */ const isDev = process.env.NODE_ENV === 'development'; // Check README file in case you get "missing files" error. const blocksRegistry = isDev ? 'http://localhost:3020' : 'https://axe-web-blocks-registry.captain.devdevdev.life'; const modulesPath = isDev ? '' : 'node_modules/block-dev-tool'; const developmentBlockName = process.env.BLOCK_NAME; const projectPath = isDev ? path.join('blocks', developmentBlockName) : ''; const sass = gulpSass(dartSass); buildStyleFiles() buildScriptFiles() /** * Init server */ let port = 3000; // This variable is used in `*.hbs` and it will be updated once BrowserSync is ready. let previewFrameUrl = `http://localhost:${port}`; // This variable is used in `*.hbs` and it will be updated once BrowserSync is ready. const dataFiles = prepareListOfDataFiles(await fs.readdir(path.join(projectPath, 'data'))); const app = express(); const hbs = create({ extname: '.hbs', defaultLayout: false, partialsDir: ['.'], helpers: { esc_attr(attr) { return escape(attr); }, esc_url(url) { return sanitizeUrl(url); }, esc_html(html) { // TODO: Check if we can remove this helper. return html; }, safe_html(html) { return sanitizeHtml(html); } } }); app.engine('.hbs', hbs.engine); app.set('view engine', '.hbs'); app.set('views', path.join(modulesPath, 'layouts')); // // Routes // app.get('/', async (req, res) => { let jsonFileName = req.query.data ? req.query.data : 'default'; const data = await getBlockConfigs(jsonFileName, {includeConfigs: true}); if (data.error && data.errorMessage) { return res.send(data.errorMessage); } const baseView = config.has('baseView') ? config.get('baseView') : 'container'; const baseViewUrl = `view/${baseView}`; data.helpers = { port, include_partial: (filesPath) => path.join(modulesPath, filesPath), baseView, previewFrameUrl: `${previewFrameUrl}/${baseViewUrl}`, } res.render('index', data); }); app.get('/view/:baseView', async (req, res) => { let jsonFileName = req.query.data ? req.query.data : 'default'; const data = await getBlockConfigs(jsonFileName, {includeConfigs: true}); if (data.error && data.errorMessage) { return res.send(data.errorMessage); } const blockName = config.has('blockName') ? config.get('blockName') : developmentBlockName; data.helpers = { include_partial: (filesPath) => path.join(modulesPath, filesPath), include_block_template: () => path.join(projectPath, 'src', `${blockName}.template`), section_class: `${blockName}--${jsonFileName}`, base_url: '/' } const baseView = req.params.baseView ?? 'container'; res.render(baseView, data) }); app.get('/publish', async (req, res) => { const data = await readJSONFile(path.join(projectPath, `block.json`)); let responseData; try { const response = await fetch(`${blocksRegistry}`, { method: 'POST', body: JSON.stringify(data), headers: {'Content-Type': 'application/json'} }); responseData = await response.json(); } catch (e) { res.json({success: false, message: 'Blocks Registry server is not available.'}); return; } if (responseData.statusCode !== 200) { res.json({success: false, message: 'Error on registry level.'}); return; } if (responseData.uploadUrl) { await zipProject(); const body = await fs.readFile(path.join(projectPath, 'dist.zip')); const response = await fetch(`${responseData.uploadUrl}`, { method: 'PUT', body, headers: {'Content-Type': 'application/zip'} }); if (response.status !== 200) { res.json({success: false, message: "Can't upload the archive, permissions error."}); // TODO: Need to update the registry server. await fs.unlink(path.join(projectPath, 'dist.zip')); return; } } res.json({success: true}); await fs.unlink(path.join(projectPath, 'dist.zip')); }); app.get('/data', async (req, res) => { let jsonDataFileName = req.query.name ? req.query.name : 'default'; const data = await getBlockConfigs(jsonDataFileName); const dataFiles = prepareListOfDataFiles(await fs.readdir(path.join(projectPath, 'data'))); const designPreviewFiles = getListOfDesignPreviewFiles(jsonDataFileName, await fs.readdir(path.join(projectPath, 'design', 'preview'))); return res.json({ dataOptions: dataFiles, designPreview: designPreviewFiles, data, }); }); // Errors handler app.use(handleSyntaxErrors); // Static Files app.use(express.static(path.join(projectPath, 'src'))); app.use(express.static(path.join(projectPath, 'design'))); app.use(express.static(path.join(modulesPath, 'layouts'))); // BrowserSync const bsOptions = await startBrowserSync(); port = bsOptions.port; previewFrameUrl = bsOptions.previewFrameUrl; await open(bsOptions.devToolUrl); // // Functions // function getListOfDesignPreviewFiles(jsonDataFileName, previewFiles) { return previewFiles .filter(fileName => { return fileName.startsWith(jsonDataFileName + '.'); }) .map(fileName => { const fileData = fileName.split('.'); const fileFormat = fileData.pop(); const previewSize = fileData.pop(); return { dataSource: jsonDataFileName, widthDimension: Number.parseInt(previewSize, 10), url: `/preview/${fileName}`, }; }); } function startBrowserSync() { return new Promise((resolve, reject) => { const listener = app.listen(0, async () => { const PORT = listener.address().port; console.log(`The web server has started on port ${PORT}`); const bs = browserSync.create(); const files = getJSBundleFiles(); gulp.watch(files, {delay: 400}, gulp.series([buildScriptFiles, function (cb) { browserSyncReload(bs, 'js', 'Script Files Change'); return cb(); }])); gulp.watch(path.join(projectPath, 'src/**/*.scss'), {delay: 400}, gulp.series([buildStyleFiles, function (cb) { browserSyncReload(bs, 'css', 'Style Files Change'); return cb(); }])); bs.watch("src/**/*.hbs", function (event, file) { browserSyncReload(bs, '', 'Template File Change: ' + file) }); bs.init({ proxy: `http://localhost:${PORT}`, open: false }, (err, bs) => { if (err) { return reject(err); } const options = bs.getOptions().toJS(); resolve({ devToolUrl: options.urls.external.replace(options.port, options.proxy.url.port), previewFrameUrl: options.urls.external, port: options.port }); }); }); }); } function browserSyncReload(bs, extension = '', message = '') { if (isDev) { // console.log(event, file); console.log(message); } if (extension) { extension = "*." + extension; } bs.reload(extension); } function getJSBundleFiles() { return [path.join(projectPath, 'src/**/*.js'), path.join(projectPath, 'src/**/*.mjs'), '!' + path.join(projectPath, 'src/**/*.min.js')]; } function buildScriptFiles() { const files = getJSBundleFiles(); return gulp.src(files) .pipe(sourcemaps.init({})) .pipe(babel()) .pipe(gulp.src(path.join(projectPath, 'vendor/*.js'))) // .pipe(gulp.dest('src/')) .pipe(uglify()) .pipe(rename({extname: '.min.js'})) .pipe(sourcemaps.write('.')) .pipe(gulp.dest(path.join(projectPath, 'src/'))); } function buildStyleFiles() { return gulp.src(path.join(projectPath, 'src/**/*.scss')) .pipe(sourcemaps.init({})) .pipe(sass.sync({outputStyle: 'compressed'}).on('error', sass.logError)) // .pipe(gulp.dest('src/')) .pipe(rename({extname: '.min.css'})) .pipe(sourcemaps.write('.', {})) .pipe(gulp.dest(path.join(projectPath, 'src'))) } function prepareListOfDataFiles(dataFiles) { return dataFiles .filter((fileName) => fileName.split('.').pop() === 'json') .map((fileName) => { const splitName = fileName.split('.'); splitName.pop(); return splitName.join(''); }) .sort(); } async function readJSONFile(jsonFile) { let data = {}; try { data = await fsExtra.readJson(jsonFile); } catch (e) { return { error: true, errorMessage: getErrorHtml("JSON Syntax error. Please make sure the dataFile is valid.", e), }; } return data; } async function getBlockConfigs(jsonFileName = 'default', {includeConfigs} = {}) { let data = await readJSONFile(path.join(projectPath, 'data', `${jsonFileName}.json`)); if (data.error) { return data; } if (includeConfigs) { Object.assign(data, { config: Object.assign(JSON.parse(JSON.stringify(config)), // The entire config object. { projectDir: modulesPath, activeDataFile: jsonFileName, dataFiles: dataFiles.map((name) => { return { name, active: jsonFileName === name, }; }), remToPx: config.has('remToPx') ? config.get('remToPx') : 16, }) }); } return data; } function getErrorHtml(message = '', errorMessage = '') { return `

${message}

${errorMessage}
`; } async function zipProject() { // create a file to stream archive data to. const output = await fsExtra.createWriteStream('dist.zip'); const archive = archiver('zip', {}); // listen for all archive data to be written // 'close' event is fired only when a file descriptor is involved output.on('close', function () { console.log(archive.pointer() + ' total bytes'); console.log('archiver has been finalized and the output file descriptor has closed.'); }); // This event is fired when the data source is drained no matter what was the data source. // It is not part of this library but rather from the NodeJS Stream API. // @see: https://nodejs.org/api/stream.html#stream_event_end output.on('end', function () { console.log('Data has been drained'); }); // good practice to catch warnings (ie stat failures and other non-blocking errors) archive.on('warning', function (err) { if (err.code === 'ENOENT') { // log warning } else { // throw error throw err; } }); // good practice to catch this error explicitly archive.on('error', function (err) { throw err; }); // pipe archive data to the file archive.pipe(output); // append files from a subdirectory, putting its contents at the root of archive archive.directory(path.join(projectPath, 'src', '/'), false); // finalize the archive (ie we are done appending files but streams have to finish yet) // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand await archive.finalize(); } function handleSyntaxErrors(err, req, res, next) { if (err) { return res.render('error', { helpers: { include_partial: (filesPath) => path.join(modulesPath, filesPath), }, err }); } next(); }