Browse Source

Provide option to rewrite project/module paths.

pull/6/head
Roman Axelrod 3 years ago
parent
commit
d9543bb22c
  1. 50
      helpers.js
  2. 3
      platforms/hubspot/hubspot-email-adapter.js
  3. 3
      platforms/hubspot/hubspot-page-adapter.js
  4. 6
      platforms/wordpress/wordpress-adapter.js
  5. 52
      server.js

50
helpers.js

@ -4,6 +4,7 @@ import {fileURLToPath} from 'url';
import memFs from 'mem-fs';
import editor from 'mem-fs-editor';
import fsExtra from "fs-extra";
import archiver from "archiver";
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
@ -15,8 +16,8 @@ export function getConfigs() {
return {
isDev,
developmentBlockName,
modulesPath: isDev ? '' : 'node_modules/block-dev-tool',
projectPath: isDev ? path.join('blocks', developmentBlockName) : '',
modulesPath: process.env.MODULE_PATH ?? (isDev ? '' : 'node_modules/block-dev-tool'),
projectPath: process.env.PROJECT_PATH ?? (isDev ? path.join('blocks', developmentBlockName) : ''),
};
}
@ -106,3 +107,48 @@ export function capitalize(str) {
})
.join(' ');
}
export async function zipProject(srcDir, outputFileName = 'dist.zip') {
// create a file to stream archive data to.
const output = await fsExtra.createWriteStream(outputFileName);
const archive = archiver('zip', {});
// listen for all archive data to be written
// 'close' event is fired only when a file descriptor is involved
output.on('close', function () {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
// This event is fired when the data source is drained no matter what was the data source.
// It is not part of this library but rather from the NodeJS Stream API.
// @see: https://nodejs.org/api/stream.html#stream_event_end
output.on('end', function () {
console.log('Data has been drained');
});
// good practice to catch warnings (ie stat failures and other non-blocking errors)
archive.on('warning', function (err) {
if (err.code === 'ENOENT') {
// log warning
} else {
// throw error
throw err;
}
});
// good practice to catch this error explicitly
archive.on('error', function (err) {
throw err;
});
// pipe archive data to the file
archive.pipe(output);
// append files from a subdirectory, putting its contents at the root of archive
archive.directory(srcDir, false);
// finalize the archive (ie we are done appending files but streams have to finish yet)
// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
await archive.finalize();
}

3
platforms/hubspot/hubspot-email-adapter.js

@ -2,8 +2,6 @@ import path from "path";
import {readFile, writeFile, mkdir, copyFile} from "fs/promises";
import {capitalize, getConfigs} from "../../helpers.js";
const {modulesPath, projectPath} = getConfigs();
export async function buildHubspotEmail(blockName) {
const distPath = await createDistFolder(blockName);
@ -217,6 +215,7 @@ export function convertToHubspotField(field = {}) {
}
export async function buildHubspotJSONFiles(distPath, metaData) {
const {modulesPath, projectPath} = getConfigs();
await writeFile(path.join(distPath, 'meta.json'), JSON.stringify(metaData, null, 4));
const blockJSON = await readFile(path.join(projectPath, 'block.json'), "utf8");

3
platforms/hubspot/hubspot-page-adapter.js

@ -3,9 +3,8 @@ import {copyFile, readFile, writeFile} from "fs/promises";
import {capitalize, getConfigs} from "../../helpers.js";
import {buildHubspotJSONFiles, createDistFolder, handlebarsToHubl,} from "./hubspot-email-adapter.js";
const {modulesPath, projectPath} = getConfigs();
export async function buildHubspotPage(blockName) {
const {modulesPath, projectPath} = getConfigs();
const distPath = await createDistFolder(blockName);
const srcPath = path.join(projectPath, 'src');

6
platforms/wordpress/wordpress-adapter.js

@ -9,9 +9,9 @@ import execPhp from "exec-php";
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
export async function buildWordPress(blockName, isBlock = false, isElementor = false) {
const {modulesPath, projectPath} = getConfigs();
export async function buildWordPress(blockName, isBlock = false, isElementor = false) {
const distPath = path.join(projectPath, 'exports', 'wordpress');
// await mkdir(distPath, {recursive: true})
await mkdir(path.join(distPath, 'templates'), {recursive: true})
@ -115,7 +115,7 @@ export async function buildWordPress(blockName, isBlock = false, isElementor = f
export function execCommand(cmd = '') {
return new Promise((resolve, reject) => {
exec(cmd, function (error, stdout) {
exec(cmd, function (error) {
if (error) {
console.log('Error:', error)
reject(error);
@ -136,7 +136,7 @@ function execPHPFile(file = '', functionName = '', args = {}) {
return reject(err);
}
php[functionName.toLowerCase()](args, (err, res, out, print) => {
php[functionName.toLowerCase()](args, (err, res, out) => {
if (err) {
console.error(out);
return reject(err);

52
server.js

@ -4,7 +4,6 @@ import path from 'path';
import fetch from "node-fetch";
import express from 'express';
import {create} from 'express-handlebars';
import fsExtra from 'fs-extra';
import browserSync from 'browser-sync';
import config from 'config';
import gulp from 'gulp';
@ -19,8 +18,7 @@ import open from "open";
import {sanitizeUrl} from "@braintree/sanitize-url";
import sanitizeHtml from 'sanitize-html';
import {escape} from "lodash-es";
import archiver from 'archiver';
import {getBlockConfigs, getConfigs, readJSONFile} from "./helpers.js";
import {getBlockConfigs, getConfigs, readJSONFile, zipProject} from "./helpers.js";
import PluginError from 'plugin-error';
/**
@ -130,7 +128,7 @@ app.get('/publish', async (req, res) => {
}
if (responseData.uploadUrl) {
await zipProject();
await zipProject(path.join(projectPath, 'src'));
const body = await fs.readFile(path.join(projectPath, 'dist.zip'));
const response = await fetch(`${responseData.uploadUrl}`, {
method: 'PUT',
@ -339,52 +337,6 @@ function prepareListOfDataFiles(dataFiles) {
.sort();
}
async function zipProject() {
// create a file to stream archive data to.
const output = await fsExtra.createWriteStream('dist.zip');
const archive = archiver('zip', {});
// listen for all archive data to be written
// 'close' event is fired only when a file descriptor is involved
output.on('close', function () {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
// This event is fired when the data source is drained no matter what was the data source.
// It is not part of this library but rather from the NodeJS Stream API.
// @see: https://nodejs.org/api/stream.html#stream_event_end
output.on('end', function () {
console.log('Data has been drained');
});
// good practice to catch warnings (ie stat failures and other non-blocking errors)
archive.on('warning', function (err) {
if (err.code === 'ENOENT') {
// log warning
} else {
// throw error
throw err;
}
});
// good practice to catch this error explicitly
archive.on('error', function (err) {
throw err;
});
// pipe archive data to the file
archive.pipe(output);
// append files from a subdirectory, putting its contents at the root of archive
archive.directory(path.join(projectPath, 'src'), false);
// finalize the archive (ie we are done appending files but streams have to finish yet)
// 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand
await archive.finalize();
}
function handleSyntaxErrors(err, req, res, next) {
if (err) {
return res.render('error', {

Loading…
Cancel
Save