commit 374e79cfb4895a622e5df8a917296433501731dc Author: Artur Gurgul Date: Thu Aug 15 19:52:42 2024 +0200 Start project with the old POC diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..6a7d6d8 --- /dev/null +++ b/.gitignore @@ -0,0 +1,130 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +.pnpm-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage +*.lcov + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) +web_modules/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional stylelint cache +.stylelintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variable files +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) +.cache +.parcel-cache + +# Next.js build output +.next +out + +# Nuxt.js build / generate output +.nuxt +dist + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and not Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# vuepress v2.x temp and cache directory +.temp +.cache + +# Docusaurus cache and generated files +.docusaurus + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# TernJS port file +.tern-port + +# Stores VSCode versions used for testing VSCode extensions +.vscode-test + +# yarn v2 +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* \ No newline at end of file diff --git a/.npmignore b/.npmignore new file mode 100644 index 0000000..e69de29 diff --git a/bin/sajt b/bin/sajt new file mode 100755 index 0000000..cd7f5d2 --- /dev/null +++ b/bin/sajt @@ -0,0 +1,3 @@ +#!/usr/bin/env node + +console.log("hello world update") diff --git a/index.js b/index.js new file mode 100644 index 0000000..0a69180 --- /dev/null +++ b/index.js @@ -0,0 +1,2 @@ + +console.log("START") diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..107e0b7 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,16 @@ +{ + "name": "sajt", + "version": "0.0.1", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "sajt", + "version": "0.0.1", + "license": "ISC", + "bin": { + "sajt": "bin/sajt" + } + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..6844601 --- /dev/null +++ b/package.json @@ -0,0 +1,30 @@ +{ + "name": "sajt", + "version": "0.0.1", + "description": "Static site generator based on pug/html and yaml files", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "bin": { + "sajt": "./bin/sajt" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/artur-gurgul-pro/sajt.git" + }, + "keywords": [ + "generator", + "utils", + "static", + "sites", + "ssh", + "ftp" + ], + "author": "Artur Gurgul", + "license": "ISC", + "bugs": { + "url": "https://github.com/artur-gurgul-pro/sajt/issues" + }, + "homepage": "https://github.com/artur-gurgul-pro/sajt#readme" +} diff --git a/site b/site new file mode 100755 index 0000000..681042c --- /dev/null +++ b/site @@ -0,0 +1,401 @@ +#!/usr/bin/env node + +const fs = require('fs') +const yaml = require('js-yaml') +const matter = require('gray-matter') +const { marked } = require('marked') +const pug = require('pug') +const hljs = require('highlight.js') +const { markedHighlight } = require('marked-highlight') +const path = require('path') + +marked.use(markedHighlight({ + langPrefix: 'hljs language-', + highlight: function(code, lang) { + const language = hljs.getLanguage(lang) ? lang : 'plaintext'; + return hljs.highlight(code, { language }).value; + } +})) + +function copyDirectory(source, destination) { + fs.mkdirSync(destination, { recursive: true }) + const items = fs.readdirSync(source) + items.forEach(item => { + const sourceItemPath = path.join(source, item) + const destinationItemPath = path.join(destination, item) + const stat = fs.statSync(sourceItemPath) + if (stat.isDirectory()) { + copyDirectory(sourceItemPath, destinationItemPath) + } else { + fs.copyFileSync(sourceItemPath, destinationItemPath) + } + }) +} + +function removeDirectorySync(directory) { + try { + fs.rmSync(directory, { recursive: true, force: true }) + console.log("Directory and its contents removed.") + } catch (err) { + console.error(`Error removing directory: ${err.message}`) + } +} + +function readConfig() { + const fileContents = fs.readFileSync('.site/config.yaml', 'utf8') + return yaml.load(fileContents) +} + + +function parseMD(file) { + + const fileContents = fs.readFileSync(path.join("./", file), 'utf8') + + const { data: metadata, content: markdownContent } = matter(fileContents) + + const htmlContent = marked(markdownContent) + + return { + meta: metadata, + content: htmlContent + } + + // Combine metadata with HTML content + const completeHtml = ` + + + + + + ${metadata.title} + + + + + + ${htmlContent} + + + `; + + // Save the HTML content to a file + fs.writeFileSync('.build/output.html', completeHtml); + + console.log('Markdown with metadata has been converted to HTML and saved to output.html'); + +} + +function compile(template, content, output) { + if (template == null) { + console.error("Template is not defined") + return + } + const compiledFunction = pug.compileFile(`.site/templates/${template}.pug`); + const data = { + title: 'Pug Example', + message: 'Hello, Puggi!', + sub: "test", + ...content, + site: {posts: []} + } + + //console.log(data) + + const dirname = path.dirname(output) + if (!fs.existsSync(dirname)) { + fs.mkdirSync(dirname, { recursive: true }) + } + + const html = compiledFunction(data) + fs.writeFileSync(output, html) + console.log(`HTML has been rendered and saved to ${output}`); +} + +function compileData(template, content, output) { + const compiledFunction = pug.compileFile(`.site/templates/${template}.pug`); + + const dirname = path.dirname(output) + if (!fs.existsSync(dirname)) { + fs.mkdirSync(dirname, { recursive: true }) + } + + const html = compiledFunction(content) + fs.writeFileSync(output, html) + console.log(`HTML has been rendered and saved to ${output}`); +} + +function getAllFilesWithExtension(directory, extension, excludes) { + let results = [] + function readDirectory(directory) { + const items = fs.readdirSync(directory) + + items.forEach(item => { + if(excludes.includes(item)) { + return + } + const itemPath = path.join(directory, item) + const stat = fs.statSync(itemPath) + if (stat.isDirectory()) { + readDirectory(itemPath) + } else if (path.extname(item) === extension) { + results.push(itemPath) + } + }) + } + readDirectory(directory) + return results +} + +const Client = require('ssh2-sftp-client') + + +// Read the private key from the default location +const privateKey = fs.readFileSync(path.resolve(process.env.HOME, '.ssh/id_rsa')) + +async function uploadDirectory(serverConfig, localDirPath) { + const sftp = new Client() + await sftp.connect(serverConfig) + try { + await upload(sftp, config, localDirPath, serverConfig.path) + } catch (err) { + console.error(`Error: ${err.message}`) + } finally { + await sftp.end() + console.log('Connection closed') + } +} + +async function upload(sftp, config, localPath, remotePath) { + + console.log('Connected to the server') + + const files = fs.readdirSync(localPath) + + for (const file of files) { + const localFilePath = path.join(localPath, file) + const remoteFilePath = `${remotePath}/${file}` + + if (fs.statSync(localFilePath).isDirectory()) { + await sftp.mkdir(remoteFilePath, true) + await upload(sftp, config, localFilePath, remoteFilePath) + } else { + const fileContent = fs.readFileSync(localFilePath) + await sftp.put(Buffer.from(fileContent), remoteFilePath) + console.log(`File transferred successfully: ${localFilePath}`) +// await sftp.exec(`chown www-data:www-data ${remoteFilePath}`) +// console.log(`Changed owner to www-data for: ${remoteFilePath}`) + } + } + +} + +function pathToArray(filePath) { + // Normalize the file path to handle different OS path separators + const normalizedPath = path.normalize(filePath) + // Split the path into an array of directories + const pathArray = normalizedPath.split(path.sep) + return pathArray +} + + +function parseYML(file) { + const fileContents = fs.readFileSync(file, 'utf8') + return yaml.load(fileContents) +} + +function readMetadata() { + let htmlExtension = "html" + + let list = getAllFilesWithExtension('.',".md", [".build", ".site"]) + .map(f => { return { + pathMD: f, + type: "md", + data: {}, + md: parseMD(f) + }}) + // sites needs to include data from header + + list = list.concat( + getAllFilesWithExtension('.',".yml", [".build", ".site"]) + .map(f => { return { + pathMD: f, + type: "yml", + data: parseYML(f), + md: {meta: {}} + }}) + ) + + for(const site of list) { + + //console.log(site.md.meta.path) + // TODO: data can set default path + if (site.md.meta?.path != null && site.md.meta?.path != undefined) { + site.path = path.join("/", site.md.meta.path) + } else { + const parsedPath = path.parse(site.pathMD) + const basePath = path.join("/", parsedPath.dir, parsedPath.name) + site.path = basePath + } + + + + + // add proper extension + const parsedPath = path.parse(site.path) + parsedPath.ext = htmlExtension.startsWith('.') ? htmlExtension : `.${htmlExtension}` + parsedPath.base = `${parsedPath.name}${parsedPath.ext}` + site.path = path.format(parsedPath) + + // add dirs metadata + const dirArray = pathToArray(site.path) + site.fileName = dirArray.pop() + dirArray.shift() + site.dir = dirArray + + site.meta = site.md.meta + + // For tests + //delete site.md + + site.hidden = site.data.hidden || false + } + + return list +} + +// Custom renderer to avoid unnecessary

tags +const renderer = new marked.Renderer(); +renderer.paragraph = (text) => { + + return text.text +} + + +function parseMarkdown(obj) { + for (let key in obj) { + if (typeof obj[key] === 'object' && obj[key] !== null) { + if (Array.isArray(obj[key])) { + for (let i = 0; i < obj[key].length; i++) { + if (typeof obj[key][i] === 'object' && obj[key][i] !== null) { + parseMarkdown(obj[key][i]); + } + else if (typeof obj[key][i] === 'string') { + obj[key][i] = marked(obj[key][i], { renderer }); + } + } + } else { + parseMarkdown(obj[key]); + } + } else if (typeof obj[key] === 'string') { + obj[key] = marked(obj[key], { renderer }); + } + } +} + + +const buildFolder = './.build' + + +//loadTemplate() +//parseMD() + +let config = readConfig() + + +const serverConfig = { + host: config.remote.host, + port: 22, + username: config.remote.user, + privateKey: privateKey, + path: config.remote.path +} + +console.log(serverConfig) + + + + +removeDirectorySync(buildFolder) +copyDirectory("./.site/static", path.join(buildFolder, "static")) + +let data = readMetadata() +let pages = data.map(site => { + return { + title: site.meta.title, + url: site.path + } +}) + +for(const site of data) { + if (site.type == "md") { + compile(site.meta.layout, + { + content: site.md.content, + title: site.meta.title, + hidden: false, + pages + }, + path.join(buildFolder, site.path)) + } else if (site.type == "yml") { + let data = {...site.data} + delete data.layout + parseMarkdown(data) + compileData(site.data.layout, + {data, pages, hidden: data.hidden}, + path.join(buildFolder, site.path)) + } +} + +//console.log(readMetadata()) +// sajt + + +uploadDirectory(serverConfig, buildFolder) + +/* +process.exit(0) + + + +// Add watchers + +const chokidar = require('chokidar') +const directoryPath = "." + +// Initialize watcher +const watcher = chokidar.watch(directoryPath, { + ignored: /(^|[\/\\])\../, // ignore dotfiles + persistent: true +}); + + + +// Add event listeners +watcher + .on('add', path => console.log(`File ${path} has been added`)) + .on('change', path => console.log(`File ${path} has been changed`)) + .on('unlink', path => console.log(`File ${path} has been removed`)) + .on('error', error => console.error(`Watcher error: ${error}`)); + +console.log(`Watching for changes in ${directoryPath}`); + + + + + + +*/ + +// Start server + +const express = require('express') +const app = express() +const PORT = process.env.PORT || 3000 + +app.use(express.static('./.build')) +//console.log(path.join(__dirname, '.build')) + +app.listen(PORT, () => { + console.log(`Server is running on http://localhost:${PORT}`) +}); +