mirror of
https://github.com/seigler/awesome-dash-platform
synced 2025-07-27 06:36:08 +00:00
feat: clean up (a lot) (#247)
License: MIT Signed-off-by: Henrique Dias <hacdias@gmail.com>
This commit is contained in:
parent
6f15e77ed6
commit
a89015da98
12 changed files with 141 additions and 237 deletions
|
@ -1,126 +1,50 @@
|
|||
const lunr = require('lunr')
|
||||
const fs = require('fs-extra')
|
||||
const path = require('path')
|
||||
const { slugify, capitalize, sortAbc } = require('./utils')
|
||||
const { join } = require('path')
|
||||
|
||||
const dataDir = path.join(__dirname, '../src/data')
|
||||
const contentDir = path.join(__dirname, '../src/content')
|
||||
const indexesDir = path.join(__dirname, '../src/layouts/partials/indexes')
|
||||
function getData () {
|
||||
let data = require('./data')
|
||||
|
||||
const processDataType = (data) => {
|
||||
const content = data.content.map(info => {
|
||||
const { website, ...more } = info
|
||||
|
||||
return {
|
||||
website: website,
|
||||
categories: [data.title.toLowerCase()],
|
||||
...more
|
||||
}
|
||||
data.push({
|
||||
title: 'Awesome IPFS',
|
||||
slug: '_index',
|
||||
content: data
|
||||
.reduce((arr, cat) => arr.concat(cat.content), [])
|
||||
.map((el, i) => ({
|
||||
...el,
|
||||
index: i
|
||||
}))
|
||||
})
|
||||
|
||||
delete data.content
|
||||
|
||||
return {
|
||||
info: { ...data },
|
||||
content: content
|
||||
}
|
||||
data.forEach(makeIndex)
|
||||
return data
|
||||
}
|
||||
|
||||
const writeContentFile = (data) => {
|
||||
const basename = slugify(data.title)
|
||||
const filename = path.join(contentDir, `${basename}.md`)
|
||||
|
||||
fs.writeFileSync(filename, JSON.stringify(data))
|
||||
}
|
||||
|
||||
const makeIndex = (data) => {
|
||||
const indexes = { 'index': [] }
|
||||
|
||||
const checkField = (field, el) => {
|
||||
if (Array.isArray(el[field])) {
|
||||
el[field].forEach(t => {
|
||||
const key = `${field}_${t}`
|
||||
|
||||
if (indexes[key]) {
|
||||
indexes[key].push(el.index)
|
||||
} else {
|
||||
indexes[key] = [el.index]
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
data.forEach(el => {
|
||||
indexes.index.push(el.index)
|
||||
checkField('tags', el)
|
||||
checkField('categories', el)
|
||||
})
|
||||
|
||||
data = data.map(({index, title, description = '', tags = [], categories = []}) => ({
|
||||
function makeIndex (category) {
|
||||
const data = category.content.map(({ index, title, description = '', tags = [], category = '' }) => ({
|
||||
ref: index,
|
||||
data: `${title} ${description} ${tags.join(' ')} ${categories.join(' ')}`
|
||||
data: `${title} ${description} ${tags.join(' ')} ${category}`
|
||||
}))
|
||||
|
||||
for (const index in indexes) {
|
||||
const idx = lunr(function () {
|
||||
this.ref('ref')
|
||||
this.field('data')
|
||||
|
||||
indexes[index].map(i => data[i]).forEach(this.add.bind(this))
|
||||
})
|
||||
|
||||
const file = path.join(indexesDir, index + '.html')
|
||||
const json = JSON.stringify(idx).replace(`'`, `\\'`)
|
||||
|
||||
fs.writeFileSync(file, `<script>var idx = JSON.parse(\`${json}\`);</script>`)
|
||||
}
|
||||
category.index = lunr(function () {
|
||||
this.ref('ref')
|
||||
this.field('data')
|
||||
data.forEach(this.add.bind(this))
|
||||
})
|
||||
}
|
||||
|
||||
const process = () => {
|
||||
fs.ensureDirSync(dataDir)
|
||||
fs.ensureDirSync(contentDir)
|
||||
fs.ensureDirSync(indexesDir)
|
||||
fs.emptyDirSync(dataDir)
|
||||
fs.emptyDirSync(contentDir)
|
||||
fs.emptyDirSync(indexesDir)
|
||||
const dir = join(__dirname, '../src/content')
|
||||
fs.ensureDirSync(dir)
|
||||
fs.emptyDirSync(dir)
|
||||
|
||||
let data = []
|
||||
let types = []
|
||||
let typesObj = {}
|
||||
const data = getData()
|
||||
|
||||
require('./data')
|
||||
.map(processDataType)
|
||||
.forEach(({info, content}) => {
|
||||
types.push(info)
|
||||
data.push(content)
|
||||
})
|
||||
|
||||
data = data.reduce((a, v) => a.concat(v), [])
|
||||
.sort((a, b) => sortAbc(a.title, b.title))
|
||||
.map((v, i) => { v.index = i; return v })
|
||||
|
||||
data.forEach(writeContentFile)
|
||||
makeIndex(data)
|
||||
|
||||
types = types.map(t => {
|
||||
t.title = capitalize(t.title)
|
||||
return t
|
||||
}).sort((a, b) => {
|
||||
if (a.weight < b.weight) {
|
||||
return -1
|
||||
}
|
||||
|
||||
if (a.weight > b.weight) {
|
||||
return 1
|
||||
}
|
||||
|
||||
return 0
|
||||
}).forEach(type => {
|
||||
typesObj[type.title.toLowerCase()] = type
|
||||
})
|
||||
|
||||
const pt = path.join(dataDir, 'categories.json')
|
||||
fs.writeFileSync(pt, JSON.stringify(typesObj))
|
||||
for (const { index, slug, ...meta } of data) {
|
||||
const filename = join(dir, slug + '.md')
|
||||
fs.writeFileSync(filename, `${JSON.stringify(meta)}
|
||||
<script>var idx = JSON.parse(\`${JSON.stringify(index).replace(`'`, `\\'`)}\`);</script>`)
|
||||
}
|
||||
}
|
||||
|
||||
process()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue