mirror of
https://github.com/XRPLF/xrpl-dev-portal.git
synced 2025-11-04 20:05:50 +00:00
chore(project): migrate to latest version of realm and adjust plugins to match new interface
This commit is contained in:
@@ -9,30 +9,32 @@ import moment from "moment";
|
||||
export function blogPosts() {
|
||||
/** @type {import("@redocly/realm/dist/server/plugins/types").PluginInstance } */
|
||||
const instance = {
|
||||
processContent: async (contentProvider, actions) => {
|
||||
processContent: async (actions, { fs, cache }) => {
|
||||
try {
|
||||
const posts = [];
|
||||
const allBlogFiles = Array.from(contentProvider.fsFilesList.values());
|
||||
|
||||
const markdownFiles = allBlogFiles.filter(file => file.match(/blog[\/\\]([^\\\/]*)[\/\\].*\.md$/));
|
||||
const allFiles = await fs.scan()
|
||||
const markdownFiles = allFiles
|
||||
.filter(file => file.relativePath
|
||||
.match(/^blog[\/\\]([^\\\/]*)[\/\\].*\.md$/));
|
||||
|
||||
for (const relativePath of markdownFiles) {
|
||||
const record = contentProvider.loadContent(relativePath, 'frontmatter');
|
||||
const ast = markdoc.parse(record.content);
|
||||
for (const { relativePath } of markdownFiles) {
|
||||
const { data: { ast } } = await cache.load(relativePath, 'markdown-ast');
|
||||
const { data: { frontmatter } } = await cache.load(relativePath, 'markdown-frontmatter');
|
||||
|
||||
const dirPath = dirname(relativePath);
|
||||
const title = extractFirstHeading(ast) || '';
|
||||
const category = extractCategory(record.parsed.data.labels);
|
||||
const category = extractCategory(frontmatter.labels);
|
||||
const year = `${relativePath.split("/")[1]}`
|
||||
|
||||
posts.push({
|
||||
path: dirPath,
|
||||
author: record.parsed.data.author || "",
|
||||
author: frontmatter.author || "",
|
||||
title: title || toTitleCase(dirname(dirPath)),
|
||||
description: getInnerText([ast.children[1]]).replace(title, '').trim(),
|
||||
year: year,
|
||||
date: record.parsed.data.date
|
||||
? moment(record.parsed.data.date).format("YYYY-MM-DD")
|
||||
date: frontmatter.date
|
||||
? moment(frontmatter.date).format("YYYY-MM-DD")
|
||||
: moment(year).format("YYYY-MM-DD"),
|
||||
category: category || "General",
|
||||
category_id: category ? category.toLowerCase().replace(/ /g, "_") : "general",
|
||||
|
||||
@@ -3,45 +3,45 @@
|
||||
import { getInnerText } from '@redocly/realm/dist/shared/markdoc.js';
|
||||
|
||||
import { dirname, relative, join as joinPath } from 'path';
|
||||
import markdoc from '@markdoc/markdoc';
|
||||
|
||||
export function codeSamples() {
|
||||
/** @type {import("@redocly/realm/dist/server/plugins/types").PluginInstance } */
|
||||
const instance = {
|
||||
processContent: async (contentProvider, actions) => {
|
||||
processContent: async (actions, { fs, cache }) => {
|
||||
try {
|
||||
const samples = [];
|
||||
const allLands = new Set();
|
||||
const allCodeSampleFiles = Array.from(contentProvider.fsFilesList.values());
|
||||
const allCodeSampleFiles = await fs.scan();
|
||||
|
||||
const readmes = allCodeSampleFiles.filter(file => file.match(/_code-samples[\/\\]([^\\\/]*)[\/\\]README\.md$/));
|
||||
const readmes = allCodeSampleFiles.filter((file) => file.relativePath.match(/^_code-samples[\/\\]([^\\\/]*)[\/\\]README\.md$/));
|
||||
|
||||
for (const relativePath of readmes) {
|
||||
const record = contentProvider.loadContent(relativePath, 'frontmatter');
|
||||
for (const { relativePath } of readmes) {
|
||||
const { data } = await cache.load(relativePath, 'markdown-ast');
|
||||
|
||||
const ast = markdoc.parse(record.content);
|
||||
|
||||
const dirPath = dirname(relativePath)
|
||||
const dirPath = dirname(relativePath);
|
||||
const langs = unique(
|
||||
allCodeSampleFiles
|
||||
.filter(file => file.startsWith(dirPath) && !file.endsWith('README.md'))
|
||||
.map(file => relative(dirPath, file).split('/')[0])
|
||||
.filter((file) => file.relativePath.startsWith(dirPath) && !file.relativePath.endsWith('README.md'))
|
||||
.map((file) => relative(dirPath, file.relativePath).split('/')[0])
|
||||
);
|
||||
const title = extractFirstHeading(ast) || '';
|
||||
const title = extractFirstHeading(data.ast) || '';
|
||||
samples.push({
|
||||
path: dirPath,
|
||||
title: title || toTitleCase(dirname(dirPath)),
|
||||
description: getInnerText([ast.children[1]]).replace(title, '').trim(),
|
||||
description: getInnerText([data.ast.children[1]]).replace(title, '').trim(),
|
||||
href: joinPath('content', dirPath),
|
||||
langs,
|
||||
});
|
||||
|
||||
langs.forEach(l => allLands.add(l));
|
||||
langs.forEach((l) => allLands.add(l));
|
||||
}
|
||||
|
||||
const sortedSamples = samples.sort((a, b) => normalizeTitleForSort(a).localeCompare(normalizeTitleForSort(b)));
|
||||
|
||||
actions.createSharedData('code-samples', { codeSamples: sortedSamples, langs: Array.from(allLands) });
|
||||
actions.createSharedData('code-samples', {
|
||||
codeSamples: sortedSamples,
|
||||
langs: Array.from(allLands),
|
||||
});
|
||||
actions.addRouteSharedData('/resources/code-samples/', 'code-samples', 'code-samples');
|
||||
actions.addRouteSharedData('/ja/resources/code-samples/', 'code-samples', 'code-samples');
|
||||
} catch (e) {
|
||||
@@ -64,8 +64,8 @@ const WORDS_TO_CAPS = ['xrp'];
|
||||
function toTitleCase(s) {
|
||||
const words = s.split(/_|[^\w']/);
|
||||
return words
|
||||
.filter(word => word)
|
||||
.map(word => (WORDS_TO_CAPS.includes(word) ? word.toUpperCase() : word.charAt(0).toUpperCase() + word.slice(1)))
|
||||
.filter((word) => word)
|
||||
.map((word) => (WORDS_TO_CAPS.includes(word) ? word.toUpperCase() : word.charAt(0).toUpperCase() + word.slice(1)))
|
||||
.join(' ')
|
||||
.replace("'S", "'s")
|
||||
.replace(' A ', ' a ');
|
||||
@@ -78,7 +78,7 @@ function unique(array) {
|
||||
function extractFirstHeading(ast) {
|
||||
let heading;
|
||||
|
||||
visit(ast, node => {
|
||||
visit(ast, (node) => {
|
||||
if (!isNode(node)) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -6,9 +6,11 @@ export function indexPages() {
|
||||
/** @type {import("@redocly/realm/dist/server/plugins/types").PluginInstance } */
|
||||
const instance = {
|
||||
// hook that gets executed after all routes were created
|
||||
async afterRoutesCreated(contentProvider, actions) {
|
||||
async afterRoutesCreated(actions, { cache }) {
|
||||
// get all the routes that are ind pages
|
||||
const indexRoutes = actions.getAllRoutes().filter(route => route.metadata?.indexPage);
|
||||
const indexRoutes = actions
|
||||
.getAllRoutes()
|
||||
.filter((route) => route.metadata?.indexPage);
|
||||
|
||||
for (const route of indexRoutes) {
|
||||
// @ts-ignore this uses some internals, we will expose them in nicer way in the future releases
|
||||
@@ -22,27 +24,38 @@ export function indexPages() {
|
||||
}
|
||||
|
||||
const item = findItemDeep(sidebar.items, route.fsPath);
|
||||
const childrenPaths = (item.items || []).map(item => item.fsPath).filter(Boolean);
|
||||
const childrenPaths = (item.items || [])
|
||||
.map((item) => item.fsPath)
|
||||
.filter(Boolean);
|
||||
|
||||
const childRoutes = childrenPaths.map(fsPath => actions.getRouteByFsPath(fsPath));
|
||||
const childRoutes = childrenPaths.map((fsPath) =>
|
||||
actions.getRouteByFsPath(fsPath),
|
||||
);
|
||||
const childRoutesData = await Promise.all(
|
||||
childRoutes.map(async route => {
|
||||
const { parsed } = contentProvider.loadContent(route.fsPath, 'frontmatter');
|
||||
childRoutes.map(async (route) => {
|
||||
const { data } = await cache.load(
|
||||
route.fsPath,
|
||||
'markdown-frontmatter',
|
||||
);
|
||||
const slug = route.slug;
|
||||
const title = await route.getNavText();
|
||||
return {
|
||||
...parsed?.data,
|
||||
...data?.frontmatter,
|
||||
slug,
|
||||
title,
|
||||
};
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
const sharedDataId = await actions.createSharedData(
|
||||
route.slug + '_' + INDEX_PAGE_INFO_DATA_KEY,
|
||||
childRoutesData
|
||||
childRoutesData,
|
||||
);
|
||||
actions.addRouteSharedData(
|
||||
route.slug,
|
||||
INDEX_PAGE_INFO_DATA_KEY,
|
||||
sharedDataId,
|
||||
);
|
||||
actions.addRouteSharedData(route.slug, INDEX_PAGE_INFO_DATA_KEY, sharedDataId);
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user