refactor: Migrate documentation content, rebuild UI components, and update core architecture.
This commit is contained in:
118
lib/markdown.ts
118
lib/markdown.ts
@@ -6,7 +6,7 @@ import rehypePrism from "rehype-prism-plus";
|
||||
import rehypeAutolinkHeadings from "rehype-autolink-headings";
|
||||
import rehypeSlug from "rehype-slug";
|
||||
import rehypeCodeTitles from "rehype-code-titles";
|
||||
import { page_routes, ROUTES } from "./routes-config";
|
||||
import { page_routes, ROUTES } from "./routes";
|
||||
import { visit } from "unist-util-visit";
|
||||
import type { Node, Parent } from "unist";
|
||||
import matter from "gray-matter";
|
||||
@@ -31,6 +31,7 @@ interface TextNode extends Node {
|
||||
|
||||
// custom components imports
|
||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
|
||||
import { Table, TableHeader, TableBody, TableFooter, TableHead, TableRow, TableCell } from "@/components/ui/table";
|
||||
import Pre from "@/components/markdown/PreMdx";
|
||||
import Note from "@/components/markdown/NoteMdx";
|
||||
import { Stepper, StepperItem } from "@/components/markdown/StepperMdx";
|
||||
@@ -54,20 +55,22 @@ const components = {
|
||||
TabsContent,
|
||||
TabsList,
|
||||
TabsTrigger,
|
||||
pre: Pre,
|
||||
Note,
|
||||
Stepper,
|
||||
StepperItem,
|
||||
img: Image,
|
||||
a: Link,
|
||||
Outlet,
|
||||
Youtube,
|
||||
Tooltip,
|
||||
Card,
|
||||
Button,
|
||||
Accordion,
|
||||
AccordionGroup,
|
||||
CardGroup,
|
||||
Kbd,
|
||||
// Table Components
|
||||
table: Table,
|
||||
thead: TableHeader,
|
||||
tbody: TableBody,
|
||||
tfoot: TableFooter,
|
||||
tr: TableRow,
|
||||
th: TableHead,
|
||||
td: TableCell,
|
||||
// Release Note Components
|
||||
Release,
|
||||
Changes,
|
||||
@@ -75,7 +78,13 @@ const components = {
|
||||
File,
|
||||
Files,
|
||||
Folder,
|
||||
AccordionGroup
|
||||
pre: Pre,
|
||||
Note,
|
||||
Stepper,
|
||||
StepperItem,
|
||||
img: Image,
|
||||
a: Link,
|
||||
Outlet,
|
||||
};
|
||||
|
||||
// helper function to handle rehype code titles, since by default we can't inject into the className of rehype-code-titles
|
||||
@@ -154,38 +163,57 @@ export type BaseMdxFrontmatter = {
|
||||
|
||||
export async function getDocsForSlug(slug: string) {
|
||||
try {
|
||||
const contentPath = getDocsContentPath(slug);
|
||||
const rawMdx = await fs.readFile(contentPath, "utf-8");
|
||||
return await parseMdx<BaseMdxFrontmatter>(rawMdx);
|
||||
const { content, filePath } = await getRawMdx(slug);
|
||||
const mdx = await parseMdx<BaseMdxFrontmatter>(content);
|
||||
return {
|
||||
...mdx,
|
||||
filePath,
|
||||
};
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
}
|
||||
|
||||
export async function getDocsTocs(slug: string) {
|
||||
const contentPath = getDocsContentPath(slug);
|
||||
const rawMdx = await fs.readFile(contentPath, "utf-8");
|
||||
// captures between ## - #### can modify accordingly
|
||||
const headingsRegex = /^(#{2,4})\s(.+)$/gm;
|
||||
const { content } = await getRawMdx(slug);
|
||||
const rawMdx = content;
|
||||
|
||||
// Regex to match code blocks (```...```), standard markdown headings (##), and <Release> tags
|
||||
const combinedRegex = /(```[\s\S]*?```)|^(#{2,4})\s(.+)$|<Release[^>]*version="([^"]+)"/gm;
|
||||
|
||||
let match;
|
||||
const extractedHeadings = [];
|
||||
while ((match = headingsRegex.exec(rawMdx)) !== null) {
|
||||
const headingLevel = match[1].length;
|
||||
const headingText = match[2].trim();
|
||||
const slug = sluggify(headingText);
|
||||
extractedHeadings.push({
|
||||
level: headingLevel,
|
||||
text: headingText,
|
||||
href: `#${slug}`,
|
||||
});
|
||||
|
||||
while ((match = combinedRegex.exec(rawMdx)) !== null) {
|
||||
// match[1] -> Code block content (ignore)
|
||||
if (match[1]) continue;
|
||||
|
||||
// match[2] & match[3] -> Markdown headings
|
||||
if (match[2]) {
|
||||
const headingLevel = match[2].length;
|
||||
const headingText = match[3].trim();
|
||||
const slug = sluggify(headingText);
|
||||
extractedHeadings.push({
|
||||
level: headingLevel,
|
||||
text: headingText,
|
||||
href: `#${slug}`,
|
||||
});
|
||||
}
|
||||
// match[4] -> Release component version
|
||||
else if (match[4]) {
|
||||
const version = match[4];
|
||||
extractedHeadings.push({
|
||||
level: 2,
|
||||
text: `v${version}`,
|
||||
href: `#${version}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
return extractedHeadings;
|
||||
}
|
||||
|
||||
export function getPreviousNext(path: string) {
|
||||
// path comes in as "getting-started/introduction" but page_routes has "/docs/getting-started/introduction"
|
||||
const fullPath = path ? `/docs/${path}` : "/docs";
|
||||
const index = page_routes.findIndex(({ href }) => href === fullPath);
|
||||
const index = page_routes.findIndex(({ href }) => href == `/${path}`);
|
||||
return {
|
||||
prev: page_routes[index - 1],
|
||||
next: page_routes[index + 1],
|
||||
@@ -197,8 +225,26 @@ function sluggify(text: string) {
|
||||
return slug.replace(/[^a-z0-9-]/g, "");
|
||||
}
|
||||
|
||||
function getDocsContentPath(slug: string) {
|
||||
return path.join(process.cwd(), "/contents/docs/", `${slug}/index.mdx`);
|
||||
async function getRawMdx(slug: string) {
|
||||
const commonPath = path.join(process.cwd(), "/docs/");
|
||||
const paths = [
|
||||
path.join(commonPath, `${slug}.mdx`),
|
||||
path.join(commonPath, slug, "index.mdx"),
|
||||
];
|
||||
|
||||
for (const p of paths) {
|
||||
try {
|
||||
const content = await fs.readFile(p, "utf-8");
|
||||
return {
|
||||
content,
|
||||
filePath: `docs/${path.relative(commonPath, p)}`,
|
||||
};
|
||||
} catch {
|
||||
// ignore and try next
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Could not find mdx file for slug: ${slug}`);
|
||||
}
|
||||
|
||||
function justGetFrontmatterFromMD<Frontmatter>(rawMd: string): Frontmatter {
|
||||
@@ -220,16 +266,10 @@ export async function getAllChilds(pathString: string) {
|
||||
|
||||
return await Promise.all(
|
||||
page_routes_copy.map(async (it) => {
|
||||
const totalPath = path.join(
|
||||
process.cwd(),
|
||||
"/contents/docs/",
|
||||
prevHref,
|
||||
it.href,
|
||||
"index.mdx"
|
||||
);
|
||||
const raw = await fs.readFile(totalPath, "utf-8");
|
||||
const slug = path.join(prevHref, it.href);
|
||||
const { content } = await getRawMdx(slug);
|
||||
return {
|
||||
...justGetFrontmatterFromMD<BaseMdxFrontmatter>(raw),
|
||||
...justGetFrontmatterFromMD<BaseMdxFrontmatter>(content),
|
||||
href: `/docs${prevHref}${it.href}`,
|
||||
};
|
||||
})
|
||||
|
||||
5
lib/search/algolia.ts
Normal file
5
lib/search/algolia.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export const algoliaConfig = {
|
||||
appId: process.env.NEXT_PUBLIC_ALGOLIA_DOCSEARCH_APP_ID,
|
||||
apiKey: process.env.NEXT_PUBLIC_ALGOLIA_DOCSEARCH_API_KEY,
|
||||
indexName: process.env.NEXT_PUBLIC_ALGOLIA_DOCSEARCH_INDEX_NAME,
|
||||
}
|
||||
43
lib/search/built-in.ts
Normal file
43
lib/search/built-in.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { ROUTES, type EachRoute } from "../routes"
|
||||
|
||||
export type SearchResult = {
|
||||
title: string
|
||||
href: string
|
||||
noLink?: boolean
|
||||
items?: undefined
|
||||
score?: number
|
||||
}
|
||||
|
||||
function helperSearch(
|
||||
query: string,
|
||||
node: EachRoute,
|
||||
prefix: string,
|
||||
currenLevel: number,
|
||||
maxLevel?: number
|
||||
) {
|
||||
const res: EachRoute[] = []
|
||||
let parentHas = false
|
||||
|
||||
const nextLink = `${prefix}${node.href}`
|
||||
if (!node.noLink && node.title.toLowerCase().includes(query.toLowerCase())) {
|
||||
res.push({ ...node, items: undefined, href: nextLink })
|
||||
parentHas = true
|
||||
}
|
||||
const goNext = maxLevel ? currenLevel < maxLevel : true
|
||||
if (goNext)
|
||||
node.items?.forEach((item) => {
|
||||
const innerRes = helperSearch(query, item, nextLink, currenLevel + 1, maxLevel)
|
||||
if (!!innerRes.length && !parentHas && !node.noLink) {
|
||||
res.push({ ...node, items: undefined, href: nextLink })
|
||||
parentHas = true
|
||||
}
|
||||
res.push(...innerRes)
|
||||
})
|
||||
return res
|
||||
}
|
||||
|
||||
export function advanceSearch(query: string) {
|
||||
return ROUTES.map((node) =>
|
||||
helperSearch(query, node, "", 1, query.length == 0 ? 2 : undefined)
|
||||
).flat()
|
||||
}
|
||||
7
lib/search/config.ts
Normal file
7
lib/search/config.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import docuConfig from "@/docu.json"
|
||||
|
||||
export type SearchType = "default" | "algolia"
|
||||
|
||||
export const searchConfig = {
|
||||
type: (docuConfig.search?.type as SearchType) ?? "default",
|
||||
}
|
||||
@@ -6,4 +6,5 @@ export interface TocItem {
|
||||
|
||||
export interface MobTocProps {
|
||||
tocs: TocItem[];
|
||||
title?: string;
|
||||
}
|
||||
|
||||
67
lib/utils.ts
67
lib/utils.ts
@@ -1,80 +1,39 @@
|
||||
import { type ClassValue, clsx } from "clsx";
|
||||
import { twMerge } from "tailwind-merge";
|
||||
import { EachRoute, ROUTES } from "./routes-config";
|
||||
import { type ClassValue, clsx } from "clsx"
|
||||
import { twMerge } from "tailwind-merge"
|
||||
|
||||
export function cn(...inputs: ClassValue[]) {
|
||||
return twMerge(clsx(inputs));
|
||||
}
|
||||
|
||||
export function helperSearch(
|
||||
query: string,
|
||||
node: EachRoute,
|
||||
prefix: string,
|
||||
currenLevel: number,
|
||||
maxLevel?: number
|
||||
) {
|
||||
const res: EachRoute[] = [];
|
||||
let parentHas = false;
|
||||
|
||||
const nextLink = `${prefix}${node.href}`;
|
||||
if (!node.noLink && node.title.toLowerCase().includes(query.toLowerCase())) {
|
||||
res.push({ ...node, items: undefined, href: nextLink });
|
||||
parentHas = true;
|
||||
}
|
||||
const goNext = maxLevel ? currenLevel < maxLevel : true;
|
||||
if (goNext)
|
||||
node.items?.forEach((item) => {
|
||||
const innerRes = helperSearch(
|
||||
query,
|
||||
item,
|
||||
nextLink,
|
||||
currenLevel + 1,
|
||||
maxLevel
|
||||
);
|
||||
if (!!innerRes.length && !parentHas && !node.noLink) {
|
||||
res.push({ ...node, items: undefined, href: nextLink });
|
||||
parentHas = true;
|
||||
}
|
||||
res.push(...innerRes);
|
||||
});
|
||||
return res;
|
||||
}
|
||||
|
||||
export function advanceSearch(query: string) {
|
||||
return ROUTES.map((node) =>
|
||||
helperSearch(query, node, "", 1, query.length == 0 ? 2 : undefined)
|
||||
).flat();
|
||||
return twMerge(clsx(inputs))
|
||||
}
|
||||
|
||||
// Thursday, May 23, 2024
|
||||
export function formatDate(dateStr: string): string {
|
||||
const [day, month, year] = dateStr.split("-").map(Number);
|
||||
const date = new Date(year, month - 1, day);
|
||||
const [day, month, year] = dateStr.split("-").map(Number)
|
||||
const date = new Date(year, month - 1, day)
|
||||
|
||||
const options: Intl.DateTimeFormatOptions = {
|
||||
weekday: "long",
|
||||
year: "numeric",
|
||||
month: "long",
|
||||
day: "numeric",
|
||||
};
|
||||
}
|
||||
|
||||
return date.toLocaleDateString("en-US", options);
|
||||
return date.toLocaleDateString("en-US", options)
|
||||
}
|
||||
|
||||
// May 23, 2024
|
||||
export function formatDate2(dateStr: string): string {
|
||||
const [day, month, year] = dateStr.split("-").map(Number);
|
||||
const date = new Date(year, month - 1, day);
|
||||
const [day, month, year] = dateStr.split("-").map(Number)
|
||||
const date = new Date(year, month - 1, day)
|
||||
|
||||
const options: Intl.DateTimeFormatOptions = {
|
||||
month: "short",
|
||||
day: "numeric",
|
||||
year: "numeric",
|
||||
};
|
||||
return date.toLocaleDateString("en-US", options);
|
||||
}
|
||||
return date.toLocaleDateString("en-US", options)
|
||||
}
|
||||
|
||||
export function stringToDate(date: string) {
|
||||
const [day, month, year] = date.split("-").map(Number);
|
||||
return new Date(year, month - 1, day);
|
||||
const [day, month, year] = date.split("-").map(Number)
|
||||
return new Date(year, month - 1, day)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user