Merge pull request #681 from datopian/ref/old-mdx

Revert to old MDX parsing
This commit is contained in:
Rufus Pollock 2022-03-15 10:20:54 +01:00 committed by GitHub
commit 727fd8deff
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 3242 additions and 45298 deletions

View File

@ -1,11 +1,10 @@
import Layout from './Layout'
import Layout from '../components/Layout'
import { MDXRemote } from 'next-mdx-remote'
import dynamic from 'next/dynamic'
import Head from 'next/head'
import Excel from './Excel'
import Table from './Table'
import TableGrid from './TableGrid'
import LineChart from './LineChart'
import MetaData from './Metadata'
import { MDXProvider } from '@mdx-js/react'
import Link from 'next/link'
import { Vega, VegaLite } from 'react-vega'
// Custom components/renderers to pass to MDX.
@ -13,29 +12,34 @@ import { Vega, VegaLite } from 'react-vega'
// to handle import statements. Instead, you must include components in scope
// here.
const components = {
Table,
Excel,
Vega,
VegaLite,
LineChart,
Table: dynamic(() => import('../components/Table')),
Excel: dynamic(() => import('../components/Excel')),
// TODO: try and make these dynamic ...
Vega: Vega,
VegaLite: VegaLite,
LineChart: dynamic(() => import('../components/LineChart')),
Head,
TableGrid,
MetaData,
}
export default function DataLiterate({ children }) {
const { Component, pageProps } = children
export default function DataLiterate({ children, source, frontMatter }) {
return (
<Layout>
<main>
<MDXProvider components={components}>
<div className="prose mx-auto">
<Component {...pageProps} />
<Layout title={frontMatter.title}>
<div className="prose mx-auto">
<header>
<div className="mb-6">
<h1>{frontMatter.title}</h1>
{frontMatter.author && (
<div className="-mt-6"><p className="opacity-60 pl-1">{frontMatter.author}</p></div>
)}
{frontMatter.description && (
<p className="description">{frontMatter.description}</p>
)}
</div>
</MDXProvider>
</main>
</header>
<main>
<MDXRemote {...source} components={components} />
</main>
</div>
</Layout>
)
}

View File

@ -1,18 +0,0 @@
export default function MetaData({ title, author, description }) {
return (
<header>
<div className="mb-6">
<h1>{title}</h1>
{author && (
<div className="-mt-6"><p className="opacity-60 pl-1">{author}</p></div>
)}
{description && (
<p className="description">{description}</p>
)}
</div>
</header>
)
}

View File

@ -1,81 +0,0 @@
import axios from 'axios'
import React, { useEffect } from 'react'
import { Table } from 'portal'
const papa = require("papaparse")
/*
Portaljs Table Grid
usage: <TableGrid url="" data={data} cols={cols} csv="" />
*/
export default function TableGrid({ data = [], cols = [], csv = '', url = '' }) {
if (csv) {
const out = parseCsv(csv)
data = prepareRowsForPortalJsTable(out.rows)
cols = out.fields
}
if (cols) {
cols = prepareColsForPortalJsTable(cols)
}
const [ourdata, setData] = React.useState(data)
const [ourcols, setCols] = React.useState(cols)
useEffect(() => {
if (url) {
loadUrl(url)
}
}, [url])
function loadUrl(path) {
// HACK: duplicate of Excel code - maybe refactor
// if url is external may have CORS issue so we proxy it ...
if (url.startsWith('http')) {
const PROXY_URL = window.location.origin + '/api/proxy'
url = PROXY_URL + '?url=' + encodeURIComponent(url)
}
axios.get(url).then((res) => {
const { rows, fields } = parseCsv(res.data)
setData(rows)
setCols(prepareColsForPortalJsTable(fields))
})
}
return (
<div>
<Table columns={ourcols} data={ourdata} height={"400px"} />
</div>
)
}
function prepareColsForPortalJsTable(cols) {
return cols.map((col) => {
return {
field: col.key,
headerName: col.name,
flex: true
}
})
}
function prepareRowsForPortalJsTable(rows) {
return rows.map((r) => {
return {
...r,
id: r.id || r.key
}
})
}
function parseCsv(csv) {
csv = csv.trim()
const rawdata = papa.parse(csv, { header: true })
const cols = rawdata.meta.fields.map((r, i) => {
return { key: r, name: r }
})
return {
rows: rawdata.data,
fields: cols
}
}

View File

@ -1,11 +1,8 @@
---
title: Demo
author: Rufus Pollock
description: This demos and documents Data Literate features live
---
<MetaData title={title} author={author} description={description} />
This demos and documents Data Literate features live.
You can see the raw source of this page here: https://raw.githubusercontent.com/datopian/data-literate/main/content/demo.mdx
@ -68,7 +65,7 @@ You can create a table of contents by having a markdown heading named `Table of
## A Table
You can create a simple table ...
You can create tables ...
```
<Table cols={[
@ -129,71 +126,6 @@ Year,Temp Anomaly,
```
<Table url='/_files/HadCRUT.5.0.1.0.analysis.summary_series.global.annual.csv' />
```
___
You can also create a Table Grid, with more advance features
```
<TableGrid cols={[
{ key: 'id', name: 'ID' },
{ key: 'firstName', name: 'First name' },
{ key: 'lastName', name: 'Last name' },
{ key: 'age', name: 'Age' }
]} data={[
{ id: 1, lastName: 'Snow', firstName: 'Jon', age: 35 },
{ id: 2, lastName: 'Lannister', firstName: 'Cersei', age: 42 },
{ id: 3, lastName: 'Lannister', firstName: 'Jaime', age: 45 },
{ id: 4, lastName: 'Stark', firstName: 'Arya', age: 16 },
{ id: 7, lastName: 'Clifford', firstName: 'Ferrara', age: 44 },
{ id: 8, lastName: 'Frances', firstName: 'Rossini', age: 36 },
{ id: 9, lastName: 'Roxie', firstName: 'Harvey', age: 65 },
]}
/>
```
<TableGrid cols={[
{ key: 'id', name: 'ID' },
{ key: 'firstName', name: 'First name' },
{ key: 'lastName', name: 'Last name' },
{ key: 'age', name: 'Age' }
]} data={[
{ id: 1, lastName: 'Snow', firstName: 'Jon', age: 35 },
{ id: 2, lastName: 'Lannister', firstName: 'Cersei', age: 42 },
{ id: 3, lastName: 'Lannister', firstName: 'Jaime', age: 45 },
{ id: 4, lastName: 'Stark', firstName: 'Arya', age: 16 },
{ id: 7, lastName: 'Clifford', firstName: 'Ferrara', age: 44 },
{ id: 8, lastName: 'Frances', firstName: 'Rossini', age: 36 },
{ id: 9, lastName: 'Roxie', firstName: 'Harvey', age: 65 },
]}
/>
### Table Grid from Raw CSV
You can also pass raw CSV as the content ...
```
<TableGrid csv={`
Year,Temp Anomaly
1850,-0.418
2020,0.923
`} />
```
<TableGrid csv={`
Year,Temp Anomaly,
1850,-0.418
2020,0.923
`} />
### Table Grid from a URL
```
<TableGrid url='/_files/HadCRUT.5.0.1.0.analysis.summary_series.global.annual.csv' />
```
<TableGrid url='/_files/HadCRUT.5.0.1.0.analysis.summary_series.global.annual.csv' />
## Charts

View File

@ -0,0 +1,33 @@
import matter from 'gray-matter'
import toc from 'remark-toc'
import slug from 'remark-slug'
import gfm from 'remark-gfm'
import footnotes from 'remark-footnotes'
import { serialize } from 'next-mdx-remote/serialize'
/**
* Parse a markdown or MDX file to an MDX source form + front matter data
*
* @source: the contents of a markdown or mdx file
* @returns: { mdxSource: mdxSource, frontMatter: ...}
*/
const parse = async function(source) {
const { content, data } = matter(source)
const mdxSource = await serialize(content, {
// Optionally pass remark/rehype plugins
mdxOptions: {
remarkPlugins: [gfm, toc, slug, footnotes],
rehypePlugins: [],
},
scope: data,
})
return {
mdxSource: mdxSource,
frontMatter: data
}
}
export default parse

View File

@ -0,0 +1,23 @@
import fs from 'fs'
import glob from 'glob'
import path from 'path'
// POSTS_PATH is useful when you want to get the path to a specific file
export const POSTS_PATH = path.join(process.cwd(), 'content')
const walkSync = (dir, filelist = []) => {
fs.readdirSync(dir).forEach(file => {
filelist = fs.statSync(path.join(dir, file)).isDirectory()
? walkSync(path.join(dir, file), filelist)
: filelist.concat(path.join(dir, file))
})
return filelist
}
// postFilePaths is the list of all mdx files inside the POSTS_PATH directory
export const postFilePaths = walkSync(POSTS_PATH)
.map((file) => { return file.slice(POSTS_PATH.length) })
// Only include md(x) files
.filter((path) => /\.mdx?$/.test(path))

View File

@ -1,20 +0,0 @@
import gfm from 'remark-gfm'
import toc from 'remark-toc'
import slug from 'remark-slug'
import remarkFrontmatter from 'remark-frontmatter'
import { remarkMdxFrontmatter } from 'remark-mdx-frontmatter'
import withMDXImp from '@next/mdx'
const withMDX = withMDXImp({
extension: /\.mdx?$/,
options: {
remarkPlugins: [remarkFrontmatter, remarkMdxFrontmatter, gfm, toc, slug],
rehypePlugins: [],
// If you use `MDXProvider`, uncomment the following line.
providerImportSource: "@mdx-js/react",
},
})
export default withMDX({
// Append the default value with md extensions
pageExtensions: ['ts', 'tsx', 'js', 'jsx', 'md', 'mdx'],
})

File diff suppressed because it is too large Load Diff

View File

@ -8,34 +8,29 @@
"export": "next export",
"start": "next start"
},
"engines": {
"node": ">12"
},
"dependencies": {
"@headlessui/react": "^1.3.0",
"@heroicons/react": "^1.0.3",
"@mdx-js/loader": "^2.0.0",
"@mdx-js/react": "^2.0.0",
"@next/mdx": "^12.1.0",
"@tailwindcss/typography": "^0.5.2",
"autoprefixer": "^10.4.2",
"@mdx-js/loader": "^1.6.22",
"@tailwindcss/typography": "^0.4.0",
"autoprefixer": "^10.0.4",
"frictionless.js": "^0.13.4",
"gray-matter": "^4.0.3",
"next": "12.1.0",
"next-mdx-remote": "^3.0.4",
"papaparse": "^5.3.1",
"portal": "https://github.com/datopian/portal.js.git",
"postcss": "^8.4.7",
"postcss": "^8.2.10",
"prop-types": "^15.7.2",
"react": "17.0.1",
"react-dom": "17.0.1",
"react": "17.0.2",
"react-dom": "17.0.2",
"react-vega": "^7.4.4",
"remark": "^13.0.0",
"remark-footnotes": "^3.0.0",
"remark-frontmatter": "^4.0.1",
"remark-gfm": "^1.0.0",
"remark-mdx-frontmatter": "^1.1.1",
"remark-html": "^13.0.2",
"remark-slug": "^6.1.0",
"remark-toc": "^7.2.0",
"tailwindcss": "^3.0.23",
"tailwindcss": "^2.2.16",
"vega": "^5.20.2",
"vega-lite": "^5.1.0",
"xlsx": "^0.17.0"

View File

@ -0,0 +1,48 @@
import fs from 'fs'
import path from 'path'
import parse from '../lib/markdown.js'
import DataLiterate from '../components/DataLiterate'
import { postFilePaths, POSTS_PATH } from '../lib/mdxUtils'
export default function PostPage({ source, frontMatter }) {
return (
<DataLiterate source={source} frontMatter={frontMatter} />
)
}
export const getStaticProps = async ({ params }) => {
const mdxPath = path.join(POSTS_PATH, `${params.slug.join('/')}.mdx`)
const postFilePath = fs.existsSync(mdxPath) ? mdxPath : mdxPath.slice(0, -1)
const source = fs.readFileSync(postFilePath)
const { mdxSource, frontMatter } = await parse(source)
return {
props: {
source: mdxSource,
frontMatter: frontMatter,
},
}
}
export const getStaticPaths = async () => {
var paths = postFilePaths
// Remove file extensions for page paths
.map((path) => path.replace(/\.mdx?$/, ''))
// Map the path into the static paths object required by Next.js
paths = paths.map((slug) => {
// /demo => [demo]
const parts = slug.slice(1).split('/')
return { params: { slug: parts } }
})
return {
paths,
fallback: false,
}
}

View File

@ -1,12 +1,8 @@
import '../styles/globals.css'
import '../styles/tailwind.css'
import DataLiterate from '../components/DataLiterate'
function MyApp({ Component, pageProps }) {
return (
<DataLiterate children={{ Component, pageProps }}/>
)
return <Component {...pageProps} />
}
export default MyApp

View File

@ -1,8 +1,9 @@
const defaultTheme = require("tailwindcss/defaultTheme");
module.exports = {
mode: 'jit',
// purge: ['./pages/**/*.{js,ts,jsx,tsx}', './components/**/*.{js,ts,jsx,tsx}'],
content: [
purge: [
"./pages/**/*.js",
"./pages/**/*.ts",
"./pages/**/*.jsx",
@ -23,6 +24,9 @@ module.exports = {
}
},
},
variants: {
extend: {},
},
plugins: [
require('@tailwindcss/typography'),
],