[Data literate][m]: Copy and refactor data-literate into minimal template folder
This commit is contained in:
60
examples/data-literate-template/README.md
Normal file
60
examples/data-literate-template/README.md
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
This example renders markdown + CSV into an elegant web page. These type of data setup we term [data literate][]
|
||||||
|
|
||||||
|
[data literate]: https://portaljs.org/data-literate
|
||||||
|
|
||||||
|
## How to use
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx create-next-app -e https://github.com/datopian/portal.js/tree/main/examples/data-literate
|
||||||
|
# choose a name for your portal when prompted e.g. your-portal or go with default my-app
|
||||||
|
|
||||||
|
# then run it
|
||||||
|
cd your-portal
|
||||||
|
yarn #install packages
|
||||||
|
yarn dev # start app in dev mode
|
||||||
|
```
|
||||||
|
|
||||||
|
You should see the demo portal running with the example dataset provided in `http://localhost:3000/demo`
|
||||||
|
|
||||||
|
For the moment there is no root path and each markdown file will have it's own path (route) for the generated html code.
|
||||||
|
|
||||||
|
TODO
|
||||||
|
### Use your own dataset
|
||||||
|
|
||||||
|
You can try it out with your own data literate setups:
|
||||||
|
|
||||||
|
In the directory of your portal do:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
export PORTAL_DATASET_PATH=/path/to/my/dataset
|
||||||
|
```
|
||||||
|
|
||||||
|
Then restart the dev server:
|
||||||
|
|
||||||
|
```
|
||||||
|
yarn dev
|
||||||
|
```
|
||||||
|
|
||||||
|
Check the portal page and it should have updated e.g. like:
|
||||||
|
|
||||||
|
TODO
|
||||||
|
|
||||||
|
### Static Export
|
||||||
|
|
||||||
|
Build the export:
|
||||||
|
|
||||||
|
```
|
||||||
|
yarn build
|
||||||
|
```
|
||||||
|
|
||||||
|
Results will be in `out/` subfolder.
|
||||||
|
|
||||||
|
To test you will need to run a local webserver in the folder (just opening the relevant file in your browser won't work):
|
||||||
|
|
||||||
|
Here we do this with another (non nodejs based) server to show that the static site works. Python3 as a really useful simple http server that one can use here:
|
||||||
|
|
||||||
|
```
|
||||||
|
cd out
|
||||||
|
python3 -m http.server
|
||||||
|
```
|
||||||
|
|
||||||
41
examples/data-literate-template/components/DataLiterate.js
Normal file
41
examples/data-literate-template/components/DataLiterate.js
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import Layout from './Layout'
|
||||||
|
import Head from 'next/head'
|
||||||
|
import Excel from './Excel'
|
||||||
|
import Table from './Table'
|
||||||
|
import TableGrid from './TableGrid'
|
||||||
|
import LineChart from './LineChart'
|
||||||
|
import MetaData from './Metadata'
|
||||||
|
import { MDXProvider } from '@mdx-js/react'
|
||||||
|
import { Vega, VegaLite } from 'react-vega'
|
||||||
|
|
||||||
|
// Custom components/renderers to pass to MDX.
|
||||||
|
// Since the MDX files aren't loaded by webpack, they have no knowledge of how
|
||||||
|
// to handle import statements. Instead, you must include components in scope
|
||||||
|
// here.
|
||||||
|
const components = {
|
||||||
|
Table,
|
||||||
|
Excel,
|
||||||
|
Vega,
|
||||||
|
VegaLite,
|
||||||
|
LineChart,
|
||||||
|
Head,
|
||||||
|
TableGrid,
|
||||||
|
MetaData,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
export default function DataLiterate({ children }) {
|
||||||
|
const { Component, pageProps } = children
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Layout>
|
||||||
|
<main>
|
||||||
|
<MDXProvider components={components}>
|
||||||
|
<div className="prose mx-auto">
|
||||||
|
<Component {...pageProps} />
|
||||||
|
</div>
|
||||||
|
</MDXProvider>
|
||||||
|
</main>
|
||||||
|
</Layout>
|
||||||
|
)
|
||||||
|
}
|
||||||
74
examples/data-literate-template/components/Excel.js
Normal file
74
examples/data-literate-template/components/Excel.js
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
import axios from 'axios'
|
||||||
|
import XLSX from 'xlsx'
|
||||||
|
import React, { useEffect, useState } from 'react'
|
||||||
|
|
||||||
|
import Table from './Table'
|
||||||
|
|
||||||
|
export default function Excel ({ src='' }) {
|
||||||
|
const [data, setData] = React.useState([])
|
||||||
|
const [cols, setCols] = React.useState([])
|
||||||
|
const [workbook, setWorkbook] = React.useState(null)
|
||||||
|
const [error, setError] = React.useState('')
|
||||||
|
const [hasMounted, setHasMounted] = React.useState(0)
|
||||||
|
|
||||||
|
// so this is here so we re-render this in the browser
|
||||||
|
// and not just when we build the page statically in nextjs
|
||||||
|
useEffect(() => {
|
||||||
|
if (hasMounted==0) {
|
||||||
|
handleUrl(src)
|
||||||
|
}
|
||||||
|
setHasMounted(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
function handleUrl(url) {
|
||||||
|
// if url is external may have CORS issue so we proxy it ...
|
||||||
|
if (url.startsWith('http')) {
|
||||||
|
const PROXY_URL = window.location.origin + '/api/proxy'
|
||||||
|
url = PROXY_URL + '?url=' + encodeURIComponent(url)
|
||||||
|
}
|
||||||
|
axios.get(url, {
|
||||||
|
responseType: 'arraybuffer'
|
||||||
|
}).then((res) => {
|
||||||
|
let out = new Uint8Array(res.data)
|
||||||
|
let workbook = XLSX.read(out, {type: "array"})
|
||||||
|
// Get first worksheet
|
||||||
|
const wsname = workbook.SheetNames[0]
|
||||||
|
const ws = workbook.Sheets[wsname]
|
||||||
|
// Convert array of arrays
|
||||||
|
const datatmp = XLSX.utils.sheet_to_json(ws, {header:1})
|
||||||
|
const colstmp = make_cols(ws['!ref'])
|
||||||
|
setData(datatmp)
|
||||||
|
setCols(colstmp)
|
||||||
|
setWorkbook(workbook)
|
||||||
|
}).catch((e) => {
|
||||||
|
setError(e.message)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
{error &&
|
||||||
|
<div>
|
||||||
|
There was an error loading the excel file at {src}:
|
||||||
|
<p>{error}</p>
|
||||||
|
</div>
|
||||||
|
}
|
||||||
|
{workbook &&
|
||||||
|
<ul>
|
||||||
|
{workbook.SheetNames.map((value, index) => {
|
||||||
|
return <li key={index}>{value}</li>
|
||||||
|
})}
|
||||||
|
</ul>
|
||||||
|
}
|
||||||
|
<Table data={data} cols={cols} />
|
||||||
|
</>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/* generate an array of column objects */
|
||||||
|
const make_cols = refstr => {
|
||||||
|
let o = [], C = XLSX.utils.decode_range(refstr).e.c + 1
|
||||||
|
for(var i = 0; i < C; ++i) o[i] = {name:XLSX.utils.encode_col(i), key:i}
|
||||||
|
return o
|
||||||
|
}
|
||||||
|
|
||||||
29
examples/data-literate-template/components/Layout.js
Normal file
29
examples/data-literate-template/components/Layout.js
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import Link from 'next/link'
|
||||||
|
import Head from 'next/head'
|
||||||
|
|
||||||
|
export default function Layout({ children, title = 'Home' }) {
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<Head>
|
||||||
|
<title>Portal.JS - {title}</title>
|
||||||
|
<link rel="icon" href="/favicon.ico" />
|
||||||
|
<meta charSet="utf-8" />
|
||||||
|
<meta name="viewport" content="initial-scale=1.0, width=device-width" />
|
||||||
|
</Head>
|
||||||
|
<div className="mx-auto p-6">
|
||||||
|
{children}
|
||||||
|
</div>
|
||||||
|
<footer className="flex items-center justify-center w-full h-24 border-t">
|
||||||
|
<a
|
||||||
|
className="flex items-center justify-center"
|
||||||
|
href="https://datopian.com/"
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener noreferrer"
|
||||||
|
>
|
||||||
|
Built by{' '}
|
||||||
|
<img src="/datopian-logo.png" alt="Datopian Logo" className="h-6 ml-2" />
|
||||||
|
</a>
|
||||||
|
</footer>
|
||||||
|
</>
|
||||||
|
)
|
||||||
|
}
|
||||||
33
examples/data-literate-template/components/LineChart.js
Normal file
33
examples/data-literate-template/components/LineChart.js
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import { Vega, VegaLite } from 'react-vega'
|
||||||
|
|
||||||
|
export default function LineChart( { data=[] }) {
|
||||||
|
var tmp = data
|
||||||
|
if (Array.isArray(data)) {
|
||||||
|
tmp = data.map((r,i) => {
|
||||||
|
return { x: r[0], y: r[1] }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
const vegaData = { "table": tmp }
|
||||||
|
const spec = {
|
||||||
|
"$schema": "https://vega.github.io/schema/vega-lite/v5.json",
|
||||||
|
"mark": "line",
|
||||||
|
"data": {
|
||||||
|
"name": "table"
|
||||||
|
},
|
||||||
|
"encoding": {
|
||||||
|
"x": {
|
||||||
|
"field": "x",
|
||||||
|
"timeUnit": "year",
|
||||||
|
"type": "temporal"
|
||||||
|
},
|
||||||
|
"y": {
|
||||||
|
"field": "y",
|
||||||
|
"type": "quantitative"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<VegaLite data={ vegaData } spec={ spec } />
|
||||||
|
)
|
||||||
|
}
|
||||||
18
examples/data-literate-template/components/Metadata.js
Normal file
18
examples/data-literate-template/components/Metadata.js
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
|
||||||
|
|
||||||
|
export default function MetaData({ title, author, description }) {
|
||||||
|
return (
|
||||||
|
<header>
|
||||||
|
<div className="mb-6">
|
||||||
|
<h1>{title}</h1>
|
||||||
|
{author && (
|
||||||
|
<div className="-mt-6"><p className="opacity-60 pl-1">{author}</p></div>
|
||||||
|
)}
|
||||||
|
{description && (
|
||||||
|
<p className="description">{description}</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
)
|
||||||
|
|
||||||
|
}
|
||||||
83
examples/data-literate-template/components/Table.js
Normal file
83
examples/data-literate-template/components/Table.js
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
import axios from 'axios'
|
||||||
|
import React, { useEffect, useState } from 'react'
|
||||||
|
|
||||||
|
const papa = require("papaparse")
|
||||||
|
|
||||||
|
/*
|
||||||
|
Simple HTML Table
|
||||||
|
usage: <OutTable data={data} cols={cols} />
|
||||||
|
data:Array<Array<any> >;
|
||||||
|
cols:Array<{name:string, key:number|string}>;
|
||||||
|
*/
|
||||||
|
export default function Table({ data=[], cols=[], csv='', url='' }) {
|
||||||
|
if (csv) {
|
||||||
|
const out = parseCsv(csv)
|
||||||
|
data = out.rows
|
||||||
|
cols = out.cols
|
||||||
|
}
|
||||||
|
|
||||||
|
const [ourdata, setData] = React.useState(data)
|
||||||
|
const [ourcols, setCols] = React.useState(cols)
|
||||||
|
const [error, setError] = React.useState('')
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (url) {
|
||||||
|
loadUrl(url)
|
||||||
|
}
|
||||||
|
}, [url])
|
||||||
|
|
||||||
|
function loadUrl(path) {
|
||||||
|
// HACK: duplicate of Excel code - maybe refactor
|
||||||
|
// if url is external may have CORS issue so we proxy it ...
|
||||||
|
if (url.startsWith('http')) {
|
||||||
|
const PROXY_URL = window.location.origin + '/api/proxy'
|
||||||
|
url = PROXY_URL + '?url=' + encodeURIComponent(url)
|
||||||
|
}
|
||||||
|
axios.get(url).then((res) => {
|
||||||
|
const { rows, fields } = parseCsv(res.data)
|
||||||
|
setData(rows)
|
||||||
|
setCols(fields)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<SimpleTable data={ourdata} cols={ourcols} />
|
||||||
|
</>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Simple HTML Table
|
||||||
|
usage: <OutTable data={data} cols={cols} />
|
||||||
|
data:Array<Array<any> >;
|
||||||
|
cols:Array<{name:string, key:number|string}>;
|
||||||
|
*/
|
||||||
|
function SimpleTable({ data=[], cols=[] }) {
|
||||||
|
return (
|
||||||
|
<div className="table-responsive">
|
||||||
|
<table className="table table-striped">
|
||||||
|
<thead>
|
||||||
|
<tr>{cols.map((c) => <th key={c.key}>{c.name}</th>)}</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{data.map((r,i) => <tr key={i}>
|
||||||
|
{cols.map(c => <td key={c.key}>{ r[c.key] }</td>)}
|
||||||
|
</tr>)}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseCsv(csv) {
|
||||||
|
csv = csv.trim()
|
||||||
|
const rawdata = papa.parse(csv, {header: true})
|
||||||
|
const cols = rawdata.meta.fields.map((r,i) => {
|
||||||
|
return { key: r, name: r }
|
||||||
|
})
|
||||||
|
return {
|
||||||
|
rows: rawdata.data,
|
||||||
|
fields: cols
|
||||||
|
}
|
||||||
|
}
|
||||||
81
examples/data-literate-template/components/TableGrid.js
Normal file
81
examples/data-literate-template/components/TableGrid.js
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
import axios from 'axios'
|
||||||
|
import React, { useEffect } from 'react'
|
||||||
|
import { Table } from 'portal'
|
||||||
|
|
||||||
|
const papa = require("papaparse")
|
||||||
|
|
||||||
|
/*
|
||||||
|
Portaljs Table Grid
|
||||||
|
usage: <TableGrid url="" data={data} cols={cols} csv="" />
|
||||||
|
*/
|
||||||
|
export default function TableGrid({ data = [], cols = [], csv = '', url = '' }) {
|
||||||
|
|
||||||
|
if (csv) {
|
||||||
|
const out = parseCsv(csv)
|
||||||
|
data = prepareRowsForPortalJsTable(out.rows)
|
||||||
|
cols = out.fields
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cols) {
|
||||||
|
cols = prepareColsForPortalJsTable(cols)
|
||||||
|
}
|
||||||
|
|
||||||
|
const [ourdata, setData] = React.useState(data)
|
||||||
|
const [ourcols, setCols] = React.useState(cols)
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (url) {
|
||||||
|
loadUrl(url)
|
||||||
|
}
|
||||||
|
}, [url])
|
||||||
|
|
||||||
|
function loadUrl(path) {
|
||||||
|
// HACK: duplicate of Excel code - maybe refactor
|
||||||
|
// if url is external may have CORS issue so we proxy it ...
|
||||||
|
if (url.startsWith('http')) {
|
||||||
|
const PROXY_URL = window.location.origin + '/api/proxy'
|
||||||
|
url = PROXY_URL + '?url=' + encodeURIComponent(url)
|
||||||
|
}
|
||||||
|
axios.get(url).then((res) => {
|
||||||
|
const { rows, fields } = parseCsv(res.data)
|
||||||
|
setData(rows)
|
||||||
|
setCols(prepareColsForPortalJsTable(fields))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<Table columns={ourcols} data={ourdata} height={"400px"} />
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function prepareColsForPortalJsTable(cols) {
|
||||||
|
return cols.map((col) => {
|
||||||
|
return {
|
||||||
|
field: col.key,
|
||||||
|
headerName: col.name,
|
||||||
|
flex: true
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function prepareRowsForPortalJsTable(rows) {
|
||||||
|
return rows.map((r) => {
|
||||||
|
return {
|
||||||
|
...r,
|
||||||
|
id: r.id || r.key
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseCsv(csv) {
|
||||||
|
csv = csv.trim()
|
||||||
|
const rawdata = papa.parse(csv, { header: true })
|
||||||
|
const cols = rawdata.meta.fields.map((r, i) => {
|
||||||
|
return { key: r, name: r }
|
||||||
|
})
|
||||||
|
return {
|
||||||
|
rows: rawdata.data,
|
||||||
|
fields: cols
|
||||||
|
}
|
||||||
|
}
|
||||||
20
examples/data-literate-template/next.config.mjs
Normal file
20
examples/data-literate-template/next.config.mjs
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import gfm from 'remark-gfm'
|
||||||
|
import toc from 'remark-toc'
|
||||||
|
import slug from 'remark-slug'
|
||||||
|
import remarkFrontmatter from 'remark-frontmatter'
|
||||||
|
import { remarkMdxFrontmatter } from 'remark-mdx-frontmatter'
|
||||||
|
import withMDXImp from '@next/mdx'
|
||||||
|
|
||||||
|
const withMDX = withMDXImp({
|
||||||
|
extension: /\.mdx?$/,
|
||||||
|
options: {
|
||||||
|
remarkPlugins: [remarkFrontmatter, remarkMdxFrontmatter, gfm, toc, slug],
|
||||||
|
rehypePlugins: [],
|
||||||
|
// If you use `MDXProvider`, uncomment the following line.
|
||||||
|
providerImportSource: "@mdx-js/react",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
export default withMDX({
|
||||||
|
// Append the default value with md extensions
|
||||||
|
pageExtensions: ['ts', 'tsx', 'js', 'jsx', 'md', 'mdx'],
|
||||||
|
})
|
||||||
50978
examples/data-literate-template/package-lock.json
generated
Normal file
50978
examples/data-literate-template/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
43
examples/data-literate-template/package.json
Normal file
43
examples/data-literate-template/package.json
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
{
|
||||||
|
"name": "docs",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"private": true,
|
||||||
|
"scripts": {
|
||||||
|
"dev": "next dev",
|
||||||
|
"build": "next build",
|
||||||
|
"export": "next export",
|
||||||
|
"start": "next start"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">12"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@headlessui/react": "^1.3.0",
|
||||||
|
"@heroicons/react": "^1.0.3",
|
||||||
|
"@mdx-js/loader": "^2.0.0",
|
||||||
|
"@mdx-js/react": "^2.0.0",
|
||||||
|
"@next/mdx": "^12.1.0",
|
||||||
|
"@tailwindcss/typography": "^0.5.2",
|
||||||
|
"autoprefixer": "^10.4.2",
|
||||||
|
"frictionless.js": "^0.13.4",
|
||||||
|
"next": "12.1.0",
|
||||||
|
"papaparse": "^5.3.1",
|
||||||
|
"portal": "https://github.com/datopian/portal.js.git",
|
||||||
|
"postcss": "^8.4.7",
|
||||||
|
"prop-types": "^15.7.2",
|
||||||
|
"react": "17.0.1",
|
||||||
|
"react-dom": "17.0.1",
|
||||||
|
"react-vega": "^7.4.4",
|
||||||
|
"remark": "^13.0.0",
|
||||||
|
"remark-footnotes": "^3.0.0",
|
||||||
|
"remark-frontmatter": "^4.0.1",
|
||||||
|
"remark-gfm": "^1.0.0",
|
||||||
|
"remark-mdx-frontmatter": "^1.1.1",
|
||||||
|
"remark-slug": "^6.1.0",
|
||||||
|
"remark-toc": "^7.2.0",
|
||||||
|
"tailwindcss": "^3.0.23",
|
||||||
|
"vega": "^5.20.2",
|
||||||
|
"vega-lite": "^5.1.0",
|
||||||
|
"xlsx": "^0.17.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
12
examples/data-literate-template/pages/_app.js
Normal file
12
examples/data-literate-template/pages/_app.js
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import '../styles/globals.css'
|
||||||
|
import '../styles/tailwind.css'
|
||||||
|
import DataLiterate from '../components/DataLiterate'
|
||||||
|
|
||||||
|
|
||||||
|
function MyApp({ Component, pageProps }) {
|
||||||
|
return (
|
||||||
|
<DataLiterate children={{ Component, pageProps }}/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export default MyApp
|
||||||
26
examples/data-literate-template/pages/api/proxy.js
Normal file
26
examples/data-literate-template/pages/api/proxy.js
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import axios from 'axios'
|
||||||
|
|
||||||
|
export default function handler(req, res) {
|
||||||
|
if (!req.query.url) {
|
||||||
|
res.status(200).send({
|
||||||
|
error: true,
|
||||||
|
info: 'No url to proxy in query string i.e. ?url=...'
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
axios({
|
||||||
|
method: 'get',
|
||||||
|
url: req.query.url,
|
||||||
|
responseType:'stream'
|
||||||
|
})
|
||||||
|
.then(resp => {
|
||||||
|
resp.data.pipe(res)
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
res.status(400).send({
|
||||||
|
error: true,
|
||||||
|
info: err.message,
|
||||||
|
detailed: err
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
336
examples/data-literate-template/pages/demo.mdx
Normal file
336
examples/data-literate-template/pages/demo.mdx
Normal file
@@ -0,0 +1,336 @@
|
|||||||
|
---
|
||||||
|
title: Demo
|
||||||
|
author: Rufus Pollock
|
||||||
|
description: This demos and documents Data Literate features live
|
||||||
|
---
|
||||||
|
|
||||||
|
<MetaData title={title} author={author} description={description} />
|
||||||
|
|
||||||
|
|
||||||
|
You can see the raw source of this page here: https://raw.githubusercontent.com/datopian/data-literate/main/content/demo.mdx
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
## GFM
|
||||||
|
|
||||||
|
We can have github-flavored markdown including markdown tables, auto-linked links and checklists:
|
||||||
|
|
||||||
|
```
|
||||||
|
https://github.com/datopian/portal.js
|
||||||
|
|
||||||
|
| a | b |
|
||||||
|
|---|---|
|
||||||
|
| 1 | 2 |
|
||||||
|
|
||||||
|
* [x] one thing to do
|
||||||
|
* [ ] a second thing to do
|
||||||
|
```
|
||||||
|
|
||||||
|
https://github.com/datopian/portal.js
|
||||||
|
|
||||||
|
| a | b |
|
||||||
|
|---|---|
|
||||||
|
| 1 | 2 |
|
||||||
|
|
||||||
|
* [x] one thing to do
|
||||||
|
* [ ] a second thing to do
|
||||||
|
|
||||||
|
## Footnotes
|
||||||
|
|
||||||
|
```
|
||||||
|
here is a footnote reference[^1]
|
||||||
|
|
||||||
|
[^1]: a very interesting footnote.
|
||||||
|
```
|
||||||
|
|
||||||
|
here is a footnote reference[^1]
|
||||||
|
|
||||||
|
[^1]: a very interesting footnote.
|
||||||
|
|
||||||
|
|
||||||
|
## Frontmatter
|
||||||
|
|
||||||
|
Posts can have frontmatter like:
|
||||||
|
|
||||||
|
```
|
||||||
|
---
|
||||||
|
title: Hello World
|
||||||
|
author: Rufus Pollock
|
||||||
|
---
|
||||||
|
```
|
||||||
|
|
||||||
|
The title and description are pulled from the MDX file and processed using `gray-matter`. Additionally, links are rendered using a custom component passed to `next-mdx-remote`.
|
||||||
|
|
||||||
|
## A Table of Contents
|
||||||
|
|
||||||
|
You can create a table of contents by having a markdown heading named `Table of Contents`. You can see an example at the start of this post.
|
||||||
|
|
||||||
|
|
||||||
|
## A Table
|
||||||
|
|
||||||
|
You can create a simple table ...
|
||||||
|
|
||||||
|
```
|
||||||
|
<Table cols={[
|
||||||
|
{ key: 'id', name: 'ID' },
|
||||||
|
{ key: 'firstName', name: 'First name' },
|
||||||
|
{ key: 'lastName', name: 'Last name' },
|
||||||
|
{ key: 'age', name: 'Age' }
|
||||||
|
]} data={[
|
||||||
|
{ id: 1, lastName: 'Snow', firstName: 'Jon', age: 35 },
|
||||||
|
{ id: 2, lastName: 'Lannister', firstName: 'Cersei', age: 42 },
|
||||||
|
{ id: 3, lastName: 'Lannister', firstName: 'Jaime', age: 45 },
|
||||||
|
{ id: 4, lastName: 'Stark', firstName: 'Arya', age: 16 },
|
||||||
|
{ id: 7, lastName: 'Clifford', firstName: 'Ferrara', age: 44 },
|
||||||
|
{ id: 8, lastName: 'Frances', firstName: 'Rossini', age: 36 },
|
||||||
|
{ id: 9, lastName: 'Roxie', firstName: 'Harvey', age: 65 },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
```
|
||||||
|
|
||||||
|
<Table cols={[
|
||||||
|
{ key: 'id', name: 'ID' },
|
||||||
|
{ key: 'firstName', name: 'First name' },
|
||||||
|
{ key: 'lastName', name: 'Last name' },
|
||||||
|
{ key: 'age', name: 'Age' }
|
||||||
|
]} data={[
|
||||||
|
{ id: 1, lastName: 'Snow', firstName: 'Jon', age: 35 },
|
||||||
|
{ id: 2, lastName: 'Lannister', firstName: 'Cersei', age: 42 },
|
||||||
|
{ id: 3, lastName: 'Lannister', firstName: 'Jaime', age: 45 },
|
||||||
|
{ id: 4, lastName: 'Stark', firstName: 'Arya', age: 16 },
|
||||||
|
{ id: 7, lastName: 'Clifford', firstName: 'Ferrara', age: 44 },
|
||||||
|
{ id: 8, lastName: 'Frances', firstName: 'Rossini', age: 36 },
|
||||||
|
{ id: 9, lastName: 'Roxie', firstName: 'Harvey', age: 65 },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
|
||||||
|
### Table from Raw CSV
|
||||||
|
|
||||||
|
You can also pass raw CSV as the content ...
|
||||||
|
|
||||||
|
```
|
||||||
|
<Table csv={`
|
||||||
|
Year,Temp Anomaly
|
||||||
|
1850,-0.418
|
||||||
|
2020,0.923
|
||||||
|
`} />
|
||||||
|
```
|
||||||
|
|
||||||
|
<Table csv={`
|
||||||
|
Year,Temp Anomaly,
|
||||||
|
1850,-0.418
|
||||||
|
2020,0.923
|
||||||
|
`} />
|
||||||
|
|
||||||
|
### Table from a URL
|
||||||
|
|
||||||
|
<Table url='/_files/HadCRUT.5.0.1.0.analysis.summary_series.global.annual.csv' />
|
||||||
|
|
||||||
|
```
|
||||||
|
<Table url='/_files/HadCRUT.5.0.1.0.analysis.summary_series.global.annual.csv' />
|
||||||
|
```
|
||||||
|
___
|
||||||
|
|
||||||
|
You can also create a Table Grid, with more advance features
|
||||||
|
|
||||||
|
```
|
||||||
|
<TableGrid cols={[
|
||||||
|
{ key: 'id', name: 'ID' },
|
||||||
|
{ key: 'firstName', name: 'First name' },
|
||||||
|
{ key: 'lastName', name: 'Last name' },
|
||||||
|
{ key: 'age', name: 'Age' }
|
||||||
|
]} data={[
|
||||||
|
{ id: 1, lastName: 'Snow', firstName: 'Jon', age: 35 },
|
||||||
|
{ id: 2, lastName: 'Lannister', firstName: 'Cersei', age: 42 },
|
||||||
|
{ id: 3, lastName: 'Lannister', firstName: 'Jaime', age: 45 },
|
||||||
|
{ id: 4, lastName: 'Stark', firstName: 'Arya', age: 16 },
|
||||||
|
{ id: 7, lastName: 'Clifford', firstName: 'Ferrara', age: 44 },
|
||||||
|
{ id: 8, lastName: 'Frances', firstName: 'Rossini', age: 36 },
|
||||||
|
{ id: 9, lastName: 'Roxie', firstName: 'Harvey', age: 65 },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
```
|
||||||
|
|
||||||
|
<TableGrid cols={[
|
||||||
|
{ key: 'id', name: 'ID' },
|
||||||
|
{ key: 'firstName', name: 'First name' },
|
||||||
|
{ key: 'lastName', name: 'Last name' },
|
||||||
|
{ key: 'age', name: 'Age' }
|
||||||
|
]} data={[
|
||||||
|
{ id: 1, lastName: 'Snow', firstName: 'Jon', age: 35 },
|
||||||
|
{ id: 2, lastName: 'Lannister', firstName: 'Cersei', age: 42 },
|
||||||
|
{ id: 3, lastName: 'Lannister', firstName: 'Jaime', age: 45 },
|
||||||
|
{ id: 4, lastName: 'Stark', firstName: 'Arya', age: 16 },
|
||||||
|
{ id: 7, lastName: 'Clifford', firstName: 'Ferrara', age: 44 },
|
||||||
|
{ id: 8, lastName: 'Frances', firstName: 'Rossini', age: 36 },
|
||||||
|
{ id: 9, lastName: 'Roxie', firstName: 'Harvey', age: 65 },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
|
||||||
|
### Table Grid from Raw CSV
|
||||||
|
|
||||||
|
You can also pass raw CSV as the content ...
|
||||||
|
|
||||||
|
```
|
||||||
|
<TableGrid csv={`
|
||||||
|
Year,Temp Anomaly
|
||||||
|
1850,-0.418
|
||||||
|
2020,0.923
|
||||||
|
`} />
|
||||||
|
```
|
||||||
|
|
||||||
|
<TableGrid csv={`
|
||||||
|
Year,Temp Anomaly,
|
||||||
|
1850,-0.418
|
||||||
|
2020,0.923
|
||||||
|
`} />
|
||||||
|
|
||||||
|
### Table Grid from a URL
|
||||||
|
|
||||||
|
```
|
||||||
|
<TableGrid url='/_files/HadCRUT.5.0.1.0.analysis.summary_series.global.annual.csv' />
|
||||||
|
```
|
||||||
|
|
||||||
|
<TableGrid url='/_files/HadCRUT.5.0.1.0.analysis.summary_series.global.annual.csv' />
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Charts
|
||||||
|
|
||||||
|
You can create charts using a simple syntax.
|
||||||
|
|
||||||
|
### Line Chart
|
||||||
|
|
||||||
|
<LineChart data={
|
||||||
|
[
|
||||||
|
["1850",-0.41765878],
|
||||||
|
["1851",-0.2333498],
|
||||||
|
["1852",-0.22939907],
|
||||||
|
["1853",-0.27035445],
|
||||||
|
["1854",-0.29163003]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
|
||||||
|
```
|
||||||
|
<LineChart data={
|
||||||
|
[
|
||||||
|
["1850",-0.41765878],
|
||||||
|
["1851",-0.2333498],
|
||||||
|
["1852",-0.22939907],
|
||||||
|
["1853",-0.27035445],
|
||||||
|
["1854",-0.29163003]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
```
|
||||||
|
|
||||||
|
NB: we have quoted years as otherwise not interpreted as dates but as integers ...
|
||||||
|
|
||||||
|
|
||||||
|
### Vega and Vega Lite
|
||||||
|
|
||||||
|
You can using vega or vega-lite. Here's an example using vega-lite:
|
||||||
|
|
||||||
|
<VegaLite data={ { "table": [
|
||||||
|
{
|
||||||
|
"y": -0.418,
|
||||||
|
"x": 1850
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"y": 0.923,
|
||||||
|
"x": 2020
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
} spec={
|
||||||
|
{
|
||||||
|
"$schema": "https://vega.github.io/schema/vega-lite/v4.json",
|
||||||
|
"mark": "bar",
|
||||||
|
"data": {
|
||||||
|
"name": "table"
|
||||||
|
},
|
||||||
|
"encoding": {
|
||||||
|
"x": {
|
||||||
|
"field": "x",
|
||||||
|
"type": "ordinal"
|
||||||
|
},
|
||||||
|
"y": {
|
||||||
|
"field": "y",
|
||||||
|
"type": "quantitative"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} />
|
||||||
|
|
||||||
|
|
||||||
|
```jsx
|
||||||
|
<VegaLite data={ { "table": [
|
||||||
|
{
|
||||||
|
"y": -0.418,
|
||||||
|
"x": 1850
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"y": 0.923,
|
||||||
|
"x": 2020
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
} spec={
|
||||||
|
{
|
||||||
|
"$schema": "https://vega.github.io/schema/vega-lite/v4.json",
|
||||||
|
"mark": "bar",
|
||||||
|
"data": {
|
||||||
|
"name": "table"
|
||||||
|
},
|
||||||
|
"encoding": {
|
||||||
|
"x": {
|
||||||
|
"field": "x",
|
||||||
|
"type": "ordinal"
|
||||||
|
},
|
||||||
|
"y": {
|
||||||
|
"field": "y",
|
||||||
|
"type": "quantitative"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} />
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Line Chart from URL with Tooltip
|
||||||
|
|
||||||
|
https://vega.github.io/vega-lite/examples/interactive_multi_line_pivot_tooltip.html
|
||||||
|
|
||||||
|
<VegaLite spec={
|
||||||
|
{
|
||||||
|
"$schema": "https://vega.github.io/schema/vega-lite/v5.json",
|
||||||
|
"data": {"url": "/_files/HadCRUT.5.0.1.0.analysis.summary_series.global.annual.csv"},
|
||||||
|
"width": 600,
|
||||||
|
"height": 250,
|
||||||
|
"mark": "line",
|
||||||
|
"encoding": {
|
||||||
|
"x": {"field": "Time", "type": "temporal"},
|
||||||
|
"y": {"field": "Anomaly (deg C)", "type": "quantitative"},
|
||||||
|
"tooltip": {"field": "Anomaly (deg C)", "type": "quantitative"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} />
|
||||||
|
|
||||||
|
## Display Excel Files
|
||||||
|
|
||||||
|
Local file ...
|
||||||
|
|
||||||
|
```
|
||||||
|
<Excel src='/_files/eight-centuries-of-global-real-interest-rates-r-g-and-the-suprasecular-decline-1311-2018-data.xlsx' />
|
||||||
|
```
|
||||||
|
|
||||||
|
<Excel src='/_files/eight-centuries-of-global-real-interest-rates-r-g-and-the-suprasecular-decline-1311-2018-data.xlsx' />
|
||||||
|
|
||||||
|
Remote files work too (even without CORS) thanks to proxying:
|
||||||
|
|
||||||
|
```
|
||||||
|
<Excel src='https://github.com/datasets/awesome-data/files/6604635/eight-centuries-of-global-real-interest-rates-r-g-and-the-suprasecular-decline-1311-2018-data.xlsx' />
|
||||||
|
```
|
||||||
|
|
||||||
|
<Excel src='https://github.com/datasets/awesome-data/files/6604635/eight-centuries-of-global-real-interest-rates-r-g-and-the-suprasecular-decline-1311-2018-data.xlsx' />
|
||||||
8
examples/data-literate-template/postcss.config.js
Normal file
8
examples/data-literate-template/postcss.config.js
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
// If you want to use other PostCSS plugins, see the following:
|
||||||
|
// https://tailwindcss.com/docs/using-with-preprocessors
|
||||||
|
module.exports = {
|
||||||
|
plugins: {
|
||||||
|
tailwindcss: {},
|
||||||
|
autoprefixer: {},
|
||||||
|
},
|
||||||
|
}
|
||||||
BIN
examples/data-literate-template/public/datopian-logo.png
Normal file
BIN
examples/data-literate-template/public/datopian-logo.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 33 KiB |
BIN
examples/data-literate-template/public/favicon.ico
Normal file
BIN
examples/data-literate-template/public/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
16
examples/data-literate-template/styles/globals.css
Normal file
16
examples/data-literate-template/styles/globals.css
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
html,
|
||||||
|
body {
|
||||||
|
padding: 0;
|
||||||
|
margin: 0;
|
||||||
|
font-family: -apple-system, BlinkMacSystemFont, Segoe UI, Roboto, Oxygen,
|
||||||
|
Ubuntu, Cantarell, Fira Sans, Droid Sans, Helvetica Neue, sans-serif;
|
||||||
|
}
|
||||||
|
|
||||||
|
a {
|
||||||
|
color: inherit;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
* {
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
3
examples/data-literate-template/styles/tailwind.css
Normal file
3
examples/data-literate-template/styles/tailwind.css
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
@tailwind base;
|
||||||
|
@tailwind components;
|
||||||
|
@tailwind utilities;
|
||||||
29
examples/data-literate-template/tailwind.config.js
Normal file
29
examples/data-literate-template/tailwind.config.js
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
const defaultTheme = require("tailwindcss/defaultTheme");
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
// purge: ['./pages/**/*.{js,ts,jsx,tsx}', './components/**/*.{js,ts,jsx,tsx}'],
|
||||||
|
content: [
|
||||||
|
"./pages/**/*.js",
|
||||||
|
"./pages/**/*.ts",
|
||||||
|
"./pages/**/*.jsx",
|
||||||
|
"./pages/**/*.tsx",
|
||||||
|
"./components/**/*.js",
|
||||||
|
"./components/**/*.ts",
|
||||||
|
"./components/**/*.jsx",
|
||||||
|
"./components/**/*.tsx"
|
||||||
|
],
|
||||||
|
darkMode: false, // or 'media' or 'class'
|
||||||
|
theme: {
|
||||||
|
container: {
|
||||||
|
center: true,
|
||||||
|
},
|
||||||
|
extend: {
|
||||||
|
fontFamily: {
|
||||||
|
mono: ["Inconsolata", ...defaultTheme.fontFamily.mono]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
plugins: [
|
||||||
|
require('@tailwindcss/typography'),
|
||||||
|
],
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user