Compare commits
22 Commits
@portaljs/
...
@portaljs/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3aac4dabf9 | ||
|
|
a044f56e3c | ||
|
|
1b58c311eb | ||
|
|
ed9ac2c263 | ||
|
|
42c72e5afd | ||
|
|
9e1a324fa1 | ||
|
|
90178af8f2 | ||
|
|
00e61e104c | ||
|
|
f7f03fddca | ||
|
|
0891dfde2d | ||
|
|
c904e3731b | ||
|
|
86a2945ee6 | ||
|
|
09daa98b28 | ||
|
|
b511c9f71b | ||
|
|
464cda6db8 | ||
|
|
2bbf313489 | ||
|
|
c26b76368d | ||
|
|
af11f0cfd5 | ||
|
|
9ae2b31113 | ||
|
|
2bffd130c8 | ||
|
|
058d23678a | ||
|
|
540a08934c |
@@ -1,3 +1,9 @@
|
|||||||
|
# PortalJS Demo replicating the FiveThirtyEight data portal
|
||||||
|
|
||||||
|
## 👉 https://fivethirtyeight.portaljs.org 👈
|
||||||
|
|
||||||
|
Here's a blog post we wrote about it: https://www.datopian.com/blog/fivethirtyeight-replica
|
||||||
|
|
||||||
This is a replica of the awesome data.fivethirtyeight.com using PortalJS.
|
This is a replica of the awesome data.fivethirtyeight.com using PortalJS.
|
||||||
|
|
||||||
You might be asking why we did that, there are three main reasons:
|
You might be asking why we did that, there are three main reasons:
|
||||||
|
|||||||
3518
package-lock.json
generated
3518
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,29 @@
|
|||||||
# @portaljs/components
|
# @portaljs/components
|
||||||
|
|
||||||
|
## 0.6.0
|
||||||
|
|
||||||
|
### Minor Changes
|
||||||
|
|
||||||
|
- [`a044f56e`](https://github.com/datopian/portaljs/commit/a044f56e3cbe0519ddf9d24d78b0bb7eac917e1c) Thanks [@luccasmmg](https://github.com/luccasmmg)! - Added plotly components
|
||||||
|
|
||||||
|
## 0.5.10
|
||||||
|
|
||||||
|
### Patch Changes
|
||||||
|
|
||||||
|
- [#1083](https://github.com/datopian/portaljs/pull/1083) [`86a2945e`](https://github.com/datopian/portaljs/commit/86a2945ee68dfcea0299984ca9cc9070d68fe1c2) Thanks [@Gutts-n](https://github.com/Gutts-n)! - Created integration with datastore api for table component
|
||||||
|
|
||||||
|
## 0.5.9
|
||||||
|
|
||||||
|
### Patch Changes
|
||||||
|
|
||||||
|
- [#1081](https://github.com/datopian/portaljs/pull/1081) [`2bbf3134`](https://github.com/datopian/portaljs/commit/2bbf3134896df3ecc66560bdf95bece143614c7b) Thanks [@Gutts-n](https://github.com/Gutts-n)! - Fixed error to remove anchor from document
|
||||||
|
|
||||||
|
## 0.5.8
|
||||||
|
|
||||||
|
### Patch Changes
|
||||||
|
|
||||||
|
- [#1079](https://github.com/datopian/portaljs/pull/1079) [`058d2367`](https://github.com/datopian/portaljs/commit/058d23678a024890f8a6d909ded9fc8fc11cf145) Thanks [@Gutts-n](https://github.com/Gutts-n)! - Changed the download behaviour of the bucket viewer component and removed loading component while downloading
|
||||||
|
|
||||||
## 0.5.7
|
## 0.5.7
|
||||||
|
|
||||||
### Patch Changes
|
### Patch Changes
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@portaljs/components",
|
"name": "@portaljs/components",
|
||||||
"version": "0.5.7",
|
"version": "0.6.0",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"description": "https://portaljs.org",
|
"description": "https://portaljs.org",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
@@ -40,11 +40,13 @@
|
|||||||
"ol": "^7.4.0",
|
"ol": "^7.4.0",
|
||||||
"papaparse": "^5.4.1",
|
"papaparse": "^5.4.1",
|
||||||
"pdfjs-dist": "2.15.349",
|
"pdfjs-dist": "2.15.349",
|
||||||
|
"plotly.js": "^2.30.1",
|
||||||
"postcss-url": "^10.1.3",
|
"postcss-url": "^10.1.3",
|
||||||
"react": "^18.2.0",
|
"react": "^18.2.0",
|
||||||
"react-dom": "^18.2.0",
|
"react-dom": "^18.2.0",
|
||||||
"react-hook-form": "^7.43.9",
|
"react-hook-form": "^7.43.9",
|
||||||
"react-leaflet": "^4.2.1",
|
"react-leaflet": "^4.2.1",
|
||||||
|
"react-plotly.js": "^2.6.0",
|
||||||
"react-query": "^3.39.3",
|
"react-query": "^3.39.3",
|
||||||
"react-vega": "^7.6.0",
|
"react-vega": "^7.6.0",
|
||||||
"vega": "5.25.0",
|
"vega": "5.25.0",
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ export interface BucketViewerProps {
|
|||||||
onLoadTotalNumberOfItems?: (total: number) => void;
|
onLoadTotalNumberOfItems?: (total: number) => void;
|
||||||
domain: string;
|
domain: string;
|
||||||
downloadConfig?: {
|
downloadConfig?: {
|
||||||
downloadingMessageComponent?: ReactNode;
|
|
||||||
hoverOfTheFileComponent?: ReactNode;
|
hoverOfTheFileComponent?: ReactNode;
|
||||||
};
|
};
|
||||||
suffix?: string;
|
suffix?: string;
|
||||||
@@ -47,14 +46,10 @@ export function BucketViewer({
|
|||||||
}: BucketViewerProps) {
|
}: BucketViewerProps) {
|
||||||
suffix = suffix ?? '/';
|
suffix = suffix ?? '/';
|
||||||
|
|
||||||
const { downloadingMessageComponent, hoverOfTheFileComponent } =
|
const { hoverOfTheFileComponent } = downloadConfig ?? {};
|
||||||
downloadConfig ?? {};
|
|
||||||
const [isLoading, setIsLoading] = useState<boolean>(false);
|
const [isLoading, setIsLoading] = useState<boolean>(false);
|
||||||
const [showDownloadComponentOnLine, setShowDownloadComponentOnLine] =
|
const [showDownloadComponentOnLine, setShowDownloadComponentOnLine] =
|
||||||
useState(-1);
|
useState(-1);
|
||||||
const [showDownloadLoadingOnFile, setShowDownloadLoadingOnFile] = useState(
|
|
||||||
new Map<string, boolean>()
|
|
||||||
);
|
|
||||||
const [currentPage, setCurrentPage] = useState<number>(0);
|
const [currentPage, setCurrentPage] = useState<number>(0);
|
||||||
const [lastPage, setLastPage] = useState<number>(0);
|
const [lastPage, setLastPage] = useState<number>(0);
|
||||||
const [bucketFiles, setBucketFiles] = useState<BucketViewerData[]>([]);
|
const [bucketFiles, setBucketFiles] = useState<BucketViewerData[]>([]);
|
||||||
@@ -136,31 +131,13 @@ export function BucketViewer({
|
|||||||
(data, i) => (
|
(data, i) => (
|
||||||
<ul
|
<ul
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
const anchorId = `download_anchor_${data.fileName} `;
|
const a: HTMLAnchorElement = document.createElement('a');
|
||||||
const a: HTMLAnchorElement =
|
a.href = data.downloadFileUri;
|
||||||
(document.getElementById(
|
a.target = `_blank`;
|
||||||
anchorId
|
a.download = data.fileName;
|
||||||
) as HTMLAnchorElement | null) ?? document.createElement('a');
|
document.body.appendChild(a);
|
||||||
a.id = anchorId;
|
a.click();
|
||||||
if (a.download) a.click();
|
document.body.removeChild(a);
|
||||||
else {
|
|
||||||
setShowDownloadLoadingOnFile((lastState) => {
|
|
||||||
lastState.set(data.fileName, true);
|
|
||||||
return new Map(lastState);
|
|
||||||
});
|
|
||||||
fetch(data.downloadFileUri)
|
|
||||||
.then((res) => res.blob())
|
|
||||||
.then((res) => {
|
|
||||||
setShowDownloadLoadingOnFile((lastState) => {
|
|
||||||
lastState.set(data.fileName, false);
|
|
||||||
return new Map(lastState);
|
|
||||||
});
|
|
||||||
a.href = URL.createObjectURL(res);
|
|
||||||
a.download = res.name ?? data.fileName;
|
|
||||||
document.body.appendChild(a);
|
|
||||||
a.click();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}}
|
}}
|
||||||
key={i}
|
key={i}
|
||||||
onMouseEnter={() => setShowDownloadComponentOnLine(i)}
|
onMouseEnter={() => setShowDownloadComponentOnLine(i)}
|
||||||
@@ -184,13 +161,6 @@ export function BucketViewer({
|
|||||||
<></>
|
<></>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
{showDownloadLoadingOnFile.get(data.fileName) ? (
|
|
||||||
downloadingMessageComponent ?? (
|
|
||||||
<label>Downloading file...</label>
|
|
||||||
)
|
|
||||||
) : (
|
|
||||||
<></>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
</ul>
|
</ul>
|
||||||
)
|
)
|
||||||
|
|||||||
9
packages/components/src/components/Plotly.tsx
Normal file
9
packages/components/src/components/Plotly.tsx
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
import Plot, { PlotParams } from "react-plotly.js";
|
||||||
|
|
||||||
|
export const Plotly: React.FC<PlotParams> = (props) => {
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<Plot {...props} />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
157
packages/components/src/components/PlotlyBarChart.tsx
Normal file
157
packages/components/src/components/PlotlyBarChart.tsx
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
import { QueryClient, QueryClientProvider, useQuery } from "react-query";
|
||||||
|
import { Plotly } from "./Plotly";
|
||||||
|
import Papa, { ParseConfig } from "papaparse";
|
||||||
|
import LoadingSpinner from "./LoadingSpinner";
|
||||||
|
|
||||||
|
const queryClient = new QueryClient();
|
||||||
|
|
||||||
|
async function getCsv(url: string, bytes: number) {
|
||||||
|
const response = await fetch(url, {
|
||||||
|
headers: {
|
||||||
|
Range: `bytes=0-${bytes}`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const data = await response.text();
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function parseCsv(
|
||||||
|
file: string,
|
||||||
|
parsingConfig: ParseConfig,
|
||||||
|
): Promise<any> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
Papa.parse(file, {
|
||||||
|
...parsingConfig,
|
||||||
|
header: true,
|
||||||
|
dynamicTyping: true,
|
||||||
|
skipEmptyLines: true,
|
||||||
|
transform: (value: string): string => {
|
||||||
|
return value.trim();
|
||||||
|
},
|
||||||
|
complete: (results: any) => {
|
||||||
|
return resolve(results);
|
||||||
|
},
|
||||||
|
error: (error: any) => {
|
||||||
|
return reject(error);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PlotlyBarChartProps {
|
||||||
|
url?: string;
|
||||||
|
data?: { [key: string]: number | string }[];
|
||||||
|
rawCsv?: string;
|
||||||
|
randomId?: number;
|
||||||
|
bytes?: number;
|
||||||
|
parsingConfig?: ParseConfig;
|
||||||
|
xAxis: string;
|
||||||
|
yAxis: string;
|
||||||
|
lineLabel?: string;
|
||||||
|
title?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const PlotlyBarChart: React.FC<PlotlyBarChartProps> = ({
|
||||||
|
url,
|
||||||
|
data,
|
||||||
|
rawCsv,
|
||||||
|
bytes = 5132288,
|
||||||
|
parsingConfig = {},
|
||||||
|
xAxis,
|
||||||
|
yAxis,
|
||||||
|
lineLabel,
|
||||||
|
title = "",
|
||||||
|
}) => {
|
||||||
|
const randomId = Math.random();
|
||||||
|
return (
|
||||||
|
// Provide the client to your App
|
||||||
|
<QueryClientProvider client={queryClient}>
|
||||||
|
<PlotlyBarChartInner
|
||||||
|
url={url}
|
||||||
|
data={data}
|
||||||
|
rawCsv={rawCsv}
|
||||||
|
randomId={randomId}
|
||||||
|
bytes={bytes}
|
||||||
|
parsingConfig={parsingConfig}
|
||||||
|
xAxis={xAxis}
|
||||||
|
yAxis={yAxis}
|
||||||
|
lineLabel={lineLabel ?? yAxis}
|
||||||
|
title={title}
|
||||||
|
/>
|
||||||
|
</QueryClientProvider>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const PlotlyBarChartInner: React.FC<PlotlyBarChartProps> = ({
|
||||||
|
url,
|
||||||
|
data,
|
||||||
|
rawCsv,
|
||||||
|
randomId,
|
||||||
|
bytes,
|
||||||
|
parsingConfig,
|
||||||
|
xAxis,
|
||||||
|
yAxis,
|
||||||
|
lineLabel,
|
||||||
|
title,
|
||||||
|
}) => {
|
||||||
|
if (data) {
|
||||||
|
return (
|
||||||
|
<div className="w-full" style={{ height: "500px" }}>
|
||||||
|
<Plotly
|
||||||
|
layout={{
|
||||||
|
title,
|
||||||
|
}}
|
||||||
|
data={[
|
||||||
|
{
|
||||||
|
x: data.map((d) => d[xAxis]),
|
||||||
|
y: data.map((d) => d[yAxis]),
|
||||||
|
type: "bar",
|
||||||
|
name: lineLabel,
|
||||||
|
},
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const { data: csvString, isLoading: isDownloadingCSV } = useQuery(
|
||||||
|
["dataCsv", url, randomId],
|
||||||
|
() => getCsv(url as string, bytes ?? 5132288),
|
||||||
|
{ enabled: !!url },
|
||||||
|
);
|
||||||
|
const { data: parsedData, isLoading: isParsing } = useQuery(
|
||||||
|
["dataPreview", csvString, randomId],
|
||||||
|
() =>
|
||||||
|
parseCsv(
|
||||||
|
rawCsv ? (rawCsv as string) : (csvString as string),
|
||||||
|
parsingConfig ?? {},
|
||||||
|
),
|
||||||
|
{ enabled: rawCsv ? true : !!csvString },
|
||||||
|
);
|
||||||
|
if (isParsing || isDownloadingCSV)
|
||||||
|
<div className="w-full flex justify-center items-center h-[500px]">
|
||||||
|
<LoadingSpinner />
|
||||||
|
</div>;
|
||||||
|
if (parsedData)
|
||||||
|
return (
|
||||||
|
<div className="w-full" style={{ height: "500px" }}>
|
||||||
|
<Plotly
|
||||||
|
layout={{
|
||||||
|
title,
|
||||||
|
}}
|
||||||
|
data={[
|
||||||
|
{
|
||||||
|
x: parsedData.data.map((d: any) => d[xAxis]),
|
||||||
|
y: parsedData.data.map((d: any) => d[yAxis]),
|
||||||
|
type: "bar",
|
||||||
|
name: lineLabel,
|
||||||
|
},
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
return (
|
||||||
|
<div className="w-full flex justify-center items-center h-[500px]">
|
||||||
|
<LoadingSpinner />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
157
packages/components/src/components/PlotlyLineChart.tsx
Normal file
157
packages/components/src/components/PlotlyLineChart.tsx
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
import { QueryClient, QueryClientProvider, useQuery } from "react-query";
|
||||||
|
import { Plotly } from "./Plotly";
|
||||||
|
import Papa, { ParseConfig } from "papaparse";
|
||||||
|
import LoadingSpinner from "./LoadingSpinner";
|
||||||
|
|
||||||
|
const queryClient = new QueryClient();
|
||||||
|
|
||||||
|
async function getCsv(url: string, bytes: number) {
|
||||||
|
const response = await fetch(url, {
|
||||||
|
headers: {
|
||||||
|
Range: `bytes=0-${bytes}`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const data = await response.text();
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function parseCsv(
|
||||||
|
file: string,
|
||||||
|
parsingConfig: ParseConfig,
|
||||||
|
): Promise<any> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
Papa.parse(file, {
|
||||||
|
...parsingConfig,
|
||||||
|
header: true,
|
||||||
|
dynamicTyping: true,
|
||||||
|
skipEmptyLines: true,
|
||||||
|
transform: (value: string): string => {
|
||||||
|
return value.trim();
|
||||||
|
},
|
||||||
|
complete: (results: any) => {
|
||||||
|
return resolve(results);
|
||||||
|
},
|
||||||
|
error: (error: any) => {
|
||||||
|
return reject(error);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PlotlyLineChartProps {
|
||||||
|
url?: string;
|
||||||
|
data?: { [key: string]: number | string }[];
|
||||||
|
rawCsv?: string;
|
||||||
|
randomId?: number;
|
||||||
|
bytes?: number;
|
||||||
|
parsingConfig?: ParseConfig;
|
||||||
|
xAxis: string;
|
||||||
|
yAxis: string;
|
||||||
|
lineLabel?: string;
|
||||||
|
title?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const PlotlyLineChart: React.FC<PlotlyLineChartProps> = ({
|
||||||
|
url,
|
||||||
|
data,
|
||||||
|
rawCsv,
|
||||||
|
bytes = 5132288,
|
||||||
|
parsingConfig = {},
|
||||||
|
xAxis,
|
||||||
|
yAxis,
|
||||||
|
lineLabel,
|
||||||
|
title = "",
|
||||||
|
}) => {
|
||||||
|
const randomId = Math.random();
|
||||||
|
return (
|
||||||
|
// Provide the client to your App
|
||||||
|
<QueryClientProvider client={queryClient}>
|
||||||
|
<LineChartInner
|
||||||
|
url={url}
|
||||||
|
data={data}
|
||||||
|
rawCsv={rawCsv}
|
||||||
|
randomId={randomId}
|
||||||
|
bytes={bytes}
|
||||||
|
parsingConfig={parsingConfig}
|
||||||
|
xAxis={xAxis}
|
||||||
|
yAxis={yAxis}
|
||||||
|
lineLabel={lineLabel ?? yAxis}
|
||||||
|
title={title}
|
||||||
|
/>
|
||||||
|
</QueryClientProvider>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const LineChartInner: React.FC<PlotlyLineChartProps> = ({
|
||||||
|
url,
|
||||||
|
data,
|
||||||
|
rawCsv,
|
||||||
|
randomId,
|
||||||
|
bytes,
|
||||||
|
parsingConfig,
|
||||||
|
xAxis,
|
||||||
|
yAxis,
|
||||||
|
lineLabel,
|
||||||
|
title,
|
||||||
|
}) => {
|
||||||
|
if (data) {
|
||||||
|
return (
|
||||||
|
<div className="w-full" style={{ height: "500px" }}>
|
||||||
|
<Plotly
|
||||||
|
layout={{
|
||||||
|
title,
|
||||||
|
}}
|
||||||
|
data={[
|
||||||
|
{
|
||||||
|
x: data.map((d) => d[xAxis]),
|
||||||
|
y: data.map((d) => d[yAxis]),
|
||||||
|
mode: "lines",
|
||||||
|
name: lineLabel,
|
||||||
|
},
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const { data: csvString, isLoading: isDownloadingCSV } = useQuery(
|
||||||
|
["dataCsv", url, randomId],
|
||||||
|
() => getCsv(url as string, bytes ?? 5132288),
|
||||||
|
{ enabled: !!url },
|
||||||
|
);
|
||||||
|
const { data: parsedData, isLoading: isParsing } = useQuery(
|
||||||
|
["dataPreview", csvString, randomId],
|
||||||
|
() =>
|
||||||
|
parseCsv(
|
||||||
|
rawCsv ? (rawCsv as string) : (csvString as string),
|
||||||
|
parsingConfig ?? {},
|
||||||
|
),
|
||||||
|
{ enabled: rawCsv ? true : !!csvString },
|
||||||
|
);
|
||||||
|
if (isParsing || isDownloadingCSV)
|
||||||
|
<div className="w-full flex justify-center items-center h-[500px]">
|
||||||
|
<LoadingSpinner />
|
||||||
|
</div>;
|
||||||
|
if (parsedData)
|
||||||
|
return (
|
||||||
|
<div className="w-full" style={{ height: "500px" }}>
|
||||||
|
<Plotly
|
||||||
|
layout={{
|
||||||
|
title,
|
||||||
|
}}
|
||||||
|
data={[
|
||||||
|
{
|
||||||
|
x: parsedData.data.map((d: any) => d[xAxis]),
|
||||||
|
y: parsedData.data.map((d: any) => d[yAxis]),
|
||||||
|
mode: "lines",
|
||||||
|
name: lineLabel,
|
||||||
|
},
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
return (
|
||||||
|
<div className="w-full flex justify-center items-center h-[500px]">
|
||||||
|
<LoadingSpinner />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
||||||
@@ -6,6 +6,8 @@ import {
|
|||||||
getFilteredRowModel,
|
getFilteredRowModel,
|
||||||
getPaginationRowModel,
|
getPaginationRowModel,
|
||||||
getSortedRowModel,
|
getSortedRowModel,
|
||||||
|
PaginationState,
|
||||||
|
Table as ReactTable,
|
||||||
useReactTable,
|
useReactTable,
|
||||||
} from '@tanstack/react-table';
|
} from '@tanstack/react-table';
|
||||||
|
|
||||||
@@ -25,12 +27,19 @@ import DebouncedInput from './DebouncedInput';
|
|||||||
import loadData from '../lib/loadData';
|
import loadData from '../lib/loadData';
|
||||||
import LoadingSpinner from './LoadingSpinner';
|
import LoadingSpinner from './LoadingSpinner';
|
||||||
|
|
||||||
|
export type TableData = { cols: {key: string, name: string}[]; data: any[]; total: number };
|
||||||
|
|
||||||
export type TableProps = {
|
export type TableProps = {
|
||||||
data?: Array<{ [key: string]: number | string }>;
|
data?: Array<{ [key: string]: number | string }>;
|
||||||
cols?: Array<{ [key: string]: string }>;
|
cols?: Array<{ [key: string]: string }>;
|
||||||
csv?: string;
|
csv?: string;
|
||||||
url?: string;
|
url?: string;
|
||||||
fullWidth?: boolean;
|
fullWidth?: boolean;
|
||||||
|
datastoreConfig?: {
|
||||||
|
dataStoreURI: string;
|
||||||
|
rowsPerPage?: number;
|
||||||
|
dataMapperFn: (data) => Promise<TableData> | TableData;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export const Table = ({
|
export const Table = ({
|
||||||
@@ -39,8 +48,28 @@ export const Table = ({
|
|||||||
csv = '',
|
csv = '',
|
||||||
url = '',
|
url = '',
|
||||||
fullWidth = false,
|
fullWidth = false,
|
||||||
|
datastoreConfig,
|
||||||
}: TableProps) => {
|
}: TableProps) => {
|
||||||
const [isLoading, setIsLoading] = useState<boolean>(false);
|
const [isLoading, setIsLoading] = useState<boolean>(false);
|
||||||
|
const [pageMap, setPageMap] = useState(new Map<number, boolean>());
|
||||||
|
const {
|
||||||
|
dataMapperFn,
|
||||||
|
dataStoreURI,
|
||||||
|
rowsPerPage = 10,
|
||||||
|
} = datastoreConfig ?? {};
|
||||||
|
|
||||||
|
const [globalFilter, setGlobalFilter] = useState('');
|
||||||
|
const [isLoadingPage, setIsLoadingPage] = useState<boolean>(false);
|
||||||
|
const [totalOfRows, setTotalOfRows] = useState<number>(0);
|
||||||
|
|
||||||
|
const [{ pageIndex, pageSize }, setPagination] = useState<PaginationState>({
|
||||||
|
pageIndex: 0,
|
||||||
|
pageSize: rowsPerPage,
|
||||||
|
});
|
||||||
|
|
||||||
|
const [lastIndex, setLastIndex] = useState(pageSize);
|
||||||
|
const [startIndex, setStartIndex] = useState(0);
|
||||||
|
const [hasSorted, setHasSorted] = useState(false);
|
||||||
|
|
||||||
if (csv) {
|
if (csv) {
|
||||||
const out = parseCsv(csv);
|
const out = parseCsv(csv);
|
||||||
@@ -62,21 +91,56 @@ export const Table = ({
|
|||||||
);
|
);
|
||||||
}, [data, cols]);
|
}, [data, cols]);
|
||||||
|
|
||||||
const [globalFilter, setGlobalFilter] = useState('');
|
let table: ReactTable<unknown>;
|
||||||
|
|
||||||
const table = useReactTable({
|
if (datastoreConfig) {
|
||||||
data,
|
useEffect(() => {
|
||||||
columns: tableCols,
|
setIsLoading(true);
|
||||||
getCoreRowModel: getCoreRowModel(),
|
fetch(`${dataStoreURI}&limit=${rowsPerPage}&offset=0`)
|
||||||
state: {
|
.then((res) => res.json())
|
||||||
globalFilter,
|
.then(async (res) => {
|
||||||
},
|
const { data, cols, total } = await dataMapperFn(res);
|
||||||
globalFilterFn: globalFilterFn,
|
setData(data);
|
||||||
onGlobalFilterChange: setGlobalFilter,
|
setCols(cols);
|
||||||
getFilteredRowModel: getFilteredRowModel(),
|
setTotalOfRows(Math.ceil(total / rowsPerPage));
|
||||||
getPaginationRowModel: getPaginationRowModel(),
|
pageMap.set(0, true);
|
||||||
getSortedRowModel: getSortedRowModel(),
|
})
|
||||||
});
|
.finally(() => setIsLoading(false));
|
||||||
|
}, [dataStoreURI]);
|
||||||
|
|
||||||
|
table = useReactTable({
|
||||||
|
data,
|
||||||
|
pageCount: totalOfRows,
|
||||||
|
columns: tableCols,
|
||||||
|
getCoreRowModel: getCoreRowModel(),
|
||||||
|
state: {
|
||||||
|
pagination: { pageIndex, pageSize },
|
||||||
|
},
|
||||||
|
getFilteredRowModel: getFilteredRowModel(),
|
||||||
|
manualPagination: true,
|
||||||
|
onPaginationChange: setPagination,
|
||||||
|
getSortedRowModel: getSortedRowModel(),
|
||||||
|
});
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!hasSorted) return;
|
||||||
|
queryDataByText(globalFilter);
|
||||||
|
}, [table.getState().sorting]);
|
||||||
|
} else {
|
||||||
|
table = useReactTable({
|
||||||
|
data,
|
||||||
|
columns: tableCols,
|
||||||
|
getCoreRowModel: getCoreRowModel(),
|
||||||
|
state: {
|
||||||
|
globalFilter,
|
||||||
|
},
|
||||||
|
globalFilterFn: globalFilterFn,
|
||||||
|
onGlobalFilterChange: setGlobalFilter,
|
||||||
|
getFilteredRowModel: getFilteredRowModel(),
|
||||||
|
getPaginationRowModel: getPaginationRowModel(),
|
||||||
|
getSortedRowModel: getSortedRowModel(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (url) {
|
if (url) {
|
||||||
@@ -91,6 +155,70 @@ export const Table = ({
|
|||||||
}
|
}
|
||||||
}, [url]);
|
}, [url]);
|
||||||
|
|
||||||
|
const queryDataByText = (filter) => {
|
||||||
|
setIsLoadingPage(true);
|
||||||
|
const sortedParam = getSortParam();
|
||||||
|
fetch(
|
||||||
|
`${dataStoreURI}&limit=${rowsPerPage}&offset=0&q=${filter}${sortedParam}`
|
||||||
|
)
|
||||||
|
.then((res) => res.json())
|
||||||
|
.then(async (res) => {
|
||||||
|
const { data, total = 0 } = await dataMapperFn(res);
|
||||||
|
setTotalOfRows(Math.ceil(total / rowsPerPage));
|
||||||
|
setData(data);
|
||||||
|
const newMap = new Map();
|
||||||
|
newMap.set(0, true);
|
||||||
|
setPageMap(newMap);
|
||||||
|
table.setPageIndex(0);
|
||||||
|
setStartIndex(0);
|
||||||
|
setLastIndex(pageSize);
|
||||||
|
})
|
||||||
|
.finally(() => setIsLoadingPage(false));
|
||||||
|
};
|
||||||
|
|
||||||
|
const getSortParam = () => {
|
||||||
|
const sort = table.getState().sorting;
|
||||||
|
return sort.length == 0
|
||||||
|
? ``
|
||||||
|
: '&sort=' +
|
||||||
|
sort
|
||||||
|
.map(
|
||||||
|
(x, i) =>
|
||||||
|
`${x.id}${
|
||||||
|
i === sort.length - 1 ? (x.desc ? ` desc` : ` asc`) : `,`
|
||||||
|
}`
|
||||||
|
)
|
||||||
|
.reduce((x1, x2) => x1 + x2);
|
||||||
|
};
|
||||||
|
|
||||||
|
const queryPaginatedData = (newPageIndex) => {
|
||||||
|
let newStartIndex = newPageIndex * pageSize;
|
||||||
|
setStartIndex(newStartIndex);
|
||||||
|
setLastIndex(newStartIndex + pageSize);
|
||||||
|
|
||||||
|
if (!pageMap.get(newPageIndex)) pageMap.set(newPageIndex, true);
|
||||||
|
else return;
|
||||||
|
|
||||||
|
const sortedParam = getSortParam();
|
||||||
|
|
||||||
|
setIsLoadingPage(true);
|
||||||
|
fetch(
|
||||||
|
`${dataStoreURI}&limit=${rowsPerPage}&offset=${
|
||||||
|
newStartIndex + pageSize
|
||||||
|
}&q=${globalFilter}${sortedParam}`
|
||||||
|
)
|
||||||
|
.then((res) => res.json())
|
||||||
|
.then(async (res) => {
|
||||||
|
const { data: responseData } = await dataMapperFn(res);
|
||||||
|
responseData.forEach((e) => {
|
||||||
|
data[newStartIndex] = e;
|
||||||
|
newStartIndex++;
|
||||||
|
});
|
||||||
|
setData([...data]);
|
||||||
|
})
|
||||||
|
.finally(() => setIsLoadingPage(false));
|
||||||
|
};
|
||||||
|
|
||||||
return isLoading ? (
|
return isLoading ? (
|
||||||
<div className="w-full h-full min-h-[500px] flex items-center justify-center">
|
<div className="w-full h-full min-h-[500px] flex items-center justify-center">
|
||||||
<LoadingSpinner />
|
<LoadingSpinner />
|
||||||
@@ -99,7 +227,10 @@ export const Table = ({
|
|||||||
<div className={`${fullWidth ? 'w-[90vw] ml-[calc(50%-45vw)]' : 'w-full'}`}>
|
<div className={`${fullWidth ? 'w-[90vw] ml-[calc(50%-45vw)]' : 'w-full'}`}>
|
||||||
<DebouncedInput
|
<DebouncedInput
|
||||||
value={globalFilter ?? ''}
|
value={globalFilter ?? ''}
|
||||||
onChange={(value: any) => setGlobalFilter(String(value))}
|
onChange={(value: any) => {
|
||||||
|
if (datastoreConfig) queryDataByText(String(value));
|
||||||
|
setGlobalFilter(String(value));
|
||||||
|
}}
|
||||||
className="p-2 text-sm shadow border border-block"
|
className="p-2 text-sm shadow border border-block"
|
||||||
placeholder="Search all columns..."
|
placeholder="Search all columns..."
|
||||||
/>
|
/>
|
||||||
@@ -114,7 +245,10 @@ export const Table = ({
|
|||||||
className: h.column.getCanSort()
|
className: h.column.getCanSort()
|
||||||
? 'cursor-pointer select-none'
|
? 'cursor-pointer select-none'
|
||||||
: '',
|
: '',
|
||||||
onClick: h.column.getToggleSortingHandler(),
|
onClick: (v) => {
|
||||||
|
setHasSorted(true);
|
||||||
|
h.column.getToggleSortingHandler()(v);
|
||||||
|
},
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{flexRender(h.column.columnDef.header, h.getContext())}
|
{flexRender(h.column.columnDef.header, h.getContext())}
|
||||||
@@ -135,15 +269,28 @@ export const Table = ({
|
|||||||
))}
|
))}
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
{table.getRowModel().rows.map((r) => (
|
{datastoreConfig && isLoadingPage ? (
|
||||||
<tr key={r.id} className="border-b border-b-slate-200">
|
<tr>
|
||||||
{r.getVisibleCells().map((c) => (
|
<td colSpan={cols.length} rowSpan={cols.length}>
|
||||||
<td key={c.id} className="py-2">
|
<div className="w-full h-full flex items-center justify-center pt-6">
|
||||||
{flexRender(c.column.columnDef.cell, c.getContext())}
|
<LoadingSpinner />
|
||||||
</td>
|
</div>
|
||||||
))}
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
))}
|
) : (
|
||||||
|
(datastoreConfig
|
||||||
|
? table.getRowModel().rows.slice(startIndex, lastIndex)
|
||||||
|
: table.getRowModel().rows
|
||||||
|
).map((r) => (
|
||||||
|
<tr key={r.id} className="border-b border-b-slate-200">
|
||||||
|
{r.getVisibleCells().map((c) => (
|
||||||
|
<td key={c.id} className="py-2">
|
||||||
|
{flexRender(c.column.columnDef.cell, c.getContext())}
|
||||||
|
</td>
|
||||||
|
))}
|
||||||
|
</tr>
|
||||||
|
))
|
||||||
|
)}
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
<div className="flex gap-2 items-center justify-center mt-10">
|
<div className="flex gap-2 items-center justify-center mt-10">
|
||||||
@@ -151,7 +298,10 @@ export const Table = ({
|
|||||||
className={`w-6 h-6 ${
|
className={`w-6 h-6 ${
|
||||||
!table.getCanPreviousPage() ? 'opacity-25' : 'opacity-100'
|
!table.getCanPreviousPage() ? 'opacity-25' : 'opacity-100'
|
||||||
}`}
|
}`}
|
||||||
onClick={() => table.setPageIndex(0)}
|
onClick={() => {
|
||||||
|
if (datastoreConfig) queryPaginatedData(0);
|
||||||
|
table.setPageIndex(0);
|
||||||
|
}}
|
||||||
disabled={!table.getCanPreviousPage()}
|
disabled={!table.getCanPreviousPage()}
|
||||||
>
|
>
|
||||||
<ChevronDoubleLeftIcon />
|
<ChevronDoubleLeftIcon />
|
||||||
@@ -160,7 +310,12 @@ export const Table = ({
|
|||||||
className={`w-6 h-6 ${
|
className={`w-6 h-6 ${
|
||||||
!table.getCanPreviousPage() ? 'opacity-25' : 'opacity-100'
|
!table.getCanPreviousPage() ? 'opacity-25' : 'opacity-100'
|
||||||
}`}
|
}`}
|
||||||
onClick={() => table.previousPage()}
|
onClick={() => {
|
||||||
|
if (datastoreConfig) {
|
||||||
|
queryPaginatedData(table.getState().pagination.pageIndex - 1);
|
||||||
|
}
|
||||||
|
table.previousPage();
|
||||||
|
}}
|
||||||
disabled={!table.getCanPreviousPage()}
|
disabled={!table.getCanPreviousPage()}
|
||||||
>
|
>
|
||||||
<ChevronLeftIcon />
|
<ChevronLeftIcon />
|
||||||
@@ -176,7 +331,11 @@ export const Table = ({
|
|||||||
className={`w-6 h-6 ${
|
className={`w-6 h-6 ${
|
||||||
!table.getCanNextPage() ? 'opacity-25' : 'opacity-100'
|
!table.getCanNextPage() ? 'opacity-25' : 'opacity-100'
|
||||||
}`}
|
}`}
|
||||||
onClick={() => table.nextPage()}
|
onClick={() => {
|
||||||
|
if (datastoreConfig)
|
||||||
|
queryPaginatedData(table.getState().pagination.pageIndex + 1);
|
||||||
|
table.nextPage();
|
||||||
|
}}
|
||||||
disabled={!table.getCanNextPage()}
|
disabled={!table.getCanNextPage()}
|
||||||
>
|
>
|
||||||
<ChevronRightIcon />
|
<ChevronRightIcon />
|
||||||
@@ -185,7 +344,11 @@ export const Table = ({
|
|||||||
className={`w-6 h-6 ${
|
className={`w-6 h-6 ${
|
||||||
!table.getCanNextPage() ? 'opacity-25' : 'opacity-100'
|
!table.getCanNextPage() ? 'opacity-25' : 'opacity-100'
|
||||||
}`}
|
}`}
|
||||||
onClick={() => table.setPageIndex(table.getPageCount() - 1)}
|
onClick={() => {
|
||||||
|
const pageIndexToNavigate = table.getPageCount() - 1;
|
||||||
|
if (datastoreConfig) queryPaginatedData(pageIndexToNavigate);
|
||||||
|
table.setPageIndex(pageIndexToNavigate);
|
||||||
|
}}
|
||||||
disabled={!table.getCanNextPage()}
|
disabled={!table.getCanNextPage()}
|
||||||
>
|
>
|
||||||
<ChevronDoubleRightIcon />
|
<ChevronDoubleRightIcon />
|
||||||
|
|||||||
@@ -10,3 +10,6 @@ export * from './components/PdfViewer';
|
|||||||
export * from "./components/Excel";
|
export * from "./components/Excel";
|
||||||
export * from "./components/BucketViewer";
|
export * from "./components/BucketViewer";
|
||||||
export * from "./components/Iframe";
|
export * from "./components/Iframe";
|
||||||
|
export * from "./components/Plotly";
|
||||||
|
export * from "./components/PlotlyLineChart";
|
||||||
|
export * from "./components/PlotlyBarChart";
|
||||||
|
|||||||
74
packages/components/stories/BarChartPlotly.stories.ts
Normal file
74
packages/components/stories/BarChartPlotly.stories.ts
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
import type { Meta, StoryObj } from '@storybook/react';
|
||||||
|
|
||||||
|
import { PlotlyBarChart, PlotlyBarChartProps } from '../src/components/PlotlyBarChart';
|
||||||
|
|
||||||
|
// More on how to set up stories at: https://storybook.js.org/docs/react/writing-stories/introduction
|
||||||
|
const meta: Meta = {
|
||||||
|
title: 'Components/PlotlyBarChart',
|
||||||
|
component: PlotlyBarChart,
|
||||||
|
tags: ['autodocs'],
|
||||||
|
argTypes: {
|
||||||
|
url: {
|
||||||
|
description:
|
||||||
|
'CSV Url to be parsed and used as data source',
|
||||||
|
},
|
||||||
|
data: {
|
||||||
|
description:
|
||||||
|
'Data to be displayed. as an array of key value pairs \n\n E.g.: [{ year: 1850, temperature: -0.41765878 }, { year: 1851, temperature: -0.2333498 }, ...]',
|
||||||
|
},
|
||||||
|
rawCsv: {
|
||||||
|
description:
|
||||||
|
'Raw csv data to be parsed and used as data source',
|
||||||
|
},
|
||||||
|
bytes: {
|
||||||
|
description:
|
||||||
|
'How many bytes to read from the url',
|
||||||
|
},
|
||||||
|
parsingConfig: {
|
||||||
|
description: 'If using url or rawCsv, this parsing config will be used to parse the data. Optional, check https://www.papaparse.com/ for more info',
|
||||||
|
},
|
||||||
|
title: {
|
||||||
|
description: 'Title to display on the chart. Optional.',
|
||||||
|
},
|
||||||
|
lineLabel: {
|
||||||
|
description: 'Label to display on the line, Optional, will use yAxis if not provided',
|
||||||
|
},
|
||||||
|
xAxis: {
|
||||||
|
description:
|
||||||
|
'Name of the X axis on the data. Required when the "data" parameter is an URL.',
|
||||||
|
},
|
||||||
|
yAxis: {
|
||||||
|
description:
|
||||||
|
'Name of the Y axis on the data. Required when the "data" parameter is an URL.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export default meta;
|
||||||
|
|
||||||
|
type Story = StoryObj<PlotlyBarChartProps>;
|
||||||
|
|
||||||
|
export const FromDataPoints: Story = {
|
||||||
|
name: 'Line chart from array of data points',
|
||||||
|
args: {
|
||||||
|
data: [
|
||||||
|
{year: '1850', temperature: -0.41765878},
|
||||||
|
{year: '1851', temperature: -0.2333498},
|
||||||
|
{year: '1852', temperature: -0.22939907},
|
||||||
|
{year: '1853', temperature: -0.27035445},
|
||||||
|
{year: '1854', temperature: -0.29163003},
|
||||||
|
],
|
||||||
|
xAxis: 'year',
|
||||||
|
yAxis: 'temperature',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export const FromURL: Story = {
|
||||||
|
name: 'Line chart from URL',
|
||||||
|
args: {
|
||||||
|
title: 'Apple Stock Prices',
|
||||||
|
url: 'https://raw.githubusercontent.com/plotly/datasets/master/finance-charts-apple.csv',
|
||||||
|
xAxis: 'Date',
|
||||||
|
yAxis: 'AAPL.Open',
|
||||||
|
},
|
||||||
|
};
|
||||||
@@ -95,23 +95,3 @@ export const WithComponentOnHoverOfEachBucketFile: Story = {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
export const WithLoadingComponentWhileDownloadTheBucketFile: Story = {
|
|
||||||
name: 'With loading component while download the bucket file',
|
|
||||||
args: {
|
|
||||||
domain: 'https://ssen-smart-meter.datopian.workers.dev',
|
|
||||||
suffix: '/',
|
|
||||||
downloadConfig: { downloadingMessageComponent: 'COMPONENT....' },
|
|
||||||
dataMapperFn: async (rawData: Response) => {
|
|
||||||
const result = await rawData.json();
|
|
||||||
return result.objects.map((e) => ({
|
|
||||||
downloadFileUri: e.downloadLink,
|
|
||||||
fileName: e.key.replace(/^(\w+\/)/g, ''),
|
|
||||||
dateProps: {
|
|
||||||
date: new Date(e.uploaded),
|
|
||||||
dateFormatter: (date) => date.toLocaleDateString(),
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|||||||
74
packages/components/stories/LineChartPlotly.stories.ts
Normal file
74
packages/components/stories/LineChartPlotly.stories.ts
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
import type { Meta, StoryObj } from '@storybook/react';
|
||||||
|
|
||||||
|
import { PlotlyLineChart, PlotlyLineChartProps } from '../src/components/PlotlyLineChart';
|
||||||
|
|
||||||
|
// More on how to set up stories at: https://storybook.js.org/docs/react/writing-stories/introduction
|
||||||
|
const meta: Meta = {
|
||||||
|
title: 'Components/PlotlyLineChart',
|
||||||
|
component: PlotlyLineChart,
|
||||||
|
tags: ['autodocs'],
|
||||||
|
argTypes: {
|
||||||
|
url: {
|
||||||
|
description:
|
||||||
|
'CSV Url to be parsed and used as data source',
|
||||||
|
},
|
||||||
|
data: {
|
||||||
|
description:
|
||||||
|
'Data to be displayed. as an array of key value pairs \n\n E.g.: [{ year: 1850, temperature: -0.41765878 }, { year: 1851, temperature: -0.2333498 }, ...]',
|
||||||
|
},
|
||||||
|
rawCsv: {
|
||||||
|
description:
|
||||||
|
'Raw csv data to be parsed and used as data source',
|
||||||
|
},
|
||||||
|
bytes: {
|
||||||
|
description:
|
||||||
|
'How many bytes to read from the url',
|
||||||
|
},
|
||||||
|
parsingConfig: {
|
||||||
|
description: 'If using url or rawCsv, this parsing config will be used to parse the data. Optional, check https://www.papaparse.com/ for more info',
|
||||||
|
},
|
||||||
|
title: {
|
||||||
|
description: 'Title to display on the chart. Optional.',
|
||||||
|
},
|
||||||
|
lineLabel: {
|
||||||
|
description: 'Label to display on the line, Optional, will use yAxis if not provided',
|
||||||
|
},
|
||||||
|
xAxis: {
|
||||||
|
description:
|
||||||
|
'Name of the X axis on the data. Required when the "data" parameter is an URL.',
|
||||||
|
},
|
||||||
|
yAxis: {
|
||||||
|
description:
|
||||||
|
'Name of the Y axis on the data. Required when the "data" parameter is an URL.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export default meta;
|
||||||
|
|
||||||
|
type Story = StoryObj<PlotlyLineChartProps>;
|
||||||
|
|
||||||
|
export const FromDataPoints: Story = {
|
||||||
|
name: 'Line chart from array of data points',
|
||||||
|
args: {
|
||||||
|
data: [
|
||||||
|
{year: '1850', temperature: -0.41765878},
|
||||||
|
{year: '1851', temperature: -0.2333498},
|
||||||
|
{year: '1852', temperature: -0.22939907},
|
||||||
|
{year: '1853', temperature: -0.27035445},
|
||||||
|
{year: '1854', temperature: -0.29163003},
|
||||||
|
],
|
||||||
|
xAxis: 'year',
|
||||||
|
yAxis: 'temperature',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export const FromURL: Story = {
|
||||||
|
name: 'Line chart from URL',
|
||||||
|
args: {
|
||||||
|
title: 'Oil Price x Year',
|
||||||
|
url: 'https://raw.githubusercontent.com/datasets/oil-prices/main/data/wti-year.csv',
|
||||||
|
xAxis: 'Date',
|
||||||
|
yAxis: 'Price',
|
||||||
|
},
|
||||||
|
};
|
||||||
39
packages/components/stories/Plotly.stories.ts
Normal file
39
packages/components/stories/Plotly.stories.ts
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import type { Meta, StoryObj } from '@storybook/react';
|
||||||
|
|
||||||
|
import { Plotly } from '../src/components/Plotly';
|
||||||
|
|
||||||
|
// More on how to set up stories at: https://storybook.js.org/docs/react/writing-stories/introduction
|
||||||
|
const meta: Meta = {
|
||||||
|
title: 'Components/Plotly',
|
||||||
|
component: Plotly,
|
||||||
|
tags: ['autodocs'],
|
||||||
|
};
|
||||||
|
|
||||||
|
export default meta;
|
||||||
|
|
||||||
|
type Story = StoryObj<any>;
|
||||||
|
|
||||||
|
// More on writing stories with args: https://storybook.js.org/docs/react/writing-stories/args
|
||||||
|
export const Primary: Story = {
|
||||||
|
name: 'Chart built with Plotly',
|
||||||
|
args: {
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
x: [1, 2, 3],
|
||||||
|
y: [2, 6, 3],
|
||||||
|
type: 'scatter',
|
||||||
|
mode: 'lines+markers',
|
||||||
|
marker: { color: 'red' },
|
||||||
|
},
|
||||||
|
],
|
||||||
|
layout: {
|
||||||
|
title: 'Chart built with Plotly',
|
||||||
|
xaxis: {
|
||||||
|
title: 'x Axis',
|
||||||
|
},
|
||||||
|
yaxis: {
|
||||||
|
title: 'y Axis',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
@@ -9,17 +9,22 @@ const meta: Meta = {
|
|||||||
tags: ['autodocs'],
|
tags: ['autodocs'],
|
||||||
argTypes: {
|
argTypes: {
|
||||||
data: {
|
data: {
|
||||||
description: "Data to be displayed in the table, must also set \"cols\" to work."
|
description:
|
||||||
|
'Data to be displayed in the table, must also set "cols" to work.',
|
||||||
},
|
},
|
||||||
cols: {
|
cols: {
|
||||||
description: "Columns to be displayed in the table, must also set \"data\" to work."
|
description:
|
||||||
|
'Columns to be displayed in the table, must also set "data" to work.',
|
||||||
},
|
},
|
||||||
csv: {
|
csv: {
|
||||||
description: "CSV data as string.",
|
description: 'CSV data as string.',
|
||||||
},
|
},
|
||||||
url: {
|
url: {
|
||||||
description: "Fetch the data from a CSV file remotely."
|
description: 'Fetch the data from a CSV file remotely.',
|
||||||
}
|
},
|
||||||
|
datastoreConfig: {
|
||||||
|
description: `Configuration to use CKAN's datastore API extension integrated with the component`,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -29,7 +34,7 @@ type Story = StoryObj<TableProps>;
|
|||||||
|
|
||||||
// More on writing stories with args: https://storybook.js.org/docs/react/writing-stories/args
|
// More on writing stories with args: https://storybook.js.org/docs/react/writing-stories/args
|
||||||
export const FromColumnsAndData: Story = {
|
export const FromColumnsAndData: Story = {
|
||||||
name: "Table from columns and data",
|
name: 'Table from columns and data',
|
||||||
args: {
|
args: {
|
||||||
data: [
|
data: [
|
||||||
{ id: 1, lastName: 'Snow', firstName: 'Jon', age: 35 },
|
{ id: 1, lastName: 'Snow', firstName: 'Jon', age: 35 },
|
||||||
@@ -49,21 +54,40 @@ export const FromColumnsAndData: Story = {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const WithDataStoreIntegration: Story = {
|
||||||
|
name: 'Table with datastore integration',
|
||||||
|
args: {
|
||||||
|
datastoreConfig: {
|
||||||
|
dataStoreURI: `https://www.civicdata.com/api/action/datastore_search?resource_id=46ec0807-31ff-497f-bfa0-f31c796cdee8`,
|
||||||
|
dataMapperFn: ({
|
||||||
|
result,
|
||||||
|
}: {
|
||||||
|
result: { fields: { id }[]; records: []; total: number };
|
||||||
|
}) => {
|
||||||
|
return {
|
||||||
|
data: result.records,
|
||||||
|
cols: result.fields.map((x) => ({ key: x.id, name: x.id })),
|
||||||
|
total: result.total,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
export const FromRawCSV: Story = {
|
export const FromRawCSV: Story = {
|
||||||
name: "Table from raw CSV",
|
name: 'Table from raw CSV',
|
||||||
args: {
|
args: {
|
||||||
csv: `
|
csv: `
|
||||||
Year,Temp Anomaly
|
Year,Temp Anomaly
|
||||||
1850,-0.418
|
1850,-0.418
|
||||||
2020,0.923
|
2020,0.923
|
||||||
`
|
`,
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
export const FromURL: Story = {
|
export const FromURL: Story = {
|
||||||
name: "Table from URL",
|
name: 'Table from URL',
|
||||||
args: {
|
args: {
|
||||||
url: "https://raw.githubusercontent.com/datasets/finance-vix/main/data/vix-daily.csv"
|
url: 'https://raw.githubusercontent.com/datasets/finance-vix/main/data/vix-daily.csv',
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -69,7 +69,7 @@ const config = {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
github: 'https://github.com/datopian/portaljs',
|
github: 'https://github.com/datopian/portaljs',
|
||||||
discord: 'https://discord.gg/EeyfGrGu4U',
|
discord: 'https://discord.gg/xfFDMPU9dC',
|
||||||
tableOfContents: true,
|
tableOfContents: true,
|
||||||
analytics: 'G-96GWZHMH57',
|
analytics: 'G-96GWZHMH57',
|
||||||
// editLinkShow: true,
|
// editLinkShow: true,
|
||||||
|
|||||||
@@ -1,12 +0,0 @@
|
|||||||
{
|
|
||||||
"extends": "../tsconfig.base.json",
|
|
||||||
"compilerOptions": {
|
|
||||||
"outDir": "../dist/out-tsc/tools",
|
|
||||||
"rootDir": ".",
|
|
||||||
"module": "commonjs",
|
|
||||||
"target": "es5",
|
|
||||||
"types": ["node"],
|
|
||||||
"importHelpers": false
|
|
||||||
},
|
|
||||||
"include": ["**/*.ts"]
|
|
||||||
}
|
|
||||||
Reference in New Issue
Block a user