Skip to content

Commit

Permalink
fix(v2): move metadata export after compiling MDX to avoid weird MDX …
Browse files Browse the repository at this point in the history
…parsing error. (#2105)

* fix(v2): move metadata export to mdx-loader to prevent any weird mdx parsing

* refactor

* nits

* nits

* nits
  • Loading branch information
endiliey authored Dec 11, 2019
1 parent 1f0eb37 commit ace93c5
Show file tree
Hide file tree
Showing 7 changed files with 64 additions and 65 deletions.
3 changes: 3 additions & 0 deletions packages/docusaurus-mdx-loader/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,3 +40,6 @@ Array of rehype plugins to manipulate the MDXHAST
### `remarkPlugins`

Array of remark plugins to manipulate the MDXAST

### `metadataPath`
A function to provide the metadataPath depending on current loaded MDX path that will be exported as the MDX metadata.
1 change: 1 addition & 0 deletions packages/docusaurus-mdx-loader/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
"@mdx-js/mdx": "^1.5.1",
"@mdx-js/react": "^1.5.1",
"escape-html": "^1.0.3",
"fs-extra": "^8.1.0",
"github-slugger": "^1.2.1",
"gray-matter": "^4.0.2",
"loader-utils": "^1.2.3",
Expand Down
17 changes: 16 additions & 1 deletion packages/docusaurus-mdx-loader/src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
*/

const {getOptions} = require('loader-utils');
const {readFile} = require('fs-extra');
const mdx = require('@mdx-js/mdx');
const emoji = require('remark-emoji');
const slug = require('remark-slug');
Expand Down Expand Up @@ -43,11 +44,25 @@ module.exports = async function(fileString) {
return callback(err);
}

let exportStr = `export const frontMatter = ${stringifyObject(data)};`;

// Read metadata for this MDX and export it
if (options.metadataPath && typeof options.metadataPath === 'function') {
const metadataPath = options.metadataPath(this.resourcePath);

if (metadataPath) {
// Add as dependency of this loader result so that we can recompile if metadata is changed
this.addDependency(metadataPath);
const metadata = await readFile(metadataPath, 'utf8');
exportStr += `\nexport const metadata = ${metadata};`;
}
}

const code = `
import React from 'react';
import { mdx } from '@mdx-js/react';
export const frontMatter = ${stringifyObject(data)};
${exportStr}
${result}
`;

Expand Down
19 changes: 13 additions & 6 deletions packages/docusaurus-plugin-content-blog/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import fs from 'fs-extra';
import _ from 'lodash';
import path from 'path';
import {normalizeUrl, docuHash} from '@docusaurus/utils';
import {normalizeUrl, docuHash, aliasedSitePath} from '@docusaurus/utils';

import {
PluginOptions,
Expand Down Expand Up @@ -69,9 +69,10 @@ export default function pluginContentBlog(
opts: Partial<PluginOptions>,
): Plugin<BlogContent | null> {
const options: PluginOptions = {...DEFAULT_OPTIONS, ...opts};
const contentPath = path.resolve(context.siteDir, options.path);
const {siteDir, generatedFilesDir} = context;
const contentPath = path.resolve(siteDir, options.path);
const dataDir = path.join(
context.generatedFilesDir,
generatedFilesDir,
'docusaurus-plugin-content-blog',
);

Expand Down Expand Up @@ -231,7 +232,7 @@ export default function pluginContentBlog(
blogPosts.map(async blogPost => {
const {id, metadata} = blogPost;
await createData(
// Note that this created data path must be in sync with markdownLoader.ts metadataPath
// Note that this created data path must be in sync with metadataPath provided to mdx-loader
`${docuHash(metadata.source)}.json`,
JSON.stringify(metadata, null, 2),
);
Expand Down Expand Up @@ -373,13 +374,19 @@ export default function pluginContentBlog(
options: {
remarkPlugins,
rehypePlugins,
// Note that metadataPath must be the same/ in-sync as the path from createData for each MDX
metadataPath: (mdxPath: string) => {
const aliasedSource = aliasedSitePath(mdxPath, siteDir);
return path.join(
dataDir,
`${docuHash(aliasedSource)}.json`,
);
},
},
},
{
loader: path.resolve(__dirname, './markdownLoader.js'),
options: {
dataDir,
siteDir: context.siteDir,
truncateMarker,
},
},
Expand Down
22 changes: 2 additions & 20 deletions packages/docusaurus-plugin-content-blog/src/markdownLoader.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,11 @@
const {parseQuery, getOptions} = require('loader-utils');
import {loader} from 'webpack';
import {truncate} from './blogUtils';
import path from 'path';
import {readFile} from 'fs-extra';
import {aliasedSitePath, docuHash} from '@docusaurus/utils';

export = function(fileString: string) {
const callback = this.async();

const {truncateMarker, siteDir, dataDir} = getOptions(this);
const {truncateMarker}: {truncateMarker: RegExp | string} = getOptions(this);

let finalContent = fileString;

Expand All @@ -24,20 +21,5 @@ export = function(fileString: string) {
if (truncated) {
finalContent = truncate(fileString, truncateMarker);
}

// Read metadata & then embed it to this markdown content
// Note that metadataPath must be the same/ in-sync as the path from createData
const aliasedSource = aliasedSitePath(this.resourcePath, siteDir);
const metadataPath = path.join(dataDir, `${docuHash(aliasedSource)}.json`);

// Add metadataPath as dependency of this loader result so that we can recompile if metadata is changed
this.addDependency(metadataPath);

readFile(metadataPath, 'utf8', function(err, metadata) {
if (err) return callback && callback(err);

const metadataStr = `export const metadata = ${metadata};`;
// We need to add two lines break so that mdx won't mistake it as part of previous paragraph
callback && callback(null, finalContent + '\n\n' + metadataStr);
});
return callback && callback(null, finalContent);
} as loader.Loader;
18 changes: 15 additions & 3 deletions packages/docusaurus-plugin-content-docs/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,12 @@ import _ from 'lodash';
import globby from 'globby';
import fs from 'fs-extra';
import path from 'path';
import {normalizeUrl, docuHash, objectWithKeySorted} from '@docusaurus/utils';
import {
normalizeUrl,
docuHash,
objectWithKeySorted,
aliasedSitePath,
} from '@docusaurus/utils';
import {LoadContext, Plugin, RouteConfig} from '@docusaurus/types';

import createOrder from './order';
Expand Down Expand Up @@ -285,7 +290,7 @@ export default function pluginContentDocs(
const routes = await Promise.all(
metadataItems.map(async metadataItem => {
await createData(
// Note that this created data path must be in sync with markdown/index.ts metadataPath
// Note that this created data path must be in sync with metadataPath provided to mdx-loader
`${docuHash(metadataItem.source)}.json`,
JSON.stringify(metadataItem, null, 2),
);
Expand Down Expand Up @@ -404,13 +409,20 @@ export default function pluginContentDocs(
options: {
remarkPlugins,
rehypePlugins,
metadataPath: (mdxPath: string) => {
// Note that metadataPath must be the same/ in-sync as the path from createData for each MDX
const aliasedSource = aliasedSitePath(mdxPath, siteDir);
return path.join(
dataDir,
`${docuHash(aliasedSource)}.json`,
);
},
},
},
{
loader: path.resolve(__dirname, './markdown/index.js'),
options: {
siteDir,
dataDir,
docsDir,
sourceToPermalink: sourceToPermalink,
versionedDir,
Expand Down
49 changes: 14 additions & 35 deletions packages/docusaurus-plugin-content-docs/src/markdown/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,46 +5,25 @@
* LICENSE file in the root directory of this source tree.
*/

import path from 'path';
import {readFile} from 'fs-extra';
import {getOptions} from 'loader-utils';
import {loader} from 'webpack';
import linkify from './linkify';
import {docuHash, aliasedSitePath} from '@docusaurus/utils';

export = function(fileString: string) {
const callback = this.async();
const {
dataDir,
docsDir,
siteDir,
versionedDir,
sourceToPermalink,
} = getOptions(this);

// Replace all markdown linking to correct url
const linkifiedStr = linkify(
fileString,
this.resourcePath,
docsDir,
siteDir,
sourceToPermalink,
versionedDir,
const {docsDir, siteDir, versionedDir, sourceToPermalink} = getOptions(this);
return (
callback &&
callback(
null,
linkify(
fileString,
this.resourcePath,
docsDir,
siteDir,
sourceToPermalink,
versionedDir,
),
)
);

// Read metadata & then embed it to this markdown content
// Note that metadataPath must be the same/ in-sync as the path from createData
const aliasedSource = aliasedSitePath(this.resourcePath, siteDir);
const metadataPath = path.join(dataDir, `${docuHash(aliasedSource)}.json`);

// Add metadataPath as dependency of this loader result so that we can recompile if metadata is changed
this.addDependency(metadataPath);

readFile(metadataPath, 'utf8', function(err, metadata) {
if (err) return callback && callback(err);

const metadataStr = `export const metadata = ${metadata}`;
// We need to add two lines break so that mdx won't mistake it as part of previous paragraph
callback && callback(null, linkifiedStr + '\n\n' + metadataStr);
});
} as loader.Loader;

0 comments on commit ace93c5

Please sign in to comment.