Initial check-in of new React docs and website

Co-authored-by: Dan Abramov <dan.abramov@me.com>
Co-authored-by: Sylwia Vargas <sylwia.vargas@gmail.com>
Co-authored-by: Dan Lebowitz <dan.lebo@me.com>
Co-authored-by: Razvan Gradinar <grazvan@fb.com>
Co-authored-by: Jared Palmer <jared@palmer.net>
Co-authored-by: Dane Grant <danecando@gmail.com>
Co-authored-by: Dustin Goodman <dustin.s.goodman@gmail.com>
Co-authored-by: Rick Hanlon <rickhanlonii@gmail.com>
Co-authored-by: Maggie Appleton <maggie.fm.appleton@gmail.com>
Co-authored-by: Alex Moldovan <alex.n.moldovan@gmail.com>
Co-authored-by: Ives van Hoorne <ives.v.h@gmail.com>
Co-authored-by: Brian Vaughn <bvaughn@fb.com>
This commit is contained in:
Rachel Nabors
2021-10-21 22:57:15 +01:00
committed by Dan Abramov
parent b62e4475d6
commit 5f404d978c
904 changed files with 53180 additions and 49731 deletions

View File

@@ -0,0 +1,69 @@
const fs = require('fs-extra');
const path = require('path');
const fm = require('gray-matter');
const globby = require('globby');
const parseISO = require('date-fns/parseISO');
const readingTime = require('reading-time');
const {markdownToHtml} = require('../plugins/markdownToHtml');
/**
* This looks at the ./src/pages/blog directory and creates a route manifest that can be used
* in the sidebar and footers, and (in theory) category and author pages.
*
* For now, the blog manifest is a big array in reverse chronological order.
*/
Promise.resolve()
.then(async () => {
const routes = [];
const blogPosts = await globby('src/pages/blog/**/*.md');
for (let postpath of blogPosts) {
const [year, month, day, title] = postpath
.replace('src/pages/blog/', '')
.split('/');
const rawStr = await fs.readFile(postpath, 'utf8');
const {data, excerpt, content} = fm(rawStr, {
excerpt: function firstLine(file, options) {
file.excerpt = file.content.split('\n').slice(0, 2).join(' ');
},
});
const rendered = await markdownToHtml(excerpt.trimLeft().trim());
routes.unshift({
path: postpath.replace('src/pages', ''),
date: [year, month, day].join('-'),
title: data.title,
author: data.author,
excerpt: rendered,
readingTime: readingTime(content).text,
});
}
const sorted = routes.sort((post1, post2) =>
parseISO(post1.date) > parseISO(post2.date) ? -1 : 1
);
const blogManifest = {
routes: sorted,
};
const blogRecentSidebar = {
routes: [
{
title: 'Recent Posts',
path: '/blog',
heading: true,
routes: sorted.slice(0, 25),
},
],
};
await fs.writeFile(
path.resolve('./src/blogIndex.json'),
JSON.stringify(blogManifest, null, 2)
);
await fs.writeFile(
path.resolve('./src/blogIndexRecent.json'),
JSON.stringify(blogRecentSidebar, null, 2)
);
})
.catch(console.error);

View File

@@ -8,7 +8,7 @@ const GithubSlugger = require('github-slugger');
function walk(dir) {
let results = [];
const list = fs.readdirSync(dir);
list.forEach(function(file) {
list.forEach(function (file) {
file = dir + '/' + file;
const stat = fs.statSync(file);
if (stat && stat.isDirectory()) {
@@ -38,7 +38,7 @@ function addHeaderID(line, slugger) {
const headingText = line.slice(line.indexOf(' ')).trim();
const headingLevel = line.slice(0, line.indexOf(' '));
return `${headingLevel} ${headingText} {#${slugger.slug(
stripLinks(headingText),
stripLinks(headingText)
)}}`;
}
@@ -47,7 +47,7 @@ function addHeaderIDs(lines) {
const slugger = new GithubSlugger();
let inCode = false;
const results = [];
lines.forEach(line => {
lines.forEach((line) => {
// Ignore code blocks
if (line.startsWith('```')) {
inCode = !inCode;
@@ -67,8 +67,8 @@ function addHeaderIDs(lines) {
const [path] = process.argv.slice(2);
const files = walk(path);
files.forEach(file => {
if (!file.endsWith('.md')) {
files.forEach((file) => {
if (!(file.endsWith('.md') || file.endsWith('.mdx'))) {
return;
}

46
scripts/generateRSS.js Normal file
View File

@@ -0,0 +1,46 @@
const RSS = require('rss');
const fs = require('fs-extra');
const authorsJson = require('../src/authors.json');
const blogIndexJson = require('../src/blogIndex.json');
const parse = require('date-fns/parse');
function removeFromLast(path, key) {
const i = path.lastIndexOf(key);
return i === -1 ? path : path.substring(0, i);
}
const SITE_URL = 'https://reactjs.org';
function generate() {
const feed = new RSS({
title: 'React.js Blog',
site_url: SITE_URL,
feed_url: SITE_URL + '/feed.xml',
});
blogIndexJson.routes.map((meta) => {
feed.item({
title: meta.title,
guid: removeFromLast(meta.path, '.'),
url: SITE_URL + removeFromLast(meta.path, '.'),
date: parse(meta.date, 'yyyy-MM-dd', new Date()),
description: meta.description,
custom_elements: [].concat(
meta.author.map((author) => ({
author: [{ name: authorsJson[author].name }],
}))
),
});
});
const rss = feed.xml({ indent: true });
fs.writeFileSync('./.next/static/feed.xml', rss);
}
try {
generate();
} catch (error) {
console.error('Error generating rss feed');
throw error;
}

View File

@@ -0,0 +1,50 @@
const fs = require('fs-extra');
const path = require('path');
const fm = require('gray-matter');
const globby = require('globby');
const parse = require('date-fns/parse');
/**
* This script takes the gatsby blog posts directory and migrates it.
*
* In gatsby, blog posts were put in markdown files title YYYY-MM-DD-post-title.md.
* This script looks at that directory and then moves posts into folders paths
* that match the end URL structure of /blog/YYYY/MM/DD/postitle.md
*
* This allows us to use MDX in blog posts.
*/
// I dropped them into src/pages/oldblog
// @todo remove after migration
// I am not proud of this. Also, the blog posts needed to be cleaned up for MDX, don't run this again.
Promise.resolve()
.then(async () => {
const blogManifest = {};
const blogPosts = await globby('src/pages/oldblog/*.md');
// console.log(blogPosts);
for (let postpath of blogPosts.sort()) {
const rawStr = await fs.readFile(postpath, 'utf8');
// console.log(rawStr);
const {data, content} = fm(rawStr);
const cleanPath = postpath.replace('src/pages/oldblog/', '');
const yrStr = parseInt(cleanPath.substr(0, 4), 10); // 2013-06-02
// console.log(yrStr);
const dateStr = cleanPath.substr(0, 10); // 2013-06-02
const postFileName = cleanPath.substr(11);
// console.log(postFileName, dateStr);
const datePath = dateStr.split('-').join('/');
// console.log(datePath);
const newPath = './src/pages/blog/' + datePath + '/' + postFileName;
// console.log(newPath);
await fs.ensureFile(path.resolve(newPath));
await fs.writeFile(
path.resolve(newPath),
rawStr
.replace('<br>', '<br/>')
.replace('<hr>', '<hr/>')
.replace('layout: post', '')
.replace('\nauthor', '\nlayout: Post\nauthor')
);
}
})
.catch(console.error);

View File

@@ -0,0 +1,35 @@
const fs = require('fs-extra');
const path = require('path');
const fm = require('gray-matter');
const globby = require('globby');
/**
* This script ensures that every file in the docs folder is named corresponding
* to its respective frontmatter permalink. In the old site, the path of the page was set by
* the `permalink` in markdown frontmatter, and not the name of the file itself or it's id.
* In the new Next.js site, with its filesystem router, the name of the file must
* match exactly to its `permalink`.
*/
Promise.resolve()
.then(async () => {
const pages = await globby('src/pages/docs/**/*.{md,mdx}');
for (let sourcePath of pages.sort()) {
const rawStr = await fs.readFile(sourcePath, 'utf8');
const {data, content} = fm(rawStr);
const currentPath = sourcePath
.replace('src/pages/', '')
.replace('.md', '');
const permalink = data.permalink.replace('.html', '');
if (permalink !== currentPath) {
const destPath = 'src/pages/' + permalink + '.md';
try {
await fs.move(sourcePath, destPath);
console.log(`MOVED: ${sourcePath} --> ${destPath}`);
} catch (error) {
console.error(`ERROR: ${sourcePath} --> ${destPath}`);
console.error(error);
}
}
}
})
.catch(console.error);

View File

@@ -0,0 +1,117 @@
const fs = require('fs-extra');
const path = require('path');
const fm = require('gray-matter');
const globby = require('globby');
/**
* This script takes a look at all the redirect frontmatter and converts it
* into a Next.js compatible redirects list. It also merges it with netlify's
* _redirects, which we moved by hand below.
*
* @remarks
* In the old gatsby site, redirects were specified in docs and blog post
* frontmatter that looks like:
*
* ---
* redirect_from:
* - /docs/old-path.html#maybe-an-anchor
* ---
*/
const netlifyRedirects = [
{
source: '/html-jsx.html',
destination: 'https://magic.reactjs.net/htmltojsx.htm',
permanent: true,
},
{
source: '/tips/controlled-input-null-value.html',
destination: '/docs/forms.html#controlled-input-null-value',
permanent: false, // @todo why were these not permanent on netlify?
},
{
source: '/concurrent',
destination: '/docs/concurrent-mode-intro.html',
permanent: false,
},
{
source: '/hooks',
destination: '/docs/hooks-intro.html',
permanent: false,
},
{
source: '/tutorial',
destination: '/tutorial/tutorial.html',
permanent: false,
},
{
source: '/your-story',
destination: 'https://www.surveymonkey.co.uk/r/MVQV2R9',
permanent: true,
},
{
source: '/stories',
destination: 'https://medium.com/react-community-stories',
permanent: true,
},
];
Promise.resolve()
.then(async () => {
let contentRedirects = [];
let redirectPageCount = 0;
// Get all markdown pages
const pages = await globby('src/pages/**/*.{md,mdx}');
for (let filepath of pages) {
// Read file as string
const rawStr = await fs.readFile(filepath, 'utf8');
// Extract frontmatter
const {data, content} = fm(rawStr);
// Look for redirect yaml
if (data.redirect_from) {
redirectPageCount++;
let destinationPath = filepath
.replace('src/pages', '')
.replace('.md', '');
// Fix /docs/index -> /docs
if (destinationPath === '/docs/index') {
destinationPath = '/docs';
}
if (destinationPath === '/index') {
destinationPath = '/';
}
for (let sourcePath of data.redirect_from) {
contentRedirects.push({
source: '/' + sourcePath, // add slash
destination: destinationPath,
permanent: true,
});
}
}
}
console.log(
`Found ${redirectPageCount} pages with \`redirect_from\` frontmatter`
);
console.log(
`Writing ${contentRedirects.length} redirects to redirects.json`
);
await fs.writeFile(
path.resolve('./src/redirects.json'),
JSON.stringify(
{
redirects: [...contentRedirects, ...netlifyRedirects],
},
null,
2
)
);
console.log('✅ Done writing redirects');
})
.catch(console.error);