Skip to content

Commit

Permalink
refactor(docs): consolidate logic (#540)
Browse files Browse the repository at this point in the history
* refactor(docs): consolidate logic

* chore: slightly more debugging info

* test: more coverage

In the process of writing more coverage, I realized that we didn't wrap an error properly and it was just returning raw JSON. Yay tests!
  • Loading branch information
kanadgupta authored Jul 21, 2022
1 parent c7e53a2 commit 9d9e40c
Show file tree
Hide file tree
Showing 5 changed files with 158 additions and 199 deletions.
35 changes: 35 additions & 0 deletions __tests__/cmds/docs.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -682,6 +682,41 @@ describe('rdme docs:single', () => {
postMock.done();
versionMock.done();
});

it('should fail if some other error when retrieving page slug', async () => {
const slug = 'fail-doc';

const errorObject = {
error: 'INTERNAL_ERROR',
message: 'Unknown error (yikes)',
suggestion: '...a suggestion to resolve the issue...',
help: 'If you need help, email [email protected] and mention log "fake-metrics-uuid".',
};

const getMock = getNockWithVersionHeader(version)
.get(`/api/v1/docs/${slug}`)
.basicAuth({ user: key })
.reply(500, errorObject);

const versionMock = getApiNock()
.get(`/api/v1/version/${version}`)
.basicAuth({ user: key })
.reply(200, { version });

const filepath = './__tests__/__fixtures__/failure-docs/fail-doc.md';

const formattedErrorObject = {
...errorObject,
message: `Error uploading ${chalk.underline(`${filepath}`)}:\n\n${errorObject.message}`,
};

await expect(docsSingle.run({ filepath: `${filepath}`, key, version })).rejects.toStrictEqual(
new APIError(formattedErrorObject)
);

getMock.done();
versionMock.done();
});
});

describe('slug metadata', () => {
Expand Down
102 changes: 4 additions & 98 deletions src/cmds/docs/index.js
Original file line number Diff line number Diff line change
@@ -1,16 +1,11 @@
const chalk = require('chalk');
const config = require('config');
const fs = require('fs');
const path = require('path');
const config = require('config');
const crypto = require('crypto');
const frontMatter = require('gray-matter');
const { promisify } = require('util');

const { getProjectVersion } = require('../../lib/versionSelect');
const fetch = require('../../lib/fetch');
const { cleanHeaders, handleRes } = require('../../lib/fetch');
const { debug } = require('../../lib/logger');

const readFile = promisify(fs.readFile);
const pushDoc = require('../../lib/pushDoc');

module.exports = class DocsCommand {
constructor() {
Expand Down Expand Up @@ -88,98 +83,9 @@ module.exports = class DocsCommand {
return Promise.reject(new Error(`We were unable to locate Markdown files in ${folder}.`));
}

function createDoc(slug, file, filename, hash, err) {
if (err.error !== 'DOC_NOTFOUND') return Promise.reject(err);

if (dryRun) {
return `🎭 dry run! This will create '${slug}' with contents from ${filename} with the following metadata: ${JSON.stringify(
file.data
)}`;
}

return fetch(`${config.get('host')}/api/v1/docs`, {
method: 'post',
headers: cleanHeaders(key, {
'x-readme-version': selectedVersion,
'Content-Type': 'application/json',
}),
body: JSON.stringify({
slug,
body: file.content,
...file.data,
lastUpdatedHash: hash,
}),
})
.then(res => handleRes(res))
.then(res => `🌱 successfully created '${res.slug}' with contents from ${filename}`);
}

function updateDoc(slug, file, filename, hash, existingDoc) {
if (hash === existingDoc.lastUpdatedHash) {
return `${dryRun ? '🎭 dry run! ' : ''}\`${slug}\` ${
dryRun ? 'will not be' : 'was not'
} updated because there were no changes.`;
}

if (dryRun) {
return `🎭 dry run! This will update '${slug}' with contents from ${filename} with the following metadata: ${JSON.stringify(
file.data
)}`;
}

return fetch(`${config.get('host')}/api/v1/docs/${slug}`, {
method: 'put',
headers: cleanHeaders(key, {
'x-readme-version': selectedVersion,
'Content-Type': 'application/json',
}),
body: JSON.stringify(
Object.assign(existingDoc, {
body: file.content,
...file.data,
lastUpdatedHash: hash,
})
),
})
.then(res => handleRes(res))
.then(res => `✏️ successfully updated '${res.slug}' with contents from ${filename}`);
}

const updatedDocs = await Promise.all(
files.map(async filename => {
debug(`reading file ${filename}`);
const file = await readFile(filename, 'utf8');
const matter = frontMatter(file);
debug(`frontmatter for ${filename}: ${JSON.stringify(matter)}`);

// Stripping the subdirectories and markdown extension from the filename and lowercasing to get the default slug.
const slug = matter.data.slug || path.basename(filename).replace(path.extname(filename), '').toLowerCase();
const hash = crypto.createHash('sha1').update(file).digest('hex');

debug(`fetching data for ${slug}`);

return fetch(`${config.get('host')}/api/v1/docs/${slug}`, {
method: 'get',
headers: cleanHeaders(key, {
'x-readme-version': selectedVersion,
Accept: 'application/json',
}),
})
.then(res => res.json())
.then(res => {
debug(`GET /docs/:slug API response for ${slug}: ${JSON.stringify(res)}`);
if (res.error) {
debug(`error retrieving data for ${slug}, creating doc`);
return createDoc(slug, matter, filename, hash, res);
}
debug(`data received for ${slug}, updating doc`);
return updateDoc(slug, matter, filename, hash, res);
})
.catch(err => {
// eslint-disable-next-line no-param-reassign
err.message = `Error uploading ${chalk.underline(filename)}:\n\n${err.message}`;
throw err;
});
return pushDoc(key, selectedVersion, dryRun, filename);
})
);

Expand Down
103 changes: 4 additions & 99 deletions src/cmds/docs/single.js
Original file line number Diff line number Diff line change
@@ -1,16 +1,9 @@
const chalk = require('chalk');
const fs = require('fs');
const path = require('path');
const config = require('config');
const crypto = require('crypto');
const frontMatter = require('gray-matter');
const { promisify } = require('util');
const { getProjectVersion } = require('../../lib/versionSelect');
const fetch = require('../../lib/fetch');
const { cleanHeaders, handleRes } = require('../../lib/fetch');
const { debug } = require('../../lib/logger');

const readFile = promisify(fs.readFile);
const { debug } = require('../../lib/logger');
const { getProjectVersion } = require('../../lib/versionSelect');
const pushDoc = require('../../lib/pushDoc');

module.exports = class SingleDocCommand {
constructor() {
Expand Down Expand Up @@ -69,95 +62,7 @@ module.exports = class SingleDocCommand {

debug(`selectedVersion: ${selectedVersion}`);

debug(`reading file ${filepath}`);
const file = await readFile(filepath, 'utf8');
const matter = frontMatter(file);
debug(`frontmatter for ${filepath}: ${JSON.stringify(matter)}`);

// Stripping the subdirectories and markdown extension from the filename and lowercasing to get the default slug.
const slug = matter.data.slug || path.basename(filepath).replace(path.extname(filepath), '').toLowerCase();
const hash = crypto.createHash('sha1').update(file).digest('hex');

function createDoc(err) {
if (err.error !== 'DOC_NOTFOUND') return Promise.reject(err);

if (dryRun) {
return `🎭 dry run! This will create '${slug}' with contents from ${filepath} with the following metadata: ${JSON.stringify(
matter.data
)}`;
}

return fetch(`${config.get('host')}/api/v1/docs`, {
method: 'post',
headers: cleanHeaders(key, {
'x-readme-version': selectedVersion,
'Content-Type': 'application/json',
}),
body: JSON.stringify({
slug,
body: matter.content,
...matter.data,
lastUpdatedHash: hash,
}),
})
.then(res => handleRes(res))
.then(res => `🌱 successfully created '${res.slug}' with contents from ${filepath}`);
}

function updateDoc(existingDoc) {
if (hash === existingDoc.lastUpdatedHash) {
return `${dryRun ? '🎭 dry run! ' : ''}\`${slug}\` ${
dryRun ? 'will not be' : 'was not'
} updated because there were no changes.`;
}

if (dryRun) {
return `🎭 dry run! This will update '${slug}' with contents from ${filepath} with the following metadata: ${JSON.stringify(
matter.data
)}`;
}

return fetch(`${config.get('host')}/api/v1/docs/${slug}`, {
method: 'put',
headers: cleanHeaders(key, {
'x-readme-version': selectedVersion,
'Content-Type': 'application/json',
}),
body: JSON.stringify(
Object.assign(existingDoc, {
body: matter.content,
...matter.data,
lastUpdatedHash: hash,
})
),
})
.then(res => handleRes(res))
.then(res => `✏️ successfully updated '${res.slug}' with contents from ${filepath}`);
}

debug(`creating doc for ${slug}`);
const createdDoc = await fetch(`${config.get('host')}/api/v1/docs/${slug}`, {
method: 'get',
headers: cleanHeaders(key, {
'x-readme-version': selectedVersion,
Accept: 'application/json',
}),
})
.then(res => res.json())
.then(res => {
debug(`GET /docs/:slug API response for ${slug}: ${JSON.stringify(res)}`);
if (res.error) {
debug(`error retrieving data for ${slug}, creating doc`);
return createDoc(res);
}
debug(`data received for ${slug}, updating doc`);
return updateDoc(res);
})
.catch(err => {
// eslint-disable-next-line no-param-reassign
err.message = `Error uploading ${chalk.underline(filepath)}:\n\n${err.message}`;
throw err;
});
const createdDoc = await pushDoc(key, selectedVersion, dryRun, filepath);

return chalk.green(createdDoc);
}
Expand Down
4 changes: 2 additions & 2 deletions src/lib/fetch.js
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ module.exports.handleRes = async function handleRes(res) {
const extension = mime.extension(contentType);
if (extension === 'json') {
const body = await res.json();
debug(`received status code ${res.status} with JSON response: ${JSON.stringify(body)}`);
debug(`received status code ${res.status} from ${res.url} with JSON response: ${JSON.stringify(body)}`);
if (body.error) {
return Promise.reject(new APIError(body));
}
Expand All @@ -64,7 +64,7 @@ module.exports.handleRes = async function handleRes(res) {
// If we receive a non-JSON response, it's likely an error.
// Let's debug the raw response body and throw it.
const body = await res.text();
debug(`received status code ${res.status} with non-JSON response: ${body}`);
debug(`received status code ${res.status} from ${res.url} with non-JSON response: ${body}`);
return Promise.reject(body);
};

Expand Down
Loading

0 comments on commit 9d9e40c

Please sign in to comment.