diff --git a/src/github-handler/comment-handler/get-hunk-scope-handler/index.ts b/src/github-handler/comment-handler/get-hunk-scope-handler/index.ts deleted file mode 100644 index ee7ed517..00000000 --- a/src/github-handler/comment-handler/get-hunk-scope-handler/index.ts +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -export {getPullRequestScope} from './remote-patch-ranges-handler'; diff --git a/src/github-handler/comment-handler/get-hunk-scope-handler/remote-patch-ranges-handler.ts b/src/github-handler/comment-handler/get-hunk-scope-handler/remote-patch-ranges-handler.ts index cfb7589a..afb40d1d 100644 --- a/src/github-handler/comment-handler/get-hunk-scope-handler/remote-patch-ranges-handler.ts +++ b/src/github-handler/comment-handler/get-hunk-scope-handler/remote-patch-ranges-handler.ts @@ -13,7 +13,7 @@ // limitations under the License. import {Octokit} from '@octokit/rest'; -import {Range, RepoDomain} from '../../../types'; +import {RepoDomain, Hunk} from '../../../types'; import {logger} from '../../../logger'; import {parseHunks} from '../../diff-utils'; @@ -32,7 +32,7 @@ export async function getCurrentPullRequestPatches( pullNumber: number, pageSize: number ): Promise<{patches: Map; filesMissingPatch: string[]}> { - // TODO support pagination + // TODO: support pagination const filesMissingPatch: string[] = []; const files = ( await octokit.pulls.listFiles({ @@ -78,62 +78,50 @@ index cac8fbc..87f387c 100644 `; /** - * Given the patch text (for a whole file) for each file, - * get each file's hunk's (part of a file's) range - * @param {Map} validPatches patch text from the remote github file - * @returns {Map} the range of the remote patch - */ -export function patchTextToRanges( - validPatches: Map -): Map { - const allValidLineRanges: Map = new Map(); - validPatches.forEach((patch, filename) => { - // get each hunk range in the patch string - try { - const validHunks = parseHunks(_DIFF_HEADER + patch); - // TODO: consider returning hunks instead - const validLineRanges = validHunks.map(hunk => { - return {start: hunk.newStart, end: hunk.newEnd}; - }); - allValidLineRanges.set(filename, validLineRanges); - } catch (err) { - logger.info( - `Failed to parse the patch of file ${filename}. Resuming parsing patches...` - ); - } - }); - return allValidLineRanges; -} - -/** - * For a pull request, get each remote file's current patch range to identify the scope of each patch as a Map, - * as well as a list of files that cannot have suggestions applied to it within the Pull Request. - * The list of files are a subset of the total out-of-scope files. + * For a pull request, get each remote file's current patch range to identify the scope of each patch as a Map. * @param {Octokit} octokit the authenticated octokit instance * @param {RepoDomain} remote the remote repository domain information * @param {number} pullNumber the pull request number * @param {number} pageSize the number of files to return per pull request list files query - * @returns {Promise, string[]>>} the scope of each file in the pull request and the list of files whose patch data could not be resolved + * @returns {Promise>} the scope of each file in the pull request */ -export async function getPullRequestScope( +export async function getPullRequestHunks( octokit: Octokit, remote: RepoDomain, pullNumber: number, pageSize: number -): Promise<{validFileLines: Map; invalidFiles: string[]}> { - try { - const {patches, filesMissingPatch} = await getCurrentPullRequestPatches( - octokit, - remote, - pullNumber, - pageSize - ); - const validFileLines = patchTextToRanges(patches); - return {validFileLines, invalidFiles: filesMissingPatch}; - } catch (err) { +): Promise> { + const files = ( + await octokit.pulls.listFiles({ + owner: remote.owner, + repo: remote.repo, + pull_number: pullNumber, + per_page: pageSize, + }) + ).data; + const pullRequestHunks: Map = new Map(); + if (files.length === 0) { logger.error( - 'Could not convert the remote pull request file patch text to ranges' + `0 file results have returned from list files query for Pull Request #${pullNumber}. Cannot make suggestions on an empty Pull Request` + ); + throw Error('Empty Pull Request'); + } + files.forEach(file => { + if (file.patch === undefined) { + // files whose patch is too large do not return the patch text by default + // TODO handle file patches that are too large + logger.warn( + `File ${file.filename} may have a patch that is too large to display patch object.` + ); + } else { + const hunks = parseHunks(_DIFF_HEADER + file.patch); + pullRequestHunks.set(file.filename, hunks); + } + }); + if (pullRequestHunks.size === 0) { + logger.warn( + '0 patches have been returned. This could be because the patch results were too large to return.' ); - throw err; } + return pullRequestHunks; } diff --git a/src/github-handler/comment-handler/get-hunk-scope-handler/scope-handler.ts b/src/github-handler/comment-handler/get-hunk-scope-handler/scope-handler.ts new file mode 100644 index 00000000..6c6a9b90 --- /dev/null +++ b/src/github-handler/comment-handler/get-hunk-scope-handler/scope-handler.ts @@ -0,0 +1,90 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {Hunk} from '../../../types'; + +interface PartitionedHunks { + validHunks: Map; + invalidHunks: Map; +} + +interface PartitionedFileHunks { + validFileHunks: Hunk[]; + invalidFileHunks: Hunk[]; +} + +function partitionFileHunks( + pullRequestHunks: Hunk[], + suggestedHunks: Hunk[] +): PartitionedFileHunks { + // check ranges: the entirety of the old range of the suggested + // hunk must fit inside the new range of the valid Hunks + let i = 0; + let candidateHunk = pullRequestHunks[i]; + const validFileHunks: Hunk[] = []; + const invalidFileHunks: Hunk[] = []; + suggestedHunks.forEach(suggestedHunk => { + while (candidateHunk && suggestedHunk.oldStart > candidateHunk.newEnd) { + i++; + candidateHunk = pullRequestHunks[i]; + } + if (!candidateHunk) { + return; + } + if ( + suggestedHunk.oldStart >= candidateHunk.newStart && + suggestedHunk.oldEnd <= candidateHunk.newEnd + ) { + validFileHunks.push(suggestedHunk); + } else { + invalidFileHunks.push(suggestedHunk); + } + }); + return {validFileHunks, invalidFileHunks}; +} + +/** + * Split suggested hunks into commentable and non-commentable hunks. Compares the new line ranges + * from pullRequestHunks against the old line ranges from allSuggestedHunks. + * @param pullRequestHunks {Map} The parsed hunks from that represents the valid lines to comment. + * @param allSuggestedHunks {Map} The hunks that represent suggested changes. + * @returns {PartitionedHunks} split hunks + */ +export function partitionSuggestedHunksByScope( + pullRequestHunks: Map, + allSuggestedHunks: Map +): PartitionedHunks { + const validHunks: Map = new Map(); + const invalidHunks: Map = new Map(); + allSuggestedHunks.forEach((suggestedHunks, filename) => { + const pullRequestFileHunks = pullRequestHunks.get(filename); + if (!pullRequestFileHunks) { + // file is not the original PR + invalidHunks.set(filename, suggestedHunks); + return; + } + + const {validFileHunks, invalidFileHunks} = partitionFileHunks( + pullRequestFileHunks, + suggestedHunks + ); + if (validFileHunks.length > 0) { + validHunks.set(filename, validFileHunks); + } + if (invalidFileHunks.length > 0) { + invalidHunks.set(filename, invalidFileHunks); + } + }); + return {validHunks, invalidHunks}; +} diff --git a/src/github-handler/comment-handler/index.ts b/src/github-handler/comment-handler/index.ts index 70ece1f0..1df5a77e 100644 --- a/src/github-handler/comment-handler/index.ts +++ b/src/github-handler/comment-handler/index.ts @@ -13,11 +13,12 @@ // limitations under the License. import {FileDiffContent, RepoDomain} from '../../types'; -import {getPullRequestScope} from './get-hunk-scope-handler/'; import {Octokit} from '@octokit/rest'; -import {getSuggestionPatches} from './raw-patch-handler'; import {makeInlineSuggestions} from './make-review-handler'; import {logger} from '../../logger'; +import {getRawSuggestionHunks} from './raw-patch-handler/raw-hunk-handler'; +import {getPullRequestHunks} from './get-hunk-scope-handler/remote-patch-ranges-handler'; +import {partitionSuggestedHunksByScope} from './get-hunk-scope-handler/scope-handler'; /** * Comment on a Pull Request @@ -36,21 +37,28 @@ export async function reviewPullRequest( diffContents: Map ): Promise { try { - const {invalidFiles, validFileLines} = await getPullRequestScope( + // get the hunks from the pull request + const pullRequestHunks = await getPullRequestHunks( octokit, remote, pullNumber, pageSize ); - const {filePatches, outOfScopeSuggestions} = getSuggestionPatches( - diffContents, - invalidFiles, - validFileLines + + // get the hunks from the suggested change + const allSuggestedHunks = getRawSuggestionHunks(diffContents); + + // split hunks by commentable and uncommentable + const {validHunks, invalidHunks} = partitionSuggestedHunksByScope( + pullRequestHunks, + allSuggestedHunks ); + + // create pull request review const reviewNumber = await makeInlineSuggestions( octokit, - filePatches, - outOfScopeSuggestions, + validHunks, + invalidHunks, remote, pullNumber ); diff --git a/src/github-handler/comment-handler/make-review-handler/upload-comments-handler.ts b/src/github-handler/comment-handler/make-review-handler/upload-comments-handler.ts index 4d1316fa..5f0cb392 100644 --- a/src/github-handler/comment-handler/make-review-handler/upload-comments-handler.ts +++ b/src/github-handler/comment-handler/make-review-handler/upload-comments-handler.ts @@ -13,7 +13,7 @@ // limitations under the License. import {Octokit} from '@octokit/rest'; -import {Hunk, Patch, RepoDomain} from '../../../types'; +import {Hunk, RepoDomain} from '../../../types'; import {logger} from '../../../logger'; import {buildSummaryComment} from './message-handler'; @@ -61,25 +61,26 @@ export type PullsCreateReviewParamsComments = { * @param suggestions */ export function buildReviewComments( - suggestions: Map + suggestions: Map ): Comment[] { const fileComments: Comment[] = []; - suggestions.forEach((patches: Patch[], fileName: string) => { - patches.forEach(patch => { - if (patch.start === patch.end) { + suggestions.forEach((hunks: Hunk[], fileName: string) => { + hunks.forEach(hunk => { + const newContent = hunk.newContent.join('\n'); + if (hunk.newStart === hunk.newEnd) { const singleComment: SingleLineComment = { path: fileName, - body: `\`\`\`suggestion\n${patch.newContent}\n\`\`\``, - line: patch.end, + body: `\`\`\`suggestion\n${newContent}\n\`\`\``, + line: hunk.newEnd, side: 'RIGHT', }; fileComments.push(singleComment); } else { const comment: MultilineComment = { path: fileName, - body: `\`\`\`suggestion\n${patch.newContent}\n\`\`\``, - start_line: patch.start, - line: patch.end, + body: `\`\`\`suggestion\n${newContent}\n\`\`\``, + start_line: hunk.newStart, + line: hunk.newEnd, side: 'RIGHT', start_side: 'RIGHT', }; @@ -99,7 +100,7 @@ export function buildReviewComments( */ export async function makeInlineSuggestions( octokit: Octokit, - suggestions: Map, + suggestions: Map, outOfScopeSuggestions: Map, remote: RepoDomain, pullNumber: number diff --git a/src/github-handler/comment-handler/raw-patch-handler/hunk-to-patch-handler.ts b/src/github-handler/comment-handler/raw-patch-handler/hunk-to-patch-handler.ts deleted file mode 100644 index 5f61e900..00000000 --- a/src/github-handler/comment-handler/raw-patch-handler/hunk-to-patch-handler.ts +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import {Hunk, Patch, FileDiffContent} from '../../../types'; - -/** - * From the each file's hunk, generate each file's hunk's old version range - * and the new content (from the user's) to update that range with. - * @param filesHunks a list of hunks for each file where the lines are at least 1 - * @param diffContents - * @returns patches to upload to octokit - */ -export function generatePatches( - filesHunks: Map, - diffContents: Map -): Map { - const filesPatches: Map = new Map(); - filesHunks.forEach((hunks, fileName) => { - const patches: Patch[] = []; - // we expect all hunk lists to not be empty - hunks.forEach(hunk => { - // returns a copy of the hashmap value, then creates a new list with new substrings - const lines = diffContents.get(fileName)!.newContent.split('\n'); - // creates a new shallow-copied subarray - // we assume the hunk new start and new end to be within the domain of the lines length - if ( - hunk.newStart < 1 || - hunk.newEnd < 1 || - hunk.oldStart < 1 || - hunk.oldEnd < 1 - ) { - throw new RangeError('The file line value should be at least 1'); - } - const newContent = lines.slice(hunk.newStart - 1, hunk.newEnd - 1); - const patch: Patch = { - newContent: newContent.join('\n'), - start: hunk.oldStart, - end: hunk.oldEnd, - }; - patches.push(patch); - }); - filesPatches.set(fileName, patches); - }); - return filesPatches; -} diff --git a/src/github-handler/comment-handler/raw-patch-handler/in-scope-hunks-handler.ts b/src/github-handler/comment-handler/raw-patch-handler/in-scope-hunks-handler.ts deleted file mode 100644 index 55623dd2..00000000 --- a/src/github-handler/comment-handler/raw-patch-handler/in-scope-hunks-handler.ts +++ /dev/null @@ -1,196 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import {Hunk, Range, FileDiffContent} from '../../../types'; -import {getRawSuggestionHunks} from './raw-hunk-handler'; - -/** - * Given a value and a list of ranges, find the index of the range domain - * that the value lies within. - * Return -1 otherwise. - * We expect the ranges to be ascending, and disjoint. - * @param {Range[]} ranges a list of range objects with a start value and end value - * @param {number} value the value to search for in the range list - * @param {number} from the lower bound of the search indices. (optional) - */ -export function findRange(ranges: Range[], value: number, from = 0): number { - function binarySearch(lo: number, hi: number): number { - const mid = Math.floor((hi + lo) / 2); - if (lo > hi) { - return -1; - } else if (ranges[mid].start > value) { - return binarySearch(lo, mid - 1); - } else if (ranges[mid].end < value) { - return binarySearch(mid + 1, hi); - } else { - return mid; - } - } - return binarySearch(from, ranges.length - 1); -} - -/** - * Get a subset of the hunks who exactly fit within a valid hunk scope. - * Also get the converse of the set of hunks that are invalid. - * @param {Range[]} scope the list of valid ranges that a hunk can fit between - * @param {Hunk[]} suggestedHunks the hunks for the file to suggest - */ -function getScopeSuggestionHunks( - scope: Range[], - suggestedHunks: Hunk[] -): {inScopeHunks: Hunk[]; outOfScopeHunks: Hunk[]} { - const inScopeHunks: Hunk[] = []; - const outOfScopeHunks: Hunk[] = []; - let lastIndex = 0; - suggestedHunks.forEach(suggestedHunk => { - const startIndex = findRange(scope, suggestedHunk.oldStart, lastIndex); - const endIndex = findRange(scope, suggestedHunk.oldEnd, lastIndex); - lastIndex = endIndex; - // if the suggested range is not a subset of a single valid range, then skip - if (startIndex < 0 || endIndex < 0 || endIndex !== startIndex) { - outOfScopeHunks.push(suggestedHunk); - } else { - inScopeHunks.push(suggestedHunk); - } - }); - return {inScopeHunks, outOfScopeHunks}; -} - -/** - * Return all the files hunks whose scope lies inside a valid scope/ranges, and - * return files that have a non-empty hunk scope - * also return invalid hunks - * @param {Map} changeScope a map of each file's valid scope/ranges - * @param {Map} suggestions the hunks for each file to suggest - */ -export function getInScopeHunks( - changeScope: Map, - suggestions: Map -): { - inScopeFilesHunks: Map; - outOfScopeFilesHunks: Map; -} { - const inScopeFilesHunks: Map = new Map(); - const outOfScopeFilesHunks: Map = new Map(); - suggestions.forEach((suggestedHunks: Hunk[], fileName: string) => { - const scope = changeScope.get(fileName); - if (!scope) { - outOfScopeFilesHunks.set(fileName, suggestedHunks); - } else { - const {inScopeHunks, outOfScopeHunks} = getScopeSuggestionHunks( - scope, - suggestedHunks - ); - if (inScopeHunks.length) { - inScopeFilesHunks.set(fileName, inScopeHunks); - } - if (outOfScopeHunks.length) { - outOfScopeFilesHunks.set(fileName, outOfScopeHunks); - } - } - }); - return {inScopeFilesHunks, outOfScopeFilesHunks}; -} - -/** - * Return the suggestions whose filename is not in the list of invalid files to suggest - * @param {string[]} invalidFiles a list of file names - * @param {Map} suggestions the hunks for each file to suggest - */ -export function getInScopeByFileName( - invalidFiles: string[], - suggestions: Map -): Map { - // copy original suggestions to our valid suggestions map - const inScopeFiles: Map = new Map(suggestions); - // filter invalid suggestions - invalidFiles.forEach(file => { - if (inScopeFiles.has(file)) { - inScopeFiles.delete(file); - } - }); - return inScopeFiles; -} - -/** - * Get the suggestions who are out of scope because their file name has - * been recorded in the invalid file list. The suggestions should be the original - * unfiltered suggestions, otherwise the method will return a subset of the possible - * out of scope file names (and their respective hunks) - * @param {string} invalidFiles - * @param {Map} suggestions the unfiltered suggestions - */ -export function getOutOfScopeByFileName( - invalidFiles: string[], - suggestions: Map -) { - const invalidFileSuggestions: Map = new Map(); - invalidFiles.forEach(file => { - if (suggestions.has(file)) { - invalidFileSuggestions.set(file, suggestions.get(file) as Hunk[]); - } - }); - return invalidFileSuggestions; -} - -/** - * Get all the out of scope suggestions - * @param {Map} outOfScopeFileNameSuggestions - * @param {Map} outOfScopeHunkSuggestions - */ -export function mergeOutOfScopeSuggestions( - outOfScopeFileNameSuggestions: Map, - outOfScopeHunkSuggestions: Map -): Map { - const invalidSuggestions: Map = new Map( - outOfScopeHunkSuggestions - ); - outOfScopeFileNameSuggestions.forEach((hunks: Hunk[], fileName: string) => { - // invalid file name suggestions should be a superset of invalid hunk suggestions - invalidSuggestions.set(fileName, hunks); - }); - return invalidSuggestions; -} - -/** - * Get the in scope (of the corresponding pull request's) hunks and files - * @param {Map} diffContents the old text content and new text content of a file - * @param {string[]} invalidFiles list of invalid files - * @param {Map} validFileLines a map of each file's in scope lines for a Pull Request - */ -export function getValidSuggestionHunks( - diffContents: Map, - invalidFiles: string[], - validFileLines: Map -): { - inScopeSuggestions: Map; - outOfScopeSuggestions: Map; -} { - const totalfileHunks = getRawSuggestionHunks(diffContents); - const outofScopeByFilename = getOutOfScopeByFileName( - invalidFiles, - totalfileHunks - ); - const inScopeByFileName = getInScopeByFileName(invalidFiles, totalfileHunks); - - const scopifiedHunks = getInScopeHunks(validFileLines, inScopeByFileName); - const outOfScopeSuggestions = mergeOutOfScopeSuggestions( - outofScopeByFilename, - scopifiedHunks.outOfScopeFilesHunks - ); - return { - inScopeSuggestions: scopifiedHunks.inScopeFilesHunks, - outOfScopeSuggestions, - }; -} diff --git a/src/github-handler/comment-handler/raw-patch-handler/index.ts b/src/github-handler/comment-handler/raw-patch-handler/index.ts deleted file mode 100644 index 10955df4..00000000 --- a/src/github-handler/comment-handler/raw-patch-handler/index.ts +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import {generatePatches} from './hunk-to-patch-handler'; -import {getValidSuggestionHunks} from './in-scope-hunks-handler'; -import {Hunk, FileDiffContent, Range, Patch} from '../../../types'; - -interface SuggestionPatches { - filePatches: Map; - outOfScopeSuggestions: Map; -} - -/** - * Get the range of the old version of every file and the corresponding new text for that range - * whose old and new contents differ, under the constraints that the file - * is in scope for Pull Request, as well as its lines. - * @param {Map} diffContents the old text content and new text content of a file - * @param {string[]} invalidFiles list of invalid files - * @param {Map} validFileLines a map of each file's in scope lines for a Pull Request - */ -export function getSuggestionPatches( - diffContents: Map, - invalidFiles: string[], - validFileLines: Map -): SuggestionPatches { - const {inScopeSuggestions, outOfScopeSuggestions} = getValidSuggestionHunks( - diffContents, - invalidFiles, - validFileLines - ); - const filePatches: Map = generatePatches( - inScopeSuggestions, - diffContents - ); - return {filePatches, outOfScopeSuggestions}; -} diff --git a/src/github-handler/diff-utils.ts b/src/github-handler/diff-utils.ts index 7cfaa09e..a463fcae 100644 --- a/src/github-handler/diff-utils.ts +++ b/src/github-handler/diff-utils.ts @@ -29,20 +29,21 @@ export function parseHunks(diff: string): Hunk[] { let newStart = chunk.newStart; let normalLines = 0; let changeSeen = false; + const newLines: string[] = []; chunk.changes.forEach(change => { - switch (change.type) { - case 'add': - case 'del': - if (!changeSeen) { - oldStart += normalLines; - newStart += normalLines; - changeSeen = true; - } - break; - case 'normal': - normalLines++; - break; + if (change.type === 'normal') { + normalLines++; + } else { + if (change.type === 'add') { + // strip off leading '+' and trailing carriage return + newLines.push(change.content.substring(1).replace(/[\n\r]+$/g, '')); + } + if (!changeSeen) { + oldStart += normalLines; + newStart += normalLines; + changeSeen = true; + } } }); const newEnd = newStart + chunk.newLines - normalLines - 1; @@ -52,6 +53,7 @@ export function parseHunks(diff: string): Hunk[] { oldEnd: oldEnd, newStart: newStart, newEnd: newEnd, + newContent: newLines, }; }); } diff --git a/src/types/index.ts b/src/types/index.ts index 68ce4dba..0200b844 100644 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -162,24 +162,10 @@ export interface FileDiffContent { readonly newContent: string; } -/** - * A range object defined by lower boundary as 'start' and upper boundary as 'end' - */ -export interface Range { - readonly start: number; - readonly end: number; -} - export interface Hunk { readonly oldStart: number; readonly oldEnd: number; readonly newStart: number; readonly newEnd: number; -} - -/** - * The range of a patch along with the raw text file content - */ -export interface Patch extends Range { - readonly newContent: string; + readonly newContent: string[]; } diff --git a/test/diff-utils.ts b/test/diff-utils.ts index 8dbd811e..c3969f10 100644 --- a/test/diff-utils.ts +++ b/test/diff-utils.ts @@ -31,28 +31,60 @@ describe('parseHunks', () => { resolve(fixturePath, 'one-line-to-one.diff') ).toString(); const hunks = parseHunks(diff); - expect(hunks).to.eql([{oldStart: 5, oldEnd: 5, newStart: 5, newEnd: 5}]); + expect(hunks).to.eql([ + { + oldStart: 5, + oldEnd: 5, + newStart: 5, + newEnd: 5, + newContent: [" args: ['sleep', '301']"], + }, + ]); }); it('parses one-to-many hunks', () => { const diff = readFileSync( resolve(fixturePath, 'one-line-to-many.diff') ).toString(); const hunks = parseHunks(diff); - expect(hunks).to.eql([{oldStart: 7, oldEnd: 7, newStart: 7, newEnd: 8}]); + expect(hunks).to.eql([ + { + oldStart: 7, + oldEnd: 7, + newStart: 7, + newEnd: 8, + newContent: [" args: ['foobar']", ' id: asdf'], + }, + ]); }); it('parses one-to-many hunks with a newline added', () => { const diff = readFileSync( resolve(fixturePath, 'one-line-to-many-newline.diff') ).toString(); const hunks = parseHunks(diff); - expect(hunks).to.eql([{oldStart: 5, oldEnd: 5, newStart: 5, newEnd: 6}]); + expect(hunks).to.eql([ + { + oldStart: 5, + oldEnd: 5, + newStart: 5, + newEnd: 6, + newContent: [" args: ['sleep', '30']", " id: 'foobar'"], + }, + ]); }); it('parses many-to-many-hunks', () => { const diff = readFileSync( resolve(fixturePath, 'many-to-many.diff') ).toString(); const hunks = parseHunks(diff); - expect(hunks).to.eql([{oldStart: 2, oldEnd: 5, newStart: 2, newEnd: 3}]); + expect(hunks).to.eql([ + { + oldStart: 2, + oldEnd: 5, + newStart: 2, + newEnd: 3, + newContent: ["- name: 'foo'", " args: ['sleep 1']"], + }, + ]); }); it('parses many-to-one-hunks', () => { @@ -60,11 +92,21 @@ describe('parseHunks', () => { resolve(fixturePath, 'many-to-one.diff') ).toString(); const hunks = parseHunks(diff); - expect(hunks).to.eql([{oldStart: 2, oldEnd: 5, newStart: 2, newEnd: 2}]); + expect(hunks).to.eql([ + { + oldStart: 2, + oldEnd: 5, + newStart: 2, + newEnd: 2, + newContent: ["- name: 'foo'"], + }, + ]); }); it('parses deletions', () => { const diff = readFileSync(resolve(fixturePath, 'deletion.diff')).toString(); const hunks = parseHunks(diff); - expect(hunks).to.eql([{oldStart: 4, oldEnd: 5, newStart: 4, newEnd: 3}]); + expect(hunks).to.eql([ + {oldStart: 4, oldEnd: 5, newStart: 4, newEnd: 3, newContent: []}, + ]); }); }); diff --git a/test/fixtures/diffs/one-line-to-many-newline.diff b/test/fixtures/diffs/one-line-to-many-newline.diff index b7a142ce..24322b09 100644 --- a/test/fixtures/diffs/one-line-to-many-newline.diff +++ b/test/fixtures/diffs/one-line-to-many-newline.diff @@ -9,4 +9,4 @@ index 07e5def..ca56e25 100644 - args: ['sleep', '30'] \ No newline at end of file + args: ['sleep', '30'] -+ id: 'foobar \ No newline at end of file ++ id: 'foobar' \ No newline at end of file diff --git a/test/helper-review-pull-request.ts b/test/helper-review-pull-request.ts index 6e62637b..a223efd1 100644 --- a/test/helper-review-pull-request.ts +++ b/test/helper-review-pull-request.ts @@ -15,7 +15,7 @@ import {assert, expect} from 'chai'; import {describe, it, before} from 'mocha'; import {octokit, setup} from './util'; -import {RepoDomain, FileDiffContent, Patch, Hunk} from '../src/types'; +import {RepoDomain, FileDiffContent, Hunk} from '../src/types'; import {Octokit} from '@octokit/rest'; import * as proxyquire from 'proxyquire'; before(() => { @@ -36,19 +36,14 @@ describe('reviewPullRequest', () => { it('Succeeds when all values are passed as expected', async () => { let numMockedHelpersCalled = 0; - const validFileLines = new Map(); - const filePatches = new Map(); - const outOfScopeSuggestions = new Map(); - const invalidFiles = [ - 'invalid-file1.txt', - 'invalid-file2.txt', - 'invalid-file3.txt', - ]; + const validFileHunks = new Map(); + const invalidFileHunks = new Map(); + const suggestionHunks = new Map(); const stubMakePr = proxyquire.noCallThru()( '../src/github-handler/comment-handler', { - './get-hunk-scope-handler': { - getPullRequestScope: ( + './get-hunk-scope-handler/remote-patch-ranges-handler': { + getPullRequestHunks: ( testOctokit: Octokit, testRemote: RepoDomain, testPullNumber: number, @@ -61,88 +56,41 @@ describe('reviewPullRequest', () => { expect(testPullNumber).equals(pullNumber); expect(testPageSize).equals(pageSize); numMockedHelpersCalled += 1; - return {invalidFiles, validFileLines}; + return validFileHunks; }, }, - './raw-patch-handler': { - getSuggestionPatches: ( - testDiffContents: Map, - testInvalidFiles: string[], - testValidFileLines: Map + './raw-patch-handler/raw-hunk-handler': { + getRawSuggestionHunks: ( + testDiffContents: Map ) => { expect(testDiffContents).equals(diffContents); - expect(testInvalidFiles).equals(invalidFiles); - expect(testValidFileLines).equals(validFileLines); numMockedHelpersCalled += 1; - return {filePatches, outOfScopeSuggestions}; + return suggestionHunks; }, }, - './make-review-handler': { - makeInlineSuggestions: ( - testOctokit: Octokit, - testFilePatches: Map, - testoutOfScopeSuggestions: Map, - testRemote: RepoDomain, - testPullNumber: number + './get-hunk-scope-handler/scope-handler': { + partitionSuggestedHunksByScope: ( + testPullRequestHunks: Map, + testSuggestedHunks: Map ) => { - expect(testOctokit).equals(octokit); - expect(testFilePatches).equals(filePatches); - expect(testRemote).equals(remote); - expect(testoutOfScopeSuggestions).equals(outOfScopeSuggestions); - expect(testPullNumber).equals(pullNumber); + expect(testPullRequestHunks).equals(validFileHunks); + expect(testSuggestedHunks).equals(suggestionHunks); numMockedHelpersCalled += 1; - }, - }, - } - ); - await stubMakePr.reviewPullRequest( - octokit, - remote, - pullNumber, - pageSize, - diffContents - ); - expect(numMockedHelpersCalled).equals(3); - }); - - it('Executes patch handler without error when valid patch is passed', async () => { - let numMockedHelpersCalled = 0; - const validFileLines = new Map(); - const invalidFiles = [ - 'invalid-file1.txt', - 'invalid-file2.txt', - 'invalid-file3.txt', - ]; - const stubMakePr = proxyquire.noCallThru()( - '../src/github-handler/comment-handler', - { - './get-hunk-scope-handler': { - getPullRequestScope: ( - testOctokit: Octokit, - testRemote: RepoDomain, - testPullNumber: number, - testPageSize: number - ) => { - expect(testOctokit).equals(octokit); - expect(testRemote.owner).equals(owner); - expect(testOctokit).equals(octokit); - expect(testRemote.repo).equals(repo); - expect(testPullNumber).equals(pullNumber); - expect(testPageSize).equals(pageSize); - numMockedHelpersCalled += 1; - return {invalidFiles, validFileLines}; + return {validHunks: validFileHunks, invalidHunks: invalidFileHunks}; }, }, './make-review-handler': { makeInlineSuggestions: ( testOctokit: Octokit, - testFilePatches: Map, - testoutOfScopeSuggestions: Map, + testValidHunks: Map, + testInvalidHunks: Map, testRemote: RepoDomain, testPullNumber: number ) => { expect(testOctokit).equals(octokit); + expect(testValidHunks).equals(validFileHunks); expect(testRemote).equals(remote); + expect(testInvalidHunks).equals(invalidFileHunks); expect(testPullNumber).equals(pullNumber); numMockedHelpersCalled += 1; }, @@ -156,16 +104,16 @@ describe('reviewPullRequest', () => { pageSize, diffContents ); - expect(numMockedHelpersCalled).equals(2); + expect(numMockedHelpersCalled).equals(4); }); - it('Passes up the error message when getPullRequestScope helper fails', async () => { + it('Passes up the error message when getPullRequestHunks helper fails', async () => { let numMockedHelpersCalled = 0; const stubMakePr = proxyquire.noCallThru()( '../src/github-handler/comment-handler', { - './get-hunk-scope-handler': { - getPullRequestScope: ( + './get-hunk-scope-handler/remote-patch-ranges-handler': { + getPullRequestHunks: ( testOctokit: Octokit, testRemote: RepoDomain, testPullNumber: number, @@ -178,7 +126,7 @@ describe('reviewPullRequest', () => { expect(testPullNumber).equals(pullNumber); expect(testPageSize).equals(pageSize); numMockedHelpersCalled += 1; - throw new Error('getPullRequestScope failed'); + throw new Error('getPullRequestHunks failed'); }, }, } @@ -194,142 +142,7 @@ describe('reviewPullRequest', () => { assert.ok(false); } catch (err) { expect(numMockedHelpersCalled).equals(1); - expect(err.message).equals('getPullRequestScope failed'); - } - }); - - it('Passes up the error message when getSuggestionPatches helper fails', async () => { - let numMockedHelpersCalled = 0; - const validFileLines = new Map(); - const invalidFiles = [ - 'invalid-file1.txt', - 'invalid-file2.txt', - 'invalid-file3.txt', - ]; - const stubMakePr = proxyquire.noCallThru()( - '../src/github-handler/comment-handler', - { - './get-hunk-scope-handler': { - getPullRequestScope: ( - testOctokit: Octokit, - testRemote: RepoDomain, - testPullNumber: number, - testPageSize: number - ) => { - expect(testOctokit).equals(octokit); - expect(testRemote.owner).equals(owner); - expect(testOctokit).equals(octokit); - expect(testRemote.repo).equals(repo); - expect(testPullNumber).equals(pullNumber); - expect(testPageSize).equals(pageSize); - numMockedHelpersCalled += 1; - return {invalidFiles, validFileLines}; - }, - }, - './raw-patch-handler': { - getSuggestionPatches: ( - testDiffContents: Map, - testInvalidFiles: string[], - testValidFileLines: Map - ) => { - expect(testDiffContents).equals(diffContents); - expect(testInvalidFiles).equals(invalidFiles); - expect(testValidFileLines).equals(validFileLines); - numMockedHelpersCalled += 1; - throw new Error('getSuggestionPatches failed'); - }, - }, - } - ); - try { - await stubMakePr.reviewPullRequest( - octokit, - remote, - pullNumber, - pageSize, - diffContents - ); - assert.ok(false); - } catch (err) { - expect(numMockedHelpersCalled).equals(2); - expect(err.message).equals('getSuggestionPatches failed'); - } - }); - - it('Passes up the error message when get makeInlineSuggestions helper fails', async () => { - let numMockedHelpersCalled = 0; - const validFileLines = new Map(); - const filePatches = new Map(); - const outOfScopeSuggestions = new Map(); - const invalidFiles = [ - 'invalid-file1.txt', - 'invalid-file2.txt', - 'invalid-file3.txt', - ]; - const stubMakePr = proxyquire.noCallThru()( - '../src/github-handler/comment-handler', - { - './get-hunk-scope-handler': { - getPullRequestScope: ( - testOctokit: Octokit, - testRemote: RepoDomain, - testPullNumber: number, - testPageSize: number - ) => { - expect(testOctokit).equals(octokit); - expect(testRemote.owner).equals(owner); - expect(testOctokit).equals(octokit); - expect(testRemote.repo).equals(repo); - expect(testPullNumber).equals(pullNumber); - expect(testPageSize).equals(pageSize); - numMockedHelpersCalled += 1; - return {invalidFiles, validFileLines}; - }, - }, - './raw-patch-handler': { - getSuggestionPatches: ( - testDiffContents: Map, - testInvalidFiles: string[], - testValidFileLines: Map - ) => { - expect(testDiffContents).equals(diffContents); - expect(testInvalidFiles).equals(invalidFiles); - expect(testValidFileLines).equals(validFileLines); - numMockedHelpersCalled += 1; - return {filePatches, outOfScopeSuggestions}; - }, - }, - './make-review-handler': { - makeInlineSuggestions: ( - testOctokit: Octokit, - testFilePatches: Map, - testoutOfScopeSuggestions: Map, - testRemote: RepoDomain, - testPullNumber: number - ) => { - expect(testOctokit).equals(octokit); - expect(testFilePatches).equals(filePatches); - expect(testRemote).equals(remote); - expect(testoutOfScopeSuggestions).equals(outOfScopeSuggestions); - expect(testPullNumber).equals(pullNumber); - numMockedHelpersCalled += 1; - throw new Error('makeInlineSuggestions failed'); - }, - }, - } - ); - try { - await stubMakePr.reviewPullRequest( - octokit, - remote, - pullNumber, - pageSize, - diffContents - ); - assert.ok(false); - } catch (err) { - expect(numMockedHelpersCalled).equals(3); - expect(err.message).equals('makeInlineSuggestions failed'); + expect(err.message).equals('getPullRequestHunks failed'); } }); }); diff --git a/test/hunk-to-patch.ts b/test/hunk-to-patch.ts deleted file mode 100644 index 3342f530..00000000 --- a/test/hunk-to-patch.ts +++ /dev/null @@ -1,240 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import {expect} from 'chai'; -import {describe, it, before, beforeEach} from 'mocha'; -import {setup} from './util'; -import {generatePatches} from '../src/github-handler/comment-handler/raw-patch-handler/hunk-to-patch-handler'; -import {Hunk, FileDiffContent} from '../src/types'; - -before(() => { - setup(); -}); - -describe('generatePatches', () => { - const diffContents: Map = new Map(); - const filesHunks: Map = new Map(); - - const fileName1Addition = 'file-1.txt'; - const fileName2Addition = 'file-2.txt'; - const fileNameDelete = 'file-3.txt'; - const fileNameSpecialChar1 = 'file-4.txt'; - const fileNameEmtpy = 'file-5.txt'; - const fileNameNonEmtpy = 'file-6.txt'; - - beforeEach(() => { - diffContents.clear(); - filesHunks.clear(); - }); - - it('Gets the correct substrings when there is 1 addition', () => { - diffContents.set(fileName1Addition, { - oldContent: - 'line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\n', - newContent: - 'addition+line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\n', - }); - filesHunks.set(fileName1Addition, [ - {oldStart: 1, oldEnd: 6, newStart: 1, newEnd: 7}, - ]); - const filePatches = generatePatches(filesHunks, diffContents); - expect(filePatches.get(fileName1Addition)!).deep.equals([ - { - start: 1, - end: 6, - newContent: 'addition+line1\nline2\nline3\nline4\nline5\nline6', - }, - ]); - }); - - it('Gets the correct substrings when there is 2 additions', () => { - diffContents.set(fileName2Addition, { - oldContent: - 'line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\n', - newContent: - 'line0\nline1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10+another addition\n', - }); - filesHunks.set(fileName2Addition, [ - {oldStart: 1, oldEnd: 6, newStart: 1, newEnd: 7}, - {oldStart: 9, oldEnd: 12, newStart: 10, newEnd: 13}, - ]); - const filePatches = generatePatches(filesHunks, diffContents); - expect(filePatches.get(fileName2Addition)!).deep.equals([ - { - start: 1, - end: 6, - newContent: 'line0\nline1\nline2\nline3\nline4\nline5', - }, - { - start: 9, - end: 12, - newContent: 'line9\nline10+another addition\n', - }, - ]); - }); - - it('Gets the correct substrings when there is 1 deletion', () => { - diffContents.set(fileNameDelete, { - oldContent: - 'line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\n', - newContent: - 'line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\n', - }); - filesHunks.set(fileNameDelete, [ - {oldStart: 9, oldEnd: 12, newStart: 9, newEnd: 11}, - ]); - const filePatches = generatePatches(filesHunks, diffContents); - expect(filePatches.get(fileNameDelete)!).deep.equals([ - { - start: 9, - end: 12, - newContent: 'line9\n', - }, - ]); - }); - - it('Gets the correct substrings when there is a special patch char prepending the text', () => { - diffContents.set(fileNameSpecialChar1, { - oldContent: - 'line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\n', - newContent: - '+line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\n', - }); - - filesHunks.set(fileNameSpecialChar1, [ - {oldStart: 1, oldEnd: 2, newStart: 1, newEnd: 2}, - ]); - const filePatches = generatePatches(filesHunks, diffContents); - expect(filePatches.get(fileNameSpecialChar1)!).deep.equals([ - { - start: 1, - end: 2, - newContent: '+line1', - }, - ]); - }); - - it('Gets the correct substrings when the file is now an empty string', () => { - diffContents.set(fileNameEmtpy, { - oldContent: 'line1', - newContent: '', - }); - - filesHunks.set(fileNameEmtpy, [ - {oldStart: 1, oldEnd: 2, newStart: 1, newEnd: 1}, - ]); - const filePatches = generatePatches(filesHunks, diffContents); - expect(filePatches.get(fileNameEmtpy)!).deep.equals([ - { - start: 1, - end: 2, - newContent: '', - }, - ]); - }); - - it('Gets the correct substrings when the empty string file is now has text', () => { - diffContents.set(fileNameNonEmtpy, { - oldContent: '', - newContent: - 'line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\n', - }); - - filesHunks.set(fileNameNonEmtpy, [ - {oldStart: 1, oldEnd: 1, newStart: 1, newEnd: 12}, - ]); - const filePatches = generatePatches(filesHunks, diffContents); - expect(filePatches.get(fileNameNonEmtpy)!).deep.equals([ - { - start: 1, - end: 1, - newContent: - 'line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\n', - }, - ]); - }); - - it('Throws an error when the new start hunk line is 0', () => { - diffContents.set(fileNameNonEmtpy, { - oldContent: '', - newContent: - 'line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\n', - }); - filesHunks.set(fileNameNonEmtpy, [ - {oldStart: 1, oldEnd: 1, newStart: 0, newEnd: 12}, - ]); - try { - generatePatches(filesHunks, diffContents); - expect.fail( - 'Should have errored because the new start line is < 1. Value should be >= 1' - ); - } catch (err) { - expect(err instanceof RangeError).equals(true); - } - }); - it('Throws an error when the new end hunk line is 0', () => { - diffContents.set(fileNameNonEmtpy, { - oldContent: '', - newContent: - 'line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\n', - }); - filesHunks.set(fileNameNonEmtpy, [ - {oldStart: 2, oldEnd: 1, newStart: 1, newEnd: 0}, - ]); - try { - generatePatches(filesHunks, diffContents); - expect.fail( - 'Should have errored because the new end line is < 1. Value should be >= 1' - ); - } catch (err) { - expect(err instanceof RangeError).equals(true); - } - }); - it('Throws an error when the old start hunk line is 0', () => { - diffContents.set(fileNameNonEmtpy, { - oldContent: '', - newContent: - 'line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\n', - }); - filesHunks.set(fileNameNonEmtpy, [ - {oldStart: 0, oldEnd: 1, newStart: 1, newEnd: 12}, - ]); - try { - generatePatches(filesHunks, diffContents); - expect.fail( - 'Should have errored because the old start line is < 1. Value should be >= 1' - ); - } catch (err) { - expect(err instanceof RangeError).equals(true); - } - }); - it('Throws an error when theold end hunk line is 0', () => { - diffContents.set(fileNameNonEmtpy, { - oldContent: '', - newContent: - 'line1\nline2\nline3\nline4\nline5\nline6\nline7\nline8\nline9\nline10\n', - }); - filesHunks.set(fileNameNonEmtpy, [ - {oldStart: 2, oldEnd: 0, newStart: 1, newEnd: 2}, - ]); - try { - generatePatches(filesHunks, diffContents); - expect.fail( - 'Should have errored because the old end line is < 1. Value should be >= 1' - ); - } catch (err) { - expect(err instanceof RangeError).equals(true); - } - }); -}); diff --git a/test/inline-suggest.ts b/test/inline-suggest.ts index ee6ce505..971733f5 100644 --- a/test/inline-suggest.ts +++ b/test/inline-suggest.ts @@ -21,7 +21,7 @@ import { buildReviewComments, PullsCreateReviewParamsComments, } from '../src/github-handler/comment-handler/make-review-handler/upload-comments-handler'; -import {Patch, Hunk} from '../src/types'; +import {Hunk} from '../src/types'; before(() => { setup(); @@ -29,14 +29,16 @@ before(() => { describe('buildFileComments', () => { it('Maps patches to GitHub comment object types', () => { - const suggestions: Map = new Map(); + const suggestions: Map = new Map(); const fileName1 = 'foo.txt'; - const patch1: Patch = { - start: 1, - end: 2, - newContent: 'Foo', + const hunk1: Hunk = { + oldStart: 1, + oldEnd: 2, + newStart: 1, + newEnd: 2, + newContent: ['Foo'], }; - suggestions.set(fileName1, [patch1]); + suggestions.set(fileName1, [hunk1]); const comments = buildReviewComments(suggestions); expect(comments).deep.equals([ { @@ -50,21 +52,25 @@ describe('buildFileComments', () => { ]); }); it('Maps multiple patches to GitHub comment object types', () => { - const suggestions: Map = new Map(); + const suggestions: Map = new Map(); const fileName1 = 'foo.txt'; const fileName2 = 'bar.txt'; - const patch1: Patch = { - start: 1, - end: 2, - newContent: 'Foo', + const hunk1: Hunk = { + oldStart: 1, + oldEnd: 2, + newStart: 1, + newEnd: 2, + newContent: ['Foo'], }; - const patch2: Patch = { - start: 3, - end: 4, - newContent: 'Bar', + const hunk2: Hunk = { + oldStart: 3, + oldEnd: 4, + newStart: 3, + newEnd: 4, + newContent: ['Bar'], }; - suggestions.set(fileName2, [patch1]); - suggestions.set(fileName1, [patch1, patch2]); + suggestions.set(fileName2, [hunk1]); + suggestions.set(fileName1, [hunk1, hunk2]); const comments = buildReviewComments(suggestions); expect(comments).deep.equals([ { @@ -94,19 +100,21 @@ describe('buildFileComments', () => { ]); }); it('Maps empty suggestion to empty list', () => { - const suggestions: Map = new Map(); + const suggestions: Map = new Map(); const comments = buildReviewComments(suggestions); expect(comments.length).deep.equals(0); }); it('Builds single line comments', () => { - const suggestions: Map = new Map(); + const suggestions: Map = new Map(); const fileName1 = 'foo.txt'; - const patch1: Patch = { - start: 1, - end: 1, - newContent: 'Foo', + const hunk1: Hunk = { + oldStart: 1, + oldEnd: 1, + newStart: 1, + newEnd: 1, + newContent: ['Foo'], }; - suggestions.set(fileName1, [patch1]); + suggestions.set(fileName1, [hunk1]); const comments = buildReviewComments(suggestions); expect(comments).deep.equals([ { @@ -121,22 +129,24 @@ describe('buildFileComments', () => { describe('makeInlineSuggestions', () => { const sandbox = sinon.createSandbox(); - const suggestions: Map = new Map(); + const suggestions: Map = new Map(); const outOfScopeSuggestions: Map = new Map(); afterEach(() => { sandbox.restore(); suggestions.clear(); }); const fileName1 = 'foo.txt'; - const patch1: Patch = { - start: 1, - end: 2, - newContent: 'Foo', + const hunk1: Hunk = { + oldStart: 1, + oldEnd: 2, + newStart: 1, + newEnd: 2, + newContent: ['Foo'], }; const remote = {owner: 'upstream-owner', repo: 'upstream-repo'}; const pullNumber = 711; it("Calls Octokit with the correct values and returns the successfully created review's number", async () => { - suggestions.set(fileName1, [patch1]); + suggestions.set(fileName1, [hunk1]); const responseData = await import( './fixtures/get-pull-request-response.json' ); @@ -302,7 +312,7 @@ describe('makeInlineSuggestions', () => { it('Throws and does not continue when get pull request fails', async () => { // setup - suggestions.set(fileName1, [patch1]); + suggestions.set(fileName1, [hunk1]); const stubGetPulls = sandbox .stub(octokit.pulls, 'get') .rejects(new Error()); @@ -325,7 +335,7 @@ describe('makeInlineSuggestions', () => { }); it('Throws when create review comments fails', async () => { // setup - suggestions.set(fileName1, [patch1]); + suggestions.set(fileName1, [hunk1]); const responseData = await import( './fixtures/get-pull-request-response.json' ); diff --git a/test/pull-request-hunks.ts b/test/pull-request-hunks.ts new file mode 100644 index 00000000..137cec3c --- /dev/null +++ b/test/pull-request-hunks.ts @@ -0,0 +1,106 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {expect} from 'chai'; +import {describe, it, before, afterEach} from 'mocha'; +import {setup} from './util'; +import * as sinon from 'sinon'; +import {Octokit} from '@octokit/rest'; +import {getPullRequestHunks} from '../src/github-handler/comment-handler/get-hunk-scope-handler/remote-patch-ranges-handler'; + +before(() => { + setup(); +}); + +describe('getPullRequestHunks', () => { + const upstream = {owner: 'upstream-owner', repo: 'upstream-repo'}; + const pullNumber = 10; + const pageSize = 80; + const octokit = new Octokit({}); + const sandbox = sinon.createSandbox(); + afterEach(() => { + sandbox.restore(); + }); + + it('Returns the correct values when octokit and patch text parsing function execute properly', async () => { + // setup + const patch = `@@ -1,2 +1,5 @@ + Hello world +-! ++Goodbye World ++gOodBYE world ++ ++Goodbye World`; + const listFilesOfPRResult = { + headers: {}, + status: 200, + url: 'http://fake-url.com', + data: [ + { + sha: 'a1d470fa4d7b04450715e3e02d240a34517cd988', + filename: 'Readme.md', + status: 'modified', + additions: 4, + deletions: 1, + changes: 5, + blob_url: + 'https://github.com/TomKristie/HelloWorld/blob/eb53f3871f56e8dd6321e44621fe6ac2da1bc120/Readme.md', + raw_url: + 'https://github.com/TomKristie/HelloWorld/raw/eb53f3871f56e8dd6321e44621fe6ac2da1bc120/Readme.md', + contents_url: + 'https://api.github.com/repos/TomKristie/HelloWorld/contents/Readme.md?ref=eb53f3871f56e8dd6321e44621fe6ac2da1bc120', + patch: patch, + }, + ], + }; + const stub = sandbox + .stub(octokit.pulls, 'listFiles') + .resolves(listFilesOfPRResult); + + // tests + const pullRequestHunks = await getPullRequestHunks( + octokit, + upstream, + pullNumber, + pageSize + ); + sandbox.assert.calledOnceWithExactly(stub, { + owner: upstream.owner, + repo: upstream.repo, + pull_number: pullNumber, + per_page: pageSize, + }); + const hunks = pullRequestHunks.get('Readme.md'); + expect(hunks).not.equals(null); + expect(hunks!.length).equals(1); + expect(hunks![0].newStart).equals(2); + expect(hunks![0].newEnd).equals(5); + }); + + it('Passes up the error when a sub-method fails', async () => { + // setup + const errorMsg = 'Test error for list files'; + sandbox.stub(octokit.pulls, 'listFiles').rejects(new Error(errorMsg)); + + // tests + try { + await getPullRequestHunks(octokit, upstream, pullNumber, pageSize); + expect.fail( + 'The getPullRequestHunks function should have failed because Octokit failed.' + ); + } catch (err) { + expect(err.message).equals(errorMsg); + } + }); +}); diff --git a/test/remote-github-patch-text.ts b/test/remote-github-patch-text.ts index a445d838..b2124130 100644 --- a/test/remote-github-patch-text.ts +++ b/test/remote-github-patch-text.ts @@ -16,11 +16,7 @@ import {expect} from 'chai'; import {describe, it, before, afterEach} from 'mocha'; import {setup} from './util'; import * as sinon from 'sinon'; -import { - patchTextToRanges, - getCurrentPullRequestPatches, - getPullRequestScope, -} from '../src/github-handler/comment-handler/get-hunk-scope-handler/remote-patch-ranges-handler'; +import {getCurrentPullRequestPatches} from '../src/github-handler/comment-handler/get-hunk-scope-handler/remote-patch-ranges-handler'; import {Octokit} from '@octokit/rest'; import {logger} from '../src/logger'; @@ -240,218 +236,3 @@ describe('getCurrentPullRequestPatches', () => { expect(filesMissingPatch[0]).equals('Readme.md'); }); }); - -describe('patchTextToRanges', () => { - it('Returns an empty range file record when there is no patch text', () => { - const patchText = new Map(); - const ranges = patchTextToRanges(patchText); - expect(ranges.size).equals(0); - }); - it('Does not throw an error when the patch text is an empty string and does not add the patch to the valid range map', () => { - const patchText = new Map(); - patchText.set('invalid-patch.txt', ''); - const ranges = patchTextToRanges(patchText); - expect(ranges.get('invalid-patch.txt')).to.eql([]); - }); - it('Does not throw an error when the patch text is a string that does not contain patch data add the patch to the valid range map', () => { - const patchText = new Map(); - patchText.set('invalid-patch.txt', '@@ this is some invalid patch data @@'); - const ranges = patchTextToRanges(patchText); - expect(ranges.get('invalid-patch.txt')).to.eql([]); - }); - it('Calculates ranges with an inclusive lower bound', () => { - const multilinePatch = `@@ -1,2 +1,5 @@ - Hello world --! -+Goodbye World -+gOodBYE world -+ -+Goodbye World`; - const multilineFileName = 'multi-line-patch.txt'; - const patchText = new Map(); - patchText.set(multilineFileName, multilinePatch); - const ranges = patchTextToRanges(patchText).get(multilineFileName); - expect(ranges).not.equals(null); - expect(ranges!.length).equals(1); - expect(ranges![0].start).equals(2); - expect(ranges![0].end).equals(5); - }); - it('Calculates ranges with an exclusive upper bound', () => { - const firstLinePatch = `@@ -0,0 +1 @@ -+console.log('Hello World!');`; - const multiToSingleLineFileName = 'multi-to-single-line-patch.txt'; - const patchText = new Map(); - patchText.set(multiToSingleLineFileName, firstLinePatch); - const ranges = patchTextToRanges(patchText).get(multiToSingleLineFileName); - expect(ranges).not.equals(null); - expect(ranges!.length).equals(1); - expect(ranges![0].start).equals(1); - expect(ranges![0].end).equals(1); - }); - it('Returns a single range file record when there is a single multiline patch hunk', () => { - const multilinePatch = `@@ -1,2 +1,5 @@ - Hello world --! -+Goodbye World -+gOodBYE world -+ -+Goodbye World`; - const multilineFileName = 'multi-line-patch.txt'; - const patchText = new Map(); - patchText.set(multilineFileName, multilinePatch); - const ranges = patchTextToRanges(patchText).get(multilineFileName); - expect(ranges).not.equals(null); - expect(ranges!.length).equals(1); - expect(ranges![0].start).equals(2); - expect(ranges![0].end).equals(5); - }); - it('Returns a single range file record when there is a single 1 line patch hunk', () => { - const firstLinePatch = `@@ -1 +1 @@ --Hello foo -+`; - const singleLineFileName = 'single-line-patch.txt'; - const patchText = new Map(); - patchText.set(singleLineFileName, firstLinePatch); - const ranges = patchTextToRanges(patchText).get(singleLineFileName); - expect(ranges).not.equals(null); - expect(ranges!.length).equals(1); - expect(ranges![0].start).equals(1); - expect(ranges![0].end).equals(1); - }); - it('Returns a single range file record when there is a single 1 line to multiline line patch hunk', () => { - const singleToMultilineFormat = `@@ -1 +0,0 @@ --hello world`; - const singleToMultilineFileName = 'single-line-to-multiline-patch.txt'; - const patchText = new Map(); - patchText.set(singleToMultilineFileName, singleToMultilineFormat); - const ranges = patchTextToRanges(patchText).get(singleToMultilineFileName); - expect(ranges).not.equals(null); - expect(ranges!.length).equals(1); - expect(ranges![0].start).equals(0); - // FIXME: See #127 - expect(ranges![0].end).equals(-1); - }); - it('Returns a single range file record when there is a single multiline to 1 line patch hunk', () => { - const firstLinePatch = `@@ -0,0 +1 @@ -+console.log('Hello World!');`; - const multiToSingleLineFileName = 'multi-to-single-line-patch.txt'; - const patchText = new Map(); - patchText.set(multiToSingleLineFileName, firstLinePatch); - const ranges = patchTextToRanges(patchText).get(multiToSingleLineFileName); - expect(ranges).not.equals(null); - expect(ranges!.length).equals(1); - expect(ranges![0].start).equals(1); - expect(ranges![0].end).equals(1); - }); - it('Returns a single range file record when there is a single multiline to 1 line patch hunk', () => { - const multiplePatches = `@@ -356,6 +356,7 @@ - Hello - Hello - Hello -+Bye - Hello - Hello - Hello -@@ -6576,8 +6577,7 @@ - Hello - Hello - Hello --Hello --Hello -+Bye - Hello - Hello - Hello`; - const multiplePatchesFileName = 'multiple-patches.txt'; - const patchText = new Map(); - patchText.set(multiplePatchesFileName, multiplePatches); - const ranges = patchTextToRanges(patchText).get(multiplePatchesFileName); - expect(ranges).not.equals(null); - expect(ranges!.length).equals(2); - expect(ranges![0].start).equals(359); - expect(ranges![0].end).equals(359); - expect(ranges![1].start).equals(6580); - expect(ranges![1].end).equals(6580); - }); -}); - -describe('getPullRequestScope', () => { - const upstream = {owner: 'upstream-owner', repo: 'upstream-repo'}; - const pullNumber = 10; - const pageSize = 80; - const octokit = new Octokit({}); - const sandbox = sinon.createSandbox(); - afterEach(() => { - sandbox.restore(); - }); - - it('Returns the correct values when octokit and patch text parsing function execute properly', async () => { - // setup - const patch = `@@ -1,2 +1,5 @@ - Hello world --! -+Goodbye World -+gOodBYE world -+ -+Goodbye World`; - const listFilesOfPRResult = { - headers: {}, - status: 200, - url: 'http://fake-url.com', - data: [ - { - sha: 'a1d470fa4d7b04450715e3e02d240a34517cd988', - filename: 'Readme.md', - status: 'modified', - additions: 4, - deletions: 1, - changes: 5, - blob_url: - 'https://github.com/TomKristie/HelloWorld/blob/eb53f3871f56e8dd6321e44621fe6ac2da1bc120/Readme.md', - raw_url: - 'https://github.com/TomKristie/HelloWorld/raw/eb53f3871f56e8dd6321e44621fe6ac2da1bc120/Readme.md', - contents_url: - 'https://api.github.com/repos/TomKristie/HelloWorld/contents/Readme.md?ref=eb53f3871f56e8dd6321e44621fe6ac2da1bc120', - patch: patch, - }, - ], - }; - const stub = sandbox - .stub(octokit.pulls, 'listFiles') - .resolves(listFilesOfPRResult); - - // tests - const { - validFileLines, - invalidFiles: filesMissingPatch, - } = await getPullRequestScope(octokit, upstream, pullNumber, pageSize); - sandbox.assert.calledOnceWithExactly(stub, { - owner: upstream.owner, - repo: upstream.repo, - pull_number: pullNumber, - per_page: pageSize, - }); - expect(validFileLines.size).equals(1); - expect(validFileLines.get('Readme.md')).not.equals(null); - expect(validFileLines.get('Readme.md')?.length).equals(1); - expect(validFileLines.get('Readme.md')![0].start).equals(2); - expect(validFileLines.get('Readme.md')![0].end).equals(5); - expect(filesMissingPatch.length).equals(0); - }); - - it('Passes up the error when a sub-method fails', async () => { - // setup - const errorMsg = 'Test error for list files'; - sandbox.stub(octokit.pulls, 'listFiles').rejects(new Error(errorMsg)); - - // tests - try { - await getPullRequestScope(octokit, upstream, pullNumber, pageSize); - expect.fail( - 'The getPullRequestScope function should have failed because Octokit failed.' - ); - } catch (err) { - expect(err.message).equals(errorMsg); - } - }); -}); diff --git a/test/scope-handler.ts b/test/scope-handler.ts new file mode 100644 index 00000000..9cb03986 --- /dev/null +++ b/test/scope-handler.ts @@ -0,0 +1,159 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import {describe, it, before} from 'mocha'; +import {setup} from './util'; +import {partitionSuggestedHunksByScope} from '../src/github-handler/comment-handler/get-hunk-scope-handler/scope-handler'; +import {expect} from 'chai'; +import {Hunk} from '../src/types'; + +before(() => { + setup(); +}); + +describe('partitionSuggestedHunksByScope', () => { + it('allows multiple suggestions in a single hunk', () => { + const prHunk = { + oldStart: 10, + oldEnd: 20, + newStart: 10, + newEnd: 20, + newContent: [], + }; + const pullRequestHunks: Map = new Map(); + pullRequestHunks.set('file1.txt', [prHunk]); + + const hunk1 = { + oldStart: 10, + oldEnd: 12, + newStart: 10, + newEnd: 12, + newContent: [], + }; + const hunk2 = { + oldStart: 15, + oldEnd: 15, + newStart: 20, + newEnd: 20, + newContent: [], + }; + const suggestedHunks: Map = new Map(); + suggestedHunks.set('file1.txt', [hunk1, hunk2]); + + const {validHunks, invalidHunks} = partitionSuggestedHunksByScope( + pullRequestHunks, + suggestedHunks + ); + expect(validHunks.get('file1.txt')!.length).to.equal(2); + expect(validHunks.get('file1.txt')).to.eql([hunk1, hunk2]); + expect(invalidHunks.size).to.equal(0); + }); + + it('allows multiple suggestions in a second hunk', () => { + const prHunk1 = { + oldStart: 10, + oldEnd: 20, + newStart: 10, + newEnd: 20, + newContent: [], + }; + const prHunk2 = { + oldStart: 30, + oldEnd: 40, + newStart: 30, + newEnd: 40, + newContent: [], + }; + const pullRequestHunks: Map = new Map(); + pullRequestHunks.set('file1.txt', [prHunk1, prHunk2]); + + const hunk1 = { + oldStart: 32, + oldEnd: 35, + newStart: 32, + newEnd: 35, + newContent: [], + }; + const suggestedHunks: Map = new Map(); + suggestedHunks.set('file1.txt', [hunk1]); + + const {validHunks, invalidHunks} = partitionSuggestedHunksByScope( + pullRequestHunks, + suggestedHunks + ); + expect(validHunks.get('file1.txt')!.length).to.equal(1); + expect(validHunks.get('file1.txt')).to.eql([hunk1]); + expect(invalidHunks.size).to.equal(0); + }); + + it('disallows hunk not included in range', () => { + const prHunk = { + oldStart: 10, + oldEnd: 20, + newStart: 10, + newEnd: 20, + newContent: ['original'], + }; + const pullRequestHunks: Map = new Map(); + pullRequestHunks.set('file1.txt', [prHunk]); + + const hunk1 = { + oldStart: 5, + oldEnd: 12, + newStart: 5, + newEnd: 12, + newContent: ['hunk1'], + }; + const hunk2 = { + oldStart: 15, + oldEnd: 25, + newStart: 15, + newEnd: 25, + newContent: ['hunk2'], + }; + const suggestedHunks: Map = new Map(); + suggestedHunks.set('file1.txt', [hunk1, hunk2]); + + const {validHunks, invalidHunks} = partitionSuggestedHunksByScope( + pullRequestHunks, + suggestedHunks + ); + expect(validHunks.size).to.equal(0); + expect(invalidHunks.get('file1.txt')!.length).to.equal(2); + expect(invalidHunks.get('file1.txt')).to.eql([hunk1, hunk2]); + }); + + it('disallows files not included in valid files', () => { + const pullRequestHunks: Map = new Map(); + pullRequestHunks.set('file1.txt', []); + + const hunk1 = { + oldStart: 1, + oldEnd: 1, + newStart: 1, + newEnd: 1, + newContent: [], + }; + const suggestedHunks: Map = new Map(); + suggestedHunks.set('file2.txt', [hunk1]); + + const {validHunks, invalidHunks} = partitionSuggestedHunksByScope( + pullRequestHunks, + suggestedHunks + ); + expect(validHunks.size).to.equal(0); + expect(invalidHunks.get('file2.txt')!.length).to.equal(1); + expect(invalidHunks.get('file2.txt')).to.eql([hunk1]); + }); +}); diff --git a/test/scope-suggestion-hunks.ts b/test/scope-suggestion-hunks.ts deleted file mode 100644 index 1525a199..00000000 --- a/test/scope-suggestion-hunks.ts +++ /dev/null @@ -1,294 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import {expect} from 'chai'; -import {describe, it, before, beforeEach} from 'mocha'; -import {setup} from './util'; -import { - findRange, - getInScopeHunks, - getInScopeByFileName, - getOutOfScopeByFileName, - mergeOutOfScopeSuggestions, -} from '../src/github-handler/comment-handler/raw-patch-handler/in-scope-hunks-handler'; -import {Range, Hunk} from '../src/types'; - -before(() => { - setup(); -}); - -describe('findRange', () => { - const ranges: Range[] = [ - {start: 1, end: 2}, - {start: 5, end: 8}, - {start: 10, end: 12}, - {start: 14, end: 15}, - {start: 20, end: 25}, - {start: 100, end: 125}, - ]; - it('Returns the index of the interval that contains the numeric input', () => { - expect(findRange(ranges, 1)).equals(0); - expect(findRange(ranges, 2)).equals(0); - expect(findRange(ranges, 100)).equals(5); - expect(findRange(ranges, 125)).equals(5); - expect(findRange(ranges, 126)).equals(-1); - expect(findRange(ranges, 0)).equals(-1); - expect(findRange(ranges, 3)).equals(-1); - }); - it('Returns -1 if there is no interval domain in which the numeric input satisfies', () => { - expect(findRange(ranges, 126)).equals(-1); - expect(findRange(ranges, 0)).equals(-1); - expect(findRange(ranges, 3)).equals(-1); - }); -}); - -describe('getInScopeHunks', () => { - const scope: Map = new Map(); - const suggestions: Map = new Map(); - const fileName1 = 'foo.txt'; - const fileName2 = 'bar.txt'; - const fileName3 = 'baz.txt'; - scope.set(fileName1, [ - {start: 1, end: 20}, - {start: 50, end: 79}, - ]); - scope.set(fileName2, [{start: 15, end: 54}]); - scope.set(fileName3, [{start: 1, end: 2}]); - - beforeEach(() => { - suggestions.clear(); - }); - - it('Calculates file hunks that are in scope for the pull request', () => { - suggestions.set(fileName1, [ - {oldStart: 1, oldEnd: 20, newStart: 1, newEnd: 30}, - ]); - suggestions.set(fileName2, [ - {oldStart: 50, oldEnd: 54, newStart: 50, newEnd: 53}, - ]); - suggestions.set(fileName3, [ - {oldStart: 1, oldEnd: 2, newStart: 1, newEnd: 2}, - ]); - const {inScopeFilesHunks} = getInScopeHunks(scope, suggestions); - expect(inScopeFilesHunks.size).equals(3); - expect(inScopeFilesHunks.get(fileName1)!).deep.equals([ - {oldStart: 1, oldEnd: 20, newStart: 1, newEnd: 30}, - ]); - expect(inScopeFilesHunks.get(fileName2)!).deep.equals([ - {oldStart: 50, oldEnd: 54, newStart: 50, newEnd: 53}, - ]); - expect(inScopeFilesHunks.get(fileName3)!).deep.equals([ - {oldStart: 1, oldEnd: 2, newStart: 1, newEnd: 2}, - ]); - }); - - it('Calculates file hunks that are in scope for the pull request even if the suggestions are a subset of the total PR scope', () => { - suggestions.set(fileName1, [ - {oldStart: 1, oldEnd: 20, newStart: 1, newEnd: 30}, - ]); - const {inScopeFilesHunks} = getInScopeHunks(scope, suggestions); - expect(inScopeFilesHunks.size).equals(1); - expect(inScopeFilesHunks.get(fileName1)!).deep.equals([ - {oldStart: 1, oldEnd: 20, newStart: 1, newEnd: 30}, - ]); - }); - - it('Calculates file hunks that are out of scope for the pull request even if the suggestions are a subset of the total PR scope', () => { - suggestions.set(fileName1, [ - {oldStart: 1, oldEnd: 20, newStart: 1, newEnd: 30}, - ]); - const {outOfScopeFilesHunks} = getInScopeHunks(scope, suggestions); - expect(outOfScopeFilesHunks.size).equals(0); - }); - - it('Calculates files hunks that are out of scope for the pull request because the lines are out of scope', () => { - suggestions.set(fileName1, [ - {oldStart: 20, oldEnd: 49, newStart: 20, newEnd: 49}, - ]); - suggestions.set(fileName2, [ - {oldStart: 14, oldEnd: 55, newStart: 15, newEnd: 54}, - ]); - const {inScopeFilesHunks, outOfScopeFilesHunks} = getInScopeHunks( - scope, - suggestions - ); - expect(inScopeFilesHunks.size).equals(0); - expect(outOfScopeFilesHunks.size).equals(2); - expect(outOfScopeFilesHunks.get(fileName1)).deep.equals([ - {oldStart: 20, oldEnd: 49, newStart: 20, newEnd: 49}, - ]); - }); - - it('Calculates files hunks that are out of scope for the pull request because the file is not in scope', () => { - suggestions.set('non-existant-file.txt', [ - {oldStart: 1, oldEnd: 2, newStart: 1, newEnd: 2}, - ]); - const {inScopeFilesHunks, outOfScopeFilesHunks} = getInScopeHunks( - scope, - suggestions - ); - expect(inScopeFilesHunks.size).equals(0); - expect(outOfScopeFilesHunks.size).equals(1); - expect(outOfScopeFilesHunks.get('non-existant-file.txt')).deep.equals([ - {oldStart: 1, oldEnd: 2, newStart: 1, newEnd: 2}, - ]); - }); -}); - -describe('getInScopeByFileName', () => { - const fileName1 = 'foo.txt'; - const fileName2 = 'bar.txt'; - const fileName3 = 'baz.txt'; - const invalidFiles = [ - fileName3, - 'BAZ.txt', - 'baz-1.txt', - 'bbaz.txt', - 'foo/bar/baz.txt', - ]; - const suggestions: Map = new Map(); - - beforeEach(() => { - suggestions.clear(); - }); - - it('Calculates files that are valid', () => { - suggestions.set(fileName1, [ - {oldStart: 1, oldEnd: 20, newStart: 1, newEnd: 30}, - ]); - suggestions.set(fileName2, [ - {oldStart: 50, oldEnd: 54, newStart: 50, newEnd: 53}, - ]); - const inScopeFiles = getInScopeByFileName(invalidFiles, suggestions); - expect(inScopeFiles.size).equals(2); - expect(inScopeFiles.get(fileName1)).deep.equals([ - {oldStart: 1, oldEnd: 20, newStart: 1, newEnd: 30}, - ]); - expect(inScopeFiles.get(fileName2)).deep.equals([ - {oldStart: 50, oldEnd: 54, newStart: 50, newEnd: 53}, - ]); - }); - - it('Does not include files that do not appear in the invalid list', () => { - suggestions.set(fileName3, [ - {oldStart: 1, oldEnd: 20, newStart: 1, newEnd: 30}, - ]); - const inScopeFiles = getInScopeByFileName(invalidFiles, suggestions); - expect(inScopeFiles.size).equals(0); - }); -}); - -describe('getOutOfScopeByFileName', () => { - const fileName1 = 'foo.txt'; - const fileName2 = 'bar.txt'; - const fileName3 = 'baz.txt'; - const invalidFiles = [fileName1, fileName2, fileName3]; - const suggestions: Map = new Map(); - - beforeEach(() => { - suggestions.clear(); - }); - - it('Calculates files that are invalid and gets their hunks', () => { - suggestions.set(fileName1, [ - {oldStart: 1, oldEnd: 20, newStart: 1, newEnd: 30}, - ]); - suggestions.set(fileName2, [ - {oldStart: 50, oldEnd: 54, newStart: 50, newEnd: 53}, - ]); - suggestions.set('valid-file.txt', [ - {oldStart: 50, oldEnd: 54, newStart: 50, newEnd: 53}, - ]); - const outOfScopeFiles = getOutOfScopeByFileName(invalidFiles, suggestions); - expect(outOfScopeFiles.size).equals(2); - expect(outOfScopeFiles.get(fileName1)).deep.equals([ - {oldStart: 1, oldEnd: 20, newStart: 1, newEnd: 30}, - ]); - expect(outOfScopeFiles.get(fileName2)).deep.equals([ - {oldStart: 50, oldEnd: 54, newStart: 50, newEnd: 53}, - ]); - expect(outOfScopeFiles.has('valid-file.txt')).equals(false); - }); - - it('Does not include files if they are all valid', () => { - suggestions.set('valid-file-1.txt', [ - {oldStart: 50, oldEnd: 54, newStart: 50, newEnd: 53}, - ]); - suggestions.set('valid-file-2.txt', [ - {oldStart: 50, oldEnd: 54, newStart: 50, newEnd: 53}, - ]); - const outofScopeFiles = getOutOfScopeByFileName(invalidFiles, suggestions); - expect(outofScopeFiles.size).equals(0); - }); -}); - -describe('mergeOutOfScopeSuggestions', () => { - const fileName1 = 'foo.txt'; - const fileName2 = 'bar.txt'; - const fileName3 = 'baz.txt'; - const invalidFileNameSuggestions: Map = new Map(); - const invalidHunkSuggestions: Map = new Map(); - - beforeEach(() => { - invalidFileNameSuggestions.clear(); - invalidHunkSuggestions.clear(); - }); - - it('Gets all the files whose file name is out of scope and/or has out of scope hunks', () => { - invalidFileNameSuggestions.set(fileName1, [ - {oldStart: 1, oldEnd: 20, newStart: 1, newEnd: 30}, - ]); - invalidFileNameSuggestions.set(fileName2, [ - {oldStart: 50, oldEnd: 54, newStart: 50, newEnd: 53}, - ]); - invalidHunkSuggestions.set(fileName3, [ - {oldStart: 2, oldEnd: 54, newStart: 2, newEnd: 10}, - ]); - const outOfScope = mergeOutOfScopeSuggestions( - invalidFileNameSuggestions, - invalidHunkSuggestions - ); - expect(outOfScope.size).equals(3); - expect(outOfScope.get(fileName1)).deep.equals([ - {oldStart: 1, oldEnd: 20, newStart: 1, newEnd: 30}, - ]); - expect(outOfScope.get(fileName2)).deep.equals([ - {oldStart: 50, oldEnd: 54, newStart: 50, newEnd: 53}, - ]); - expect(outOfScope.get(fileName3)).deep.equals([ - {oldStart: 2, oldEnd: 54, newStart: 2, newEnd: 10}, - ]); - }); - - it('Takes the invalid files hunk data over the invalid hunk data because invalid file hunks are a superset', () => { - invalidFileNameSuggestions.set(fileName1, [ - {oldStart: 1, oldEnd: 20, newStart: 1, newEnd: 30}, - {oldStart: 100, oldEnd: 120, newStart: 100, newEnd: 130}, - {oldStart: 1000, oldEnd: 1200, newStart: 1000, newEnd: 1300}, - ]); - invalidHunkSuggestions.set(fileName1, [ - {oldStart: 1, oldEnd: 20, newStart: 1, newEnd: 30}, - ]); - const outOfScope = mergeOutOfScopeSuggestions( - invalidFileNameSuggestions, - invalidHunkSuggestions - ); - expect(outOfScope.size).equals(1); - expect(outOfScope.get(fileName1)).deep.equals([ - {oldStart: 1, oldEnd: 20, newStart: 1, newEnd: 30}, - {oldStart: 100, oldEnd: 120, newStart: 100, newEnd: 130}, - {oldStart: 1000, oldEnd: 1200, newStart: 1000, newEnd: 1300}, - ]); - }); -}); diff --git a/test/suggestion-patch.ts b/test/suggestion-patch.ts deleted file mode 100644 index 341b06cc..00000000 --- a/test/suggestion-patch.ts +++ /dev/null @@ -1,150 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import {expect} from 'chai'; -import {describe, it, before, beforeEach} from 'mocha'; -import {setup} from './util'; -import {getValidSuggestionHunks} from '../src/github-handler/comment-handler/raw-patch-handler/in-scope-hunks-handler'; -import {Range, FileDiffContent} from '../src/types'; - -before(() => { - setup(); -}); - -describe('getValidSuggestionHunks', () => { - const scope: Map = new Map(); - const fileName1 = 'foo.txt'; - const fileName2 = 'bar.txt'; - const invalidFile = 'baz.txt'; - const invalidFiles = [invalidFile]; - scope.set(fileName1, [{start: 1, end: 2}]); - scope.set(fileName2, [{start: 2, end: 11}]); - const diffContents: Map = new Map(); - - beforeEach(() => { - diffContents.clear(); - }); - - it('Finds file hunks that are in scope for the pull request', () => { - diffContents.set(fileName2, { - oldContent: - 'bar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar', - newContent: - 'bar\nbar\nbar\nbar\nbar\nbar\nfoo\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar', - }); - const suggestions = getValidSuggestionHunks( - diffContents, - invalidFiles, - scope - ); - expect(suggestions.inScopeSuggestions.size).equals(1); - }); - - it('Excludes file hunks that are not in scope for the pull request', () => { - diffContents.set(fileName1, { - oldContent: - 'bar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nfoo', - newContent: - 'bar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nfoo', - }); - const suggestions = getValidSuggestionHunks( - diffContents, - invalidFiles, - scope - ); - expect(suggestions.inScopeSuggestions.size).equals(0); - }); - - it('Excludes suggestion files that are not in scope because the file is not in scope', () => { - diffContents.set('non-existant-file-that-is-not-invalid.txt', { - oldContent: 'foo', - newContent: 'bar', - }); - const suggestions = getValidSuggestionHunks( - diffContents, - invalidFiles, - scope - ); - expect(suggestions.inScopeSuggestions.size).equals(0); - }); - - it('Excludes suggestion files that are not in scope because the file is invalid', () => { - diffContents.set(invalidFile, {oldContent: 'foo', newContent: 'bar'}); - const suggestions = getValidSuggestionHunks( - diffContents, - invalidFiles, - scope - ); - expect(suggestions.inScopeSuggestions.size).equals(0); - }); - - it('Does not include suggestion files that have no change', () => { - diffContents.set(fileName1, {oldContent: 'foo', newContent: 'foo'}); - const suggestions = getValidSuggestionHunks( - diffContents, - invalidFiles, - scope - ); - expect(suggestions.inScopeSuggestions.size).equals(0); - }); - - it('Calculates in scope and out of scope files that are mutually exclusive', () => { - // in scope hunk - diffContents.set(fileName2, { - oldContent: - 'bar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar', - newContent: - 'bar\nbar\nbar\nbar\nbar\nbar\nfoo\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar', - }); - // out of scope hunks - diffContents.set(fileName1, { - oldContent: - 'bar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nfoo', - newContent: - 'bar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar', - }); - // same before and after - diffContents.set('same-before-and-after.text', { - oldContent: - 'bar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nfoo', - newContent: - 'bar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nbar\nfoo', - }); - // out of scope file name - diffContents.set('non-existant-file-that-is-not-invalid.txt', { - oldContent: 'foo', - newContent: 'bar', - }); - // out of scope file name - diffContents.set(invalidFile, { - oldContent: 'foo', - newContent: 'bar', - }); - const suggestions = getValidSuggestionHunks( - diffContents, - invalidFiles, - scope - ); - expect(suggestions.inScopeSuggestions.size).equals(1); - expect(suggestions.inScopeSuggestions.has(fileName2)).equals(true); - expect(suggestions.outOfScopeSuggestions.size).equals(3); - expect( - suggestions.outOfScopeSuggestions.has( - 'non-existant-file-that-is-not-invalid.txt' - ) - ).equals(true); - expect(suggestions.outOfScopeSuggestions.has(invalidFile)).equals(true); - expect(suggestions.outOfScopeSuggestions.has(fileName1)).equals(true); - }); -});