1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
|
import { Diff } from "./diff";
import { ConvenientPatch as NodeGitPatch } from "nodegit";
type Hunk = {
new_start: number,
new_lines_cnt: number,
old_start: number,
old_lines_cnt: number,
new_lines: number[],
deleted_lines: number[],
hunk: string
}
type Hunks = {
prev: null | number,
hunks: Hunk[]
}
type PatchBounds = {
start: number,
end: number
}
interface HunkLines {
new_lines: number[],
deleted_lines: number[]
}
interface ProcessedHunk extends HunkLines {
hunk: string
}
/**
* Prepare a hunk for further usage
*
* @param hunk - The hunk to process
* @returns A processed hunk
*/
function sliceAndCountHunk(hunk: string[]): ProcessedHunk {
const lines = hunk.reduce((result: HunkLines, line, index) => {
if(line.charAt(0) === "+") {
hunk[index] = line.slice(1);
result.new_lines.push(index);
}
else if(line.charAt(0) === "-") {
hunk[index] = line.slice(1);
result.deleted_lines.push(index);
}
return result;
}, { new_lines: [], deleted_lines: [] });
return { ...lines, hunk: hunk.join("\n") };
}
/**
* A representation of a patch
*/
export class Patch {
private _ng_patch: NodeGitPatch;
private _diff: Diff;
private _index: number;
public from: string;
public to: string;
public additions: number;
public deletions: number;
/**
* @param diff - The commit diff that contains the patch
* @param patch - An instance of Nodegit patch
*/
constructor(diff: Diff, patch: NodeGitPatch, index: number) {
this._ng_patch = patch;
this._diff = diff;
this._index = index;
this.from = patch.oldFile().path();
this.to = patch.newFile().path();
this.additions = patch.lineStats()["total_additions"];
this.deletions = patch.lineStats()["total_deletions"];
}
/**
* Returns patch's bounds
*
* @remarks
*
* These bounds are in the context of it's whole diff
*
* @returns A patch bounds instance which contains a start & an end property
*/
private async _bounds(): Promise<PatchBounds> {
const raw_patches = (await this._diff.rawPatches()).split("\n");
const patch_header_data = await this._diff.patchHeaderData();
return {
start: patch_header_data.indexes[this._index] + patch_header_data.lengths[this._index],
end: (typeof patch_header_data.indexes[this._index + 1] === "undefined") ? raw_patches.length - 1 : patch_header_data.indexes[this._index + 1]
};
}
/**
* Returns the patch's content
*/
private async _content(): Promise<string> {
const raw_patches = (await this._diff.rawPatches()).split("\n");
const bounds = await this._bounds();
return raw_patches.slice(bounds.start, bounds.end).join("\n");
}
/**
* Returns if the patch is too large or not
*
* @returns Whether or not the patch is too large
*/
public async isTooLarge(): Promise<boolean> {
const content = (await this._content()).split("\n");
const line_lengths = content.map(line => line.length).reduce((result, length) => result + length);
if(content.length > 5000 || line_lengths > 5000) {
return true;
}
return false;
}
/**
* Returns the patch's hunks
*
* @returns An array of hunk instances
*/
public async getHunks(): Promise<Hunk[] | null> {
const content = (await this._content()).split("\n");
const hunks = await this._ng_patch.hunks();
if(hunks.length === 0) {
return null;
}
const hunks_data = hunks.reduce((result: Hunks, hunk, hunk_index) => {
const hunk_header = hunk.header();
const hunk_header_index = content.indexOf(hunk_header.replace(/\n/gu, ""));
if(result.prev !== null) {
const prev_hunk = hunks[hunk_index - 1];
result.hunks.push({
new_start: prev_hunk.newStart(),
new_lines_cnt: prev_hunk.newLines(),
old_start: prev_hunk.oldStart(),
old_lines_cnt: prev_hunk.oldLines(),
...sliceAndCountHunk(content.slice(result.prev, hunk_header_index))
});
}
result.prev = hunk_header_index;
return result;
}, { prev: null, hunks: [] });
const prev_hunk = hunks[hunks.length - 1];
hunks_data.hunks.push({
new_start: prev_hunk.newStart(),
new_lines_cnt: prev_hunk.newLines(),
old_start: prev_hunk.oldStart(),
old_lines_cnt: prev_hunk.oldLines(),
...sliceAndCountHunk(content.slice(<number>hunks_data.prev))
});
return hunks_data.hunks;
}
}
|