-
-
Notifications
You must be signed in to change notification settings - Fork 5
Expand file tree
/
Copy pathgenerateCommitMessage.js
More file actions
61 lines (51 loc) · 1.5 KB
/
generateCommitMessage.js
File metadata and controls
61 lines (51 loc) · 1.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import openAiModel from '../models/openai.js';
import ollamaModel from '../models/ollama.js';
import config from './config.json';
import {gitDiff, gitStatus, initGit} from './commit.js';
// remove any staged changes in git
async function generateCommitMessage(flags, model) {
const maxDiffSize = config.maxDiffSize;
await initGit();
const status = await gitStatus();
const gitDiffContent = await gitDiff();
const {category, message} = await getModelResponse(
model,
flags,
gitDiffContent,
);
if (gitDiffContent.length > maxDiffSize) {
console.log('Diff content is too large. Skipping OpenAI request.');
return `✨ tweak (${firstFilePath}): update ${firstFilePath}`;
}
if (status !== false) {
return `${category} (${firstFilePath}): ${message}`;
} else {
return false;
}
}
async function getModelResponse(model, flags, gitDiffContent) {
let response;
try {
switch (model) {
case 'gpt-4o-mini':
response = await openAiModel(model, flags, gitDiffContent);
break;
case 'llama3.1:8b':
response = await ollamaModel(model, flags, gitDiffContent);
break;
default:
throw new Error('Unsupported model selected');
}
console.log('response', response);
if (response && response.category && response.message) {
// Destructure and return the required fields
const {category, message} = response;
return {category, message};
} else {
throw new Error(response.message);
}
} catch (error) {
console.log(error.message);
}
}
export default generateCommitMessage;