Skip to content

Commit 2191057

Browse files
committed
fix: ollama custom client
1 parent d22eff4 commit 2191057

File tree

8 files changed

+19
-27
lines changed

8 files changed

+19
-27
lines changed

manifest.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
{
22
"id": "bmo-chatbot",
33
"name": "BMO Chatbot",
4-
"version": "2.1.0",
4+
"version": "2.1.1",
55
"minAppVersion": "1.0.0",
66
"description": "Generate and brainstorm ideas while creating your notes using Large Language Models (LLMs) from Ollama, LM Studio, Anthropic, OpenAI, Mistral AI, and more for Obsidian.",
77
"author": "Longy2k",

package-lock.json

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "bmo-chatbot",
3-
"version": "2.1.0",
3+
"version": "2.1.1",
44
"description": "Generate and brainstorm ideas while creating your notes using Large Language Models (LLMs) from Ollama, LM Studio, Anthropic, OpenAI, Mistral AI, and more for Obsidian.",
55
"main": "main.js",
66
"scripts": {

src/components/FetchModelEditor.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { requestUrl } from 'obsidian';
22
import { BMOSettings } from 'src/main';
3-
import ollama from 'ollama';
3+
import { Ollama } from 'ollama';
44
import OpenAI from 'openai';
55

66
// Request response from Ollama
@@ -35,7 +35,7 @@ export async function fetchOllamaResponseEditor(settings: BMOSettings, selection
3535
}
3636

3737
try {
38-
38+
const ollama = new Ollama({host: ollamaRESTAPIURL});
3939
const response = await ollama.generate({
4040
model: settings.general.model,
4141
system: settings.editor.prompt_select_generate_system_role,

src/components/FetchModelList.ts

Lines changed: 2 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,15 @@
11
import { requestUrl } from 'obsidian';
2-
import ollama from 'ollama'
2+
import { Ollama } from 'ollama'
33
import OpenAI from 'openai';
44
import BMOGPT from 'src/main';
55
import { OPENAI_MODELS } from 'src/view';
66

77
export async function fetchOllamaModels(plugin: BMOGPT) {
88
const ollamaRESTAPIURL = plugin.settings.OllamaConnection.RESTAPIURL;
99

10-
// Check if the URL is functional
11-
try {
12-
const response = await fetch(ollamaRESTAPIURL);
13-
if (!response.ok) {
14-
console.error('OLLAMA URL is not responding:', ollamaRESTAPIURL);
15-
return;
16-
}
17-
} catch (error) {
18-
console.error('Error reaching OLLAMA URL:', error);
19-
return;
20-
}
21-
2210
// Log the list of models using ollama.list()
2311
try {
12+
const ollama = new Ollama({host: ollamaRESTAPIURL});
2413
const modelsList = await ollama.list();
2514
// console.log('Ollama Models:', modelsList);
2615

src/components/FetchModelResponse.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ import { ChatCompletionMessageParam } from 'openai/resources/chat';
55
import { addMessage, addParagraphBreaks, updateUnresolvedInternalLinks } from './chat/Message';
66
import { displayErrorBotMessage, displayLoadingBotMessage } from './chat/BotMessage';
77
import { getActiveFileContent, getCurrentNoteContent } from './editor/ReferenceCurrentNote';
8-
import ollama from 'ollama';
8+
import { Ollama } from 'ollama';
99
import OpenAI from 'openai';
1010
import { getPrompt } from './chat/Prompt';
1111

@@ -37,6 +37,7 @@ export async function fetchOllamaResponse(plugin: BMOGPT, settings: BMOSettings,
3737
const referenceCurrentNoteContent = getCurrentNoteContent();
3838

3939
try {
40+
const ollama = new Ollama({ host: ollamaRESTAPIURL});
4041
const response = await ollama.chat({
4142
model: settings.general.model,
4243
messages: [
@@ -145,6 +146,7 @@ export async function fetchOllamaResponseStream(plugin: BMOGPT, settings: BMOSet
145146
});
146147

147148
try {
149+
const ollama = new Ollama({ host: ollamaRESTAPIURL});
148150
const response = await ollama.chat({
149151
model: settings.general.model,
150152
messages: [

src/components/chat/Commands.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ export function commandHelp(plugin: BMOGPT, settings: BMOSettings) {
110110

111111
const generalCommandHeader = document.createElement('h4');
112112
generalCommandHeader.textContent = 'General Commands';
113-
generalCommandHeader.style.textAlign = 'center';
113+
generalCommandHeader.style.textAlign = 'left';
114114
displayCommandBotMessageDiv.appendChild(generalCommandHeader);
115115

116116
const commandClearP = document.createElement('p');
@@ -135,7 +135,7 @@ export function commandHelp(plugin: BMOGPT, settings: BMOSettings) {
135135

136136
const profileCommandHeader = document.createElement('h4');
137137
profileCommandHeader.textContent = 'Profile Commands';
138-
profileCommandHeader.style.textAlign = 'center';
138+
profileCommandHeader.style.textAlign = 'left';
139139
displayCommandBotMessageDiv.appendChild(profileCommandHeader);
140140

141141
const commandProfileListP = document.createElement('p');
@@ -148,7 +148,7 @@ export function commandHelp(plugin: BMOGPT, settings: BMOSettings) {
148148

149149
const modelCommandHeader = document.createElement('h4');
150150
modelCommandHeader.textContent = 'Model Commands';
151-
modelCommandHeader.style.textAlign = 'center';
151+
modelCommandHeader.style.textAlign = 'left';
152152
displayCommandBotMessageDiv.appendChild(modelCommandHeader);
153153

154154
const commandModelListP = document.createElement('p');
@@ -161,7 +161,7 @@ export function commandHelp(plugin: BMOGPT, settings: BMOSettings) {
161161

162162
const promptCommandHeader = document.createElement('h4');
163163
promptCommandHeader.textContent = 'Prompt Commands';
164-
promptCommandHeader.style.textAlign = 'center';
164+
promptCommandHeader.style.textAlign = 'left';
165165
displayCommandBotMessageDiv.appendChild(promptCommandHeader);
166166

167167
const commandPromptListP = document.createElement('p');
@@ -178,7 +178,7 @@ export function commandHelp(plugin: BMOGPT, settings: BMOSettings) {
178178

179179
const editorCommandHeader = document.createElement('h4');
180180
editorCommandHeader.textContent = 'Editor Commands';
181-
editorCommandHeader.style.textAlign = 'center';
181+
editorCommandHeader.style.textAlign = 'left';
182182
displayCommandBotMessageDiv.appendChild(editorCommandHeader);
183183

184184
const commandAppendP = document.createElement('p');
@@ -191,7 +191,7 @@ export function commandHelp(plugin: BMOGPT, settings: BMOSettings) {
191191

192192
const streamCommandHeader = document.createElement('h4');
193193
streamCommandHeader.textContent = 'Stream Commands';
194-
streamCommandHeader.style.textAlign = 'center';
194+
streamCommandHeader.style.textAlign = 'left';
195195
displayCommandBotMessageDiv.appendChild(streamCommandHeader);
196196

197197
const commandStopP = document.createElement('p');

src/components/editor/FetchRenameNoteTitle.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import { Notice, requestUrl } from 'obsidian';
2-
import ollama from 'ollama';
2+
import { Ollama } from 'ollama';
33
import OpenAI from 'openai';
44
import { BMOSettings } from 'src/main';
55
import { ANTHROPIC_MODELS, OPENAI_MODELS } from 'src/view';
@@ -14,6 +14,7 @@ export async function fetchModelRenameTitle(settings: BMOSettings, referenceCurr
1414
try {
1515
if (settings.OllamaConnection.RESTAPIURL && settings.OllamaConnection.ollamaModels.includes(settings.general.model)) {
1616
try {
17+
const ollama = new Ollama({ host: settings.OllamaConnection.RESTAPIURL});
1718
const response = await ollama.generate({
1819
model: settings.general.model,
1920
system: prompt,

0 commit comments

Comments
 (0)