added ollama vision backup

This commit is contained in:
Travis James 2024-10-07 23:19:47 -05:00
parent 5c0c952a27
commit 0834fd3107
4 changed files with 40 additions and 2 deletions

View File

@ -78,3 +78,7 @@ RESEND_API_KEY=
# TRACE - For logging more detailed information than the DEBUG level.
# Set LOGGING_LEVEL to one of the above options to control logging output.
LOGGING_LEVEL=INFO
# OLLAMA_API_URL for ollama
OLLAMA_URL=http://localhost:11434
OLLAMA_MODEL=llava

View File

@ -96,6 +96,7 @@
"moment": "^2.29.4",
"mongoose": "^8.4.4",
"natural": "^7.0.7",
"ollama": "^0.5.9",
"openai": "^4.57.0",
"pdf-parse": "^1.1.1",
"pos": "^0.4.2",

View File

@ -152,6 +152,9 @@ importers:
natural:
specifier: ^7.0.7
version: 7.0.7(socks@2.8.3)
ollama:
specifier: ^0.5.9
version: 0.5.9
openai:
specifier: ^4.57.0
version: 4.57.0(zod@3.23.8)
@ -3729,6 +3732,9 @@ packages:
object-inspect@1.13.1:
resolution: {integrity: sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==}
ollama@0.5.9:
resolution: {integrity: sha512-F/KZuDRC+ZsVCuMvcOYuQ6zj42/idzCkkuknGyyGVmNStMZ/sU3jQpvhnl4SyC0+zBzLiKNZJnJeuPFuieWZvQ==}
on-finished@2.4.1:
resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==}
engines: {node: '>= 0.8'}
@ -8929,6 +8935,10 @@ snapshots:
object-inspect@1.13.1: {}
ollama@0.5.9:
dependencies:
whatwg-fetch: 3.6.20
on-finished@2.4.1:
dependencies:
ee-first: 1.1.1

View File

@ -1,6 +1,10 @@
import Anthropic from '@anthropic-ai/sdk';
import axios from 'axios';
import { Logger } from '../../../lib/logger';
import { Ollama } from 'ollama';
const OLLAMA_URL = process.env.OLLAMA_URL || "http://localhost:11434";
const OLLAMA_MODEL = process.env.OLLAMA_MODEL || "llava";
export async function getImageDescription(
imageUrl: string,
@ -53,9 +57,28 @@ export async function getImageDescription(
}
default: {
if (!process.env.OPENAI_API_KEY) {
throw new Error("No OpenAI API key provided");
if (!process.env.OLLAMA_API_URL) {
throw new Error("No OpenAI key or Ollama API URL provided");
} else {
const ollama = new Ollama({
host: OLLAMA_URL,
});
const response = await ollama.chat({
model: OLLAMA_MODEL,
messages: [
{
role: "user",
content: prompt,
},
{
role: "user",
content: imageUrl,
},
],
});
return response.message.content;
}
}
const { OpenAI } = require("openai");
const openai = new OpenAI();