bolt.diy / app /lib /modules /llm /base-provider.ts
aproli90's picture
Upload 289 files
2e1ab99 verified
raw
history blame
3.78 kB
import type { LanguageModelV1 } from 'ai';
import type { ProviderInfo, ProviderConfig, ModelInfo } from './types';
import type { IProviderSetting } from '~/types/model';
import { createOpenAI } from '@ai-sdk/openai';
import { LLMManager } from './manager';
export abstract class BaseProvider implements ProviderInfo {
abstract name: string;
abstract staticModels: ModelInfo[];
abstract config: ProviderConfig;
cachedDynamicModels?: {
cacheId: string;
models: ModelInfo[];
};
getApiKeyLink?: string;
labelForGetApiKey?: string;
icon?: string;
getProviderBaseUrlAndKey(options: {
apiKeys?: Record<string, string>;
providerSettings?: IProviderSetting;
serverEnv?: Record<string, string>;
defaultBaseUrlKey: string;
defaultApiTokenKey: string;
}) {
const { apiKeys, providerSettings, serverEnv, defaultBaseUrlKey, defaultApiTokenKey } = options;
let settingsBaseUrl = providerSettings?.baseUrl;
const manager = LLMManager.getInstance();
if (settingsBaseUrl && settingsBaseUrl.length == 0) {
settingsBaseUrl = undefined;
}
const baseUrlKey = this.config.baseUrlKey || defaultBaseUrlKey;
let baseUrl =
settingsBaseUrl ||
serverEnv?.[baseUrlKey] ||
process?.env?.[baseUrlKey] ||
manager.env?.[baseUrlKey] ||
this.config.baseUrl;
if (baseUrl && baseUrl.endsWith('/')) {
baseUrl = baseUrl.slice(0, -1);
}
const apiTokenKey = this.config.apiTokenKey || defaultApiTokenKey;
const apiKey =
apiKeys?.[this.name] || serverEnv?.[apiTokenKey] || process?.env?.[apiTokenKey] || manager.env?.[baseUrlKey];
return {
baseUrl,
apiKey,
};
}
getModelsFromCache(options: {
apiKeys?: Record<string, string>;
providerSettings?: Record<string, IProviderSetting>;
serverEnv?: Record<string, string>;
}): ModelInfo[] | null {
if (!this.cachedDynamicModels) {
// console.log('no dynamic models',this.name);
return null;
}
const cacheKey = this.cachedDynamicModels.cacheId;
const generatedCacheKey = this.getDynamicModelsCacheKey(options);
if (cacheKey !== generatedCacheKey) {
// console.log('cache key mismatch',this.name,cacheKey,generatedCacheKey);
this.cachedDynamicModels = undefined;
return null;
}
return this.cachedDynamicModels.models;
}
getDynamicModelsCacheKey(options: {
apiKeys?: Record<string, string>;
providerSettings?: Record<string, IProviderSetting>;
serverEnv?: Record<string, string>;
}) {
return JSON.stringify({
apiKeys: options.apiKeys?.[this.name],
providerSettings: options.providerSettings?.[this.name],
serverEnv: options.serverEnv,
});
}
storeDynamicModels(
options: {
apiKeys?: Record<string, string>;
providerSettings?: Record<string, IProviderSetting>;
serverEnv?: Record<string, string>;
},
models: ModelInfo[],
) {
const cacheId = this.getDynamicModelsCacheKey(options);
// console.log('caching dynamic models',this.name,cacheId);
this.cachedDynamicModels = {
cacheId,
models,
};
}
// Declare the optional getDynamicModels method
getDynamicModels?(
apiKeys?: Record<string, string>,
settings?: IProviderSetting,
serverEnv?: Record<string, string>,
): Promise<ModelInfo[]>;
abstract getModelInstance(options: {
model: string;
serverEnv: Env;
apiKeys?: Record<string, string>;
providerSettings?: Record<string, IProviderSetting>;
}): LanguageModelV1;
}
type OptionalApiKey = string | undefined;
export function getOpenAILikeModel(baseURL: string, apiKey: OptionalApiKey, model: string) {
const openai = createOpenAI({
baseURL,
apiKey,
});
return openai(model);
}