Skip to content

Commit

Permalink
chore: remove @lmstudio/sdk
Browse files Browse the repository at this point in the history
because the sdk setup caused too many issues and bugs, it is better to use the openai endpoint instead
  • Loading branch information
mrdjohnson committed Feb 4, 2025
1 parent 0d8e954 commit 5a8143f
Show file tree
Hide file tree
Showing 14 changed files with 25 additions and 326 deletions.
1 change: 0 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
"@langchain/core": "^0.3.20",
"@langchain/ollama": "^0.1.2",
"@langchain/openai": "^0.3.14",
"@lmstudio/sdk": "^0.4.7",
"@paralleldrive/cuid2": "^2.2.2",
"axios": "^1.6.8",
"camelcase-keys": "^9.1.3",
Expand Down
10 changes: 0 additions & 10 deletions src/core/LanguageModel.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,8 @@ import {
A1111LanguageModel,
ConnectionTypes,
IA1111Model,
ILmsModel,
IOllamaModel,
IOpenAiModel,
LmsLanguageModel,
OllamaLanguageModel,
OpenAiLanguageModel,
GeminiLanguageModel,
Expand Down Expand Up @@ -36,14 +34,6 @@ class LanguageModel {
}
}

static fromILmsModel(model: ILmsModel, connectionId: string): LmsLanguageModel {
return LanguageModel.toSharedLanguageModel(connectionId, model, {
type: 'LMS',
label: model.name,
modelName: model.path,
})
}

static fromIA1111Model(model: IA1111Model, connectionId: string): A1111LanguageModel {
return LanguageModel.toSharedLanguageModel(connectionId, model, {
type: 'A1111',
Expand Down
129 changes: 0 additions & 129 deletions src/core/connection/api/LmsApi.ts

This file was deleted.

4 changes: 2 additions & 2 deletions src/core/connection/api/getApiByType.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import BaseApi from '~/core/connection/api/BaseApi'

const _apiByConnectionViewModelType: Record<ConnectionTypes, Promise<{ baseApi: BaseApi }>> = {
A1111: import('~/core/connection/api/A1111Api'),
LMS: import('~/core/connection/api/LmsApi'),
LMS: import('~/core/connection/api/OpenAiApi'),
Ollama: import('~/core/connection/api/OllamaApi'),
OpenAi: import('~/core/connection/api/OpenAiApi'),
Gemini: import('~/core/connection/api/GeminiApi'),
Expand All @@ -14,4 +14,4 @@ export const getApiByType = async (type: ConnectionTypes) => {
}

//todo: regenerating a message that failed immediately (without creating a new variation) does not wipe out the error message
// also, it would be nice to manually remove the error message maybe
// also, it would be nice to manually remove the error message maybe
5 changes: 0 additions & 5 deletions src/core/connection/types.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
import { toOllamaModel } from '~/core/transformers/toOllamaModel'
import { toLmsModel } from '~/core/transformers/toLmsModel'
import { LanguageModelType } from '~/core/LanguageModel'
import { ConnectionModel } from '~/core/connection/ConnectionModel'

export type IGeminiModel = { name: string }

export type ILmsModel = ReturnType<typeof toLmsModel>

export type IA1111Model = {
title: string
modelName: string
Expand All @@ -22,14 +19,12 @@ export type IOpenAiModel = {
ownedBy: string
}

export type LmsLanguageModel = LanguageModelType<ILmsModel>
export type A1111LanguageModel = LanguageModelType<IA1111Model>
export type OllamaLanguageModel = LanguageModelType<IOllamaModel>
export type OpenAiLanguageModel = LanguageModelType<IOpenAiModel>
export type GeminiLanguageModel = LanguageModelType<IGeminiModel>

export type LanguageModelTypes =
| LmsLanguageModel
| A1111LanguageModel
| OllamaLanguageModel
| OpenAiLanguageModel
Expand Down
59 changes: 15 additions & 44 deletions src/core/connection/viewModels/LmsConnectionViewModel.ts
Original file line number Diff line number Diff line change
@@ -1,40 +1,37 @@
import _ from 'lodash'

import { type SortType as SelectionPanelSortType } from '~/components/SelectionTablePanel'
import { toLmsModel } from '~/core/transformers/toLmsModel'
import LanguageModel from '~/core/LanguageModel'

import { ILmsModel, LmsLanguageModel } from '~/core/connection/types'
import { BaseConnectionViewModel } from '~/core/connection/viewModels/BaseConnectionViewModel'
import { ConnectionModel } from '~/core/connection/ConnectionModel'
import { connectionTable } from '~/core/connection/ConnectionTable'
import OpenAiConnectionViewModel from '~/core/connection/viewModels/OpenAiConnectionViewModel'

const DefaultHost = 'ws://127.0.0.1:1234'
const DefaultHost = 'http://127.0.0.1:1234/v1'

class LmsConnectionViewModel extends BaseConnectionViewModel<ILmsModel> {
class LmsConnectionViewModel extends OpenAiConnectionViewModel {
DefaultHost: string = DefaultHost

modelTableHeaders: Array<SelectionPanelSortType<LmsLanguageModel>> = [
{ label: 'Name', value: 'name' },
{ label: 'Size', value: 'sizeBytes' },
{ label: 'Type', value: 'architecture' },
{ label: 'Folder', value: 'folder' },
]

primaryHeader = this.modelTableHeaders[0].value

type = 'LMS' as const

readonly hostLabel = 'LM Studio Host:'
readonly enabledLabel = 'Text generation through LM Studio:'

constructor(
public source: ConnectionModel,
{ autoFetch = true } = {},
) {
if (source.host?.startsWith('ws')) {
source.host = source.host.replace('ws', 'http') + '/v1'
}

super(source, { autoFetch })
}

static toViewModel(connection: ConnectionModel, { autoFetch = true } = {}) {
return new this(connection, { autoFetch })
}

static readonly getSnapshot = (): ConnectionModel =>
connectionTable.parse({
label: 'LM Studio',
label: 'LM Studio (open ai)',
type: 'LMS',

host: DefaultHost,
Expand All @@ -50,32 +47,6 @@ class LmsConnectionViewModel extends BaseConnectionViewModel<ILmsModel> {
},
],
})

validateHost(host?: string) {
if (!host) return true

if (!host.startsWith('ws')) return 'Host needs to start with ws:// or wss://'

return true
}

async _fetchLmModels(host: string): Promise<LmsLanguageModel[]> {
const sdk = await import('@lmstudio/sdk')
const { LMStudioClient } = sdk

const client = new LMStudioClient({ baseUrl: host })

// if this fails it breaks downstream, try catches do not work yet
const response = await client.system.listDownloadedModels()

const lmsModels: ILmsModel[] = _.filter(response, { type: 'llm' }).map(toLmsModel)

const models: LmsLanguageModel[] = lmsModels.map(lmsModel =>
LanguageModel.fromILmsModel(lmsModel, this.id),
)

return models
}
}

export default LmsConnectionViewModel
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ class OpenAiConnectionViewModel extends BaseConnectionViewModel<IOpenAiModel> {

primaryHeader = this.modelTableHeaders[0].value

type = 'OpenAi' as const
type: 'OpenAi' | 'LMS' = 'OpenAi'

readonly hostLabel: string = 'Open AI Host:'
readonly enabledLabel: string = 'Text generation through LM Studio:'
Expand Down
4 changes: 1 addition & 3 deletions src/core/connection/viewModels/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@ export const connectionViewModelByType = {
Ollama: () => OllamaConnectionViewModel,
OpenAi: () => OpenAiConnectionViewModel,
Gemini: () => GeminiConnectionViewModel,

// if we try to connect to LMS and fail, the try catch does not catch the error
LMS: () => LmsConnectionViewModel,
}

Expand All @@ -26,5 +24,5 @@ export const connectionModelLabelByType = {
Ollama: 'Ollama',
OpenAi: 'Open AI',
Gemini: 'Gemini nano',
LMS: 'LM Studio',
LMS: 'LM Studio (open ai)',
}
26 changes: 0 additions & 26 deletions src/core/transformers/toLmsModel.ts

This file was deleted.

6 changes: 1 addition & 5 deletions src/core/types.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
import { toOllamaModel } from '~/core/transformers/toOllamaModel'
import { toLmsModel } from '~/core/transformers/toLmsModel'
import { LanguageModelType } from '~/core/LanguageModel'
import { ConnectionModel } from '~/core/connection/ConnectionModel'

export type ILmsModel = ReturnType<typeof toLmsModel>

export type IA1111Model = {
title: string
modelName: string
Expand All @@ -20,9 +17,8 @@ export type IOpenAiModel = {
ownedBy: string
}

export type SharedLanguageModelTypes = ILmsModel | IA1111Model | IOllamaModel
export type SharedLanguageModelTypes = IA1111Model | IOllamaModel

export type LmsLanguageModel = LanguageModelType<ILmsModel>
export type A1111LanguageModel = LanguageModelType<IA1111Model>
export type OllamaLanguageModel = LanguageModelType<IOllamaModel>
export type OpenAiLanguageModel = LanguageModelType<IOpenAiModel>
Expand Down
4 changes: 4 additions & 0 deletions src/features/settings/panels/help/LmsHelpMarkdown.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,8 @@ LM Studio makes working with models easy! Use this and get going:
\`${LMS_CODE}\`
Find models to download at their [Hugging Face](https://huggingface.co/lmstudio-community?sort_models=likes#models)
--
Please note: The LM Studio sdk is no longer used in this project, the url will now be redirected to the open ai endpoint. If this causes problems please contact the developer via [github issue](https://github.com/mrdjohnson/llm-x/issues).
`
Loading

0 comments on commit 5a8143f

Please sign in to comment.