chore: remove icon_large of models (#30466)

Co-authored-by: zhsama <torvalds@linux.do>
This commit is contained in:
非法操作 2026-01-03 01:35:17 +08:00 committed by GitHub
parent 8f2aabf7bd
commit c1bb310183
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 3 additions and 59 deletions

View File

@ -30,7 +30,6 @@ class SimpleModelProviderEntity(BaseModel):
label: I18nObject
icon_small: I18nObject | None = None
icon_small_dark: I18nObject | None = None
icon_large: I18nObject | None = None
supported_model_types: list[ModelType]
def __init__(self, provider_entity: ProviderEntity):
@ -44,7 +43,6 @@ class SimpleModelProviderEntity(BaseModel):
label=provider_entity.label,
icon_small=provider_entity.icon_small,
icon_small_dark=provider_entity.icon_small_dark,
icon_large=provider_entity.icon_large,
supported_model_types=provider_entity.supported_model_types,
)
@ -94,7 +92,6 @@ class DefaultModelProviderEntity(BaseModel):
provider: str
label: I18nObject
icon_small: I18nObject | None = None
icon_large: I18nObject | None = None
supported_model_types: Sequence[ModelType] = []

View File

@ -100,7 +100,6 @@ class SimpleProviderEntity(BaseModel):
label: I18nObject
icon_small: I18nObject | None = None
icon_small_dark: I18nObject | None = None
icon_large: I18nObject | None = None
supported_model_types: Sequence[ModelType]
models: list[AIModelEntity] = []
@ -123,7 +122,6 @@ class ProviderEntity(BaseModel):
label: I18nObject
description: I18nObject | None = None
icon_small: I18nObject | None = None
icon_large: I18nObject | None = None
icon_small_dark: I18nObject | None = None
background: str | None = None
help: ProviderHelpEntity | None = None
@ -157,7 +155,6 @@ class ProviderEntity(BaseModel):
provider=self.provider,
label=self.label,
icon_small=self.icon_small,
icon_large=self.icon_large,
supported_model_types=self.supported_model_types,
models=self.models,
)

View File

@ -285,7 +285,7 @@ class ModelProviderFactory:
"""
Get provider icon
:param provider: provider name
:param icon_type: icon type (icon_small or icon_large)
:param icon_type: icon type (icon_small or icon_small_dark)
:param lang: language (zh_Hans or en_US)
:return: provider icon
"""
@ -309,13 +309,7 @@ class ModelProviderFactory:
else:
file_name = provider_schema.icon_small_dark.en_US
else:
if not provider_schema.icon_large:
raise ValueError(f"Provider {provider} does not have large icon.")
if lang.lower() == "zh_hans":
file_name = provider_schema.icon_large.zh_Hans
else:
file_name = provider_schema.icon_large.en_US
raise ValueError(f"Unsupported icon type: {icon_type}.")
if not file_name:
raise ValueError(f"Provider {provider} does not have icon.")

View File

@ -331,7 +331,6 @@ class ProviderManager:
provider=provider_schema.provider,
label=provider_schema.label,
icon_small=provider_schema.icon_small,
icon_large=provider_schema.icon_large,
supported_model_types=provider_schema.supported_model_types,
),
)

View File

@ -70,7 +70,6 @@ class ProviderResponse(BaseModel):
description: I18nObject | None = None
icon_small: I18nObject | None = None
icon_small_dark: I18nObject | None = None
icon_large: I18nObject | None = None
background: str | None = None
help: ProviderHelpEntity | None = None
supported_model_types: Sequence[ModelType]
@ -98,11 +97,6 @@ class ProviderResponse(BaseModel):
en_US=f"{url_prefix}/icon_small_dark/en_US",
zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans",
)
if self.icon_large is not None:
self.icon_large = I18nObject(
en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans"
)
return self
@ -116,7 +110,6 @@ class ProviderWithModelsResponse(BaseModel):
label: I18nObject
icon_small: I18nObject | None = None
icon_small_dark: I18nObject | None = None
icon_large: I18nObject | None = None
status: CustomConfigurationStatus
models: list[ProviderModelWithStatusEntity]
@ -134,11 +127,6 @@ class ProviderWithModelsResponse(BaseModel):
self.icon_small_dark = I18nObject(
en_US=f"{url_prefix}/icon_small_dark/en_US", zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans"
)
if self.icon_large is not None:
self.icon_large = I18nObject(
en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans"
)
return self
@ -163,11 +151,6 @@ class SimpleProviderEntityResponse(SimpleProviderEntity):
self.icon_small_dark = I18nObject(
en_US=f"{url_prefix}/icon_small_dark/en_US", zh_Hans=f"{url_prefix}/icon_small_dark/zh_Hans"
)
if self.icon_large is not None:
self.icon_large = I18nObject(
en_US=f"{url_prefix}/icon_large/en_US", zh_Hans=f"{url_prefix}/icon_large/zh_Hans"
)
return self

View File

@ -99,7 +99,6 @@ class ModelProviderService:
description=provider_configuration.provider.description,
icon_small=provider_configuration.provider.icon_small,
icon_small_dark=provider_configuration.provider.icon_small_dark,
icon_large=provider_configuration.provider.icon_large,
background=provider_configuration.provider.background,
help=provider_configuration.provider.help,
supported_model_types=provider_configuration.provider.supported_model_types,
@ -423,7 +422,6 @@ class ModelProviderService:
label=first_model.provider.label,
icon_small=first_model.provider.icon_small,
icon_small_dark=first_model.provider.icon_small_dark,
icon_large=first_model.provider.icon_large,
status=CustomConfigurationStatus.ACTIVE,
models=[
ProviderModelWithStatusEntity(
@ -488,7 +486,6 @@ class ModelProviderService:
provider=result.provider.provider,
label=result.provider.label,
icon_small=result.provider.icon_small,
icon_large=result.provider.icon_large,
supported_model_types=result.provider.supported_model_types,
),
)
@ -522,7 +519,7 @@ class ModelProviderService:
:param tenant_id: workspace id
:param provider: provider name
:param icon_type: icon type (icon_small or icon_large)
:param icon_type: icon type (icon_small or icon_small_dark)
:param lang: language (zh_Hans or en_US)
:return:
"""

View File

@ -48,10 +48,6 @@ class MockModelClass(PluginModelClient):
en_US="https://example.com/icon_small.png",
zh_Hans="https://example.com/icon_small.png",
),
icon_large=I18nObject(
en_US="https://example.com/icon_large.png",
zh_Hans="https://example.com/icon_large.png",
),
supported_model_types=[ModelType.LLM],
configurate_methods=[ConfigurateMethod.PREDEFINED_MODEL],
models=[

View File

@ -228,7 +228,6 @@ class TestModelProviderService:
mock_provider_entity.description = {"en_US": "OpenAI provider", "zh_Hans": "OpenAI 提供商"}
mock_provider_entity.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}
mock_provider_entity.icon_small_dark = None
mock_provider_entity.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}
mock_provider_entity.background = "#FF6B6B"
mock_provider_entity.help = None
mock_provider_entity.supported_model_types = [ModelType.LLM, ModelType.TEXT_EMBEDDING]
@ -302,7 +301,6 @@ class TestModelProviderService:
mock_provider_entity_llm.description = {"en_US": "OpenAI provider", "zh_Hans": "OpenAI 提供商"}
mock_provider_entity_llm.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}
mock_provider_entity_llm.icon_small_dark = None
mock_provider_entity_llm.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}
mock_provider_entity_llm.background = "#FF6B6B"
mock_provider_entity_llm.help = None
mock_provider_entity_llm.supported_model_types = [ModelType.LLM]
@ -316,7 +314,6 @@ class TestModelProviderService:
mock_provider_entity_embedding.description = {"en_US": "Cohere provider", "zh_Hans": "Cohere 提供商"}
mock_provider_entity_embedding.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}
mock_provider_entity_embedding.icon_small_dark = None
mock_provider_entity_embedding.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}
mock_provider_entity_embedding.background = "#4ECDC4"
mock_provider_entity_embedding.help = None
mock_provider_entity_embedding.supported_model_types = [ModelType.TEXT_EMBEDDING]
@ -419,7 +416,6 @@ class TestModelProviderService:
provider="openai",
label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"),
icon_small=I18nObject(en_US="icon_small.png", zh_Hans="icon_small.png"),
icon_large=I18nObject(en_US="icon_large.png", zh_Hans="icon_large.png"),
supported_model_types=[ModelType.LLM],
configurate_methods=[],
models=[],
@ -431,7 +427,6 @@ class TestModelProviderService:
provider="openai",
label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"),
icon_small=I18nObject(en_US="icon_small.png", zh_Hans="icon_small.png"),
icon_large=I18nObject(en_US="icon_large.png", zh_Hans="icon_large.png"),
supported_model_types=[ModelType.LLM],
configurate_methods=[],
models=[],
@ -655,7 +650,6 @@ class TestModelProviderService:
provider="openai",
label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"),
icon_small=I18nObject(en_US="icon_small.png", zh_Hans="icon_small.png"),
icon_large=I18nObject(en_US="icon_large.png", zh_Hans="icon_large.png"),
supported_model_types=[ModelType.LLM],
),
)
@ -1027,7 +1021,6 @@ class TestModelProviderService:
label={"en_US": "OpenAI", "zh_Hans": "OpenAI"},
icon_small={"en_US": "icon_small.png", "zh_Hans": "icon_small.png"},
icon_small_dark=None,
icon_large={"en_US": "icon_large.png", "zh_Hans": "icon_large.png"},
),
model="gpt-3.5-turbo",
model_type=ModelType.LLM,
@ -1045,7 +1038,6 @@ class TestModelProviderService:
label={"en_US": "OpenAI", "zh_Hans": "OpenAI"},
icon_small={"en_US": "icon_small.png", "zh_Hans": "icon_small.png"},
icon_small_dark=None,
icon_large={"en_US": "icon_large.png", "zh_Hans": "icon_large.png"},
),
model="gpt-4",
model_type=ModelType.LLM,

View File

@ -32,7 +32,6 @@ def mock_provider_entity():
label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"),
description=I18nObject(en_US="OpenAI provider", zh_Hans="OpenAI 提供商"),
icon_small=I18nObject(en_US="icon.png", zh_Hans="icon.png"),
icon_large=I18nObject(en_US="icon.png", zh_Hans="icon.png"),
background="background.png",
help=None,
supported_model_types=[ModelType.LLM],

View File

@ -27,7 +27,6 @@ def service_with_fake_configurations():
description=None,
icon_small=None,
icon_small_dark=None,
icon_large=None,
background=None,
help=None,
supported_model_types=[ModelType.LLM],

View File

@ -93,7 +93,6 @@ function createMockProviderContext(overrides: Partial<ProviderContextState> = {}
provider: 'openai',
label: { en_US: 'OpenAI', zh_Hans: 'OpenAI' },
icon_small: { en_US: 'icon', zh_Hans: 'icon' },
icon_large: { en_US: 'icon', zh_Hans: 'icon' },
status: ModelStatusEnum.active,
models: [
{
@ -711,7 +710,6 @@ describe('DebugWithSingleModel', () => {
provider: 'openai',
label: { en_US: 'OpenAI', zh_Hans: 'OpenAI' },
icon_small: { en_US: 'icon', zh_Hans: 'icon' },
icon_large: { en_US: 'icon', zh_Hans: 'icon' },
status: ModelStatusEnum.active,
models: [
{
@ -742,7 +740,6 @@ describe('DebugWithSingleModel', () => {
provider: 'different-provider',
label: { en_US: 'Different Provider', zh_Hans: '不同提供商' },
icon_small: { en_US: 'icon', zh_Hans: 'icon' },
icon_large: { en_US: 'icon', zh_Hans: 'icon' },
status: ModelStatusEnum.active,
models: [],
},
@ -925,7 +922,6 @@ describe('DebugWithSingleModel', () => {
provider: 'openai',
label: { en_US: 'OpenAI', zh_Hans: 'OpenAI' },
icon_small: { en_US: 'icon', zh_Hans: 'icon' },
icon_large: { en_US: 'icon', zh_Hans: 'icon' },
status: ModelStatusEnum.active,
models: [
{
@ -975,7 +971,6 @@ describe('DebugWithSingleModel', () => {
provider: 'openai',
label: { en_US: 'OpenAI', zh_Hans: 'OpenAI' },
icon_small: { en_US: 'icon', zh_Hans: 'icon' },
icon_large: { en_US: 'icon', zh_Hans: 'icon' },
status: ModelStatusEnum.active,
models: [
{

View File

@ -218,7 +218,6 @@ export type ModelProvider = {
}
icon_small: TypeWithI18N
icon_small_dark?: TypeWithI18N
icon_large: TypeWithI18N
background?: string
supported_model_types: ModelTypeEnum[]
configurate_methods: ConfigurationMethodEnum[]
@ -254,7 +253,6 @@ export type ModelProvider = {
export type Model = {
provider: string
icon_large: TypeWithI18N
icon_small: TypeWithI18N
icon_small_dark?: TypeWithI18N
label: TypeWithI18N
@ -267,7 +265,6 @@ export type DefaultModelResponse = {
model_type: ModelTypeEnum
provider: {
provider: string
icon_large: TypeWithI18N
icon_small: TypeWithI18N
}
}

View File

@ -219,7 +219,6 @@ const createModelItem = (overrides: Partial<ModelItem> = {}): ModelItem => ({
*/
const createModel = (overrides: Partial<Model> = {}): Model => ({
provider: 'openai',
icon_large: { en_US: 'icon-large.png', zh_Hans: 'icon-large.png' },
icon_small: { en_US: 'icon-small.png', zh_Hans: 'icon-small.png' },
label: { en_US: 'OpenAI', zh_Hans: 'OpenAI' },
models: [createModelItem()],