diff --git a/.github/workflows/build-api-image.yml b/.github/workflows/build-push.yml
similarity index 56%
rename from .github/workflows/build-api-image.yml
rename to .github/workflows/build-push.yml
index 0eb9e95b61..048f4cd942 100644
--- a/.github/workflows/build-api-image.yml
+++ b/.github/workflows/build-push.yml
@@ -1,17 +1,32 @@
-name: Build and Push API Image
+name: Build and Push API & Web
on:
push:
branches:
- - 'main'
- - 'deploy/dev'
+ - "main"
+ - "deploy/dev"
release:
- types: [ published ]
+ types: [published]
+
+env:
+ DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }}
+ DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
+ DIFY_WEB_IMAGE_NAME: ${{ vars.DIFY_WEB_IMAGE_NAME || 'langgenius/dify-web' }}
+ DIFY_API_IMAGE_NAME: ${{ vars.DIFY_API_IMAGE_NAME || 'langgenius/dify-api' }}
jobs:
build-and-push:
runs-on: ubuntu-latest
if: github.event.pull_request.draft == false
+ strategy:
+ matrix:
+ include:
+ - service_name: "web"
+ image_name_env: "DIFY_WEB_IMAGE_NAME"
+ context: "web"
+ - service_name: "api"
+ image_name_env: "DIFY_API_IMAGE_NAME"
+ context: "api"
steps:
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
@@ -22,14 +37,14 @@ jobs:
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
- username: ${{ secrets.DOCKERHUB_USER }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
+ username: ${{ env.DOCKERHUB_USER }}
+ password: ${{ env.DOCKERHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
- images: langgenius/dify-api
+ images: ${{ env[matrix.image_name_env] }}
tags: |
type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/') }}
type=ref,event=branch
@@ -39,22 +54,11 @@ jobs:
- name: Build and push
uses: docker/build-push-action@v5
with:
- context: "{{defaultContext}}:api"
+ context: "{{defaultContext}}:${{ matrix.context }}"
platforms: ${{ startsWith(github.ref, 'refs/tags/') && 'linux/amd64,linux/arm64' || 'linux/amd64' }}
- build-args: |
- COMMIT_SHA=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
+ build-args: COMMIT_SHA=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
-
- - name: Deploy to server
- if: github.ref == 'refs/heads/deploy/dev'
- uses: appleboy/ssh-action@v0.1.8
- with:
- host: ${{ secrets.SSH_HOST }}
- username: ${{ secrets.SSH_USER }}
- key: ${{ secrets.SSH_PRIVATE_KEY }}
- script: |
- ${{ secrets.SSH_SCRIPT }}
diff --git a/.github/workflows/build-web-image.yml b/.github/workflows/build-web-image.yml
deleted file mode 100644
index b77167c2a3..0000000000
--- a/.github/workflows/build-web-image.yml
+++ /dev/null
@@ -1,60 +0,0 @@
-name: Build and Push WEB Image
-
-on:
- push:
- branches:
- - 'main'
- - 'deploy/dev'
- release:
- types: [ published ]
-
-jobs:
- build-and-push:
- runs-on: ubuntu-latest
- if: github.event.pull_request.draft == false
- steps:
- - name: Set up QEMU
- uses: docker/setup-qemu-action@v3
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3
-
- - name: Login to Docker Hub
- uses: docker/login-action@v2
- with:
- username: ${{ secrets.DOCKERHUB_USER }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Extract metadata (tags, labels) for Docker
- id: meta
- uses: docker/metadata-action@v5
- with:
- images: langgenius/dify-web
- tags: |
- type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/') }}
- type=ref,event=branch
- type=sha,enable=true,priority=100,prefix=,suffix=,format=long
- type=raw,value=${{ github.ref_name }},enable=${{ startsWith(github.ref, 'refs/tags/') }}
-
- - name: Build and push
- uses: docker/build-push-action@v5
- with:
- context: "{{defaultContext}}:web"
- platforms: ${{ startsWith(github.ref, 'refs/tags/') && 'linux/amd64,linux/arm64' || 'linux/amd64' }}
- build-args: |
- COMMIT_SHA=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
- push: true
- tags: ${{ steps.meta.outputs.tags }}
- labels: ${{ steps.meta.outputs.labels }}
- cache-from: type=gha
- cache-to: type=gha,mode=max
-
- - name: Deploy to server
- if: github.ref == 'refs/heads/deploy/dev'
- uses: appleboy/ssh-action@v0.1.8
- with:
- host: ${{ secrets.SSH_HOST }}
- username: ${{ secrets.SSH_USER }}
- key: ${{ secrets.SSH_PRIVATE_KEY }}
- script: |
- ${{ secrets.SSH_SCRIPT }}
diff --git a/.github/workflows/deploy-dev.yml b/.github/workflows/deploy-dev.yml
new file mode 100644
index 0000000000..47ca03c2eb
--- /dev/null
+++ b/.github/workflows/deploy-dev.yml
@@ -0,0 +1,24 @@
+name: Deploy Dev
+
+on:
+ workflow_run:
+ workflows: ["Build and Push API & Web"]
+ branches:
+ - "deploy/dev"
+ types:
+ - completed
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ if: |
+ github.event.workflow_run.conclusion == 'success'
+ steps:
+ - name: Deploy to server
+ uses: appleboy/ssh-action@v0.1.8
+ with:
+ host: ${{ secrets.SSH_HOST }}
+ username: ${{ secrets.SSH_USER }}
+ key: ${{ secrets.SSH_PRIVATE_KEY }}
+ script: |
+ ${{ vars.SSH_SCRIPT || secrets.SSH_SCRIPT }}
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000000..ff61a00313
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,43 @@
+# Variables
+DOCKER_REGISTRY=langgenius
+WEB_IMAGE=$(DOCKER_REGISTRY)/dify-web
+API_IMAGE=$(DOCKER_REGISTRY)/dify-api
+VERSION=latest
+
+# Build Docker images
+build-web:
+ @echo "Building web Docker image: $(WEB_IMAGE):$(VERSION)..."
+ docker build -t $(WEB_IMAGE):$(VERSION) ./web
+ @echo "Web Docker image built successfully: $(WEB_IMAGE):$(VERSION)"
+
+build-api:
+ @echo "Building API Docker image: $(API_IMAGE):$(VERSION)..."
+ docker build -t $(API_IMAGE):$(VERSION) ./api
+ @echo "API Docker image built successfully: $(API_IMAGE):$(VERSION)"
+
+# Push Docker images
+push-web:
+ @echo "Pushing web Docker image: $(WEB_IMAGE):$(VERSION)..."
+ docker push $(WEB_IMAGE):$(VERSION)
+ @echo "Web Docker image pushed successfully: $(WEB_IMAGE):$(VERSION)"
+
+push-api:
+ @echo "Pushing API Docker image: $(API_IMAGE):$(VERSION)..."
+ docker push $(API_IMAGE):$(VERSION)
+ @echo "API Docker image pushed successfully: $(API_IMAGE):$(VERSION)"
+
+# Build all images
+build-all: build-web build-api
+
+# Push all images
+push-all: push-web push-api
+
+build-push-api: build-api push-api
+build-push-web: build-web push-web
+
+# Build and push all images
+build-push-all: build-all push-all
+ @echo "All Docker images have been built and pushed."
+
+# Phony targets
+.PHONY: build-web build-api push-web push-api build-all push-all build-push-all
diff --git a/api/core/application_manager.py b/api/core/application_manager.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/api/core/model_runtime/model_providers/_position.yaml b/api/core/model_runtime/model_providers/_position.yaml
index 2dcdc1bf2e..97116978cd 100644
--- a/api/core/model_runtime/model_providers/_position.yaml
+++ b/api/core/model_runtime/model_providers/_position.yaml
@@ -20,6 +20,7 @@
- jina
- chatglm
- xinference
+- yi
- openllm
- localai
- openai_api_compatible
diff --git a/api/core/model_runtime/model_providers/yi/__init__.py b/api/core/model_runtime/model_providers/yi/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/api/core/model_runtime/model_providers/yi/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/yi/_assets/icon_l_en.svg
new file mode 100644
index 0000000000..0efce4e85b
--- /dev/null
+++ b/api/core/model_runtime/model_providers/yi/_assets/icon_l_en.svg
@@ -0,0 +1,20 @@
+
diff --git a/api/core/model_runtime/model_providers/yi/_assets/icon_l_zh.svg b/api/core/model_runtime/model_providers/yi/_assets/icon_l_zh.svg
new file mode 100644
index 0000000000..951842da55
--- /dev/null
+++ b/api/core/model_runtime/model_providers/yi/_assets/icon_l_zh.svg
@@ -0,0 +1,20 @@
+
diff --git a/api/core/model_runtime/model_providers/yi/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/yi/_assets/icon_s_en.svg
new file mode 100644
index 0000000000..a813274466
--- /dev/null
+++ b/api/core/model_runtime/model_providers/yi/_assets/icon_s_en.svg
@@ -0,0 +1,7 @@
+
\ No newline at end of file
diff --git a/api/core/model_runtime/model_providers/yi/llm/__init__.py b/api/core/model_runtime/model_providers/yi/llm/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/api/core/model_runtime/model_providers/yi/llm/_position.yaml b/api/core/model_runtime/model_providers/yi/llm/_position.yaml
new file mode 100644
index 0000000000..12838d670f
--- /dev/null
+++ b/api/core/model_runtime/model_providers/yi/llm/_position.yaml
@@ -0,0 +1,3 @@
+- yi-34b-chat-0205
+- yi-34b-chat-200k
+- yi-vl-plus
diff --git a/api/core/model_runtime/model_providers/yi/llm/llm.py b/api/core/model_runtime/model_providers/yi/llm/llm.py
new file mode 100644
index 0000000000..8ad6462514
--- /dev/null
+++ b/api/core/model_runtime/model_providers/yi/llm/llm.py
@@ -0,0 +1,30 @@
+from collections.abc import Generator
+from typing import Optional, Union
+
+from core.model_runtime.entities.llm_entities import LLMResult
+from core.model_runtime.entities.message_entities import (
+ PromptMessage,
+ PromptMessageTool,
+)
+from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel
+
+
+class YiLargeLanguageModel(OAIAPICompatLargeLanguageModel):
+ def _invoke(self, model: str, credentials: dict,
+ prompt_messages: list[PromptMessage], model_parameters: dict,
+ tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None,
+ stream: bool = True, user: Optional[str] = None) \
+ -> Union[LLMResult, Generator]:
+ self._add_custom_parameters(credentials)
+ return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream)
+
+ def validate_credentials(self, model: str, credentials: dict) -> None:
+ self._add_custom_parameters(credentials)
+ super().validate_credentials(model, credentials)
+
+ @staticmethod
+ def _add_custom_parameters(credentials: dict) -> None:
+ credentials['mode'] = 'chat'
+
+ if 'endpoint_url' not in credentials or credentials['endpoint_url'] == "":
+ credentials['endpoint_url'] = 'https://api.lingyiwanwu.com/v1'
diff --git a/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-0205.yaml b/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-0205.yaml
new file mode 100644
index 0000000000..4d4148aa91
--- /dev/null
+++ b/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-0205.yaml
@@ -0,0 +1,28 @@
+model: yi-34b-chat-0205
+label:
+ zh_Hans: yi-34b-chat-0205
+ en_US: yi-34b-chat-0205
+model_type: llm
+features:
+ - agent-thought
+model_properties:
+ mode: chat
+ context_size: 4096
+parameter_rules:
+ - name: max_tokens
+ use_template: max_tokens
+ type: int
+ default: 512
+ min: 1
+ max: 4096
+ - name: temperature
+ use_template: temperature
+ type: float
+ default: 0.7
+ min: 0
+ max: 2
+pricing:
+ input: '0.0025'
+ output: '0.0025'
+ unit: '0.00001'
+ currency: RMB
diff --git a/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-200k.yaml b/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-200k.yaml
new file mode 100644
index 0000000000..4fbe84e9b7
--- /dev/null
+++ b/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-200k.yaml
@@ -0,0 +1,28 @@
+model: yi-34b-chat-200k
+label:
+ zh_Hans: yi-34b-chat-200k
+ en_US: yi-34b-chat-200k
+model_type: llm
+features:
+ - agent-thought
+model_properties:
+ mode: chat
+ context_size: 200000
+parameter_rules:
+ - name: max_tokens
+ use_template: max_tokens
+ type: int
+ default: 1024
+ min: 1
+ max: 200000
+ - name: temperature
+ use_template: temperature
+ type: float
+ default: 0.7
+ min: 0
+ max: 2
+pricing:
+ input: '0.012'
+ output: '0.012'
+ unit: '0.00001'
+ currency: RMB
diff --git a/api/core/model_runtime/model_providers/yi/llm/yi-vl-plus.yaml b/api/core/model_runtime/model_providers/yi/llm/yi-vl-plus.yaml
new file mode 100644
index 0000000000..6195051f16
--- /dev/null
+++ b/api/core/model_runtime/model_providers/yi/llm/yi-vl-plus.yaml
@@ -0,0 +1,28 @@
+model: yi-vl-plus
+label:
+ zh_Hans: yi-vl-plus
+ en_US: yi-vl-plus
+model_type: llm
+features:
+ - vision
+model_properties:
+ mode: chat
+ context_size: 4096
+parameter_rules:
+ - name: max_tokens
+ use_template: max_tokens
+ type: int
+ default: 512
+ min: 1
+ max: 4096
+ - name: temperature
+ use_template: temperature
+ type: float
+ default: 0.7
+ min: 0
+ max: 2
+pricing:
+ input: '0.01'
+ output: '0.03'
+ unit: '0.001'
+ currency: USD
diff --git a/api/core/model_runtime/model_providers/yi/yi.py b/api/core/model_runtime/model_providers/yi/yi.py
new file mode 100644
index 0000000000..691c7aa371
--- /dev/null
+++ b/api/core/model_runtime/model_providers/yi/yi.py
@@ -0,0 +1,32 @@
+import logging
+
+from core.model_runtime.entities.model_entities import ModelType
+from core.model_runtime.errors.validate import CredentialsValidateFailedError
+from core.model_runtime.model_providers.__base.model_provider import ModelProvider
+
+logger = logging.getLogger(__name__)
+
+
+class YiProvider(ModelProvider):
+
+ def validate_provider_credentials(self, credentials: dict) -> None:
+ """
+ Validate provider credentials
+ if validate failed, raise exception
+
+ :param credentials: provider credentials, credentials form defined in `provider_credential_schema`.
+ """
+ try:
+ model_instance = self.get_model_instance(ModelType.LLM)
+
+ # Use `yi-34b-chat-0205` model for validate,
+ # no matter what model you pass in, text completion model or chat model
+ model_instance.validate_credentials(
+ model='yi-34b-chat-0205',
+ credentials=credentials
+ )
+ except CredentialsValidateFailedError as ex:
+ raise ex
+ except Exception as ex:
+ logger.exception(f'{self.get_provider_schema().provider} credentials validate failed')
+ raise ex
diff --git a/api/core/model_runtime/model_providers/yi/yi.yaml b/api/core/model_runtime/model_providers/yi/yi.yaml
new file mode 100644
index 0000000000..368c715456
--- /dev/null
+++ b/api/core/model_runtime/model_providers/yi/yi.yaml
@@ -0,0 +1,41 @@
+provider: yi
+label:
+ en_US: 01.AI
+ zh_Hans: 零一万物
+description:
+ en_US: Models provided by 01.AI, such as yi-34b-chat and yi-vl-plus.
+ zh_Hans: 零一万物提供的模型,例如 yi-34b-chat 和 yi-vl-plus。
+icon_small:
+ en_US: icon_s_en.svg
+icon_large:
+ en_US: icon_l_en.svg
+background: "#EFFDFD"
+help:
+ title:
+ en_US: Get your API Key from 01.ai
+ zh_Hans: 从零一万物获取 API Key
+ url:
+ en_US: https://platform.lingyiwanwu.com/apikeys
+supported_model_types:
+ - llm
+configurate_methods:
+ - predefined-model
+provider_credential_schema:
+ credential_form_schemas:
+ - variable: api_key
+ label:
+ en_US: API Key
+ type: secret-input
+ required: true
+ placeholder:
+ zh_Hans: 在此输入您的 API Key
+ en_US: Enter your API Key
+ - variable: endpoint_url
+ label:
+ zh_Hans: 自定义 API endpoint 地址
+ en_US: CUstom API endpoint URL
+ type: text-input
+ required: false
+ placeholder:
+ zh_Hans: Base URL, e.g. https://api.lingyiwanwu.com/v1
+ en_US: Base URL, e.g. https://api.lingyiwanwu.com/v1
diff --git a/web/app/components/develop/template/template.zh.mdx b/web/app/components/develop/template/template.zh.mdx
index 1b83c6a1c3..6fd1cddc16 100644
--- a/web/app/components/develop/template/template.zh.mdx
+++ b/web/app/components/develop/template/template.zh.mdx
@@ -153,8 +153,6 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
"user": "abc-123"
}'
```
- ```
-
### blocking