add openai proxy site support.

This commit is contained in:
zhuyunxiang 2023-05-16 14:19:30 +08:00
parent 815f794eef
commit 765788fc61
5 changed files with 14 additions and 0 deletions

View File

@ -16,6 +16,9 @@ API_URL=http://127.0.0.1:5001
# Web APP base URL
APP_URL=http://127.0.0.1:5001
# Open AI base URL
OPENAI_API_BASE=https://api.openai.com/v1
# celery configuration
CELERY_BROKER_URL=redis://:difyai123456@localhost:6379/1

View File

@ -8,6 +8,7 @@ ENV DEPLOY_ENV PRODUCTION
ENV CONSOLE_URL http://127.0.0.1:5001
ENV API_URL http://127.0.0.1:5001
ENV APP_URL http://127.0.0.1:5001
ENV OPENAI_API_BASE https://api.openai.com/v1
EXPOSE 5001

View File

@ -29,6 +29,7 @@ DEFAULTS = {
'CONSOLE_URL': 'https://cloud.dify.ai',
'API_URL': 'https://api.dify.ai',
'APP_URL': 'https://udify.app',
'OPENAI_API_BASE': 'https://api.openai.com/v1',
'STORAGE_TYPE': 'local',
'STORAGE_LOCAL_PATH': 'storage',
'CHECK_UPDATE_URL': 'https://updates.dify.ai',
@ -74,6 +75,7 @@ class Config:
self.CONSOLE_URL = get_env('CONSOLE_URL')
self.API_URL = get_env('API_URL')
self.APP_URL = get_env('APP_URL')
self.OPENAI_API_BASE = get_env('OPENAI_API_BASE')
self.CURRENT_VERSION = "0.2.0"
self.COMMIT_SHA = get_env('COMMIT_SHA')
self.EDITION = "SELF_HOSTED"

View File

@ -1,5 +1,6 @@
from typing import Optional, Any, List
from flask import current_app
import openai
from llama_index.embeddings.base import BaseEmbedding
from llama_index.embeddings.openai import OpenAIEmbeddingMode, OpenAIEmbeddingModelType, _QUERY_MODE_MODEL_DICT, \
@ -111,6 +112,9 @@ class OpenAIEmbedding(BaseEmbedding):
self.model = OpenAIEmbeddingModelType(model)
self.deployment_name = deployment_name
self.openai_api_key = openai_api_key
# Use proxy openai base
if current_app.config['API_URL'] is not None:
openai.api_base = current_app.config['API_URL']
@handle_llm_exceptions
def _get_query_embedding(self, query: str) -> List[float]:

View File

@ -17,6 +17,8 @@ services:
API_URL: http://localhost
# The URL for Web APP, refers to the Web App base URL of WEB service.
APP_URL: http://localhost
# Openai Proxy api base.
OPENAI_API_BASE: https://api.openai.com/v1
# When enabled, migrations will be executed prior to application startup and the application will start after the migrations have completed.
MIGRATION_ENABLED: 'true'
# The configurations of postgres database connection.
@ -119,6 +121,8 @@ services:
API_URL: http://localhost
# The URL for Web APP, refers to the Web App base URL of WEB service.
APP_URL: http://localhost
# Openai Proxy api base.
OPENAI_API_BASE: https://api.openai.com/v1
# The configurations of postgres database connection.
# It is consistent with the configuration in the 'db' service below.
DB_USERNAME: postgres