mirror of https://github.com/langgenius/dify.git
test: Consolidate API CI test runner (#29440)
Signed-off-by: -LAN- <laipz8200@outlook.com> Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
This commit is contained in:
parent
1e47ffb50c
commit
80c74cf725
|
|
@ -0,0 +1,5 @@
|
||||||
|
[run]
|
||||||
|
omit =
|
||||||
|
api/tests/*
|
||||||
|
api/migrations/*
|
||||||
|
api/core/rag/datasource/vdb/*
|
||||||
|
|
@ -71,18 +71,18 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
cp api/tests/integration_tests/.env.example api/tests/integration_tests/.env
|
cp api/tests/integration_tests/.env.example api/tests/integration_tests/.env
|
||||||
|
|
||||||
- name: Run Workflow
|
- name: Run API Tests
|
||||||
run: uv run --project api bash dev/pytest/pytest_workflow.sh
|
env:
|
||||||
|
STORAGE_TYPE: opendal
|
||||||
- name: Run Tool
|
OPENDAL_SCHEME: fs
|
||||||
run: uv run --project api bash dev/pytest/pytest_tools.sh
|
OPENDAL_FS_ROOT: /tmp/dify-storage
|
||||||
|
|
||||||
- name: Run TestContainers
|
|
||||||
run: uv run --project api bash dev/pytest/pytest_testcontainers.sh
|
|
||||||
|
|
||||||
- name: Run Unit tests
|
|
||||||
run: |
|
run: |
|
||||||
uv run --project api bash dev/pytest/pytest_unit_tests.sh
|
uv run --project api pytest \
|
||||||
|
--timeout "${PYTEST_TIMEOUT:-180}" \
|
||||||
|
api/tests/integration_tests/workflow \
|
||||||
|
api/tests/integration_tests/tools \
|
||||||
|
api/tests/test_containers_integration_tests \
|
||||||
|
api/tests/unit_tests
|
||||||
|
|
||||||
- name: Coverage Summary
|
- name: Coverage Summary
|
||||||
run: |
|
run: |
|
||||||
|
|
@ -94,4 +94,3 @@ jobs:
|
||||||
echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY
|
echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY
|
echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY
|
||||||
uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY
|
uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -9,11 +9,21 @@ FILES_HEADERS: tuple[str, ...] = (*BASE_CORS_HEADERS, HEADER_NAME_CSRF_TOKEN)
|
||||||
EXPOSED_HEADERS: tuple[str, ...] = ("X-Version", "X-Env", "X-Trace-Id")
|
EXPOSED_HEADERS: tuple[str, ...] = ("X-Version", "X-Env", "X-Trace-Id")
|
||||||
|
|
||||||
|
|
||||||
def init_app(app: DifyApp):
|
def _apply_cors_once(bp, /, **cors_kwargs):
|
||||||
# register blueprint routers
|
"""Make CORS idempotent so blueprints can be reused across multiple app instances."""
|
||||||
|
|
||||||
|
if getattr(bp, "_dify_cors_applied", False):
|
||||||
|
return
|
||||||
|
|
||||||
from flask_cors import CORS
|
from flask_cors import CORS
|
||||||
|
|
||||||
|
CORS(bp, **cors_kwargs)
|
||||||
|
bp._dify_cors_applied = True
|
||||||
|
|
||||||
|
|
||||||
|
def init_app(app: DifyApp):
|
||||||
|
# register blueprint routers
|
||||||
|
|
||||||
from controllers.console import bp as console_app_bp
|
from controllers.console import bp as console_app_bp
|
||||||
from controllers.files import bp as files_bp
|
from controllers.files import bp as files_bp
|
||||||
from controllers.inner_api import bp as inner_api_bp
|
from controllers.inner_api import bp as inner_api_bp
|
||||||
|
|
@ -22,7 +32,7 @@ def init_app(app: DifyApp):
|
||||||
from controllers.trigger import bp as trigger_bp
|
from controllers.trigger import bp as trigger_bp
|
||||||
from controllers.web import bp as web_bp
|
from controllers.web import bp as web_bp
|
||||||
|
|
||||||
CORS(
|
_apply_cors_once(
|
||||||
service_api_bp,
|
service_api_bp,
|
||||||
allow_headers=list(SERVICE_API_HEADERS),
|
allow_headers=list(SERVICE_API_HEADERS),
|
||||||
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
|
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
|
||||||
|
|
@ -30,7 +40,7 @@ def init_app(app: DifyApp):
|
||||||
)
|
)
|
||||||
app.register_blueprint(service_api_bp)
|
app.register_blueprint(service_api_bp)
|
||||||
|
|
||||||
CORS(
|
_apply_cors_once(
|
||||||
web_bp,
|
web_bp,
|
||||||
resources={r"/*": {"origins": dify_config.WEB_API_CORS_ALLOW_ORIGINS}},
|
resources={r"/*": {"origins": dify_config.WEB_API_CORS_ALLOW_ORIGINS}},
|
||||||
supports_credentials=True,
|
supports_credentials=True,
|
||||||
|
|
@ -40,7 +50,7 @@ def init_app(app: DifyApp):
|
||||||
)
|
)
|
||||||
app.register_blueprint(web_bp)
|
app.register_blueprint(web_bp)
|
||||||
|
|
||||||
CORS(
|
_apply_cors_once(
|
||||||
console_app_bp,
|
console_app_bp,
|
||||||
resources={r"/*": {"origins": dify_config.CONSOLE_CORS_ALLOW_ORIGINS}},
|
resources={r"/*": {"origins": dify_config.CONSOLE_CORS_ALLOW_ORIGINS}},
|
||||||
supports_credentials=True,
|
supports_credentials=True,
|
||||||
|
|
@ -50,7 +60,7 @@ def init_app(app: DifyApp):
|
||||||
)
|
)
|
||||||
app.register_blueprint(console_app_bp)
|
app.register_blueprint(console_app_bp)
|
||||||
|
|
||||||
CORS(
|
_apply_cors_once(
|
||||||
files_bp,
|
files_bp,
|
||||||
allow_headers=list(FILES_HEADERS),
|
allow_headers=list(FILES_HEADERS),
|
||||||
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
|
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
|
||||||
|
|
@ -62,7 +72,7 @@ def init_app(app: DifyApp):
|
||||||
app.register_blueprint(mcp_bp)
|
app.register_blueprint(mcp_bp)
|
||||||
|
|
||||||
# Register trigger blueprint with CORS for webhook calls
|
# Register trigger blueprint with CORS for webhook calls
|
||||||
CORS(
|
_apply_cors_once(
|
||||||
trigger_bp,
|
trigger_bp,
|
||||||
allow_headers=["Content-Type", "Authorization", "X-App-Code"],
|
allow_headers=["Content-Type", "Authorization", "X-App-Code"],
|
||||||
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH", "HEAD"],
|
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH", "HEAD"],
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
[pytest]
|
[pytest]
|
||||||
addopts = --cov=./api --cov-report=json --cov-report=xml
|
addopts = --cov=./api --cov-report=json
|
||||||
env =
|
env =
|
||||||
ANTHROPIC_API_KEY = sk-ant-api11-IamNotARealKeyJustForMockTestKawaiiiiiiiiii-NotBaka-ASkksz
|
ANTHROPIC_API_KEY = sk-ant-api11-IamNotARealKeyJustForMockTestKawaiiiiiiiiii-NotBaka-ASkksz
|
||||||
AZURE_OPENAI_API_BASE = https://difyai-openai.openai.azure.com
|
AZURE_OPENAI_API_BASE = https://difyai-openai.openai.azure.com
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,4 @@
|
||||||
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import random
|
import random
|
||||||
import secrets
|
import secrets
|
||||||
|
|
@ -32,6 +33,10 @@ def _load_env():
|
||||||
|
|
||||||
|
|
||||||
_load_env()
|
_load_env()
|
||||||
|
# Override storage root to tmp to avoid polluting repo during local runs
|
||||||
|
os.environ["OPENDAL_FS_ROOT"] = "/tmp/dify-storage"
|
||||||
|
os.environ.setdefault("STORAGE_TYPE", "opendal")
|
||||||
|
os.environ.setdefault("OPENDAL_SCHEME", "fs")
|
||||||
|
|
||||||
_CACHED_APP = create_app()
|
_CACHED_APP = create_app()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -138,9 +138,9 @@ class DifyTestContainers:
|
||||||
logger.warning("Failed to create plugin database: %s", e)
|
logger.warning("Failed to create plugin database: %s", e)
|
||||||
|
|
||||||
# Set up storage environment variables
|
# Set up storage environment variables
|
||||||
os.environ["STORAGE_TYPE"] = "opendal"
|
os.environ.setdefault("STORAGE_TYPE", "opendal")
|
||||||
os.environ["OPENDAL_SCHEME"] = "fs"
|
os.environ.setdefault("OPENDAL_SCHEME", "fs")
|
||||||
os.environ["OPENDAL_FS_ROOT"] = "storage"
|
os.environ.setdefault("OPENDAL_FS_ROOT", "/tmp/dify-storage")
|
||||||
|
|
||||||
# Start Redis container for caching and session management
|
# Start Redis container for caching and session management
|
||||||
# Redis is used for storing session data, cache entries, and temporary data
|
# Redis is used for storing session data, cache entries, and temporary data
|
||||||
|
|
@ -348,6 +348,13 @@ def _create_app_with_containers() -> Flask:
|
||||||
"""
|
"""
|
||||||
logger.info("Creating Flask application with test container configuration...")
|
logger.info("Creating Flask application with test container configuration...")
|
||||||
|
|
||||||
|
# Ensure Redis client reconnects to the containerized Redis (no auth)
|
||||||
|
from extensions import ext_redis
|
||||||
|
|
||||||
|
ext_redis.redis_client._client = None
|
||||||
|
os.environ["REDIS_USERNAME"] = ""
|
||||||
|
os.environ["REDIS_PASSWORD"] = ""
|
||||||
|
|
||||||
# Re-create the config after environment variables have been set
|
# Re-create the config after environment variables have been set
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
|
|
||||||
|
|
@ -486,3 +493,29 @@ def db_session_with_containers(flask_app_with_containers) -> Generator[Session,
|
||||||
finally:
|
finally:
|
||||||
session.close()
|
session.close()
|
||||||
logger.debug("Database session closed")
|
logger.debug("Database session closed")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="package", autouse=True)
|
||||||
|
def mock_ssrf_proxy_requests():
|
||||||
|
"""
|
||||||
|
Avoid outbound network during containerized tests by stubbing SSRF proxy helpers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
def _fake_request(method, url, **kwargs):
|
||||||
|
request = httpx.Request(method=method, url=url)
|
||||||
|
return httpx.Response(200, request=request, content=b"")
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("core.helper.ssrf_proxy.make_request", side_effect=_fake_request),
|
||||||
|
patch("core.helper.ssrf_proxy.get", side_effect=lambda url, **kw: _fake_request("GET", url, **kw)),
|
||||||
|
patch("core.helper.ssrf_proxy.post", side_effect=lambda url, **kw: _fake_request("POST", url, **kw)),
|
||||||
|
patch("core.helper.ssrf_proxy.put", side_effect=lambda url, **kw: _fake_request("PUT", url, **kw)),
|
||||||
|
patch("core.helper.ssrf_proxy.patch", side_effect=lambda url, **kw: _fake_request("PATCH", url, **kw)),
|
||||||
|
patch("core.helper.ssrf_proxy.delete", side_effect=lambda url, **kw: _fake_request("DELETE", url, **kw)),
|
||||||
|
patch("core.helper.ssrf_proxy.head", side_effect=lambda url, **kw: _fake_request("HEAD", url, **kw)),
|
||||||
|
):
|
||||||
|
yield
|
||||||
|
|
|
||||||
|
|
@ -240,8 +240,7 @@ class TestShardedRedisBroadcastChannelIntegration:
|
||||||
for future in as_completed(producer_futures, timeout=30.0):
|
for future in as_completed(producer_futures, timeout=30.0):
|
||||||
sent_msgs.update(future.result())
|
sent_msgs.update(future.result())
|
||||||
|
|
||||||
subscription.close()
|
consumer_received_msgs = consumer_future.result(timeout=60.0)
|
||||||
consumer_received_msgs = consumer_future.result(timeout=30.0)
|
|
||||||
|
|
||||||
assert sent_msgs == consumer_received_msgs
|
assert sent_msgs == consumer_received_msgs
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -26,16 +26,29 @@ redis_mock.hgetall = MagicMock(return_value={})
|
||||||
redis_mock.hdel = MagicMock()
|
redis_mock.hdel = MagicMock()
|
||||||
redis_mock.incr = MagicMock(return_value=1)
|
redis_mock.incr = MagicMock(return_value=1)
|
||||||
|
|
||||||
|
# Ensure OpenDAL fs writes to tmp to avoid polluting workspace
|
||||||
|
os.environ.setdefault("OPENDAL_SCHEME", "fs")
|
||||||
|
os.environ.setdefault("OPENDAL_FS_ROOT", "/tmp/dify-storage")
|
||||||
|
os.environ.setdefault("STORAGE_TYPE", "opendal")
|
||||||
|
|
||||||
# Add the API directory to Python path to ensure proper imports
|
# Add the API directory to Python path to ensure proper imports
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
sys.path.insert(0, PROJECT_DIR)
|
sys.path.insert(0, PROJECT_DIR)
|
||||||
|
|
||||||
# apply the mock to the Redis client in the Flask app
|
|
||||||
from extensions import ext_redis
|
from extensions import ext_redis
|
||||||
|
|
||||||
redis_patcher = patch.object(ext_redis, "redis_client", redis_mock)
|
|
||||||
redis_patcher.start()
|
def _patch_redis_clients_on_loaded_modules():
|
||||||
|
"""Ensure any module-level redis_client references point to the shared redis_mock."""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
for module in list(sys.modules.values()):
|
||||||
|
if module is None:
|
||||||
|
continue
|
||||||
|
if hasattr(module, "redis_client"):
|
||||||
|
module.redis_client = redis_mock
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
|
@ -49,6 +62,15 @@ def _provide_app_context(app: Flask):
|
||||||
yield
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def _patch_redis_clients():
|
||||||
|
"""Patch redis_client to MagicMock only for unit test executions."""
|
||||||
|
|
||||||
|
with patch.object(ext_redis, "redis_client", redis_mock):
|
||||||
|
_patch_redis_clients_on_loaded_modules()
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def reset_redis_mock():
|
def reset_redis_mock():
|
||||||
"""reset the Redis mock before each test"""
|
"""reset the Redis mock before each test"""
|
||||||
|
|
@ -63,3 +85,20 @@ def reset_redis_mock():
|
||||||
redis_mock.hgetall.return_value = {}
|
redis_mock.hgetall.return_value = {}
|
||||||
redis_mock.hdel.return_value = None
|
redis_mock.hdel.return_value = None
|
||||||
redis_mock.incr.return_value = 1
|
redis_mock.incr.return_value = 1
|
||||||
|
|
||||||
|
# Keep any imported modules pointing at the mock between tests
|
||||||
|
_patch_redis_clients_on_loaded_modules()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def reset_secret_key():
|
||||||
|
"""Ensure SECRET_KEY-dependent logic sees an empty config value by default."""
|
||||||
|
|
||||||
|
from configs import dify_config
|
||||||
|
|
||||||
|
original = dify_config.SECRET_KEY
|
||||||
|
dify_config.SECRET_KEY = ""
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
dify_config.SECRET_KEY = original
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,9 @@ def get_example_bucket() -> str:
|
||||||
|
|
||||||
|
|
||||||
def get_opendal_bucket() -> str:
|
def get_opendal_bucket() -> str:
|
||||||
return "./dify"
|
import os
|
||||||
|
|
||||||
|
return os.environ.get("OPENDAL_FS_ROOT", "/tmp/dify-storage")
|
||||||
|
|
||||||
|
|
||||||
def get_example_filename() -> str:
|
def get_example_filename() -> str:
|
||||||
|
|
|
||||||
|
|
@ -21,20 +21,16 @@ class TestOpenDAL:
|
||||||
)
|
)
|
||||||
|
|
||||||
@pytest.fixture(scope="class", autouse=True)
|
@pytest.fixture(scope="class", autouse=True)
|
||||||
def teardown_class(self, request):
|
def teardown_class(self):
|
||||||
"""Clean up after all tests in the class."""
|
"""Clean up after all tests in the class."""
|
||||||
|
|
||||||
def cleanup():
|
yield
|
||||||
|
|
||||||
folder = Path(get_opendal_bucket())
|
folder = Path(get_opendal_bucket())
|
||||||
if folder.exists() and folder.is_dir():
|
if folder.exists() and folder.is_dir():
|
||||||
for item in folder.iterdir():
|
import shutil
|
||||||
if item.is_file():
|
|
||||||
item.unlink()
|
|
||||||
elif item.is_dir():
|
|
||||||
item.rmdir()
|
|
||||||
folder.rmdir()
|
|
||||||
|
|
||||||
return cleanup()
|
shutil.rmtree(folder, ignore_errors=True)
|
||||||
|
|
||||||
def test_save_and_exists(self):
|
def test_save_and_exists(self):
|
||||||
"""Test saving data and checking existence."""
|
"""Test saving data and checking existence."""
|
||||||
|
|
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
set -x
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
|
||||||
cd "$SCRIPT_DIR/../.."
|
|
||||||
|
|
||||||
# ModelRuntime
|
|
||||||
dev/pytest/pytest_model_runtime.sh
|
|
||||||
|
|
||||||
# Tools
|
|
||||||
dev/pytest/pytest_tools.sh
|
|
||||||
|
|
||||||
# Workflow
|
|
||||||
dev/pytest/pytest_workflow.sh
|
|
||||||
|
|
||||||
# Unit tests
|
|
||||||
dev/pytest/pytest_unit_tests.sh
|
|
||||||
|
|
||||||
# TestContainers tests
|
|
||||||
dev/pytest/pytest_testcontainers.sh
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
set -x
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
|
||||||
cd "$SCRIPT_DIR/../.."
|
|
||||||
|
|
||||||
PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-120}"
|
|
||||||
|
|
||||||
pytest --timeout "${PYTEST_TIMEOUT}" api/tests/artifact_tests/
|
|
||||||
|
|
@ -0,0 +1,58 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
||||||
|
cd "$SCRIPT_DIR/../.."
|
||||||
|
|
||||||
|
PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-180}"
|
||||||
|
|
||||||
|
# Ensure OpenDAL local storage works even if .env isn't loaded
|
||||||
|
export STORAGE_TYPE=${STORAGE_TYPE:-opendal}
|
||||||
|
export OPENDAL_SCHEME=${OPENDAL_SCHEME:-fs}
|
||||||
|
export OPENDAL_FS_ROOT=${OPENDAL_FS_ROOT:-/tmp/dify-storage}
|
||||||
|
mkdir -p "${OPENDAL_FS_ROOT}"
|
||||||
|
|
||||||
|
# Prepare env files like CI
|
||||||
|
cp -n docker/.env.example docker/.env || true
|
||||||
|
cp -n docker/middleware.env.example docker/middleware.env || true
|
||||||
|
cp -n api/tests/integration_tests/.env.example api/tests/integration_tests/.env || true
|
||||||
|
|
||||||
|
# Expose service ports (same as CI) without leaving the repo dirty
|
||||||
|
EXPOSE_BACKUPS=()
|
||||||
|
for f in docker/docker-compose.yaml docker/tidb/docker-compose.yaml; do
|
||||||
|
if [[ -f "$f" ]]; then
|
||||||
|
cp "$f" "$f.ci.bak"
|
||||||
|
EXPOSE_BACKUPS+=("$f")
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
if command -v yq >/dev/null 2>&1; then
|
||||||
|
sh .github/workflows/expose_service_ports.sh || true
|
||||||
|
else
|
||||||
|
echo "skip expose_service_ports (yq not installed)" >&2
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Optionally start middleware stack (db, redis, sandbox, ssrf proxy) to mirror CI
|
||||||
|
STARTED_MIDDLEWARE=0
|
||||||
|
if [[ "${SKIP_MIDDLEWARE:-0}" != "1" ]]; then
|
||||||
|
docker compose -f docker/docker-compose.middleware.yaml --env-file docker/middleware.env up -d db_postgres redis sandbox ssrf_proxy
|
||||||
|
STARTED_MIDDLEWARE=1
|
||||||
|
# Give services a moment to come up
|
||||||
|
sleep 5
|
||||||
|
fi
|
||||||
|
|
||||||
|
cleanup() {
|
||||||
|
if [[ $STARTED_MIDDLEWARE -eq 1 ]]; then
|
||||||
|
docker compose -f docker/docker-compose.middleware.yaml --env-file docker/middleware.env down
|
||||||
|
fi
|
||||||
|
for f in "${EXPOSE_BACKUPS[@]}"; do
|
||||||
|
mv "$f.ci.bak" "$f"
|
||||||
|
done
|
||||||
|
}
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
pytest --timeout "${PYTEST_TIMEOUT}" \
|
||||||
|
api/tests/integration_tests/workflow \
|
||||||
|
api/tests/integration_tests/tools \
|
||||||
|
api/tests/test_containers_integration_tests \
|
||||||
|
api/tests/unit_tests
|
||||||
|
|
@ -1,18 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
set -x
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
|
||||||
cd "$SCRIPT_DIR/../.."
|
|
||||||
|
|
||||||
PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-180}"
|
|
||||||
|
|
||||||
pytest --timeout "${PYTEST_TIMEOUT}" api/tests/integration_tests/model_runtime/anthropic \
|
|
||||||
api/tests/integration_tests/model_runtime/azure_openai \
|
|
||||||
api/tests/integration_tests/model_runtime/openai api/tests/integration_tests/model_runtime/chatglm \
|
|
||||||
api/tests/integration_tests/model_runtime/google api/tests/integration_tests/model_runtime/xinference \
|
|
||||||
api/tests/integration_tests/model_runtime/huggingface_hub/test_llm.py \
|
|
||||||
api/tests/integration_tests/model_runtime/upstage \
|
|
||||||
api/tests/integration_tests/model_runtime/fireworks \
|
|
||||||
api/tests/integration_tests/model_runtime/nomic \
|
|
||||||
api/tests/integration_tests/model_runtime/mixedbread \
|
|
||||||
api/tests/integration_tests/model_runtime/voyage
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
set -x
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
|
||||||
cd "$SCRIPT_DIR/../.."
|
|
||||||
|
|
||||||
PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-120}"
|
|
||||||
|
|
||||||
pytest --timeout "${PYTEST_TIMEOUT}" api/tests/test_containers_integration_tests
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
set -x
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
|
||||||
cd "$SCRIPT_DIR/../.."
|
|
||||||
|
|
||||||
PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-120}"
|
|
||||||
|
|
||||||
pytest --timeout "${PYTEST_TIMEOUT}" api/tests/integration_tests/tools
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
set -x
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
|
||||||
cd "$SCRIPT_DIR/../.."
|
|
||||||
|
|
||||||
PYTEST_TIMEOUT="${PYTEST_TIMEOUT:-120}"
|
|
||||||
|
|
||||||
pytest --timeout "${PYTEST_TIMEOUT}" api/tests/integration_tests/workflow
|
|
||||||
Loading…
Reference in New Issue