mirror of https://github.com/langgenius/dify.git
Add lib test (#25410)
This commit is contained in:
parent
e5122945fe
commit
a1cf48f84e
|
|
@ -0,0 +1,55 @@
|
|||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from libs.file_utils import search_file_upwards
|
||||
|
||||
|
||||
def test_search_file_upwards_found_in_parent(tmp_path: Path):
|
||||
base = tmp_path / "a" / "b" / "c"
|
||||
base.mkdir(parents=True)
|
||||
|
||||
target = tmp_path / "a" / "target.txt"
|
||||
target.write_text("ok", encoding="utf-8")
|
||||
|
||||
found = search_file_upwards(base, "target.txt", max_search_parent_depth=5)
|
||||
assert found == target
|
||||
|
||||
|
||||
def test_search_file_upwards_found_in_current(tmp_path: Path):
|
||||
base = tmp_path / "x"
|
||||
base.mkdir()
|
||||
target = base / "here.txt"
|
||||
target.write_text("x", encoding="utf-8")
|
||||
|
||||
found = search_file_upwards(base, "here.txt", max_search_parent_depth=1)
|
||||
assert found == target
|
||||
|
||||
|
||||
def test_search_file_upwards_not_found_raises(tmp_path: Path):
|
||||
base = tmp_path / "m" / "n"
|
||||
base.mkdir(parents=True)
|
||||
with pytest.raises(ValueError) as exc:
|
||||
search_file_upwards(base, "missing.txt", max_search_parent_depth=3)
|
||||
# error message should contain file name and base path
|
||||
msg = str(exc.value)
|
||||
assert "missing.txt" in msg
|
||||
assert str(base) in msg
|
||||
|
||||
|
||||
def test_search_file_upwards_root_breaks_and_raises():
|
||||
# Using filesystem root triggers the 'break' branch (parent == current)
|
||||
with pytest.raises(ValueError):
|
||||
search_file_upwards(Path("/"), "__definitely_not_exists__.txt", max_search_parent_depth=1)
|
||||
|
||||
|
||||
def test_search_file_upwards_depth_limit_raises(tmp_path: Path):
|
||||
base = tmp_path / "a" / "b" / "c"
|
||||
base.mkdir(parents=True)
|
||||
target = tmp_path / "a" / "target.txt"
|
||||
target.write_text("ok", encoding="utf-8")
|
||||
# The file is 2 levels up from `c` (in `a`), but search depth is only 2.
|
||||
# The search path is `c` (depth 1) -> `b` (depth 2). The file is in `a` (would need depth 3).
|
||||
# So, this should not find the file and should raise an error.
|
||||
with pytest.raises(ValueError):
|
||||
search_file_upwards(base, "target.txt", max_search_parent_depth=2)
|
||||
|
|
@ -0,0 +1,88 @@
|
|||
import pytest
|
||||
|
||||
from core.llm_generator.output_parser.errors import OutputParserError
|
||||
from libs.json_in_md_parser import (
|
||||
parse_and_check_json_markdown,
|
||||
parse_json_markdown,
|
||||
)
|
||||
|
||||
|
||||
def test_parse_json_markdown_triple_backticks_json():
|
||||
src = """
|
||||
```json
|
||||
{"a": 1, "b": "x"}
|
||||
```
|
||||
"""
|
||||
assert parse_json_markdown(src) == {"a": 1, "b": "x"}
|
||||
|
||||
|
||||
def test_parse_json_markdown_triple_backticks_generic():
|
||||
src = """
|
||||
```
|
||||
{"k": [1, 2, 3]}
|
||||
```
|
||||
"""
|
||||
assert parse_json_markdown(src) == {"k": [1, 2, 3]}
|
||||
|
||||
|
||||
def test_parse_json_markdown_single_backticks():
|
||||
src = '`{"x": true}`'
|
||||
assert parse_json_markdown(src) == {"x": True}
|
||||
|
||||
|
||||
def test_parse_json_markdown_braces_only():
|
||||
src = ' {\n \t"ok": "yes"\n} '
|
||||
assert parse_json_markdown(src) == {"ok": "yes"}
|
||||
|
||||
|
||||
def test_parse_json_markdown_not_found():
|
||||
with pytest.raises(ValueError):
|
||||
parse_json_markdown("no json here")
|
||||
|
||||
|
||||
def test_parse_and_check_json_markdown_missing_key():
|
||||
src = """
|
||||
```
|
||||
{"present": 1}
|
||||
```
|
||||
"""
|
||||
with pytest.raises(OutputParserError) as exc:
|
||||
parse_and_check_json_markdown(src, ["present", "missing"])
|
||||
assert "expected key `missing`" in str(exc.value)
|
||||
|
||||
|
||||
def test_parse_and_check_json_markdown_invalid_json():
|
||||
src = """
|
||||
```json
|
||||
{invalid json}
|
||||
```
|
||||
"""
|
||||
with pytest.raises(OutputParserError) as exc:
|
||||
parse_and_check_json_markdown(src, [])
|
||||
assert "got invalid json object" in str(exc.value)
|
||||
|
||||
|
||||
def test_parse_and_check_json_markdown_success():
|
||||
src = """
|
||||
```json
|
||||
{"present": 1, "other": 2}
|
||||
```
|
||||
"""
|
||||
obj = parse_and_check_json_markdown(src, ["present"])
|
||||
assert obj == {"present": 1, "other": 2}
|
||||
|
||||
|
||||
def test_parse_and_check_json_markdown_multiple_blocks_fails():
|
||||
src = """
|
||||
```json
|
||||
{"a": 1}
|
||||
```
|
||||
Some text
|
||||
```json
|
||||
{"b": 2}
|
||||
```
|
||||
"""
|
||||
# The current implementation is greedy and will match from the first
|
||||
# opening fence to the last closing fence, causing JSON decode failure.
|
||||
with pytest.raises(OutputParserError):
|
||||
parse_and_check_json_markdown(src, [])
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
import orjson
|
||||
import pytest
|
||||
|
||||
from libs.orjson import orjson_dumps
|
||||
|
||||
|
||||
def test_orjson_dumps_round_trip_basic():
|
||||
obj = {"a": 1, "b": [1, 2, 3], "c": {"d": True}}
|
||||
s = orjson_dumps(obj)
|
||||
assert orjson.loads(s) == obj
|
||||
|
||||
|
||||
def test_orjson_dumps_with_unicode_and_indent():
|
||||
obj = {"msg": "你好,Dify"}
|
||||
s = orjson_dumps(obj, option=orjson.OPT_INDENT_2)
|
||||
# contains indentation newline/spaces
|
||||
assert "\n" in s
|
||||
assert orjson.loads(s) == obj
|
||||
|
||||
|
||||
def test_orjson_dumps_non_utf8_encoding_fails():
|
||||
obj = {"msg": "你好"}
|
||||
# orjson.dumps() always produces UTF-8 bytes; decoding with non-UTF8 fails.
|
||||
with pytest.raises(UnicodeDecodeError):
|
||||
orjson_dumps(obj, encoding="ascii")
|
||||
Loading…
Reference in New Issue