Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ repos:
--disable=C3001,
--disable=R1702,
--disable=R0912,
--disable=R0915,
--max-line-length=120,
--max-statements=75,
]
Expand Down
3 changes: 0 additions & 3 deletions docs/zh/guide/async_op_llm_guide.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,6 @@ import json

@C.register_op()
class QAOp(BaseAsyncOp):
file_path: str = __file__ # 必须设置,用于自动查找 prompt 文件

async def async_execute(self):
"""执行问答逻辑"""
# 1. 读取输入
Expand Down Expand Up @@ -156,7 +154,6 @@ if __name__ == "__main__":
6. **调用 LLM**:使用 `await self.llm.achat(messages=messages, ...)`
7. **处理响应**:使用 `callback_fn` 处理或转换响应,返回处理后的结果
8. **应用上下文**:必须在 `FlowLLMApp()` 上下文里调用
9. **file_path**:Op 类中必须设置 `file_path = __file__`,用于自动查找 prompt 文件

---

Expand Down
2 changes: 0 additions & 2 deletions docs/zh/guide/cmd_service_guide.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@ from flowllm.core.op import BaseAsyncOp

@C.register_op()
class EchoOp(BaseAsyncOp):
file_path: str = __file__

async def async_execute(self):
text = self.context.get("text", "")
self.context.response.answer = f"echo: {text}"
Expand Down
2 changes: 0 additions & 2 deletions docs/zh/guide/http_service_guide.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@ from flowllm.core.op import BaseAsyncOp

@C.register_op()
class EchoOp(BaseAsyncOp):
file_path: str = __file__

async def async_execute(self):
text = self.context.get("text", "")
self.context.response.answer = f"echo: {text}"
Expand Down
2 changes: 0 additions & 2 deletions docs/zh/guide/http_stream_guide.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@ from flowllm.core.schema import FlowStreamChunk, Message

@C.register_op()
class StreamChatOp(BaseAsyncOp):
file_path: str = __file__

async def async_execute(self):
messages = self.context.messages
system_prompt = self.context.system_prompt
Expand Down
1 change: 0 additions & 1 deletion docs/zh/guide/mcp_service_guide.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ from flowllm.core.op import BaseAsyncOp
@C.register_op()
class MockSearchOp(BaseAsyncOp):
"""Mock search operation that uses LLM to generate realistic search results."""
file_path: str = __file__

async def async_execute(self):
query = self.context.query
Expand Down
14 changes: 12 additions & 2 deletions flowllm/core/llm/lite_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,12 @@ def stream_chat(
**self.kwargs,
**kwargs,
}
log_kwargs = {k: v for k, v in chat_kwargs.items() if k != "messages"}
log_kwargs: Dict[str, object] = {}
for k, v in chat_kwargs.items():
if k in ["messages", "tools"]:
log_kwargs[k] = len(v) if v is not None else 0
else:
log_kwargs[k] = v
logger.info(f"LiteLLM.stream_chat: {log_kwargs}")

for i in range(self.max_retries):
Expand Down Expand Up @@ -217,7 +222,12 @@ async def astream_chat(
**self.kwargs,
**kwargs,
}
log_kwargs = {k: v for k, v in chat_kwargs.items() if k != "messages"}
log_kwargs: Dict[str, object] = {}
for k, v in chat_kwargs.items():
if k in ["messages", "tools"]:
log_kwargs[k] = len(v) if v is not None else 0
else:
log_kwargs[k] = v
logger.info(f"LiteLLM.astream_chat: {log_kwargs}")

for i in range(self.max_retries):
Expand Down
14 changes: 12 additions & 2 deletions flowllm/core/llm/openai_compatible_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,12 @@ def stream_chat(
**self.kwargs,
**kwargs,
}
log_kwargs = {k: v for k, v in chat_kwargs.items() if k != "messages"}
log_kwargs: Dict[str, object] = {}
for k, v in chat_kwargs.items():
if k in ["messages", "tools"]:
log_kwargs[k] = len(v) if v is not None else 0
else:
log_kwargs[k] = v
logger.info(f"OpenAICompatibleLLM.stream_chat: {log_kwargs}")

for i in range(self.max_retries):
Expand Down Expand Up @@ -208,7 +213,12 @@ async def astream_chat(
**self.kwargs,
**kwargs,
}
log_kwargs = {k: v for k, v in chat_kwargs.items() if k != "messages"}
log_kwargs: Dict[str, object] = {}
for k, v in chat_kwargs.items():
if k in ["messages", "tools"]:
log_kwargs[k] = len(v) if v is not None else 0
else:
log_kwargs[k] = v
logger.info(f"OpenAICompatibleLLM.astream_chat: {log_kwargs}")

for i in range(self.max_retries):
Expand Down
7 changes: 4 additions & 3 deletions flowllm/core/op/base_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
"""

import copy
import inspect
from abc import ABC, abstractmethod
from pathlib import Path
from typing import Callable, List, Union
Expand Down Expand Up @@ -78,8 +79,6 @@ def execute(self):
```
"""

file_path: str = __file__

def __new__(cls, *args, **kwargs):
"""Create a new instance and save initialization arguments for copying.

Expand Down Expand Up @@ -149,7 +148,9 @@ def __init__(
self.raise_exception: bool = raise_exception
self.enable_multithread: bool = enable_multithread
self.language: str = language or C.language
default_prompt_path: str = self.file_path.replace("op.py", "prompt.yaml")

subclass_file_path: str = inspect.getfile(self.__class__)
default_prompt_path: str = subclass_file_path.replace("op.py", "prompt.yaml")
self.prompt_path: Path = Path(prompt_path if prompt_path else default_prompt_path)
self.prompt = PromptHandler(language=self.language).load_prompt_by_file(self.prompt_path)
self._llm: BaseLLM | str = llm
Expand Down
32 changes: 32 additions & 0 deletions flowllm/core/schema/message.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,38 @@ def simple_dump(self, add_reasoning: bool = True) -> dict:

return result

def format_message(
self,
i: int | None = None,
add_time_created: bool = False,
use_name_first: bool = False,
add_reasoning_content: bool = True,
add_tool_calls: bool = True
) -> str:
content = ""
if i is not None:
content += f"round{i} "

if add_time_created:
content += f"[{self.time_created}] "

if use_name_first:
content += f"{self.name or self.role.value}:\n"
else:
content += f"{self.role.value}:\n"

if add_reasoning_content and self.reasoning_content:
content += self.reasoning_content + "\n"

if self.content:
content += self.content + "\n"

if add_tool_calls and self.tool_calls:
for tool_call in self.tool_calls:
content += f" - tool_call={tool_call.name} params={tool_call.arguments}\n"

return content.strip()


class Trajectory(BaseModel):
"""Represents a conversation trajectory with messages and optional scoring."""
Expand Down
21 changes: 16 additions & 5 deletions flowllm/core/vector_store/pgvector_vector_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,12 +168,23 @@ def _build_sql_filters(
# Handle special keys that are stored as direct columns
if key == "unique_id":
# unique_id is a direct column, not in metadata JSONB
if use_async:
conditions.append(f"unique_id = ${param_idx}")
# Support both single value and list of values
if isinstance(filter_value, list):
if use_async:
placeholders = ", ".join(f"${param_idx + i}" for i in range(len(filter_value)))
conditions.append(f"unique_id IN ({placeholders})")
else:
placeholders = ", ".join(["%s"] * len(filter_value))
conditions.append(f"unique_id IN ({placeholders})")
params.extend([str(v) for v in filter_value])
param_idx += len(filter_value)
else:
conditions.append("unique_id = %s")
params.append(str(filter_value))
param_idx += 1
if use_async:
conditions.append(f"unique_id = ${param_idx}")
else:
conditions.append("unique_id = %s")
params.append(str(filter_value))
param_idx += 1
continue

# Strip "metadata." prefix if present (since we're already accessing metadata column)
Expand Down
21 changes: 19 additions & 2 deletions flowllm/core/vector_store/qdrant_vector_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,9 +190,26 @@ def _build_qdrant_filters(filter_dict: Optional[Dict[str, Any]] = None):
for key, filter_value in filter_dict.items():
# Handle special keys that are stored at payload root level
if key == "unique_id":
qdrant_key = "original_id"
# unique_id is stored as original_id in Qdrant payload
# Support both single value and list of values
if isinstance(filter_value, list):
conditions.append(
FieldCondition(
key="original_id",
match=MatchAny(any=filter_value),
),
)
else:
conditions.append(
FieldCondition(
key="original_id",
match=MatchValue(value=filter_value),
),
)
continue

# Handle nested keys by prefixing with metadata.
elif not key.startswith("metadata."):
if not key.startswith("metadata."):
qdrant_key = f"metadata.{key}"
else:
qdrant_key = key
Expand Down
2 changes: 0 additions & 2 deletions flowllm/extensions/file_tool/edit_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@ class EditOp(BaseAsyncToolOp):
is specified. Supports creating new files when old_string is empty.
"""

file_path = __file__

def __init__(self, **kwargs):
kwargs.setdefault("raise_exception", False)
super().__init__(**kwargs)
Expand Down
2 changes: 0 additions & 2 deletions flowllm/extensions/file_tool/exit_plan_mode_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@ class ExitPlanModeOp(BaseAsyncToolOp):
to the user for approval before proceeding with implementation.
"""

file_path = __file__

def __init__(self, **kwargs):
kwargs.setdefault("raise_exception", False)
super().__init__(**kwargs)
Expand Down
2 changes: 0 additions & 2 deletions flowllm/extensions/file_tool/glob_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,6 @@ class GlobOp(BaseAsyncToolOp):
Supports gitignore patterns for filtering files.
"""

file_path = __file__

def __init__(self, gitignore_patterns: List[str] = None, **kwargs):
kwargs.setdefault("raise_exception", False)
super().__init__(**kwargs)
Expand Down
2 changes: 0 additions & 2 deletions flowllm/extensions/file_tool/grep_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,6 @@ class GrepOp(BaseAsyncToolOp):
Supports glob pattern filtering and result limiting.
"""

file_path = __file__

def __init__(self, **kwargs):
kwargs.setdefault("raise_exception", False)
super().__init__(**kwargs)
Expand Down
2 changes: 0 additions & 2 deletions flowllm/extensions/file_tool/ls_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,6 @@ class LSOp(BaseAsyncToolOp):
matching provided glob patterns.
"""

file_path = __file__

def __init__(self, **kwargs):
kwargs.setdefault("raise_exception", False)
super().__init__(**kwargs)
Expand Down
2 changes: 0 additions & 2 deletions flowllm/extensions/file_tool/read_file_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@ class ReadFileOp(BaseAsyncToolOp):
For text files, it can read specific line ranges using offset and limit.
"""

file_path = __file__

def __init__(self, **kwargs):
kwargs.setdefault("raise_exception", False)
super().__init__(**kwargs)
Expand Down
2 changes: 0 additions & 2 deletions flowllm/extensions/file_tool/read_many_files_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,6 @@ class ReadManyFilesOp(BaseAsyncToolOp):
and concatenates them with separators.
"""

file_path = __file__

def __init__(self, **kwargs):
kwargs.setdefault("raise_exception", False)
super().__init__(**kwargs)
Expand Down
2 changes: 0 additions & 2 deletions flowllm/extensions/file_tool/rip_grep_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,6 @@ class RipGrepOp(BaseAsyncToolOp):
Supports glob pattern filtering and result limiting.
"""

file_path = __file__

def __init__(self, **kwargs):
kwargs.setdefault("raise_exception", False)
super().__init__(**kwargs)
Expand Down
2 changes: 0 additions & 2 deletions flowllm/extensions/file_tool/shell_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,6 @@ class ShellOp(BaseAsyncToolOp):
Supports both foreground and background execution.
"""

file_path = __file__

def __init__(self, **kwargs):
kwargs.setdefault("raise_exception", False)
super().__init__(**kwargs)
Expand Down
2 changes: 0 additions & 2 deletions flowllm/extensions/file_tool/smart_edit_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,6 @@ class SmartEditOp(BaseAsyncToolOp):
exact matching, flexible matching (ignoring indentation), and regex-based matching.
"""

file_path = __file__

def __init__(self, **kwargs):
kwargs.setdefault("raise_exception", False)
super().__init__(**kwargs)
Expand Down
2 changes: 0 additions & 2 deletions flowllm/extensions/file_tool/task_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@ class TaskOp(BaseAsyncToolOp):
This operation delegates tasks to specialized subagents for autonomous execution.
"""

file_path = __file__

def __init__(self, **kwargs):
kwargs.setdefault("raise_exception", False)
super().__init__(**kwargs)
Expand Down
2 changes: 0 additions & 2 deletions flowllm/extensions/file_tool/write_file_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@ class WriteFileOp(BaseAsyncToolOp):
it will be created. If parent directories don't exist, they will be created automatically.
"""

file_path = __file__

def __init__(self, **kwargs):
kwargs.setdefault("raise_exception", False)
super().__init__(**kwargs)
Expand Down
2 changes: 0 additions & 2 deletions flowllm/extensions/file_tool/write_todos_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@ class WriteTodosOp(BaseAsyncToolOp):
through different statuses: pending, in_progress, completed, cancelled.
"""

file_path = __file__

def __init__(self, **kwargs):
kwargs.setdefault("raise_exception", False)
super().__init__(**kwargs)
Expand Down
2 changes: 0 additions & 2 deletions flowllm/gallery/chat_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@ class ChatOp(BaseAsyncOp):
and system_prompt to be present in the context.
"""

file_path: str = __file__

def __init__(
self,
llm: str = "qwen3_30b_instruct",
Expand Down
2 changes: 0 additions & 2 deletions flowllm/gallery/think_tool_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@
class ThinkToolOp(BaseAsyncToolOp):
"""Utility operation that prompts the model for explicit reflection text."""

file_path = __file__

def __init__(self, add_output_reflection: bool = False, **kwargs):
super().__init__(**kwargs)
self.add_output_reflection: bool = add_output_reflection
Expand Down
Loading
Loading