Spaces:
Running
Running
Geevarghese George
commited on
Commit
·
8ffce3a
1
Parent(s):
4349024
add full flow
Browse files- .gitignore +2 -1
- app.py +120 -7
- poetry.lock +81 -1
- pyproject.toml +2 -0
- src/upgrade_advisor/agents/gh.py +0 -24
- src/upgrade_advisor/agents/package.py +21 -20
- src/upgrade_advisor/agents/prompts.py +37 -14
- src/upgrade_advisor/agents/tools.py +56 -0
- src/upgrade_advisor/chat/prompts.py +2 -0
- src/upgrade_advisor/schema/schema.py +35 -0
- tests/test1.toml +24 -0
- tests/test2.toml +272 -0
.gitignore
CHANGED
|
@@ -9,4 +9,5 @@
|
|
| 9 |
*__pycache__
|
| 10 |
.python-version
|
| 11 |
.vscode/
|
| 12 |
-
.pytest_cache/
|
|
|
|
|
|
| 9 |
*__pycache__
|
| 10 |
.python-version
|
| 11 |
.vscode/
|
| 12 |
+
.pytest_cache/
|
| 13 |
+
uploads/
|
app.py
CHANGED
|
@@ -1,4 +1,6 @@
|
|
| 1 |
import logging
|
|
|
|
|
|
|
| 2 |
|
| 3 |
import gradio as gr
|
| 4 |
from mcp import StdioServerParameters
|
|
@@ -27,6 +29,11 @@ logger = logging.getLogger(__name__)
|
|
| 27 |
logger.setLevel(logging.INFO)
|
| 28 |
logger.addHandler(logging.StreamHandler())
|
| 29 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
|
| 31 |
def _monkeypatch_gradio_save_history():
|
| 32 |
"""Guard against non-int indices in Gradio's chat history saver.
|
|
@@ -59,8 +66,13 @@ def _monkeypatch_gradio_save_history():
|
|
| 59 |
_monkeypatch_gradio_save_history()
|
| 60 |
|
| 61 |
|
| 62 |
-
async def chat_fn(message, history):
|
| 63 |
# parse incoming history is a list of dicts with 'role' and 'content' keys
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 64 |
if len(history) > 0:
|
| 65 |
summarized_history = await summarize_chat_history(
|
| 66 |
history,
|
|
@@ -69,17 +81,62 @@ async def chat_fn(message, history):
|
|
| 69 |
)
|
| 70 |
else:
|
| 71 |
summarized_history = ""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 72 |
|
|
|
|
|
|
|
|
|
|
| 73 |
message = message.strip()
|
| 74 |
rewritten_message, is_rewritten_good = await qn_rewriter(
|
| 75 |
message, summarized_history
|
| 76 |
)
|
|
|
|
| 77 |
if is_rewritten_good:
|
| 78 |
logger.info(f"Rewritten question: {rewritten_message}")
|
| 79 |
else:
|
| 80 |
logger.info(f"Using original question: {message}")
|
| 81 |
rewritten_message = None
|
| 82 |
# Collect events from the agent run
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 83 |
context = agent.discover_package_info(
|
| 84 |
user_input=message, reframed_question=rewritten_message
|
| 85 |
)
|
|
@@ -94,12 +151,11 @@ async def chat_fn(message, history):
|
|
| 94 |
return {
|
| 95 |
"role": "assistant",
|
| 96 |
"content": qa_answer,
|
| 97 |
-
}
|
| 98 |
|
| 99 |
|
| 100 |
if __name__ == "__main__":
|
| 101 |
-
logger.info("Starting MCP client
|
| 102 |
-
logger.info(f"Using toolsets: {GITHUB_TOOLSETS}")
|
| 103 |
|
| 104 |
try:
|
| 105 |
gh_mcp_params = StdioServerParameters(
|
|
@@ -125,12 +181,31 @@ if __name__ == "__main__":
|
|
| 125 |
},
|
| 126 |
)
|
| 127 |
pypi_mcp_params = dict(
|
| 128 |
-
url="https://mcp-1st-birthday-pypi-mcp.hf.space/gradio_api/mcp/",
|
|
|
|
| 129 |
transport="streamable-http",
|
| 130 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 131 |
|
| 132 |
pypi_mcp_client = MCPClient(
|
| 133 |
-
server_parameters=[
|
|
|
|
|
|
|
|
|
|
|
|
|
| 134 |
structured_output=True,
|
| 135 |
)
|
| 136 |
|
|
@@ -139,6 +214,9 @@ if __name__ == "__main__":
|
|
| 139 |
model_id=AGENT_MODEL,
|
| 140 |
)
|
| 141 |
|
|
|
|
|
|
|
|
|
|
| 142 |
with pypi_mcp_client as toolset:
|
| 143 |
logger.info("MCP clients connected successfully")
|
| 144 |
|
|
@@ -148,19 +226,54 @@ if __name__ == "__main__":
|
|
| 148 |
)
|
| 149 |
# link package_agent to the chat function
|
| 150 |
|
|
|
|
| 151 |
demo = gr.ChatInterface(
|
| 152 |
fn=chat_fn,
|
|
|
|
|
|
|
|
|
|
|
|
|
| 153 |
title="Package Upgrade Advisor",
|
| 154 |
type="messages",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 155 |
save_history=True,
|
| 156 |
examples=[
|
| 157 |
["Tell me about the 'requests' package. How to use it with JSON ?"],
|
| 158 |
[get_example_requirements_question()],
|
| 159 |
[get_example_pyproject_question()],
|
| 160 |
["Which version of 'pandas' is compatible with 'numpy' 2.0?"],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 161 |
],
|
|
|
|
|
|
|
| 162 |
)
|
| 163 |
demo.launch()
|
| 164 |
|
| 165 |
finally:
|
| 166 |
-
logger.info("
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import logging
|
| 2 |
+
import shutil
|
| 3 |
+
from pathlib import Path
|
| 4 |
|
| 5 |
import gradio as gr
|
| 6 |
from mcp import StdioServerParameters
|
|
|
|
| 29 |
logger.setLevel(logging.INFO)
|
| 30 |
logger.addHandler(logging.StreamHandler())
|
| 31 |
|
| 32 |
+
# this is to use the gradio-upload-mcp server for file uploads
|
| 33 |
+
uploads_dir = Path("uploads")
|
| 34 |
+
uploads_dir.mkdir(exist_ok=True)
|
| 35 |
+
uploads_dir = uploads_dir.resolve()
|
| 36 |
+
|
| 37 |
|
| 38 |
def _monkeypatch_gradio_save_history():
|
| 39 |
"""Guard against non-int indices in Gradio's chat history saver.
|
|
|
|
| 66 |
_monkeypatch_gradio_save_history()
|
| 67 |
|
| 68 |
|
| 69 |
+
async def chat_fn(message, history, persisted_attachments=None):
|
| 70 |
# parse incoming history is a list of dicts with 'role' and 'content' keys
|
| 71 |
+
from datetime import datetime
|
| 72 |
+
|
| 73 |
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
| 74 |
+
logger.info(f"Received message: {message}")
|
| 75 |
+
logger.info(f"History: {history}")
|
| 76 |
if len(history) > 0:
|
| 77 |
summarized_history = await summarize_chat_history(
|
| 78 |
history,
|
|
|
|
| 81 |
)
|
| 82 |
else:
|
| 83 |
summarized_history = ""
|
| 84 |
+
incoming_attachments = message.get("files", []) if isinstance(message, dict) else []
|
| 85 |
+
persisted_attachments = persisted_attachments or []
|
| 86 |
+
# If no new attachments are provided, keep using the previously persisted ones.
|
| 87 |
+
attachments = incoming_attachments or persisted_attachments
|
| 88 |
+
latest_attachment = attachments[-1] if attachments else []
|
| 89 |
+
|
| 90 |
+
logger.info(f"Summarized chat history:\n{summarized_history}")
|
| 91 |
+
logger.info(f"With attachments: {attachments} (incoming: {incoming_attachments})")
|
| 92 |
+
logger.info(f"Latest attachment: {latest_attachment}")
|
| 93 |
+
logger.info(f"Persisted attachments: {persisted_attachments}")
|
| 94 |
|
| 95 |
+
# if attachements are present message is a dict with 'text' and 'files' keys
|
| 96 |
+
message = message.get("text", "") if isinstance(message, dict) else message
|
| 97 |
+
# overwrite messages with the text content only
|
| 98 |
message = message.strip()
|
| 99 |
rewritten_message, is_rewritten_good = await qn_rewriter(
|
| 100 |
message, summarized_history
|
| 101 |
)
|
| 102 |
+
|
| 103 |
if is_rewritten_good:
|
| 104 |
logger.info(f"Rewritten question: {rewritten_message}")
|
| 105 |
else:
|
| 106 |
logger.info(f"Using original question: {message}")
|
| 107 |
rewritten_message = None
|
| 108 |
# Collect events from the agent run
|
| 109 |
+
# add chat summary to message
|
| 110 |
+
message = f"""
|
| 111 |
+
CHAT SUMMARY SO FAR:
|
| 112 |
+
{summarized_history}
|
| 113 |
+
CURRENT QUESTION FROM USER:
|
| 114 |
+
{message}
|
| 115 |
+
"""
|
| 116 |
+
if len(attachments) > 0:
|
| 117 |
+
message += """Attached FILE:\n"""
|
| 118 |
+
# use the last file from the list of files only, as
|
| 119 |
+
# the single file is expected to be a pyproject.toml
|
| 120 |
+
# copy to uploads directory
|
| 121 |
+
if latest_attachment:
|
| 122 |
+
# take the last uploaded file
|
| 123 |
+
source_file = latest_attachment
|
| 124 |
+
file_name = f"{timestamp}_{Path(latest_attachment).name}"
|
| 125 |
+
elif len(persisted_attachments) > 0:
|
| 126 |
+
# take the last persisted file if no new uploads
|
| 127 |
+
source_file = persisted_attachments[-1]
|
| 128 |
+
file_name = f"{timestamp}_{Path(persisted_attachments[-1]).name}"
|
| 129 |
+
else:
|
| 130 |
+
source_file = None
|
| 131 |
+
file_name = None
|
| 132 |
+
|
| 133 |
+
logger.info(f"Copying uploaded file {source_file} to {uploads_dir}")
|
| 134 |
+
shutil.copy(source_file, uploads_dir / file_name)
|
| 135 |
+
message += f"""
|
| 136 |
+
FILE PATH: {uploads_dir / file_name}\n
|
| 137 |
+
"""
|
| 138 |
+
logger.info(f"Final message to agent:\n{message}")
|
| 139 |
+
# Run the package discovery agent to build context
|
| 140 |
context = agent.discover_package_info(
|
| 141 |
user_input=message, reframed_question=rewritten_message
|
| 142 |
)
|
|
|
|
| 151 |
return {
|
| 152 |
"role": "assistant",
|
| 153 |
"content": qa_answer,
|
| 154 |
+
}, attachments
|
| 155 |
|
| 156 |
|
| 157 |
if __name__ == "__main__":
|
| 158 |
+
logger.info("Starting MCP client...")
|
|
|
|
| 159 |
|
| 160 |
try:
|
| 161 |
gh_mcp_params = StdioServerParameters(
|
|
|
|
| 181 |
},
|
| 182 |
)
|
| 183 |
pypi_mcp_params = dict(
|
| 184 |
+
# url="https://mcp-1st-birthday-pypi-mcp.hf.space/gradio_api/mcp/",
|
| 185 |
+
url="https://mcp-1st-birthday-uv-pypi-mcp.hf.space/gradio_api/mcp/",
|
| 186 |
transport="streamable-http",
|
| 187 |
)
|
| 188 |
+
upload_mcp_params = StdioServerParameters(
|
| 189 |
+
command="uvx",
|
| 190 |
+
args=[
|
| 191 |
+
"--from",
|
| 192 |
+
"gradio[mcp]",
|
| 193 |
+
"gradio",
|
| 194 |
+
"upload-mcp",
|
| 195 |
+
# Base must be the Gradio root; upload-mcp adds
|
| 196 |
+
# /gradio_api/upload.
|
| 197 |
+
# The docs are misleading here, it has gradio_api/upload as the base.
|
| 198 |
+
"https://mcp-1st-birthday-uv-pypi-mcp.hf.space/",
|
| 199 |
+
uploads_dir.as_posix(),
|
| 200 |
+
],
|
| 201 |
+
)
|
| 202 |
|
| 203 |
pypi_mcp_client = MCPClient(
|
| 204 |
+
server_parameters=[
|
| 205 |
+
pypi_mcp_params,
|
| 206 |
+
gh_mcp_params,
|
| 207 |
+
upload_mcp_params,
|
| 208 |
+
],
|
| 209 |
structured_output=True,
|
| 210 |
)
|
| 211 |
|
|
|
|
| 214 |
model_id=AGENT_MODEL,
|
| 215 |
)
|
| 216 |
|
| 217 |
+
# Gradio chat interface state to persist uploaded files
|
| 218 |
+
files_state = gr.State([])
|
| 219 |
+
|
| 220 |
with pypi_mcp_client as toolset:
|
| 221 |
logger.info("MCP clients connected successfully")
|
| 222 |
|
|
|
|
| 226 |
)
|
| 227 |
# link package_agent to the chat function
|
| 228 |
|
| 229 |
+
# attach files from local machine
|
| 230 |
demo = gr.ChatInterface(
|
| 231 |
fn=chat_fn,
|
| 232 |
+
chatbot=gr.Chatbot(
|
| 233 |
+
height=600,
|
| 234 |
+
type="messages",
|
| 235 |
+
),
|
| 236 |
title="Package Upgrade Advisor",
|
| 237 |
type="messages",
|
| 238 |
+
# additional_inputs_accordion="Attach pyproject.toml file",
|
| 239 |
+
textbox=gr.MultimodalTextbox(
|
| 240 |
+
label="pyproject.toml",
|
| 241 |
+
file_types=[".toml"],
|
| 242 |
+
file_count="single",
|
| 243 |
+
min_width=100,
|
| 244 |
+
sources="upload",
|
| 245 |
+
inputs=files_state,
|
| 246 |
+
),
|
| 247 |
+
additional_inputs=[files_state],
|
| 248 |
+
additional_outputs=[files_state],
|
| 249 |
save_history=True,
|
| 250 |
examples=[
|
| 251 |
["Tell me about the 'requests' package. How to use it with JSON ?"],
|
| 252 |
[get_example_requirements_question()],
|
| 253 |
[get_example_pyproject_question()],
|
| 254 |
["Which version of 'pandas' is compatible with 'numpy' 2.0?"],
|
| 255 |
+
[
|
| 256 |
+
{
|
| 257 |
+
"text": """Can I upgrade my dependencies from
|
| 258 |
+
the attached pyproject.toml to work with
|
| 259 |
+
python 3.14? Any suggestions on
|
| 260 |
+
potential issues I should be aware of?""",
|
| 261 |
+
"files": ["tests/test2.toml"],
|
| 262 |
+
}
|
| 263 |
+
],
|
| 264 |
],
|
| 265 |
+
stop_btn=True,
|
| 266 |
+
theme="compact",
|
| 267 |
)
|
| 268 |
demo.launch()
|
| 269 |
|
| 270 |
finally:
|
| 271 |
+
logger.info("Cleaning up MCP client resources")
|
| 272 |
+
# remove contents of uploads_dir
|
| 273 |
+
for f in uploads_dir.iterdir():
|
| 274 |
+
try:
|
| 275 |
+
f.unlink()
|
| 276 |
+
except Exception:
|
| 277 |
+
logger.exception(f"Failed to delete uploaded file: {f}")
|
| 278 |
+
|
| 279 |
+
logger.info("Shutdown complete.")
|
poetry.lock
CHANGED
|
@@ -142,6 +142,29 @@ files = [
|
|
| 142 |
{file = "backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162"},
|
| 143 |
]
|
| 144 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 145 |
[[package]]
|
| 146 |
name = "brotli"
|
| 147 |
version = "1.2.0"
|
|
@@ -1320,6 +1343,22 @@ profiling = ["gprof2dot"]
|
|
| 1320 |
rtd = ["ipykernel", "jupyter_sphinx", "mdit-py-plugins (>=0.5.0)", "myst-parser", "pyyaml", "sphinx", "sphinx-book-theme (>=1.0,<2.0)", "sphinx-copybutton", "sphinx-design"]
|
| 1321 |
testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions", "requests"]
|
| 1322 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1323 |
[[package]]
|
| 1324 |
name = "markupsafe"
|
| 1325 |
version = "3.0.3"
|
|
@@ -2864,6 +2903,18 @@ files = [
|
|
| 2864 |
{file = "socksio-1.0.0.tar.gz", hash = "sha256:f88beb3da5b5c38b9890469de67d0cb0f9d494b78b106ca1845f96c10b91c4ac"},
|
| 2865 |
]
|
| 2866 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2867 |
[[package]]
|
| 2868 |
name = "sse-starlette"
|
| 2869 |
version = "3.0.3"
|
|
@@ -3174,6 +3225,35 @@ h2 = ["h2 (>=4,<5)"]
|
|
| 3174 |
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
| 3175 |
zstd = ["zstandard (>=0.18.0)"]
|
| 3176 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3177 |
[[package]]
|
| 3178 |
name = "uvicorn"
|
| 3179 |
version = "0.38.0"
|
|
@@ -3276,4 +3356,4 @@ files = [
|
|
| 3276 |
[metadata]
|
| 3277 |
lock-version = "2.1"
|
| 3278 |
python-versions = ">=3.10,<3.15"
|
| 3279 |
-
content-hash = "
|
|
|
|
| 142 |
{file = "backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162"},
|
| 143 |
]
|
| 144 |
|
| 145 |
+
[[package]]
|
| 146 |
+
name = "beautifulsoup4"
|
| 147 |
+
version = "4.14.2"
|
| 148 |
+
description = "Screen-scraping library"
|
| 149 |
+
optional = false
|
| 150 |
+
python-versions = ">=3.7.0"
|
| 151 |
+
groups = ["main"]
|
| 152 |
+
files = [
|
| 153 |
+
{file = "beautifulsoup4-4.14.2-py3-none-any.whl", hash = "sha256:5ef6fa3a8cbece8488d66985560f97ed091e22bbc4e9c2338508a9d5de6d4515"},
|
| 154 |
+
{file = "beautifulsoup4-4.14.2.tar.gz", hash = "sha256:2a98ab9f944a11acee9cc848508ec28d9228abfd522ef0fad6a02a72e0ded69e"},
|
| 155 |
+
]
|
| 156 |
+
|
| 157 |
+
[package.dependencies]
|
| 158 |
+
soupsieve = ">1.2"
|
| 159 |
+
typing-extensions = ">=4.0.0"
|
| 160 |
+
|
| 161 |
+
[package.extras]
|
| 162 |
+
cchardet = ["cchardet"]
|
| 163 |
+
chardet = ["chardet"]
|
| 164 |
+
charset-normalizer = ["charset-normalizer"]
|
| 165 |
+
html5lib = ["html5lib"]
|
| 166 |
+
lxml = ["lxml"]
|
| 167 |
+
|
| 168 |
[[package]]
|
| 169 |
name = "brotli"
|
| 170 |
version = "1.2.0"
|
|
|
|
| 1343 |
rtd = ["ipykernel", "jupyter_sphinx", "mdit-py-plugins (>=0.5.0)", "myst-parser", "pyyaml", "sphinx", "sphinx-book-theme (>=1.0,<2.0)", "sphinx-copybutton", "sphinx-design"]
|
| 1344 |
testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions", "requests"]
|
| 1345 |
|
| 1346 |
+
[[package]]
|
| 1347 |
+
name = "markdownify"
|
| 1348 |
+
version = "1.2.2"
|
| 1349 |
+
description = "Convert HTML to markdown."
|
| 1350 |
+
optional = false
|
| 1351 |
+
python-versions = "*"
|
| 1352 |
+
groups = ["main"]
|
| 1353 |
+
files = [
|
| 1354 |
+
{file = "markdownify-1.2.2-py3-none-any.whl", hash = "sha256:3f02d3cc52714084d6e589f70397b6fc9f2f3a8531481bf35e8cc39f975e186a"},
|
| 1355 |
+
{file = "markdownify-1.2.2.tar.gz", hash = "sha256:b274f1b5943180b031b699b199cbaeb1e2ac938b75851849a31fd0c3d6603d09"},
|
| 1356 |
+
]
|
| 1357 |
+
|
| 1358 |
+
[package.dependencies]
|
| 1359 |
+
beautifulsoup4 = ">=4.9,<5"
|
| 1360 |
+
six = ">=1.15,<2"
|
| 1361 |
+
|
| 1362 |
[[package]]
|
| 1363 |
name = "markupsafe"
|
| 1364 |
version = "3.0.3"
|
|
|
|
| 2903 |
{file = "socksio-1.0.0.tar.gz", hash = "sha256:f88beb3da5b5c38b9890469de67d0cb0f9d494b78b106ca1845f96c10b91c4ac"},
|
| 2904 |
]
|
| 2905 |
|
| 2906 |
+
[[package]]
|
| 2907 |
+
name = "soupsieve"
|
| 2908 |
+
version = "2.8"
|
| 2909 |
+
description = "A modern CSS selector implementation for Beautiful Soup."
|
| 2910 |
+
optional = false
|
| 2911 |
+
python-versions = ">=3.9"
|
| 2912 |
+
groups = ["main"]
|
| 2913 |
+
files = [
|
| 2914 |
+
{file = "soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c"},
|
| 2915 |
+
{file = "soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f"},
|
| 2916 |
+
]
|
| 2917 |
+
|
| 2918 |
[[package]]
|
| 2919 |
name = "sse-starlette"
|
| 2920 |
version = "3.0.3"
|
|
|
|
| 3225 |
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
| 3226 |
zstd = ["zstandard (>=0.18.0)"]
|
| 3227 |
|
| 3228 |
+
[[package]]
|
| 3229 |
+
name = "uv"
|
| 3230 |
+
version = "0.9.11"
|
| 3231 |
+
description = "An extremely fast Python package and project manager, written in Rust."
|
| 3232 |
+
optional = false
|
| 3233 |
+
python-versions = ">=3.8"
|
| 3234 |
+
groups = ["main"]
|
| 3235 |
+
files = [
|
| 3236 |
+
{file = "uv-0.9.11-py3-none-linux_armv6l.whl", hash = "sha256:803f85cf25ab7f1fca10fe2e40a1b9f5b1d48efc25efd6651ba3c9668db6a19e"},
|
| 3237 |
+
{file = "uv-0.9.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6a31b0bd4eaec59bf97816aefbcd75cae4fcc8875c4b19ef1846b7bff3d67c70"},
|
| 3238 |
+
{file = "uv-0.9.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:48548a23fb5a103b8955dfafff7d79d21112b8e25ce5ff25e3468dc541b20e83"},
|
| 3239 |
+
{file = "uv-0.9.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:cb680948e678590b5960744af2ecea6f2c0307dbb74ac44daf5c00e84ad8c09f"},
|
| 3240 |
+
{file = "uv-0.9.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9ef1982295e5aaf909a9668d6fb6abfc5089666c699f585a36f3a67f1a22916a"},
|
| 3241 |
+
{file = "uv-0.9.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92ff773aa4193148019533c55382c2f9c661824bbf0c2e03f12aeefc800ede57"},
|
| 3242 |
+
{file = "uv-0.9.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:70137a46675bbecf3a8b43d292a61767f1b944156af3d0f8d5986292bd86f6cf"},
|
| 3243 |
+
{file = "uv-0.9.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5af9117bab6c4b3a1cacb0cddfb3cd540d0adfb13c7b8a9a318873cf2d07e52"},
|
| 3244 |
+
{file = "uv-0.9.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8cc86940d9b3a425575f25dc45247be2fb31f7fed7bf3394ae9daadd466e5b80"},
|
| 3245 |
+
{file = "uv-0.9.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e97906ca1b90dac91c23af20e282e2e37c8eb80c3721898733928a295f2defda"},
|
| 3246 |
+
{file = "uv-0.9.11-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:d901269e1db72abc974ba61d37be6e56532e104922329e0b553d9df07ba224be"},
|
| 3247 |
+
{file = "uv-0.9.11-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:8abfb7d4b136de3e92dd239ea9a51d4b7bbb970dc1b33bec84d08facf82b9a6e"},
|
| 3248 |
+
{file = "uv-0.9.11-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:1f8afc13b3b94bce1e72514c598d41623387b2b61b68d7dbce9a01a0d8874860"},
|
| 3249 |
+
{file = "uv-0.9.11-py3-none-musllinux_1_1_i686.whl", hash = "sha256:7d414cfa410f1850a244d87255f98d06ca61cc13d82f6413c4f03e9e0c9effc7"},
|
| 3250 |
+
{file = "uv-0.9.11-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:edc14143d0ba086a7da4b737a77746bb36bc00e3d26466f180ea99e3bf795171"},
|
| 3251 |
+
{file = "uv-0.9.11-py3-none-win32.whl", hash = "sha256:af5fd91eecaa04b4799f553c726307200f45da844d5c7c5880d64db4debdd5dc"},
|
| 3252 |
+
{file = "uv-0.9.11-py3-none-win_amd64.whl", hash = "sha256:c65a024ad98547e32168f3a52360fe73ff39cd609a8fb9dd2509aac91483cfc8"},
|
| 3253 |
+
{file = "uv-0.9.11-py3-none-win_arm64.whl", hash = "sha256:4907a696c745703542ed2559bdf5380b92c8b1d4bf290ebfed45bf9a2a2c6690"},
|
| 3254 |
+
{file = "uv-0.9.11.tar.gz", hash = "sha256:605a7a57f508aabd029fc0c5ef5c60a556f8c50d32e194f1a300a9f4e87f18d4"},
|
| 3255 |
+
]
|
| 3256 |
+
|
| 3257 |
[[package]]
|
| 3258 |
name = "uvicorn"
|
| 3259 |
version = "0.38.0"
|
|
|
|
| 3356 |
[metadata]
|
| 3357 |
lock-version = "2.1"
|
| 3358 |
python-versions = ">=3.10,<3.15"
|
| 3359 |
+
content-hash = "8c1bb4d6fd49cbec6bfde5966b6f2490e520425a5c5e491617163b523ec7f85f"
|
pyproject.toml
CHANGED
|
@@ -19,6 +19,8 @@ dependencies = [
|
|
| 19 |
"typer[all] (>=0.20.0,<0.21.0)",
|
| 20 |
"gradio (>=5.49.1,<6.0.0)",
|
| 21 |
"ddgs (>=9.9.1,<10.0.0)",
|
|
|
|
|
|
|
| 22 |
]
|
| 23 |
|
| 24 |
|
|
|
|
| 19 |
"typer[all] (>=0.20.0,<0.21.0)",
|
| 20 |
"gradio (>=5.49.1,<6.0.0)",
|
| 21 |
"ddgs (>=9.9.1,<10.0.0)",
|
| 22 |
+
"uv (>=0.9.11,<0.10.0)",
|
| 23 |
+
"markdownify (>=1.2.2,<2.0.0)",
|
| 24 |
]
|
| 25 |
|
| 26 |
|
src/upgrade_advisor/agents/gh.py
DELETED
|
@@ -1,24 +0,0 @@
|
|
| 1 |
-
import logging
|
| 2 |
-
|
| 3 |
-
from smolagents import CodeAgent
|
| 4 |
-
|
| 5 |
-
logger = logging.getLogger(__name__)
|
| 6 |
-
logger.setLevel(logging.INFO)
|
| 7 |
-
logger.addHandler(logging.StreamHandler())
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
class GitHubAgent:
|
| 11 |
-
"""Agent that interacts with GitHub repositories using MCP tools."""
|
| 12 |
-
|
| 13 |
-
def __init__(self, model, tools=None):
|
| 14 |
-
self.model = model
|
| 15 |
-
if tools is None:
|
| 16 |
-
tools = []
|
| 17 |
-
logger.info("No tools provided; initializing with an empty toolset.")
|
| 18 |
-
self.agent = CodeAgent(
|
| 19 |
-
tools=tools,
|
| 20 |
-
model=model,
|
| 21 |
-
max_steps=10,
|
| 22 |
-
additional_authorized_imports=["json", "datetime", "math", "git"],
|
| 23 |
-
)
|
| 24 |
-
logger.info(f"GitHubAgent initialized with model and tools: {tools}.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
src/upgrade_advisor/agents/package.py
CHANGED
|
@@ -1,8 +1,8 @@
|
|
| 1 |
import json
|
| 2 |
import logging
|
| 3 |
-
from
|
|
|
|
| 4 |
|
| 5 |
-
from pydantic import BaseModel
|
| 6 |
from smolagents import CodeAgent
|
| 7 |
from smolagents.mcp_client import MCPClient
|
| 8 |
|
|
@@ -14,28 +14,15 @@ from ..schema import ( # noqa
|
|
| 14 |
PackageVersionResponseSchema,
|
| 15 |
)
|
| 16 |
from .prompts import get_package_discovery_prompt
|
|
|
|
| 17 |
|
| 18 |
logger = logging.getLogger(__name__)
|
| 19 |
logger.setLevel(logging.INFO)
|
| 20 |
logger.addHandler(logging.StreamHandler())
|
| 21 |
logger.addHandler(logging.FileHandler("package_agent.log"))
|
| 22 |
|
| 23 |
-
tool_schemas = {
|
| 24 |
-
# pypi_search returns full PyPI payload with info + releases
|
| 25 |
-
"PyPI_MCP_pypi_search": PackageSearchResponseSchema, # corrected schema
|
| 26 |
-
"PyPI_MCP_pypi_search_version": PackageVersionResponseSchema,
|
| 27 |
-
"PyPI_MCP_resolve_repo_from_url": GithubRepoSchema,
|
| 28 |
-
"PyPI_MCP_github_repo_and_releases": PackageGitHubandReleasesSchema,
|
| 29 |
-
}
|
| 30 |
|
| 31 |
-
|
| 32 |
-
def map_tool_call_to_schema(tool_name: str) -> Optional[type[BaseModel]]:
|
| 33 |
-
return tool_schemas.get(tool_name, None)
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
# TODO: The response from the agent is not being properly
|
| 37 |
-
# parsed into the expected schema.
|
| 38 |
-
# See https://github.com/huggingface/smolagents/pull/1660
|
| 39 |
|
| 40 |
|
| 41 |
class PackageDiscoveryAgent:
|
|
@@ -44,11 +31,16 @@ class PackageDiscoveryAgent:
|
|
| 44 |
def __init__(self, model, tools=None):
|
| 45 |
self.model = model
|
| 46 |
if tools is None:
|
| 47 |
-
|
| 48 |
logger.info("No tools provided; initializing with an empty toolset.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 49 |
|
| 50 |
self.agent = CodeAgent(
|
| 51 |
-
tools=
|
| 52 |
model=model,
|
| 53 |
max_steps=10,
|
| 54 |
add_base_tools=True,
|
|
@@ -61,9 +53,18 @@ class PackageDiscoveryAgent:
|
|
| 61 |
"typing",
|
| 62 |
"ast",
|
| 63 |
"packaging.version",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 64 |
],
|
| 65 |
)
|
| 66 |
-
logger.info(
|
|
|
|
|
|
|
|
|
|
| 67 |
|
| 68 |
def _discover_package_info(
|
| 69 |
self, user_input: str, reframed_question: str = None
|
|
|
|
| 1 |
import json
|
| 2 |
import logging
|
| 3 |
+
from pathlib import Path
|
| 4 |
+
from typing import Iterator
|
| 5 |
|
|
|
|
| 6 |
from smolagents import CodeAgent
|
| 7 |
from smolagents.mcp_client import MCPClient
|
| 8 |
|
|
|
|
| 14 |
PackageVersionResponseSchema,
|
| 15 |
)
|
| 16 |
from .prompts import get_package_discovery_prompt
|
| 17 |
+
from .tools import ReadUploadFileTool
|
| 18 |
|
| 19 |
logger = logging.getLogger(__name__)
|
| 20 |
logger.setLevel(logging.INFO)
|
| 21 |
logger.addHandler(logging.StreamHandler())
|
| 22 |
logger.addHandler(logging.FileHandler("package_agent.log"))
|
| 23 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
|
| 25 |
+
UPLOADS_DIR = Path("uploads").resolve()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 26 |
|
| 27 |
|
| 28 |
class PackageDiscoveryAgent:
|
|
|
|
| 31 |
def __init__(self, model, tools=None):
|
| 32 |
self.model = model
|
| 33 |
if tools is None:
|
| 34 |
+
tool_list: list = []
|
| 35 |
logger.info("No tools provided; initializing with an empty toolset.")
|
| 36 |
+
else:
|
| 37 |
+
tool_list = list(tools)
|
| 38 |
+
|
| 39 |
+
# additional custom tools
|
| 40 |
+
tool_list.append(ReadUploadFileTool(upload_root=UPLOADS_DIR))
|
| 41 |
|
| 42 |
self.agent = CodeAgent(
|
| 43 |
+
tools=tool_list,
|
| 44 |
model=model,
|
| 45 |
max_steps=10,
|
| 46 |
add_base_tools=True,
|
|
|
|
| 53 |
"typing",
|
| 54 |
"ast",
|
| 55 |
"packaging.version",
|
| 56 |
+
"packaging.specifiers",
|
| 57 |
+
"packaging.requirements",
|
| 58 |
+
"markdownify",
|
| 59 |
+
"sys",
|
| 60 |
+
"tomli",
|
| 61 |
+
"requests",
|
| 62 |
],
|
| 63 |
)
|
| 64 |
+
logger.info(
|
| 65 |
+
f"""PackageDiscoveryAgent initialized with model and tools: \n
|
| 66 |
+
{[tool.name for tool in tool_list]}."""
|
| 67 |
+
)
|
| 68 |
|
| 69 |
def _discover_package_info(
|
| 70 |
self, user_input: str, reframed_question: str = None
|
src/upgrade_advisor/agents/prompts.py
CHANGED
|
@@ -1,8 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
| 1 |
def get_package_discovery_prompt(
|
| 2 |
original_question: str, reframed_question: str = None
|
| 3 |
) -> str:
|
|
|
|
| 4 |
user_input = f"""
|
| 5 |
-
|
| 6 |
{original_question}
|
| 7 |
"""
|
| 8 |
if reframed_question:
|
|
@@ -14,20 +18,38 @@ def get_package_discovery_prompt(
|
|
| 14 |
packages.
|
| 15 |
Your goal is to find relevant metadata about Python packages using the
|
| 16 |
available tools and to compile a structured summary of your findings based
|
| 17 |
-
on the user's question.
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
|
| 19 |
The user may ask about package metadata, compatibility, known issues,
|
| 20 |
upgrade recommendations, and best practices. For example, they may ask:
|
| 21 |
- "What are the known issues with pandas version 1.2.0?"
|
| 22 |
- "Is there a newer version of requests that fixes security vulnerabilities?"
|
| 23 |
- "What are the upgrade recommendations for Django from 2.x to 3.x?"
|
| 24 |
-
- "Given my
|
| 25 |
before upgrading numpy to the latest version?"
|
| 26 |
- "From my pyproject.toml, can you suggest any package upgrades or
|
| 27 |
compatibility considerations if I upgrade scipy to version 1.7.0?"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
|
| 29 |
The first step to tackle such questions is to gather relevant data about the
|
| 30 |
-
packages involved using the available MCP tools.
|
|
|
|
|
|
|
|
|
|
|
|
|
| 31 |
package metadata, version history, release notes, compatibility info, and
|
| 32 |
known issues. Then, analyze the collected data to identify any potential
|
| 33 |
issues, improvements, or recommendations related to the user's question.
|
|
@@ -39,29 +61,30 @@ def get_package_discovery_prompt(
|
|
| 39 |
`ast.literal_eval(tool_result)` to convert string representations of
|
| 40 |
Python data structures into actual dicts/lists.
|
| 41 |
- Always keep tool results as Python dicts/lists. Do not serialize them!!
|
|
|
|
|
|
|
|
|
|
| 42 |
- Return the final structured object using: final_answer(<python_dict>)
|
| 43 |
- Ensure the returned object STRICTLY matches the expected schema for that tool.
|
| 44 |
Do not add or rename keys. Keep value types correct.
|
|
|
|
|
|
|
| 45 |
|
| 46 |
{user_input}
|
| 47 |
|
| 48 |
HINTS:
|
| 49 |
- MCP tool outputs are often structured (Python dict/list). Use them directly.
|
| 50 |
-
- If you get a string result,
|
| 51 |
like result["info"].
|
|
|
|
|
|
|
| 52 |
- Also be careful of the types. Some fields may be optional or missing.
|
| 53 |
Some fields are ints/floats.
|
| 54 |
- Always prefer MCP tool data over web search data for package metadata.
|
| 55 |
- However, If you decide to use the `web_search`, you must ONLY rely on the
|
| 56 |
-
official package website, PyPI page, or official GitHub repo.
|
| 57 |
-
- Your knowledge cutoff may prevent you from knowing what's recent.
|
| 58 |
-
So use the `time` module to get
|
| 59 |
-
the current date if needed to reason about versions or releases.
|
| 60 |
- NEVER fabricate data. If you cannot find the info, say so.
|
| 61 |
-
- For parsing version numbers, use the `packaging.version` module.
|
| 62 |
-
- Never use ast/json modules outside the helpers; import them once at
|
| 63 |
-
the top and only call _to_mapping / _extract_version_fallback.
|
| 64 |
- When you have gathered the required info, call final_answer with
|
| 65 |
-
the BEST structured object
|
| 66 |
-
that answers the user query according to the appropriate schema.
|
| 67 |
"""
|
|
|
|
| 1 |
+
import datetime
|
| 2 |
+
|
| 3 |
+
|
| 4 |
def get_package_discovery_prompt(
|
| 5 |
original_question: str, reframed_question: str = None
|
| 6 |
) -> str:
|
| 7 |
+
today_date = datetime.date.today().isoformat()
|
| 8 |
user_input = f"""
|
| 9 |
+
DETAILS OF THE DEVELOPER QUESTION:
|
| 10 |
{original_question}
|
| 11 |
"""
|
| 12 |
if reframed_question:
|
|
|
|
| 18 |
packages.
|
| 19 |
Your goal is to find relevant metadata about Python packages using the
|
| 20 |
available tools and to compile a structured summary of your findings based
|
| 21 |
+
on the user's question. If the user asks about upgrade recommendations,
|
| 22 |
+
compatibility issues, known bugs, or best practices, you should gather
|
| 23 |
+
relevant data and provide clear, actionable advice. You must verify the
|
| 24 |
+
compatibility of packages with specific Python versions or other packages
|
| 25 |
+
using the appropriate tools.
|
| 26 |
|
| 27 |
The user may ask about package metadata, compatibility, known issues,
|
| 28 |
upgrade recommendations, and best practices. For example, they may ask:
|
| 29 |
- "What are the known issues with pandas version 1.2.0?"
|
| 30 |
- "Is there a newer version of requests that fixes security vulnerabilities?"
|
| 31 |
- "What are the upgrade recommendations for Django from 2.x to 3.x?"
|
| 32 |
+
- "Given my pyproject.toml, is there something I should be aware of
|
| 33 |
before upgrading numpy to the latest version?"
|
| 34 |
- "From my pyproject.toml, can you suggest any package upgrades or
|
| 35 |
compatibility considerations if I upgrade scipy to version 1.7.0?"
|
| 36 |
+
- "Based on my pyproject.toml, are my current package versions compatible
|
| 37 |
+
with python 3.14?"
|
| 38 |
+
- "How to safely upgrade the packages in pyproject.toml to their highest
|
| 39 |
+
versions without breaking my project?"
|
| 40 |
+
|
| 41 |
+
Your knowledge cutoff may prevent you from knowing what's recent.
|
| 42 |
+
Always use the current date (ISO format YYYY-MM-DD): {today_date}
|
| 43 |
+
when reasoning about dates and
|
| 44 |
+
releases. Some tools also provide you the release date information, which
|
| 45 |
+
you can transform to ISO format and make comparisons.
|
| 46 |
|
| 47 |
The first step to tackle such questions is to gather relevant data about the
|
| 48 |
+
packages involved using the available MCP tools. Some tools like the
|
| 49 |
+
`resolve_environment`
|
| 50 |
+
can directly analyze a pyproject.toml content to find
|
| 51 |
+
compatibility issues and upgrade suggestions.
|
| 52 |
+
Use the tools to fetch
|
| 53 |
package metadata, version history, release notes, compatibility info, and
|
| 54 |
known issues. Then, analyze the collected data to identify any potential
|
| 55 |
issues, improvements, or recommendations related to the user's question.
|
|
|
|
| 61 |
`ast.literal_eval(tool_result)` to convert string representations of
|
| 62 |
Python data structures into actual dicts/lists.
|
| 63 |
- Always keep tool results as Python dicts/lists. Do not serialize them!!
|
| 64 |
+
- Make sure the dict also contains a "reasoning" field that explains
|
| 65 |
+
how you arrived at your final answer. Do not omit this field. Do not
|
| 66 |
+
mention the tool names, rather what the tool helped you discover.
|
| 67 |
- Return the final structured object using: final_answer(<python_dict>)
|
| 68 |
- Ensure the returned object STRICTLY matches the expected schema for that tool.
|
| 69 |
Do not add or rename keys. Keep value types correct.
|
| 70 |
+
- To read the contents of any uploaded files, call the `read_upload_file` tool
|
| 71 |
+
with the path you received (direct file IO like `open()` is blocked).
|
| 72 |
|
| 73 |
{user_input}
|
| 74 |
|
| 75 |
HINTS:
|
| 76 |
- MCP tool outputs are often structured (Python dict/list). Use them directly.
|
| 77 |
+
- If you get a string result, make sure to convert it to a Python dict/list before indexing
|
| 78 |
like result["info"].
|
| 79 |
+
- To send pyproject.toml content to the `resolve_environment` tool, you
|
| 80 |
+
will need to use the `upload_file_to_gradio` tool first to upload the file.
|
| 81 |
- Also be careful of the types. Some fields may be optional or missing.
|
| 82 |
Some fields are ints/floats.
|
| 83 |
- Always prefer MCP tool data over web search data for package metadata.
|
| 84 |
- However, If you decide to use the `web_search`, you must ONLY rely on the
|
| 85 |
+
official package website, PyPI page, or official GitHub repo.
|
|
|
|
|
|
|
|
|
|
| 86 |
- NEVER fabricate data. If you cannot find the info, say so.
|
| 87 |
+
- For parsing version numbers, use the `packaging.version` module.
|
|
|
|
|
|
|
| 88 |
- When you have gathered the required info, call final_answer with
|
| 89 |
+
the BEST structured object that answers the user query according to the appropriate schema.
|
|
|
|
| 90 |
"""
|
src/upgrade_advisor/agents/tools.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from pathlib import Path
|
| 3 |
+
|
| 4 |
+
from smolagents.tools import Tool
|
| 5 |
+
|
| 6 |
+
logger = logging.getLogger(__name__)
|
| 7 |
+
logger.setLevel(logging.INFO)
|
| 8 |
+
logger.addHandler(logging.StreamHandler())
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class ReadUploadFileTool(Tool):
|
| 12 |
+
"""Tool to safely read files saved in the `uploads` directory."""
|
| 13 |
+
|
| 14 |
+
name = "read_upload_file"
|
| 15 |
+
description = """
|
| 16 |
+
Read a user-uploaded text file from the uploads directory.
|
| 17 |
+
Input: `path` should be the absolute path you received (or a filename)
|
| 18 |
+
under the `uploads` folder. Returns the file contents as text."""
|
| 19 |
+
inputs = {
|
| 20 |
+
"path": {
|
| 21 |
+
"type": "string",
|
| 22 |
+
"description": "Absolute or relative path to the uploaded file \
|
| 23 |
+
that is present under the `uploads` directory.",
|
| 24 |
+
}
|
| 25 |
+
}
|
| 26 |
+
output_type = "string"
|
| 27 |
+
|
| 28 |
+
def __init__(self, upload_root: Path):
|
| 29 |
+
self.upload_root = upload_root.resolve()
|
| 30 |
+
super().__init__()
|
| 31 |
+
|
| 32 |
+
def forward(self, path: str) -> str:
|
| 33 |
+
file_path = Path(path).expanduser()
|
| 34 |
+
if not file_path.is_absolute():
|
| 35 |
+
file_path = self.upload_root / file_path
|
| 36 |
+
|
| 37 |
+
try:
|
| 38 |
+
resolved = file_path.resolve()
|
| 39 |
+
except FileNotFoundError as exc:
|
| 40 |
+
raise FileNotFoundError(f"File not found: {file_path}") from exc
|
| 41 |
+
|
| 42 |
+
if not resolved.exists():
|
| 43 |
+
raise FileNotFoundError(f"File not found: {resolved}")
|
| 44 |
+
|
| 45 |
+
try:
|
| 46 |
+
resolved.relative_to(self.upload_root)
|
| 47 |
+
except ValueError as exc:
|
| 48 |
+
raise ValueError(
|
| 49 |
+
f"Refusing to read '{resolved}': \
|
| 50 |
+
not inside uploads directory {self.upload_root}"
|
| 51 |
+
) from exc
|
| 52 |
+
|
| 53 |
+
if resolved.is_dir():
|
| 54 |
+
raise IsADirectoryError(f"Refusing to read directory: {resolved}")
|
| 55 |
+
|
| 56 |
+
return resolved.read_text(encoding="utf-8")
|
src/upgrade_advisor/chat/prompts.py
CHANGED
|
@@ -14,6 +14,8 @@ def result_package_summary_prompt(
|
|
| 14 |
|
| 15 |
Requirements:
|
| 16 |
- Do not add speculation, hedging, or disclaimers.
|
|
|
|
|
|
|
| 17 |
- Do not mention the CONTEXT, your knowledge cutoff, or phrases like "according
|
| 18 |
to the provided context."
|
| 19 |
- DO NOT refer to the CONTEXT as "context" in your answer, just use
|
|
|
|
| 14 |
|
| 15 |
Requirements:
|
| 16 |
- Do not add speculation, hedging, or disclaimers.
|
| 17 |
+
- Do not try to fix numbers, dates, package names, or versions unless
|
| 18 |
+
they are clearly typos.
|
| 19 |
- Do not mention the CONTEXT, your knowledge cutoff, or phrases like "according
|
| 20 |
to the provided context."
|
| 21 |
- DO NOT refer to the CONTEXT as "context" in your answer, just use
|
src/upgrade_advisor/schema/schema.py
CHANGED
|
@@ -83,6 +83,41 @@ class ErrorResponseSchema(BaseModel):
|
|
| 83 |
error: str = Field(..., description="Error message")
|
| 84 |
|
| 85 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 86 |
if __name__ == "__main__":
|
| 87 |
# Example usage
|
| 88 |
example_package_info = PackageInfoSchema(
|
|
|
|
| 83 |
error: str = Field(..., description="Error message")
|
| 84 |
|
| 85 |
|
| 86 |
+
class ResolvedDep(BaseModel):
|
| 87 |
+
name: str = Field(..., description="Name of the resolved dependency")
|
| 88 |
+
version: str = Field(..., description="Version of the resolved dependency")
|
| 89 |
+
via: List[str] = Field(
|
| 90 |
+
..., description="List of packages that required this dependency"
|
| 91 |
+
)
|
| 92 |
+
|
| 93 |
+
metainfo: Optional[str] = Field(
|
| 94 |
+
None, description="Additional metadata information about the dependency"
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
def update_indirect_dep(self, indirect_dep: str):
|
| 98 |
+
"""Updates the via list with an indirect dependency."""
|
| 99 |
+
self.via.append(indirect_dep)
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
class ResolveResult(BaseModel):
|
| 103 |
+
deps: Dict[str, ResolvedDep] = Field(
|
| 104 |
+
..., description="Mapping of package names to their resolved dependencies"
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
class UVResolutionResultSchema(BaseModel):
|
| 109 |
+
python_version: str = Field(..., description="Python version used for resolution")
|
| 110 |
+
uv_version: str = Field(
|
| 111 |
+
..., description="Version of the uv tool used for resolution"
|
| 112 |
+
)
|
| 113 |
+
errored: bool = Field(
|
| 114 |
+
..., description="Indicates if there was an error during resolution"
|
| 115 |
+
)
|
| 116 |
+
output: ResolveResult = Field(
|
| 117 |
+
..., description="Output in validated ResolveResult format"
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
|
| 121 |
if __name__ == "__main__":
|
| 122 |
# Example usage
|
| 123 |
example_package_info = PackageInfoSchema(
|
tests/test1.toml
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[project]
|
| 2 |
+
name = "test-package"
|
| 3 |
+
version = "0.1.0"
|
| 4 |
+
description = ""
|
| 5 |
+
authors = [
|
| 6 |
+
{ name = "Geevarghese George", email = "4496097+thatgeeman@users.noreply.github.com" },
|
| 7 |
+
]
|
| 8 |
+
license = { text = "MIT License" }
|
| 9 |
+
requires-python = ">=3.10"
|
| 10 |
+
dependencies = [
|
| 11 |
+
"gradio (>=5.49.1,<6.0.0)",
|
| 12 |
+
"mcp (>=1.21.1,<2.0.0)",
|
| 13 |
+
"pytest (>=9.0.1,<10.0.0)",
|
| 14 |
+
"pytest-asyncio (>=1.3.0,<2.0.0)",
|
| 15 |
+
]
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
[build-system]
|
| 19 |
+
requires = ["poetry-core>=2.0.0,<3.0.0"]
|
| 20 |
+
build-backend = "poetry.core.masonry.api"
|
| 21 |
+
|
| 22 |
+
[tool.poetry]
|
| 23 |
+
package-mode = true
|
| 24 |
+
packages = [{ include = "src/server" }, { include = "src/tools" }]
|
tests/test2.toml
ADDED
|
@@ -0,0 +1,272 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[build-system]
|
| 2 |
+
requires = ["pdm-backend"]
|
| 3 |
+
build-backend = "pdm.backend"
|
| 4 |
+
|
| 5 |
+
[project]
|
| 6 |
+
name = "fastapi"
|
| 7 |
+
dynamic = ["version"]
|
| 8 |
+
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
|
| 9 |
+
readme = "README.md"
|
| 10 |
+
license = "MIT"
|
| 11 |
+
license-files = ["LICENSE"]
|
| 12 |
+
requires-python = ">=3.8"
|
| 13 |
+
authors = [
|
| 14 |
+
{ name = "Sebastián Ramírez", email = "tiangolo@gmail.com" },
|
| 15 |
+
]
|
| 16 |
+
classifiers = [
|
| 17 |
+
"Intended Audience :: Information Technology",
|
| 18 |
+
"Intended Audience :: System Administrators",
|
| 19 |
+
"Operating System :: OS Independent",
|
| 20 |
+
"Programming Language :: Python :: 3",
|
| 21 |
+
"Programming Language :: Python",
|
| 22 |
+
"Topic :: Internet",
|
| 23 |
+
"Topic :: Software Development :: Libraries :: Application Frameworks",
|
| 24 |
+
"Topic :: Software Development :: Libraries :: Python Modules",
|
| 25 |
+
"Topic :: Software Development :: Libraries",
|
| 26 |
+
"Topic :: Software Development",
|
| 27 |
+
"Typing :: Typed",
|
| 28 |
+
"Development Status :: 4 - Beta",
|
| 29 |
+
"Environment :: Web Environment",
|
| 30 |
+
"Framework :: AsyncIO",
|
| 31 |
+
"Framework :: FastAPI",
|
| 32 |
+
"Framework :: Pydantic",
|
| 33 |
+
"Framework :: Pydantic :: 1",
|
| 34 |
+
"Framework :: Pydantic :: 2",
|
| 35 |
+
"Intended Audience :: Developers",
|
| 36 |
+
"Programming Language :: Python :: 3 :: Only",
|
| 37 |
+
"Programming Language :: Python :: 3.8",
|
| 38 |
+
"Programming Language :: Python :: 3.9",
|
| 39 |
+
"Programming Language :: Python :: 3.10",
|
| 40 |
+
"Programming Language :: Python :: 3.11",
|
| 41 |
+
"Programming Language :: Python :: 3.12",
|
| 42 |
+
"Programming Language :: Python :: 3.13",
|
| 43 |
+
"Programming Language :: Python :: 3.14",
|
| 44 |
+
"Topic :: Internet :: WWW/HTTP :: HTTP Servers",
|
| 45 |
+
"Topic :: Internet :: WWW/HTTP",
|
| 46 |
+
]
|
| 47 |
+
dependencies = [
|
| 48 |
+
"starlette>=0.40.0,<0.51.0",
|
| 49 |
+
"pydantic>=1.7.4,!=1.8,!=1.8.1,!=2.0.0,!=2.0.1,!=2.1.0,<3.0.0",
|
| 50 |
+
"typing-extensions>=4.8.0",
|
| 51 |
+
"annotated-doc>=0.0.2",
|
| 52 |
+
]
|
| 53 |
+
|
| 54 |
+
[project.urls]
|
| 55 |
+
Homepage = "https://github.com/fastapi/fastapi"
|
| 56 |
+
Documentation = "https://fastapi.tiangolo.com/"
|
| 57 |
+
Repository = "https://github.com/fastapi/fastapi"
|
| 58 |
+
Issues = "https://github.com/fastapi/fastapi/issues"
|
| 59 |
+
Changelog = "https://fastapi.tiangolo.com/release-notes/"
|
| 60 |
+
|
| 61 |
+
[project.optional-dependencies]
|
| 62 |
+
|
| 63 |
+
standard = [
|
| 64 |
+
"fastapi-cli[standard] >=0.0.8",
|
| 65 |
+
# For the test client
|
| 66 |
+
"httpx >=0.23.0,<1.0.0",
|
| 67 |
+
# For templates
|
| 68 |
+
"jinja2 >=3.1.5",
|
| 69 |
+
# For forms and file uploads
|
| 70 |
+
"python-multipart >=0.0.18",
|
| 71 |
+
# To validate email fields
|
| 72 |
+
"email-validator >=2.0.0",
|
| 73 |
+
# Uvicorn with uvloop
|
| 74 |
+
"uvicorn[standard] >=0.12.0",
|
| 75 |
+
# TODO: this should be part of some pydantic optional extra dependencies
|
| 76 |
+
# # Settings management
|
| 77 |
+
# "pydantic-settings >=2.0.0",
|
| 78 |
+
# # Extra Pydantic data types
|
| 79 |
+
# "pydantic-extra-types >=2.0.0",
|
| 80 |
+
]
|
| 81 |
+
|
| 82 |
+
standard-no-fastapi-cloud-cli = [
|
| 83 |
+
"fastapi-cli[standard-no-fastapi-cloud-cli] >=0.0.8",
|
| 84 |
+
# For the test client
|
| 85 |
+
"httpx >=0.23.0,<1.0.0",
|
| 86 |
+
# For templates
|
| 87 |
+
"jinja2 >=3.1.5",
|
| 88 |
+
# For forms and file uploads
|
| 89 |
+
"python-multipart >=0.0.18",
|
| 90 |
+
# To validate email fields
|
| 91 |
+
"email-validator >=2.0.0",
|
| 92 |
+
# Uvicorn with uvloop
|
| 93 |
+
"uvicorn[standard] >=0.12.0",
|
| 94 |
+
# TODO: this should be part of some pydantic optional extra dependencies
|
| 95 |
+
# # Settings management
|
| 96 |
+
# "pydantic-settings >=2.0.0",
|
| 97 |
+
# # Extra Pydantic data types
|
| 98 |
+
# "pydantic-extra-types >=2.0.0",
|
| 99 |
+
]
|
| 100 |
+
|
| 101 |
+
all = [
|
| 102 |
+
"fastapi-cli[standard] >=0.0.8",
|
| 103 |
+
# # For the test client
|
| 104 |
+
"httpx >=0.23.0,<1.0.0",
|
| 105 |
+
# For templates
|
| 106 |
+
"jinja2 >=3.1.5",
|
| 107 |
+
# For forms and file uploads
|
| 108 |
+
"python-multipart >=0.0.18",
|
| 109 |
+
# For Starlette's SessionMiddleware, not commonly used with FastAPI
|
| 110 |
+
"itsdangerous >=1.1.0",
|
| 111 |
+
# For Starlette's schema generation, would not be used with FastAPI
|
| 112 |
+
"pyyaml >=5.3.1",
|
| 113 |
+
# For UJSONResponse
|
| 114 |
+
"ujson >=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0",
|
| 115 |
+
# For ORJSONResponse
|
| 116 |
+
"orjson >=3.2.1",
|
| 117 |
+
# To validate email fields
|
| 118 |
+
"email-validator >=2.0.0",
|
| 119 |
+
# Uvicorn with uvloop
|
| 120 |
+
"uvicorn[standard] >=0.12.0",
|
| 121 |
+
# Settings management
|
| 122 |
+
"pydantic-settings >=2.0.0",
|
| 123 |
+
# Extra Pydantic data types
|
| 124 |
+
"pydantic-extra-types >=2.0.0",
|
| 125 |
+
]
|
| 126 |
+
|
| 127 |
+
[project.scripts]
|
| 128 |
+
fastapi = "fastapi.cli:main"
|
| 129 |
+
|
| 130 |
+
[tool.pdm]
|
| 131 |
+
version = { source = "file", path = "fastapi/__init__.py" }
|
| 132 |
+
distribution = true
|
| 133 |
+
|
| 134 |
+
[tool.pdm.build]
|
| 135 |
+
source-includes = [
|
| 136 |
+
"tests/",
|
| 137 |
+
"docs_src/",
|
| 138 |
+
"requirements*.txt",
|
| 139 |
+
"scripts/",
|
| 140 |
+
# For a test
|
| 141 |
+
"docs/en/docs/img/favicon.png",
|
| 142 |
+
]
|
| 143 |
+
|
| 144 |
+
[tool.tiangolo._internal-slim-build.packages.fastapi-slim.project]
|
| 145 |
+
name = "fastapi-slim"
|
| 146 |
+
|
| 147 |
+
[tool.mypy]
|
| 148 |
+
plugins = ["pydantic.mypy"]
|
| 149 |
+
strict = true
|
| 150 |
+
|
| 151 |
+
[[tool.mypy.overrides]]
|
| 152 |
+
module = "fastapi.concurrency"
|
| 153 |
+
warn_unused_ignores = false
|
| 154 |
+
ignore_missing_imports = true
|
| 155 |
+
|
| 156 |
+
[[tool.mypy.overrides]]
|
| 157 |
+
module = "fastapi.tests.*"
|
| 158 |
+
ignore_missing_imports = true
|
| 159 |
+
check_untyped_defs = true
|
| 160 |
+
|
| 161 |
+
[[tool.mypy.overrides]]
|
| 162 |
+
module = "docs_src.*"
|
| 163 |
+
disallow_incomplete_defs = false
|
| 164 |
+
disallow_untyped_defs = false
|
| 165 |
+
disallow_untyped_calls = false
|
| 166 |
+
|
| 167 |
+
[tool.pytest.ini_options]
|
| 168 |
+
addopts = [
|
| 169 |
+
"--strict-config",
|
| 170 |
+
"--strict-markers",
|
| 171 |
+
"--ignore=docs_src",
|
| 172 |
+
]
|
| 173 |
+
xfail_strict = true
|
| 174 |
+
junit_family = "xunit2"
|
| 175 |
+
filterwarnings = [
|
| 176 |
+
"error",
|
| 177 |
+
'ignore:starlette.middleware.wsgi is deprecated and will be removed in a future release\..*:DeprecationWarning:starlette',
|
| 178 |
+
# see https://trio.readthedocs.io/en/stable/history.html#trio-0-22-0-2022-09-28
|
| 179 |
+
"ignore:You seem to already have a custom.*:RuntimeWarning:trio",
|
| 180 |
+
# TODO: remove after upgrading SQLAlchemy to a version that includes the following changes
|
| 181 |
+
# https://github.com/sqlalchemy/sqlalchemy/commit/59521abcc0676e936b31a523bd968fc157fef0c2
|
| 182 |
+
'ignore:datetime\.datetime\.utcfromtimestamp\(\) is deprecated and scheduled for removal in a future version\..*:DeprecationWarning:sqlalchemy',
|
| 183 |
+
# Trio 24.1.0 raises a warning from attrs
|
| 184 |
+
# Ref: https://github.com/python-trio/trio/pull/3054
|
| 185 |
+
# Remove once there's a new version of Trio
|
| 186 |
+
'ignore:The `hash` argument is deprecated*:DeprecationWarning:trio',
|
| 187 |
+
# Ignore flaky coverage / pytest warning about SQLite connection, only applies to Python 3.13 and Pydantic v1
|
| 188 |
+
'ignore:Exception ignored in. <sqlite3\.Connection object.*:pytest.PytestUnraisableExceptionWarning',
|
| 189 |
+
]
|
| 190 |
+
|
| 191 |
+
[tool.coverage.run]
|
| 192 |
+
parallel = true
|
| 193 |
+
data_file = "coverage/.coverage"
|
| 194 |
+
source = [
|
| 195 |
+
"docs_src",
|
| 196 |
+
"tests",
|
| 197 |
+
"fastapi"
|
| 198 |
+
]
|
| 199 |
+
context = '${CONTEXT}'
|
| 200 |
+
dynamic_context = "test_function"
|
| 201 |
+
omit = [
|
| 202 |
+
"docs_src/response_model/tutorial003_04.py",
|
| 203 |
+
"docs_src/response_model/tutorial003_04_py310.py",
|
| 204 |
+
]
|
| 205 |
+
|
| 206 |
+
[tool.coverage.report]
|
| 207 |
+
show_missing = true
|
| 208 |
+
sort = "-Cover"
|
| 209 |
+
|
| 210 |
+
[tool.coverage.html]
|
| 211 |
+
show_contexts = true
|
| 212 |
+
|
| 213 |
+
[tool.ruff.lint]
|
| 214 |
+
select = [
|
| 215 |
+
"E", # pycodestyle errors
|
| 216 |
+
"W", # pycodestyle warnings
|
| 217 |
+
"F", # pyflakes
|
| 218 |
+
"I", # isort
|
| 219 |
+
"B", # flake8-bugbear
|
| 220 |
+
"C4", # flake8-comprehensions
|
| 221 |
+
"UP", # pyupgrade
|
| 222 |
+
]
|
| 223 |
+
ignore = [
|
| 224 |
+
"E501", # line too long, handled by black
|
| 225 |
+
"B008", # do not perform function calls in argument defaults
|
| 226 |
+
"C901", # too complex
|
| 227 |
+
"W191", # indentation contains tabs
|
| 228 |
+
]
|
| 229 |
+
|
| 230 |
+
[tool.ruff.lint.per-file-ignores]
|
| 231 |
+
"__init__.py" = ["F401"]
|
| 232 |
+
"docs_src/dependencies/tutorial007.py" = ["F821"]
|
| 233 |
+
"docs_src/dependencies/tutorial008.py" = ["F821"]
|
| 234 |
+
"docs_src/dependencies/tutorial009.py" = ["F821"]
|
| 235 |
+
"docs_src/dependencies/tutorial010.py" = ["F821"]
|
| 236 |
+
"docs_src/custom_response/tutorial007.py" = ["B007"]
|
| 237 |
+
"docs_src/dataclasses/tutorial003.py" = ["I001"]
|
| 238 |
+
"docs_src/path_operation_advanced_configuration/tutorial007.py" = ["B904"]
|
| 239 |
+
"docs_src/path_operation_advanced_configuration/tutorial007_pv1.py" = ["B904"]
|
| 240 |
+
"docs_src/custom_request_and_route/tutorial002.py" = ["B904"]
|
| 241 |
+
"docs_src/dependencies/tutorial008_an.py" = ["F821"]
|
| 242 |
+
"docs_src/dependencies/tutorial008_an_py39.py" = ["F821"]
|
| 243 |
+
"docs_src/query_params_str_validations/tutorial012_an.py" = ["B006"]
|
| 244 |
+
"docs_src/query_params_str_validations/tutorial012_an_py39.py" = ["B006"]
|
| 245 |
+
"docs_src/query_params_str_validations/tutorial013_an.py" = ["B006"]
|
| 246 |
+
"docs_src/query_params_str_validations/tutorial013_an_py39.py" = ["B006"]
|
| 247 |
+
"docs_src/security/tutorial004.py" = ["B904"]
|
| 248 |
+
"docs_src/security/tutorial004_an.py" = ["B904"]
|
| 249 |
+
"docs_src/security/tutorial004_an_py310.py" = ["B904"]
|
| 250 |
+
"docs_src/security/tutorial004_an_py39.py" = ["B904"]
|
| 251 |
+
"docs_src/security/tutorial004_py310.py" = ["B904"]
|
| 252 |
+
"docs_src/security/tutorial005.py" = ["B904"]
|
| 253 |
+
"docs_src/security/tutorial005_an.py" = ["B904"]
|
| 254 |
+
"docs_src/security/tutorial005_an_py310.py" = ["B904"]
|
| 255 |
+
"docs_src/security/tutorial005_an_py39.py" = ["B904"]
|
| 256 |
+
"docs_src/security/tutorial005_py310.py" = ["B904"]
|
| 257 |
+
"docs_src/security/tutorial005_py39.py" = ["B904"]
|
| 258 |
+
"docs_src/dependencies/tutorial008b.py" = ["B904"]
|
| 259 |
+
"docs_src/dependencies/tutorial008b_an.py" = ["B904"]
|
| 260 |
+
"docs_src/dependencies/tutorial008b_an_py39.py" = ["B904"]
|
| 261 |
+
|
| 262 |
+
|
| 263 |
+
[tool.ruff.lint.isort]
|
| 264 |
+
known-third-party = ["fastapi", "pydantic", "starlette"]
|
| 265 |
+
|
| 266 |
+
[tool.ruff.lint.pyupgrade]
|
| 267 |
+
# Preserve types, even if a file imports `from __future__ import annotations`.
|
| 268 |
+
keep-runtime-typing = true
|
| 269 |
+
|
| 270 |
+
[tool.inline-snapshot]
|
| 271 |
+
# default-flags=["fix"]
|
| 272 |
+
# default-flags=["create"]
|