Implement Forgejo tool
All checks were successful
CI / Build and push Docker image (push) Successful in 1m28s
All checks were successful
CI / Build and push Docker image (push) Successful in 1m28s
This commit is contained in:
parent
6db4ea8e96
commit
4aaade7e69
55 changed files with 9186 additions and 336 deletions
|
@ -2,9 +2,11 @@ name: CI
|
|||
|
||||
on:
|
||||
push:
|
||||
branches: [main, dev]
|
||||
branches:
|
||||
- "*"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -161,3 +161,6 @@ cython_debug/
|
|||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
CLAUDE.md
|
||||
GEMINI.md
|
||||
|
|
202
LICENSE
202
LICENSE
|
@ -1,9 +1,201 @@
|
|||
MIT License
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
Copyright (c) 2025 tom
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
1. Definitions.
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
|
28
README.md
28
README.md
|
@ -13,6 +13,8 @@ The server provides the following tool endpoints:
|
|||
- **[Time Operations](docs/time.md)** (`/time`) - Time utilities and timezone operations
|
||||
- **[Weather Information](docs/weather.md)** (`/weather`) - Real-time weather data and forecasts
|
||||
- **[Search](docs/searxng.md)** (`/searxng`) - Web search through SearXNG instances
|
||||
- **[Forgejo Integration](docs/forgejo.md)** (`/forgejo`) - Interact with Forgejo (or Gitea)
|
||||
instances for repository, issue, and CI/CD operations
|
||||
|
||||
Each tool provides detailed endpoint documentation - click the links above to learn about specific
|
||||
endpoints and their capabilities.
|
||||
|
@ -52,11 +54,31 @@ all available endpoints.
|
|||
|
||||
## Configuration
|
||||
|
||||
The server can be configured through environment variables:
|
||||
The server can be configured through environment variables and a `config.yaml` file.
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Environment variables take precedence over `config.yaml` settings.
|
||||
|
||||
- `MEMORY_FILE_PATH` - Path to the memory storage file (default: `/data/memory.json`)
|
||||
- Additional tool-specific configuration options are documented in the individual tool modules
|
||||
|
||||
### `config.yaml`
|
||||
|
||||
For more complex or static configurations, a `config.yaml` file can be used. This file should be
|
||||
placed in the root directory of the project. An example `config.yaml` might look like this:
|
||||
|
||||
```yaml
|
||||
forgejo:
|
||||
base_url: "https://git.example.com"
|
||||
api_key: "your_forgejo_api_key"
|
||||
searxng:
|
||||
base_url: "http://localhost:8080"
|
||||
```
|
||||
|
||||
This allows for structured configuration of various tools and server settings. Refer to individual
|
||||
tool documentation for specific `config.yaml` options.
|
||||
|
||||
## Architecture
|
||||
|
||||
This project follows a unified single-server architecture:
|
||||
|
@ -66,3 +88,7 @@ This project follows a unified single-server architecture:
|
|||
- **OpenAPI compliance** with full documentation and validation
|
||||
- **Docker-ready** with optimized multi-stage builds
|
||||
- **uv package management** for fast, reliable dependency handling
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the Apache 2.0 License - see the [LICENSE](LICENSE) file for details.
|
||||
|
|
29
config.yaml
Normal file
29
config.yaml
Normal file
|
@ -0,0 +1,29 @@
|
|||
# Global application settings
|
||||
app:
|
||||
# Configuration for individual tools.
|
||||
# Each tool can have an 'enabled' flag and other tool-specific settings.
|
||||
tools:
|
||||
forgejo:
|
||||
enabled: true
|
||||
# Mapping of Forgejo base URLs to API tokens.
|
||||
# Example:
|
||||
# https://git.tomfos.tr: your_token_for_tomfos_tr
|
||||
# https://another-forgejo.com: your_token_for_another_forgejo
|
||||
tokens:
|
||||
# Add your Forgejo instance URLs and tokens here
|
||||
# https://git.tomfos.tr: "your_forgejo_token_here"
|
||||
memory:
|
||||
enabled: true
|
||||
file_path: "/data/memory.json"
|
||||
searxng:
|
||||
enabled: true
|
||||
base_url: "http://localhost:8080"
|
||||
time:
|
||||
enabled: true
|
||||
default_timezone: "UTC"
|
||||
weather:
|
||||
enabled: true
|
||||
default_location: "London, UK"
|
||||
web:
|
||||
enabled: true
|
||||
user_agent: "Mozilla/5.0 (X11; Linux x86_64; rv:128.0) Gecko/20100101 Firefox/128.0esr"
|
1377
docs/forgejo.md
Normal file
1377
docs/forgejo.md
Normal file
File diff suppressed because one or more lines are too long
|
@ -1,4 +1,9 @@
|
|||
"""OpenAPI MCP Server package."""
|
||||
"""This module initialises the OpenAPI MCP Server package.
|
||||
|
||||
It defines the package's metadata, including its version, title, and a brief
|
||||
description. This information is used by the server to provide details about
|
||||
itself in the OpenAPI documentation.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
|
|
@ -1,4 +1,9 @@
|
|||
"""Entry point for the OpenAPI MCP Server."""
|
||||
"""This module serves as the command-line entry point for the application.
|
||||
|
||||
When executed as the main program, it launches the OpenAPI MCP Server using
|
||||
Uvicorn. It retrieves the server configuration, creates the FastAPI application
|
||||
instance, and starts the server, making the API available for requests.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -9,7 +14,11 @@ from .server import create_app
|
|||
|
||||
|
||||
def launch() -> None:
|
||||
"""Launch the OpenAPI MCP Server."""
|
||||
"""Initialises and runs the FastAPI application using Uvicorn.
|
||||
|
||||
This function creates the FastAPI application instance and then starts the
|
||||
Uvicorn server with the configured host, port, and logging level.
|
||||
"""
|
||||
app = create_app()
|
||||
|
||||
uvicorn.run(
|
||||
|
|
|
@ -1,3 +1,9 @@
|
|||
"""Core components for the OpenAPI MCP Server."""
|
||||
"""This module initialises the core package of the OpenAPI MCP Server.
|
||||
|
||||
The `core` package contains the essential components that form the backbone of
|
||||
the server, including configuration management, middleware, and the tool
|
||||
registry. These components are responsible for the overall structure and
|
||||
functioning of the application.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
|
|
@ -1,13 +1,145 @@
|
|||
"""Configuration management for the OpenAPI MCP Server."""
|
||||
"""This module manages the configuration for the OpenAPI MCP Server.
|
||||
|
||||
It provides a structured way to handle both server-level and tool-specific
|
||||
settings. The configuration is loaded from a combination of a YAML file
|
||||
(`config.yaml`) for application-wide settings and environment variables for
|
||||
server and security-related settings. This approach allows for a clear
|
||||
separation of concerns and flexible deployment.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Self
|
||||
|
||||
import yaml
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class AppConfig:
|
||||
"""Handles application-wide configuration loaded from `config.yaml`.
|
||||
|
||||
This class is implemented as a singleton to ensure that the configuration
|
||||
is loaded only once and shared across the application. It reads settings
|
||||
for various tools and application defaults from the YAML file.
|
||||
|
||||
Attributes:
|
||||
tools: A dictionary containing the configuration for each tool.
|
||||
forgejo_tokens: A dictionary of authentication tokens for Forgejo instances.
|
||||
memory_file_path: The path to the JSON file used by the Memory tool.
|
||||
searxng_base_url: The base URL of the SearXNG instance.
|
||||
default_timezone: The default timezone for the Time tool.
|
||||
default_location: The default location for the Weather tool.
|
||||
web_user_agent: The user agent string for the Web tool HTTP requests.
|
||||
"""
|
||||
|
||||
_instance = None
|
||||
|
||||
tools: dict[str, dict]
|
||||
forgejo_tokens: dict[str, str]
|
||||
memory_file_path: str
|
||||
searxng_base_url: str
|
||||
default_timezone: str
|
||||
default_location: str
|
||||
web_user_agent: str
|
||||
|
||||
def __new__(cls) -> Self:
|
||||
"""Ensures that only one instance of the AppConfig is created.
|
||||
|
||||
This method implements the singleton pattern, returning the existing
|
||||
instance if one has already been created.
|
||||
|
||||
Returns:
|
||||
The singleton instance of the AppConfig.
|
||||
"""
|
||||
if cls._instance is None:
|
||||
cls._instance = super().__new__(cls)
|
||||
cls._instance._initialize() # noqa: SLF001
|
||||
return cls._instance
|
||||
|
||||
def _initialize(self) -> None:
|
||||
"""Initialises the configuration by loading it from `config.yaml`.
|
||||
|
||||
This method reads the YAML file and populates the configuration
|
||||
attributes. It sets default values for all settings, which can be
|
||||
overridden by the values in the configuration file.
|
||||
"""
|
||||
self.tools = {}
|
||||
self.forgejo_tokens = {}
|
||||
self.memory_file_path = "/data/memory.json" # Default value
|
||||
self.searxng_base_url = "http://localhost:8080" # Default value
|
||||
self.default_timezone = "UTC" # Default value
|
||||
self.default_location = "London, UK" # Default value
|
||||
self.web_user_agent = "Mozilla/5.0 (compatible; OpenAPI-MCP-Server/1.0)" # Default value
|
||||
config_path = Path("config.yaml")
|
||||
print(f"DEBUG: _initialize - config_path: {config_path}")
|
||||
print(f"DEBUG: _initialize - config_path.exists(): {config_path.exists()}")
|
||||
if config_path.exists():
|
||||
with config_path.open(encoding="utf-8") as f:
|
||||
config_data = yaml.safe_load(f)
|
||||
print(f"DEBUG: _initialize - config_data: {config_data}")
|
||||
if config_data:
|
||||
self.tools = config_data.get("app", {}).get("tools", {})
|
||||
# Extract forgejo tokens specifically
|
||||
forgejo_tool_config = self.tools.get("forgejo", {})
|
||||
print(f"DEBUG: _initialize - forgejo_tool_config: {forgejo_tool_config}")
|
||||
self.forgejo_tokens = forgejo_tool_config.get("tokens", {})
|
||||
print(f"DEBUG: _initialize - self.forgejo_tokens: {self.forgejo_tokens}")
|
||||
# Extract memory file path
|
||||
memory_tool_config = self.tools.get("memory", {})
|
||||
self.memory_file_path = memory_tool_config.get(
|
||||
"file_path", self.memory_file_path
|
||||
)
|
||||
# Extract SearXNG base URL
|
||||
searxng_tool_config = self.tools.get("searxng", {})
|
||||
self.searxng_base_url = searxng_tool_config.get(
|
||||
"base_url", self.searxng_base_url
|
||||
)
|
||||
# Extract default timezone
|
||||
time_tool_config = self.tools.get("time", {})
|
||||
self.default_timezone = time_tool_config.get(
|
||||
"default_timezone", self.default_timezone
|
||||
)
|
||||
# Extract default location
|
||||
weather_tool_config = self.tools.get("weather", {})
|
||||
self.default_location = weather_tool_config.get(
|
||||
"default_location", self.default_location
|
||||
)
|
||||
# Extract web user agent
|
||||
web_tool_config = self.tools.get("web", {})
|
||||
self.web_user_agent = web_tool_config.get("user_agent", self.web_user_agent)
|
||||
|
||||
|
||||
def get_app_config() -> AppConfig:
|
||||
"""Returns the singleton instance of the AppConfig.
|
||||
|
||||
This function is a convenient way to access the application-wide
|
||||
configuration from anywhere in the application.
|
||||
|
||||
Returns:
|
||||
The singleton AppConfig instance.
|
||||
"""
|
||||
return AppConfig()
|
||||
|
||||
|
||||
class ServerConfig(BaseSettings):
|
||||
"""Server configuration settings."""
|
||||
"""Manages the server configuration using environment variables.
|
||||
|
||||
This class uses Pydantic's BaseSettings to load configuration from a `.env`
|
||||
file or environment variables. It includes settings for the server's host,
|
||||
port, and CORS policies.
|
||||
|
||||
Attributes:
|
||||
host: The host address for the server.
|
||||
port: The port number for the server.
|
||||
debug: A flag to enable or disable debug mode.
|
||||
reload: A flag to enable or disable auto-reloading.
|
||||
cors_origins: A list of allowed CORS origins.
|
||||
cors_methods: A list of allowed CORS methods.
|
||||
cors_headers: A list of allowed CORS headers.
|
||||
cors_credentials: A flag to enable or disable CORS credentials.
|
||||
"""
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env",
|
||||
|
@ -15,7 +147,7 @@ class ServerConfig(BaseSettings):
|
|||
)
|
||||
|
||||
host: str = Field(default="0.0.0.0", alias="HOST")
|
||||
port: int = Field(default=8000, alias="PORT")
|
||||
port: int = Field(default=80, alias="PORT")
|
||||
debug: bool = Field(default=False, alias="DEBUG")
|
||||
reload: bool = Field(default=False, alias="RELOAD")
|
||||
|
||||
|
@ -26,4 +158,65 @@ class ServerConfig(BaseSettings):
|
|||
cors_credentials: bool = Field(default=True, alias="CORS_CREDENTIALS")
|
||||
|
||||
|
||||
class ForgejoConfig(BaseSettings):
|
||||
"""Manages the configuration for the Forgejo tool.
|
||||
|
||||
This class loads the base URL for the Forgejo instance from environment
|
||||
variables. It also provides a method to retrieve the appropriate
|
||||
authentication headers for making API requests to a Forgejo instance.
|
||||
|
||||
Attributes:
|
||||
base_url: The base URL of the Forgejo instance.
|
||||
"""
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env",
|
||||
case_sensitive=False,
|
||||
extra="ignore",
|
||||
env_prefix="FORGEJO_",
|
||||
)
|
||||
|
||||
base_url: str = Field(default="http://localhost:3000", alias="BASE_URL")
|
||||
|
||||
def get_headers(self, base_url: str) -> dict[str, str]:
|
||||
"""Constructs the authentication headers for a Forgejo API request.
|
||||
|
||||
This method retrieves the token for the specified Forgejo base URL from
|
||||
the AppConfig and creates the necessary 'Authorization' header.
|
||||
|
||||
Args:
|
||||
base_url: The base URL of the Forgejo instance for which to get
|
||||
the authentication headers.
|
||||
|
||||
Returns:
|
||||
A dictionary containing the 'Authorization' header with the token,
|
||||
or an empty dictionary if no token is found.
|
||||
"""
|
||||
app_config_instance = get_app_config()
|
||||
headers = {}
|
||||
token = app_config_instance.forgejo_tokens.get(base_url)
|
||||
if token:
|
||||
headers["Authorization"] = f"token {token}"
|
||||
return headers
|
||||
|
||||
def get_configured_instance_urls(self) -> list[str]:
|
||||
"""Returns a list of base URLs for configured Forgejo instances.
|
||||
|
||||
This method retrieves the keys (base URLs) from the `forgejo_tokens`
|
||||
dictionary in the `AppConfig` instance, which represents the Forgejo
|
||||
instances for which authentication tokens are available.
|
||||
|
||||
Returns:
|
||||
A list of strings, where each string is the base URL of a
|
||||
configured Forgejo instance.
|
||||
"""
|
||||
app_config_instance = get_app_config()
|
||||
return list(app_config_instance.forgejo_tokens.keys())
|
||||
|
||||
|
||||
config = ServerConfig()
|
||||
|
||||
|
||||
def get_forgejo_config() -> ForgejoConfig:
|
||||
"""Returns the ForgejoConfig instance."""
|
||||
return ForgejoConfig()
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
"""Middleware setup for the FastAPI application."""
|
||||
"""This module handles the setup of middleware for the FastAPI application.
|
||||
|
||||
Middleware is used to add cross-cutting concerns to the application, such as
|
||||
handling Cross-Origin Resource Sharing (CORS). This module centralises the
|
||||
configuration of all middleware, ensuring that it is applied consistently
|
||||
across the entire application.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -13,7 +19,15 @@ if TYPE_CHECKING:
|
|||
|
||||
|
||||
def setup_middleware(app: FastAPI) -> None:
|
||||
"""Setup middleware for the FastAPI application."""
|
||||
"""Configures and adds all necessary middleware to the FastAPI application.
|
||||
|
||||
This function sets up the `CORSMiddleware` based on the server's
|
||||
configuration, allowing for flexible control over cross-origin requests.
|
||||
|
||||
Args:
|
||||
app: The main FastAPI application instance to which the middleware
|
||||
will be added.
|
||||
"""
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=config.cors_origins,
|
||||
|
|
|
@ -1,4 +1,11 @@
|
|||
"""Tool registry for dynamic discovery and registration."""
|
||||
"""This module provides the ToolRegistry for dynamic discovery and registration.
|
||||
|
||||
The ToolRegistry is a central component of the server's modular architecture.
|
||||
It is responsible for discovering all available tools within the `tools`
|
||||
directory, loading them, and registering their API routes with the main FastAPI
|
||||
application. This allows for a plug-and-play system where new tools can be
|
||||
added to the server by simply creating a new tool module.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -10,33 +17,67 @@ from typing import TYPE_CHECKING
|
|||
if TYPE_CHECKING:
|
||||
from fastapi import FastAPI
|
||||
|
||||
from openapi_mcp_server.core.config import AppConfig
|
||||
|
||||
from openapi_mcp_server.tools.base import BaseTool
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ToolRegistry:
|
||||
"""Registry for managing tool modules and their routes."""
|
||||
"""A registry for managing the discovery and registration of tool modules.
|
||||
|
||||
This class scans the `tools` directory for valid tool modules, loads them,
|
||||
and provides a mechanism to register their API routes with the main FastAPI
|
||||
application. It ensures that only enabled tools are loaded and registered.
|
||||
|
||||
Attributes:
|
||||
tools: A dictionary mapping tool names to their corresponding BaseTool
|
||||
instances.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the tool registry."""
|
||||
"""Initialises the ToolRegistry.
|
||||
|
||||
This creates an empty dictionary to store the discovered tools.
|
||||
"""
|
||||
self.tools: dict[str, BaseTool] = {}
|
||||
|
||||
def discover_tools(self) -> None:
|
||||
"""Automatically discover and load all tool modules."""
|
||||
def discover_tools(self, app_config_instance: AppConfig) -> None:
|
||||
"""Discovers and loads all enabled tool modules from the `tools` directory.
|
||||
|
||||
This method iterates through the subdirectories of the `tools` directory,
|
||||
checking for valid tool modules that are enabled in the application
|
||||
configuration. It then loads the tool's routes and adds the tool
|
||||
instance to the registry.
|
||||
|
||||
Args:
|
||||
app_config_instance: The application configuration instance, used to
|
||||
check if a tool is enabled.
|
||||
"""
|
||||
tools_path = Path(__file__).parent.parent / "tools"
|
||||
|
||||
for tool_dir in tools_path.iterdir():
|
||||
tool_name = tool_dir.name
|
||||
if (
|
||||
tool_dir.is_dir()
|
||||
and tool_dir.name not in {"__pycache__", "base.py"}
|
||||
and tool_name not in {"__pycache__", "base.py"}
|
||||
and (tool_dir / "__init__.py").exists()
|
||||
and (tool_dir / "routes.py").exists()
|
||||
and app_config_instance.tools.get(tool_name, {}).get("enabled", False)
|
||||
):
|
||||
self._load_tool(tool_dir.name)
|
||||
|
||||
def _load_tool(self, tool_name: str) -> None:
|
||||
"""Load a specific tool module."""
|
||||
"""Loads a specific tool module by importing its `routes.py` file.
|
||||
|
||||
This internal method imports the specified tool's module and retrieves
|
||||
the `tool` instance from it. If the instance is a valid `BaseTool`,
|
||||
it is added to the registry.
|
||||
|
||||
Args:
|
||||
tool_name: The name of the tool to load.
|
||||
"""
|
||||
try:
|
||||
# Import the tool's routes module
|
||||
module = importlib.import_module(f"openapi_mcp_server.tools.{tool_name}.routes")
|
||||
|
@ -56,7 +97,15 @@ class ToolRegistry:
|
|||
logger.exception("Failed to load tool %s", tool_name)
|
||||
|
||||
def register_routes(self, app: FastAPI) -> None:
|
||||
"""Register all tool routes with the FastAPI application."""
|
||||
"""Registers the API routes for all loaded tools with the FastAPI app.
|
||||
|
||||
This method iterates through the discovered tools and includes their
|
||||
FastAPI routers in the main application, prefixing them with the
|
||||
tool's name.
|
||||
|
||||
Args:
|
||||
app: The main FastAPI application instance.
|
||||
"""
|
||||
for tool_name, tool in self.tools.items():
|
||||
# Create a router with tool-specific prefix
|
||||
router = tool.get_router()
|
||||
|
|
|
@ -1,3 +1,9 @@
|
|||
"""Shared data models for the OpenAPI MCP Server."""
|
||||
"""This module initialises the models package for the OpenAPI MCP Server.
|
||||
|
||||
The `models` package contains the Pydantic models that are shared across
|
||||
different parts of the application. This includes base models for common API
|
||||
responses, such as health checks, which ensure a consistent and standardised
|
||||
data structure throughout the server.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
"""Base data models for the OpenAPI MCP Server."""
|
||||
"""This module defines the base Pydantic models for the application.
|
||||
|
||||
These models are used to provide standardised responses for common API
|
||||
endpoints, such as health checks and error messages. By using these base
|
||||
models, the API maintains a consistent and predictable structure, which makes
|
||||
it easier for clients to handle responses.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -6,13 +12,27 @@ from pydantic import BaseModel, Field
|
|||
|
||||
|
||||
class HealthResponse(BaseModel):
|
||||
"""Standard health check response."""
|
||||
"""Represents a standard health check response.
|
||||
|
||||
This model is used to indicate the health status of a service.
|
||||
|
||||
Attributes:
|
||||
status: The health status of the service (e.g., 'healthy').
|
||||
service: The name of the service being checked.
|
||||
"""
|
||||
|
||||
status: str = Field(default="healthy", description="Service health status")
|
||||
service: str = Field(..., description="Service name")
|
||||
|
||||
|
||||
class ErrorResponse(BaseModel):
|
||||
"""Standard error response."""
|
||||
"""Represents a standard error response.
|
||||
|
||||
This model is used to provide a consistent format for error messages
|
||||
returned by the API.
|
||||
|
||||
Attributes:
|
||||
detail: A string containing the error message.
|
||||
"""
|
||||
|
||||
detail: str = Field(..., description="Error message")
|
||||
|
|
|
@ -1,4 +1,11 @@
|
|||
"""Main FastAPI application factory."""
|
||||
"""This module is the main entry point for creating the FastAPI application.
|
||||
|
||||
It defines the application factory, `create_app`, which initialises and
|
||||
configures the FastAPI instance. This includes setting up metadata, middleware,
|
||||
and a global health check endpoint. The module also handles the dynamic
|
||||
discovery and registration of tool routes, allowing for a modular and extensible
|
||||
architecture where new tools can be added with minimal changes to the core server.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -7,6 +14,7 @@ from typing import TYPE_CHECKING
|
|||
from fastapi import FastAPI
|
||||
|
||||
from . import __description__, __title__, __version__
|
||||
from .core.config import get_app_config
|
||||
from .core.middleware import setup_middleware
|
||||
from .core.registry import registry
|
||||
from .models.base import HealthResponse
|
||||
|
@ -16,13 +24,17 @@ if TYPE_CHECKING:
|
|||
|
||||
|
||||
def generate_unique_id(route: APIRoute) -> str:
|
||||
"""Generate a unique ID for a route.
|
||||
"""Generates a unique ID for a given FastAPI route.
|
||||
|
||||
This function is used by FastAPI to create unique operation IDs for the
|
||||
OpenAPI schema. It combines the first tag of the route with the route's name
|
||||
to ensure that each ID is unique and descriptive.
|
||||
|
||||
Args:
|
||||
route (APIRoute): The route to generate an ID for.
|
||||
route: The APIRoute object for which to generate the ID.
|
||||
|
||||
Returns:
|
||||
str: The unique ID for the route.
|
||||
A unique string identifier for the route.
|
||||
"""
|
||||
if route.tags:
|
||||
return f"{route.tags[0]}_{route.name}"
|
||||
|
@ -30,10 +42,14 @@ def generate_unique_id(route: APIRoute) -> str:
|
|||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
"""Create and configure the FastAPI application.
|
||||
"""Creates and configures the main FastAPI application instance.
|
||||
|
||||
This function initialises the FastAPI application with the project's metadata,
|
||||
sets up middleware, and registers all the tool routes that have been
|
||||
discovered by the ToolRegistry.
|
||||
|
||||
Returns:
|
||||
FastAPI: Configured FastAPI application instance.
|
||||
The configured FastAPI application instance.
|
||||
"""
|
||||
app = FastAPI(
|
||||
title=__title__,
|
||||
|
@ -50,15 +66,22 @@ def create_app() -> FastAPI:
|
|||
# Add global health endpoint
|
||||
@app.get("/health", response_model=HealthResponse, tags=["health"])
|
||||
def health_check() -> HealthResponse:
|
||||
"""Global health check endpoint.
|
||||
"""Provides a global health check endpoint for the server.
|
||||
|
||||
This endpoint can be used to monitor the status of the service.
|
||||
|
||||
Returns:
|
||||
HealthResponse: Health status of the service.
|
||||
A HealthResponse object indicating the service is running.
|
||||
"""
|
||||
return HealthResponse(service="openapi-mcp-server")
|
||||
|
||||
app_config = get_app_config()
|
||||
|
||||
# Discover and register tool routes
|
||||
registry.discover_tools()
|
||||
registry.discover_tools(app_config)
|
||||
registry.register_routes(app)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
app = create_app()
|
||||
|
|
|
@ -1,3 +1,9 @@
|
|||
"""Tool implementations for the OpenAPI MCP Server."""
|
||||
"""This module initialises the tools package for the OpenAPI MCP Server.
|
||||
|
||||
The `tools` package is a central part of the server's modular architecture. It
|
||||
contains all the individual tool modules that provide the server's core
|
||||
functionality. Each subdirectory within this package represents a distinct tool,
|
||||
which is dynamically discovered and registered by the server at startup.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
|
|
@ -1,4 +1,9 @@
|
|||
"""Base classes for MCP tools."""
|
||||
"""This module defines the base class for all tools within the OpenAPI MCP Server.
|
||||
|
||||
It provides a common interface and foundational structure that all tool
|
||||
implementations must adhere to. This ensures consistency across different
|
||||
tools and simplifies their integration into the main FastAPI application.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -10,21 +15,43 @@ if TYPE_CHECKING:
|
|||
|
||||
|
||||
class BaseTool(ABC):
|
||||
"""Base class for all MCP tools."""
|
||||
"""Abstract base class for all tools in the OpenAPI MCP Server.
|
||||
|
||||
All concrete tool implementations must inherit from this class and implement
|
||||
the `get_router` method to expose their API endpoints.
|
||||
|
||||
Attributes:
|
||||
name: The unique name of the tool.
|
||||
description: A brief description of the tool's functionality.
|
||||
"""
|
||||
|
||||
def __init__(self, name: str, description: str) -> None:
|
||||
"""Initialize the tool with name and description."""
|
||||
"""Initialises the BaseTool with a name and description.
|
||||
|
||||
Args:
|
||||
name: The unique name of the tool.
|
||||
description: A brief description of the tool's functionality.
|
||||
"""
|
||||
self.name = name
|
||||
self.description = description
|
||||
|
||||
@abstractmethod
|
||||
def get_router(self) -> APIRouter:
|
||||
"""Return the FastAPI router for this tool."""
|
||||
"""Abstract method to be implemented by concrete tool classes.
|
||||
|
||||
def get_health_endpoint(self) -> dict[str, str]:
|
||||
"""Standard health check response for this tool.
|
||||
This method should return a FastAPI `APIRouter` instance that defines
|
||||
the API endpoints for the tool.
|
||||
|
||||
Returns:
|
||||
dict[str, str]: Health status dictionary.
|
||||
An APIRouter instance containing the tool's routes.
|
||||
"""
|
||||
|
||||
def get_health_endpoint(self) -> dict[str, str]:
|
||||
"""Provides a standard health check response for the tool.
|
||||
|
||||
This method can be used to verify that the tool is operational.
|
||||
|
||||
Returns:
|
||||
A dictionary indicating the health status and the name of the tool.
|
||||
"""
|
||||
return {"status": "healthy", "service": self.name}
|
||||
|
|
8
openapi_mcp_server/tools/forgejo/__init__.py
Normal file
8
openapi_mcp_server/tools/forgejo/__init__.py
Normal file
|
@ -0,0 +1,8 @@
|
|||
"""Forgejo tool package for the OpenAPI MCP server.
|
||||
|
||||
Provides API endpoints for interacting with Forgejo instances (self-hosted Git forges).
|
||||
Supports repository operations, CI/CD monitoring via Forgejo Actions, and issue/PR management.
|
||||
|
||||
The tool registers automatically with the server's tool registry and supports authentication
|
||||
via configurable personal access tokens per Forgejo instance.
|
||||
"""
|
289
openapi_mcp_server/tools/forgejo/models.py
Normal file
289
openapi_mcp_server/tools/forgejo/models.py
Normal file
|
@ -0,0 +1,289 @@
|
|||
"""Pydantic models for Forgejo API responses.
|
||||
|
||||
Contains data models representing Forgejo entities: users, repositories, branches,
|
||||
files, commits, workflow runs and jobs, issues, pull requests, comments, labels,
|
||||
and milestones.
|
||||
|
||||
Models are organised hierarchically with base entities composed into complex structures.
|
||||
For example, ForgejoPullRequest inherits from ForgejoIssue, reflecting how Forgejo
|
||||
treats PRs as specialised issues.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime # noqa: TC003
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class ForgejoInstance(BaseModel):
|
||||
"""Represents a configured Forgejo instance."""
|
||||
|
||||
base_url: str = Field(description="The base URL of the Forgejo instance.")
|
||||
|
||||
|
||||
class ForgejoVersion(BaseModel):
|
||||
"""Represents the version information of a Forgejo instance."""
|
||||
|
||||
version: str = Field(description="The version string of the Forgejo instance.")
|
||||
|
||||
|
||||
class ForgejoUser(BaseModel):
|
||||
"""Represents a Forgejo user."""
|
||||
|
||||
id: int = Field(description="The user's ID.")
|
||||
login: str = Field(description="The user's login name.")
|
||||
|
||||
|
||||
class ForgejoRepository(BaseModel):
|
||||
"""Represents a Forgejo repository."""
|
||||
|
||||
id: int = Field(description="The repository's ID.")
|
||||
name: str = Field(description="The repository's name.")
|
||||
full_name: str = Field(description="The full name of the repository (owner/name).")
|
||||
owner: ForgejoUser = Field(description="The owner of the repository.")
|
||||
html_url: str = Field(description="The URL to the repository's page.")
|
||||
description: str | None = Field(description="The repository's description.")
|
||||
fork: bool = Field(description="Whether the repository is a fork.")
|
||||
empty: bool = Field(description="Whether the repository is empty.")
|
||||
private: bool = Field(description="Whether the repository is private.")
|
||||
archived: bool = Field(description="Whether the repository is archived.")
|
||||
mirror: bool = Field(description="Whether the repository is a mirror.")
|
||||
size: int = Field(description="Size of the repository in kilobytes.")
|
||||
created_at: datetime = Field(description="The creation time of the repository.")
|
||||
updated_at: datetime = Field(description="The last update time of the repository.")
|
||||
pushed_at: datetime | None = Field(description="The last push time of the repository.")
|
||||
default_branch: str = Field(description="The name of the default branch.")
|
||||
|
||||
|
||||
class ForgejoCommitUser(BaseModel):
|
||||
"""Represents a commit author or committer."""
|
||||
|
||||
name: str = Field(description="The name of the commit user.")
|
||||
email: str = Field(description="The email of the commit user.")
|
||||
username: str | None = Field(default=None, description="The username of the commit user.")
|
||||
|
||||
|
||||
class ForgejoCommit(BaseModel):
|
||||
"""Represents a detailed Forgejo commit."""
|
||||
|
||||
id: str = Field(description="The SHA of the commit.")
|
||||
message: str = Field(description="The commit message.")
|
||||
url: str = Field(description="The URL to the commit.")
|
||||
author: ForgejoCommitUser = Field(description="The author of the commit.")
|
||||
committer: ForgejoCommitUser = Field(description="The committer of the commit.")
|
||||
timestamp: datetime = Field(description="The timestamp of the commit.")
|
||||
|
||||
|
||||
class ForgejoBranch(BaseModel):
|
||||
"""Represents a Forgejo branch."""
|
||||
|
||||
name: str = Field(description="The name of the branch.")
|
||||
commit: ForgejoCommit = Field(description="The latest commit on the branch.")
|
||||
protected: bool = Field(description="Whether the branch is protected.")
|
||||
|
||||
|
||||
class ForgejoFile(BaseModel):
|
||||
"""Represents a file or directory content in a Forgejo repository."""
|
||||
|
||||
name: str = Field(description="The name of the file or directory.")
|
||||
path: str = Field(description="The path of the file or directory within the repository.")
|
||||
sha: str = Field(description="The SHA of the file or directory.")
|
||||
type: str = Field(description='The type of the entry (e.g., "file", "dir").')
|
||||
size: int = Field(description="The size of the file in bytes.")
|
||||
encoding: str | None = Field(description='The encoding of the file content (e.g., "base64").')
|
||||
content: str | None = Field(description="The Base64 encoded content of the file.")
|
||||
url: str = Field(description="The API URL to the file or directory.")
|
||||
html_url: str | None = Field(description="The HTML URL to the file or directory.")
|
||||
git_url: str | None = Field(description="The Git URL to the file or directory.")
|
||||
download_url: str | None = Field(description="The download URL for the file.")
|
||||
|
||||
|
||||
class ForgejoWorkflowRun(BaseModel):
|
||||
"""Represents a Forgejo Actions workflow run."""
|
||||
|
||||
id: int = Field(description="The ID of the workflow run.")
|
||||
name: str = Field(description="The name of the workflow.")
|
||||
head_branch: str = Field(description="The branch the workflow run was triggered on.")
|
||||
head_sha: str = Field(description="The SHA of the head commit.")
|
||||
url: str = Field(description="The API URL to the workflow run.")
|
||||
html_url: str = Field(description="The HTML URL to the workflow run.")
|
||||
status: str = Field(
|
||||
description='The status of the workflow run (e.g., "completed", "in_progress").'
|
||||
)
|
||||
conclusion: str | None = Field(
|
||||
description='The conclusion of the workflow run (e.g., "success", "failure").'
|
||||
)
|
||||
created_at: datetime = Field(description="The creation time of the workflow run.")
|
||||
updated_at: datetime = Field(description="The last update time of the workflow run.")
|
||||
run_number: int = Field(description="The run number of the workflow.")
|
||||
head_commit: ForgejoCommit | None = Field(description="The head commit of the workflow run.")
|
||||
|
||||
|
||||
class ForgejoWorkflowStep(BaseModel):
|
||||
"""Represents a step within a Forgejo Actions workflow job."""
|
||||
|
||||
name: str = Field(description="The name of the step.")
|
||||
status: str = Field(description='The status of the step (e.g., "completed").')
|
||||
conclusion: str | None = Field(description='The conclusion of the step (e.g., "success").')
|
||||
number: int = Field(description="The step number.")
|
||||
started_at: datetime | None = Field(description="The start time of the step.")
|
||||
completed_at: datetime | None = Field(description="The completion time of the step.")
|
||||
|
||||
|
||||
class ForgejoWorkflowJob(BaseModel):
|
||||
"""Represents a Forgejo Actions workflow job."""
|
||||
|
||||
id: int = Field(description="The ID of the job.")
|
||||
run_id: int = Field(description="The ID of the workflow run this job belongs to.")
|
||||
run_url: str = Field(description="The API URL to the workflow run.")
|
||||
head_sha: str = Field(description="The SHA of the head commit.")
|
||||
url: str = Field(description="The API URL to the job.")
|
||||
html_url: str = Field(description="The HTML URL to the job.")
|
||||
status: str = Field(description='The status of the job (e.g., "completed").')
|
||||
conclusion: str | None = Field(description='The conclusion of the job (e.g., "success").')
|
||||
started_at: datetime = Field(description="The start time of the job.")
|
||||
completed_at: datetime | None = Field(description="The completion time of the job.")
|
||||
name: str = Field(description="The name of the job.")
|
||||
steps: list[ForgejoWorkflowStep] | None = Field(description="The steps within the job.")
|
||||
runner_id: int | None = Field(description="The ID of the runner that executed the job.")
|
||||
runner_name: str | None = Field(description="The name of the runner that executed the job.")
|
||||
runner_group_id: int | None = Field(description="The ID of the runner group.")
|
||||
runner_group_name: str | None = Field(description="The name of the runner group.")
|
||||
|
||||
|
||||
class ForgejoJobLog(BaseModel):
|
||||
"""Represents the log content of a Forgejo Actions job."""
|
||||
|
||||
log_content: str = Field(description="The raw log content of the job.")
|
||||
|
||||
|
||||
class ForgejoLabel(BaseModel):
|
||||
"""Represents a label associated with an issue or pull request."""
|
||||
|
||||
id: int = Field(description="The ID of the label.")
|
||||
name: str = Field(description="The name of the label.")
|
||||
color: str = Field(description="The color of the label (hex code).")
|
||||
description: str | None = Field(description="The description of the label.")
|
||||
|
||||
|
||||
class ForgejoMilestone(BaseModel):
|
||||
"""Represents a milestone for issues or pull requests."""
|
||||
|
||||
id: int = Field(description="The ID of the milestone.")
|
||||
title: str = Field(description="The title of the milestone.")
|
||||
description: str | None = Field(description="The description of the milestone.")
|
||||
state: str = Field(description='The state of the milestone (e.g., "open", "closed").')
|
||||
open_issues: int = Field(description="The number of open issues in the milestone.")
|
||||
closed_issues: int = Field(description="The number of closed issues in the milestone.")
|
||||
created_at: datetime = Field(description="The creation time of the milestone.")
|
||||
updated_at: datetime = Field(description="The last update time of the milestone.")
|
||||
closed_at: datetime | None = Field(description="The closing time of the milestone.")
|
||||
due_on: datetime | None = Field(description="The due date of the milestone.")
|
||||
|
||||
|
||||
class ForgejoPullRequestMeta(BaseModel):
|
||||
"""Represents metadata for a pull request associated with an issue."""
|
||||
|
||||
merged: bool = Field(description="Whether the pull request has been merged.")
|
||||
merged_at: datetime | None = Field(description="The time the pull request was merged.")
|
||||
|
||||
|
||||
class ForgejoIssue(BaseModel):
|
||||
"""Represents a Forgejo issue."""
|
||||
|
||||
id: int = Field(description="The ID of the issue.")
|
||||
url: str = Field(description="The API URL to the issue.")
|
||||
html_url: str = Field(description="The HTML URL to the issue.")
|
||||
title: str = Field(description="The title of the issue.")
|
||||
body: str | None = Field(description="The body (description) of the issue.")
|
||||
state: str = Field(description='The state of the issue (e.g., "open", "closed").')
|
||||
comments: int = Field(description="The number of comments on the issue.")
|
||||
created_at: datetime = Field(description="The creation time of the issue.")
|
||||
updated_at: datetime = Field(description="The last update time of the issue.")
|
||||
closed_at: datetime | None = Field(description="The closing time of the issue.")
|
||||
assignee: ForgejoUser | None = Field(description="The assignee of the issue.")
|
||||
assignees: list[ForgejoUser] | None = Field(description="The assignees of the issue.")
|
||||
user: ForgejoUser = Field(description="The user who created the issue.")
|
||||
labels: list[ForgejoLabel] | None = Field(description="The labels associated with the issue.")
|
||||
milestone: ForgejoMilestone | None = Field(description="The milestone of the issue.")
|
||||
pull_request: ForgejoPullRequestMeta | None = Field(
|
||||
description="Pull request metadata if the issue is a pull request."
|
||||
)
|
||||
|
||||
|
||||
class ForgejoIssueComment(BaseModel):
|
||||
"""Represents a comment on a Forgejo issue."""
|
||||
|
||||
id: int = Field(description="The ID of the comment.")
|
||||
body: str = Field(description="The content of the comment.")
|
||||
created_at: datetime = Field(description="The creation time of the comment.")
|
||||
updated_at: datetime = Field(description="The last update time of the comment.")
|
||||
user: ForgejoUser = Field(description="The user who created the comment.")
|
||||
html_url: str = Field(description="The HTML URL to the comment.")
|
||||
issue_url: str = Field(description="The API URL to the issue the comment belongs to.")
|
||||
|
||||
|
||||
class ForgejoPRBranchInfo(BaseModel):
|
||||
"""Represents branch information for a pull request head or base."""
|
||||
|
||||
label: str = Field(description="The label of the branch (e.g., 'owner:branch-name').")
|
||||
ref: str = Field(description="The name of the branch.")
|
||||
sha: str = Field(description="The SHA of the latest commit on the branch.")
|
||||
repo_id: int = Field(description="The ID of the repository.")
|
||||
repository: ForgejoRepository = Field(description="The repository details.")
|
||||
|
||||
|
||||
class ForgejoPullRequest(ForgejoIssue):
|
||||
"""Represents a Forgejo pull request, inheriting from ForgejoIssue."""
|
||||
|
||||
number: int = Field(description="The pull request number.")
|
||||
merged_at: datetime | None = Field(description="The time the pull request was merged.")
|
||||
merged_commit_id: str | None = Field(description="The SHA of the merge commit.")
|
||||
head: ForgejoPRBranchInfo = Field(description="Information about the head branch.")
|
||||
base: ForgejoPRBranchInfo = Field(description="Information about the base branch.")
|
||||
merge_base: str = Field(description="The SHA of the merge base commit.")
|
||||
mergeable: bool = Field(description="Whether the pull request is mergeable.")
|
||||
merged: bool = Field(description="Whether the pull request has been merged.")
|
||||
status: int = Field(
|
||||
description="The status of the pull request (e.g., 1 for open, 2 for closed, 3 for merged)."
|
||||
)
|
||||
diff_url: str = Field(description="The URL to the diff of the pull request.")
|
||||
patch_url: str = Field(description="The URL to the patch of the pull request.")
|
||||
|
||||
|
||||
class ForgejoPullRequestComment(BaseModel):
|
||||
"""Represents a comment on a Forgejo pull request review."""
|
||||
|
||||
id: int = Field(description="The ID of the comment.")
|
||||
body: str = Field(description="The content of the comment.")
|
||||
created_at: datetime = Field(description="The creation time of the comment.")
|
||||
updated_at: datetime = Field(description="The last update time of the comment.")
|
||||
user: ForgejoUser = Field(description="The user who created the comment.")
|
||||
html_url: str = Field(description="The HTML URL to the comment.")
|
||||
pull_request_url: str = Field(
|
||||
description="The API URL to the pull request the comment belongs to."
|
||||
)
|
||||
commit_id: str = Field(description="The SHA of the commit the comment was made on.")
|
||||
diff_hunk: str = Field(description="The diff hunk for the comment.")
|
||||
path: str = Field(description="The file path the comment was made on.")
|
||||
position: int = Field(description="The position of the comment in the diff.")
|
||||
original_position: int = Field(description="The original position of the comment in the diff.")
|
||||
original_commit_id: str = Field(
|
||||
description="The SHA of the original commit the comment was made on."
|
||||
)
|
||||
pull_request_review_id: int = Field(description="The ID of the pull request review.")
|
||||
resolver: ForgejoUser | None = Field(description="The user who resolved the comment.")
|
||||
|
||||
|
||||
# Rebuild models to resolve forward references
|
||||
ForgejoRepository.model_rebuild()
|
||||
ForgejoCommit.model_rebuild()
|
||||
ForgejoBranch.model_rebuild()
|
||||
ForgejoFile.model_rebuild()
|
||||
ForgejoWorkflowRun.model_rebuild()
|
||||
ForgejoWorkflowJob.model_rebuild()
|
||||
ForgejoMilestone.model_rebuild()
|
||||
ForgejoIssue.model_rebuild()
|
||||
ForgejoPullRequest.model_rebuild()
|
||||
ForgejoPullRequestComment.model_rebuild()
|
542
openapi_mcp_server/tools/forgejo/routes.py
Normal file
542
openapi_mcp_server/tools/forgejo/routes.py
Normal file
|
@ -0,0 +1,542 @@
|
|||
"""FastAPI routes for the Forgejo tool.
|
||||
|
||||
Defines HTTP endpoints for interacting with Forgejo instances, providing access to
|
||||
repositories, branches, files, commits, workflow runs, issues, and pull requests.
|
||||
|
||||
Routes handle authentication via configured tokens and return Pydantic models for
|
||||
type safety and OpenAPI documentation. Includes proper error handling and consistent
|
||||
timeout configurations across all endpoints.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import httpx
|
||||
from fastapi import APIRouter, HTTPException, Response
|
||||
|
||||
from openapi_mcp_server.core.config import get_forgejo_config
|
||||
from openapi_mcp_server.tools.base import BaseTool
|
||||
from openapi_mcp_server.tools.forgejo.models import (
|
||||
ForgejoBranch,
|
||||
ForgejoCommit,
|
||||
ForgejoFile,
|
||||
ForgejoInstance,
|
||||
ForgejoIssue,
|
||||
ForgejoIssueComment,
|
||||
ForgejoJobLog,
|
||||
ForgejoPullRequest,
|
||||
ForgejoPullRequestComment,
|
||||
ForgejoRepository,
|
||||
ForgejoVersion,
|
||||
ForgejoWorkflowJob,
|
||||
ForgejoWorkflowRun,
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/instances", response_model=list[ForgejoInstance])
|
||||
async def list_forgejo_instances() -> list[ForgejoInstance]:
|
||||
"""List the Forgejo instances for which this application has authentication tokens.
|
||||
|
||||
Returns:
|
||||
list[ForgejoInstance]: A list of Forgejo instances with configured tokens.
|
||||
"""
|
||||
return [
|
||||
ForgejoInstance(base_url=url) for url in get_forgejo_config().get_configured_instance_urls()
|
||||
]
|
||||
|
||||
|
||||
@router.get("/")
|
||||
async def read_root() -> dict:
|
||||
"""Root endpoint for the Forgejo tool.
|
||||
|
||||
Returns:
|
||||
dict: A welcome message.
|
||||
"""
|
||||
return {"message": "Forgejo tool is working!"}
|
||||
|
||||
|
||||
@router.get("/version", response_model=ForgejoVersion)
|
||||
async def get_version() -> ForgejoVersion:
|
||||
"""Get the version of the Forgejo instance.
|
||||
|
||||
Returns:
|
||||
ForgejoVersion: The version information.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the request to the Forgejo instance fails.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{get_forgejo_config().base_url}/api/v1/version", timeout=10.0
|
||||
)
|
||||
response.raise_for_status() # Raise an exception for HTTP errors (4xx or 5xx)
|
||||
return ForgejoVersion(**(await response.json()))
|
||||
except httpx.RequestError as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to connect to Forgejo instance: {e}"
|
||||
) from e
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise HTTPException(
|
||||
status_code=e.response.status_code,
|
||||
detail=f"Forgejo API returned an error: {e.response.text}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/repos/{owner}/{repo}/branches", response_model=list[ForgejoBranch])
|
||||
async def list_branches(owner: str, repo: str) -> list[ForgejoBranch]:
|
||||
"""List all branches for a repository.
|
||||
|
||||
Returns:
|
||||
list[ForgejoBranch]: A list of branches.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the request to the Forgejo instance fails.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{get_forgejo_config().base_url}/api/v1/repos/{owner}/{repo}/branches",
|
||||
timeout=10.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return [ForgejoBranch(**branch) for branch in await response.json()]
|
||||
except httpx.RequestError as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to connect to Forgejo instance: {e}"
|
||||
) from e
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise HTTPException(
|
||||
status_code=e.response.status_code,
|
||||
detail=f"Forgejo API returned an error: {e.response.text}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/repos/{owner}/{repo}/contents/{filepath:path}", response_model=list[ForgejoFile])
|
||||
async def list_files_in_directory(
|
||||
owner: str, repo: str, filepath: str, ref: str | None = None
|
||||
) -> list[ForgejoFile]:
|
||||
"""List the contents of a directory or get a file's content.
|
||||
|
||||
Returns:
|
||||
list[ForgejoFile]: A list of file/directory contents.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the request to the Forgejo instance fails.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
params = {"ref": ref} if ref else {}
|
||||
response = await client.get(
|
||||
f"{get_forgejo_config().base_url}/api/v1/repos/{owner}/{repo}/contents/{filepath}",
|
||||
params=params,
|
||||
timeout=10.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
# The API returns a list if it's a directory, or a single dict if it's a file
|
||||
json_response = await response.json()
|
||||
if isinstance(json_response, list):
|
||||
return [ForgejoFile(**item) for item in json_response]
|
||||
return [ForgejoFile(**json_response)]
|
||||
except httpx.RequestError as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to connect to Forgejo instance: {e}"
|
||||
) from e
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise HTTPException(
|
||||
status_code=e.response.status_code,
|
||||
detail=f"Forgejo API returned an error: {e.response.text}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/repos/{owner}/{repo}/commits", response_model=list[ForgejoCommit])
|
||||
async def list_commits(owner: str, repo: str, sha: str | None = None) -> list[ForgejoCommit]:
|
||||
"""List commits for a repository.
|
||||
|
||||
Returns:
|
||||
list[ForgejoCommit]: A list of commits.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the request to the Forgejo instance fails.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
params = {"sha": sha} if sha else {}
|
||||
response = await client.get(
|
||||
f"{get_forgejo_config().base_url}/api/v1/repos/{owner}/{repo}/commits",
|
||||
params=params,
|
||||
timeout=10.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return [ForgejoCommit(**commit) for commit in await response.json()]
|
||||
except httpx.RequestError as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to connect to Forgejo instance: {e}"
|
||||
) from e
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise HTTPException(
|
||||
status_code=e.response.status_code,
|
||||
detail=f"Forgejo API returned an error: {e.response.text}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/repos/{owner}/{repo}/actions/tasks", response_model=list[ForgejoWorkflowRun])
|
||||
async def list_workflow_runs(owner: str, repo: str) -> list[ForgejoWorkflowRun]:
|
||||
"""List workflow runs (tasks) for a repository.
|
||||
|
||||
Returns:
|
||||
list[ForgejoWorkflowRun]: A list of workflow runs.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the request to the Forgejo instance fails.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{get_forgejo_config().base_url}/api/v1/repos/{owner}/{repo}/actions/tasks",
|
||||
headers=get_forgejo_config().get_headers(get_forgejo_config().base_url),
|
||||
timeout=10.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return [ForgejoWorkflowRun(**run) for run in response.json()]
|
||||
except httpx.RequestError as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to connect to Forgejo instance: {e}"
|
||||
) from e
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise HTTPException(
|
||||
status_code=e.response.status_code,
|
||||
detail=f"Forgejo API returned an error: {e.response.text}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get(
|
||||
"/repos/{owner}/{repo}/actions/runs/{run_id}/jobs", response_model=list[ForgejoWorkflowJob]
|
||||
)
|
||||
async def list_workflow_jobs(owner: str, repo: str, run_id: int) -> list[ForgejoWorkflowJob]:
|
||||
"""List workflow jobs for a specific workflow run.
|
||||
|
||||
Returns:
|
||||
list[ForgejoWorkflowJob]: A list of workflow jobs.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the request to the Forgejo instance fails.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{get_forgejo_config().base_url}/api/v1/repos/{owner}/{repo}/actions/runs/{run_id}/jobs",
|
||||
headers=get_forgejo_config().get_headers(get_forgejo_config().base_url),
|
||||
timeout=10.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
# The API returns a dictionary with a "jobs" key
|
||||
return [ForgejoWorkflowJob(**job) for job in (await response.json())["jobs"]]
|
||||
except httpx.RequestError as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to connect to Forgejo instance: {e}"
|
||||
) from e
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise HTTPException(
|
||||
status_code=e.response.status_code,
|
||||
detail=f"Forgejo API returned an error: {e.response.text}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/repos/{owner}/{repo}/actions/jobs/{job_id}/log", response_model=ForgejoJobLog)
|
||||
async def get_job_log(owner: str, repo: str, job_id: int) -> ForgejoJobLog:
|
||||
"""Get the log for a specific workflow job.
|
||||
|
||||
Returns:
|
||||
ForgejoJobLog: The log content of the job.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the request to the Forgejo instance fails.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{get_forgejo_config().base_url}/api/v1/repos/{owner}/{repo}/actions/jobs/{job_id}/log",
|
||||
headers=get_forgejo_config().get_headers(get_forgejo_config().base_url),
|
||||
timeout=10.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return ForgejoJobLog(log_content=response.text)
|
||||
except httpx.RequestError as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to connect to Forgejo instance: {e}"
|
||||
) from e
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise HTTPException(
|
||||
status_code=e.response.status_code,
|
||||
detail=f"Forgejo API returned an error: {e.response.text}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/repos/{owner}/{repo}/issues", response_model=list[ForgejoIssue])
|
||||
async def list_issues(owner: str, repo: str, state: str | None = None) -> list[ForgejoIssue]:
|
||||
"""List issues for a repository.
|
||||
|
||||
Returns:
|
||||
list[ForgejoIssue]: A list of issues.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the request to the Forgejo instance fails.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
params = {"state": state} if state else {}
|
||||
response = await client.get(
|
||||
f"{get_forgejo_config().base_url}/api/v1/repos/{owner}/{repo}/issues",
|
||||
headers=get_forgejo_config().get_headers(get_forgejo_config().base_url),
|
||||
params=params,
|
||||
timeout=10.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return [ForgejoIssue(**issue) for issue in await response.json()]
|
||||
except httpx.RequestError as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to connect to Forgejo instance: {e}"
|
||||
) from e
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise HTTPException(
|
||||
status_code=e.response.status_code,
|
||||
detail=f"Forgejo API returned an error: {e.response.text}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/repos/{owner}/{repo}/issues/{index}", response_model=ForgejoIssue)
|
||||
async def get_issue(owner: str, repo: str, index: int) -> ForgejoIssue:
|
||||
"""Get a specific issue from a repository.
|
||||
|
||||
Returns:
|
||||
ForgejoIssue: The issue details.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the request to the Forgejo instance fails.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{get_forgejo_config().base_url}/api/v1/repos/{owner}/{repo}/issues/{index}",
|
||||
headers=get_forgejo_config().get_headers(get_forgejo_config().base_url),
|
||||
timeout=10.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return ForgejoIssue(**(await response.json()))
|
||||
except httpx.RequestError as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to connect to Forgejo instance: {e}"
|
||||
) from e
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise HTTPException(
|
||||
status_code=e.response.status_code,
|
||||
detail=f"Forgejo API returned an error: {e.response.text}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get(
|
||||
"/repos/{owner}/{repo}/issues/{index}/comments", response_model=list[ForgejoIssueComment]
|
||||
)
|
||||
async def list_issue_comments(owner: str, repo: str, index: int) -> list[ForgejoIssueComment]:
|
||||
"""List comments for a specific issue.
|
||||
|
||||
Returns:
|
||||
list[ForgejoIssueComment]: A list of issue comments.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the request to the Forgejo instance fails.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{get_forgejo_config().base_url}/api/v1/repos/{owner}/{repo}/issues/{index}/comments",
|
||||
headers=get_forgejo_config().get_headers(get_forgejo_config().base_url),
|
||||
timeout=10.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return [ForgejoIssueComment(**comment) for comment in await response.json()]
|
||||
except httpx.RequestError as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to connect to Forgejo instance: {e}"
|
||||
) from e
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise HTTPException(
|
||||
status_code=e.response.status_code,
|
||||
detail=f"Forgejo API returned an error: {e.response.text}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/repos/{owner}/{repo}/pulls", response_model=list[ForgejoPullRequest])
|
||||
async def list_pull_requests(
|
||||
owner: str, repo: str, state: str | None = None
|
||||
) -> list[ForgejoPullRequest]:
|
||||
"""List pull requests for a repository.
|
||||
|
||||
Returns:
|
||||
list[ForgejoPullRequest]: A list of pull requests.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the request to the Forgejo instance fails.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
params = {"state": state} if state else {}
|
||||
response = await client.get(
|
||||
f"{get_forgejo_config().base_url}/api/v1/repos/{owner}/{repo}/pulls",
|
||||
headers=get_forgejo_config().get_headers(get_forgejo_config().base_url),
|
||||
params=params,
|
||||
timeout=10.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return [ForgejoPullRequest(**pr) for pr in await response.json()]
|
||||
except httpx.RequestError as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to connect to Forgejo instance: {e}"
|
||||
) from e
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise HTTPException(
|
||||
status_code=e.response.status_code,
|
||||
detail=f"Forgejo API returned an error: {e.response.text}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/repos/{owner}/{repo}/pulls/{index}", response_model=ForgejoPullRequest)
|
||||
async def get_pull_request(owner: str, repo: str, index: int) -> ForgejoPullRequest:
|
||||
"""Get a specific pull request from a repository.
|
||||
|
||||
Returns:
|
||||
ForgejoPullRequest: The pull request details.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the request to the Forgejo instance fails.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{get_forgejo_config().base_url}/api/v1/repos/{owner}/{repo}/pulls/{index}",
|
||||
headers=get_forgejo_config().get_headers(get_forgejo_config().base_url),
|
||||
timeout=10.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return ForgejoPullRequest(**(await response.json()))
|
||||
except httpx.RequestError as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to connect to Forgejo instance: {e}"
|
||||
) from e
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise HTTPException(
|
||||
status_code=e.response.status_code,
|
||||
detail=f"Forgejo API returned an error: {e.response.text}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get(
|
||||
"/repos/{owner}/{repo}/pulls/{index}/reviews", response_model=list[ForgejoPullRequestComment]
|
||||
)
|
||||
async def list_pull_request_comments(
|
||||
owner: str, repo: str, index: int
|
||||
) -> list[ForgejoPullRequestComment]:
|
||||
"""List comments for a specific pull request review.
|
||||
|
||||
Returns:
|
||||
list[ForgejoPullRequestComment]: A list of pull request comments.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the request to the Forgejo instance fails.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{get_forgejo_config().base_url}/api/v1/repos/{owner}/{repo}/pulls/{index}/reviews",
|
||||
headers=get_forgejo_config().get_headers(get_forgejo_config().base_url),
|
||||
timeout=10.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return [ForgejoPullRequestComment(**comment) for comment in await response.json()]
|
||||
except httpx.RequestError as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to connect to Forgejo instance: {e}"
|
||||
) from e
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise HTTPException(
|
||||
status_code=e.response.status_code,
|
||||
detail=f"Forgejo API returned an error: {e.response.text}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/repos/search", response_model=list[ForgejoRepository])
|
||||
async def search_repos(q: str | None = None) -> list[ForgejoRepository]:
|
||||
"""Search for repositories.
|
||||
|
||||
Returns:
|
||||
list[ForgejoRepository]: A list of matching repositories.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the request to the Forgejo instance fails.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
params = {"q": q} if q else {}
|
||||
response = await client.get(
|
||||
f"{get_forgejo_config().base_url}/api/v1/repos/search",
|
||||
params=params,
|
||||
timeout=10.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except httpx.RequestError as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to connect to Forgejo instance: {e}"
|
||||
) from e
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise HTTPException(
|
||||
status_code=e.response.status_code,
|
||||
detail=f"Forgejo API returned an error: {e.response.text}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/repos/{owner}/{repo}/raw/{filepath:path}")
|
||||
async def get_raw_file(owner: str, repo: str, filepath: str) -> Response:
|
||||
"""Get the raw content of a file from a repository.
|
||||
|
||||
Returns:
|
||||
Response: The raw content of the file.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the request to the Forgejo instance fails.
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{get_forgejo_config().base_url}/api/v1/repos/{owner}/{repo}/raw/{filepath}",
|
||||
headers=get_forgejo_config().get_headers(get_forgejo_config().base_url),
|
||||
timeout=10.0,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return Response(content=response.content, media_type=response.headers["Content-Type"])
|
||||
except httpx.RequestError as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to connect to Forgejo instance: {e}"
|
||||
) from e
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise HTTPException(
|
||||
status_code=e.response.status_code,
|
||||
detail=f"Forgejo API returned an error: {e.response.text}",
|
||||
) from e
|
||||
|
||||
|
||||
class ForgejoTool(BaseTool):
|
||||
"""Forgejo tool definition."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialise the Forgejo tool."""
|
||||
super().__init__("forgejo", "Tool for interacting with Forgejo instances.")
|
||||
|
||||
def get_router(self) -> APIRouter:
|
||||
"""Return the FastAPI router for this tool."""
|
||||
return router
|
||||
|
||||
|
||||
tool = ForgejoTool()
|
|
@ -1,3 +1,8 @@
|
|||
"""Memory/storage functionality tool."""
|
||||
"""This module initialises the memory tool package.
|
||||
|
||||
It provides functionalities for storing, retrieving, and managing memories
|
||||
and their associated entities. This tool is designed to act as a simple
|
||||
knowledge base for the system.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
|
|
@ -1,4 +1,9 @@
|
|||
"""Data models for memory/storage tool."""
|
||||
"""This module defines the Pydantic models for the Memory tool.
|
||||
|
||||
These models are used to structure and validate the data for creating, retrieving,
|
||||
and managing memories and their associated entities. The models ensure that the
|
||||
API requests and responses for the Memory tool are consistent and well-defined.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -6,7 +11,14 @@ from pydantic import BaseModel, Field
|
|||
|
||||
|
||||
class Memory(BaseModel):
|
||||
"""A timestamped fact that references one or more entities."""
|
||||
"""Represents a single, timestamped memory associated with one or more entities.
|
||||
|
||||
Attributes:
|
||||
id: A unique identifier for the memory.
|
||||
content: The detailed information or fact being stored.
|
||||
entities: A list of entity names that this memory references.
|
||||
timestamp: The ISO timestamp indicating when the memory was created.
|
||||
"""
|
||||
|
||||
id: str = Field(..., description="Unique identifier for this memory")
|
||||
content: str = Field(
|
||||
|
@ -20,7 +32,13 @@ class Memory(BaseModel):
|
|||
|
||||
|
||||
class Entity(BaseModel):
|
||||
"""Simple entity reference."""
|
||||
"""Represents an entity that can be associated with memories.
|
||||
|
||||
Attributes:
|
||||
name: The name of the entity.
|
||||
entity_type: The type of the entity (e.g., 'person', 'place').
|
||||
memory_count: The number of memories that reference this entity.
|
||||
"""
|
||||
|
||||
name: str = Field(..., description="The name of the entity")
|
||||
entity_type: str = Field(default="general", description="The type of the entity")
|
||||
|
@ -28,14 +46,24 @@ class Entity(BaseModel):
|
|||
|
||||
|
||||
class MemoryGraph(BaseModel):
|
||||
"""Collection of memories and entities."""
|
||||
"""Represents a collection of memories and entities.
|
||||
|
||||
Attributes:
|
||||
memories: A list of memories.
|
||||
entities: A list of entities.
|
||||
"""
|
||||
|
||||
memories: list[Memory]
|
||||
entities: list[Entity]
|
||||
|
||||
|
||||
class CreateMemoryRequest(BaseModel):
|
||||
"""Request to create a new memory."""
|
||||
"""Represents a request to create a new memory.
|
||||
|
||||
Attributes:
|
||||
content: The detailed information to be stored in the memory.
|
||||
entities: A list of entity names to associate with the memory.
|
||||
"""
|
||||
|
||||
content: str = Field(
|
||||
...,
|
||||
|
@ -48,27 +76,50 @@ class CreateMemoryRequest(BaseModel):
|
|||
|
||||
|
||||
class SearchMemoryRequest(BaseModel):
|
||||
"""Request to search memories."""
|
||||
"""Represents a request to search for memories.
|
||||
|
||||
Attributes:
|
||||
query: The search term to find within memory content or entity names.
|
||||
limit: The maximum number of memories to return.
|
||||
"""
|
||||
|
||||
query: str = Field(..., description="Search term to find in memory content or entity names")
|
||||
limit: int = Field(default=10, description="Maximum number of memories to return")
|
||||
|
||||
|
||||
class GetEntityRequest(BaseModel):
|
||||
"""Request to get memories for specific entities."""
|
||||
"""Represents a request to retrieve memories for specific entities.
|
||||
|
||||
Attributes:
|
||||
entities: A list of entity names to retrieve memories for.
|
||||
limit: The maximum number of memories to return per entity.
|
||||
"""
|
||||
|
||||
entities: list[str] = Field(..., description="List of entity names to retrieve memories for")
|
||||
limit: int = Field(default=5, description="Maximum number of memories to return")
|
||||
|
||||
|
||||
class DeleteMemoryRequest(BaseModel):
|
||||
"""Request to delete specific memories."""
|
||||
"""Represents a request to delete specific memories.
|
||||
|
||||
Attributes:
|
||||
memory_ids: A list of memory IDs to be deleted.
|
||||
"""
|
||||
|
||||
memory_ids: list[str] = Field(..., description="List of memory IDs to delete")
|
||||
|
||||
|
||||
class MemorySummary(BaseModel):
|
||||
"""Summary statistics about stored memories."""
|
||||
"""Represents summary statistics about the stored memories.
|
||||
|
||||
Attributes:
|
||||
total_memories: The total number of memories stored.
|
||||
total_entities: The total number of unique entities.
|
||||
oldest_memory: The ISO timestamp of the oldest memory.
|
||||
latest_memory: The ISO timestamp of the latest memory.
|
||||
memory_timespan_days: The number of days between the oldest and latest memory.
|
||||
top_entities: A list of the most frequently referenced entities.
|
||||
"""
|
||||
|
||||
total_memories: int = Field(..., description="Total number of memories stored")
|
||||
total_entities: int = Field(..., description="Total number of unique entities")
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
"""API routes for memory/storage tool."""
|
||||
"""This module defines the API routes for the Memory tool.
|
||||
|
||||
It provides a comprehensive set of endpoints for managing a simple memory system.
|
||||
This includes creating, retrieving, searching, and deleting memories, as well as
|
||||
getting summary statistics. The tool is designed to store and recall timestamped
|
||||
facts associated with various entities.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -27,10 +33,18 @@ from .storage import (
|
|||
|
||||
|
||||
class MemoryTool(BaseTool):
|
||||
"""Simplified memory system for storing timestamped facts."""
|
||||
"""A tool for managing a simple, file-based memory system.
|
||||
|
||||
This tool allows for the storage and retrieval of timestamped facts, which are
|
||||
associated with one or more entities. It provides a structured way to manage
|
||||
a knowledge base over time.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the memory tool."""
|
||||
"""Initialises the MemoryTool.
|
||||
|
||||
Sets up the tool's name and a description of its capabilities.
|
||||
"""
|
||||
super().__init__(
|
||||
name="memory",
|
||||
description="A simple memory system for storing timestamped facts about entities",
|
||||
|
@ -38,10 +52,18 @@ class MemoryTool(BaseTool):
|
|||
|
||||
@staticmethod
|
||||
def create_memory(req: CreateMemoryRequest) -> Memory:
|
||||
"""Store a new memory/fact.
|
||||
"""Stores a new memory or fact in the memory graph.
|
||||
|
||||
This method creates a new memory with a unique ID and timestamp. It also
|
||||
updates the associated entities, creating them if they do not already
|
||||
exist, and increments their memory counts.
|
||||
|
||||
Args:
|
||||
req: A CreateMemoryRequest object containing the content of the memory
|
||||
and the entities it is associated with.
|
||||
|
||||
Returns:
|
||||
Memory: The newly created memory with auto-generated timestamp and ID.
|
||||
The newly created Memory object.
|
||||
"""
|
||||
graph = read_memory_graph()
|
||||
|
||||
|
@ -71,10 +93,15 @@ class MemoryTool(BaseTool):
|
|||
|
||||
@staticmethod
|
||||
def get_all_memories(limit: int = 20) -> MemoryGraph:
|
||||
"""Get all memories and entities.
|
||||
"""Retrieves all stored memories and entities.
|
||||
|
||||
The memories are sorted by timestamp in descending order (newest first).
|
||||
|
||||
Args:
|
||||
limit: The maximum number of memories to return.
|
||||
|
||||
Returns:
|
||||
MemoryGraph: All stored memories and entities, sorted by timestamp (newest first).
|
||||
A MemoryGraph object containing all stored memories and entities.
|
||||
"""
|
||||
graph = read_memory_graph()
|
||||
# Sort memories by timestamp (newest first)
|
||||
|
@ -85,10 +112,17 @@ class MemoryTool(BaseTool):
|
|||
|
||||
@staticmethod
|
||||
def search_memories(req: SearchMemoryRequest) -> MemoryGraph:
|
||||
"""Search memories by content or entity names.
|
||||
"""Searches for memories by their content or associated entity names.
|
||||
|
||||
The search is case-insensitive. The results are sorted by timestamp in
|
||||
descending order (newest first).
|
||||
|
||||
Args:
|
||||
req: A SearchMemoryRequest object containing the search query and limit.
|
||||
|
||||
Returns:
|
||||
MemoryGraph: Filtered memories matching the search query.
|
||||
A MemoryGraph object containing the memories and entities that match
|
||||
the search query.
|
||||
"""
|
||||
graph = read_memory_graph()
|
||||
query = req.query.lower()
|
||||
|
@ -119,10 +153,16 @@ class MemoryTool(BaseTool):
|
|||
|
||||
@staticmethod
|
||||
def get_entity_memories(req: GetEntityRequest) -> MemoryGraph:
|
||||
"""Get memories for specific entities.
|
||||
"""Retrieves all memories associated with specific entities.
|
||||
|
||||
The results are sorted by timestamp in descending order (newest first).
|
||||
|
||||
Args:
|
||||
req: A GetEntityRequest object containing the list of entity names.
|
||||
|
||||
Returns:
|
||||
MemoryGraph: Memories that reference the specified entities.
|
||||
A MemoryGraph object containing the memories and entities associated
|
||||
with the specified entity names.
|
||||
"""
|
||||
graph = read_memory_graph()
|
||||
|
||||
|
@ -145,10 +185,18 @@ class MemoryTool(BaseTool):
|
|||
|
||||
@staticmethod
|
||||
def delete_memories(req: DeleteMemoryRequest) -> dict[str, str]:
|
||||
"""Delete specific memories by ID.
|
||||
"""Deletes specific memories from the memory graph by their IDs.
|
||||
|
||||
After deleting the memories, this method recalculates the memory counts
|
||||
for all entities and removes any entities that are no longer referenced
|
||||
by any memories.
|
||||
|
||||
Args:
|
||||
req: A DeleteMemoryRequest object containing the list of memory IDs
|
||||
to be deleted.
|
||||
|
||||
Returns:
|
||||
dict: Success message with count of deleted memories.
|
||||
A dictionary with a message indicating how many memories were deleted.
|
||||
"""
|
||||
graph = read_memory_graph()
|
||||
original_count = len(graph.memories)
|
||||
|
@ -183,10 +231,14 @@ class MemoryTool(BaseTool):
|
|||
|
||||
@staticmethod
|
||||
def get_summary() -> MemorySummary:
|
||||
"""Get summary statistics about stored memories.
|
||||
"""Retrieves a summary of the memory graph statistics.
|
||||
|
||||
This includes the total number of memories and entities, the timestamps of
|
||||
the oldest and latest memories, the timespan in days, and a list of the
|
||||
most frequently referenced entities.
|
||||
|
||||
Returns:
|
||||
MemorySummary: Statistics about memories and entities.
|
||||
A MemorySummary object containing the statistics.
|
||||
"""
|
||||
graph = read_memory_graph()
|
||||
|
||||
|
@ -226,7 +278,13 @@ class MemoryTool(BaseTool):
|
|||
)
|
||||
|
||||
def get_router(self) -> APIRouter:
|
||||
"""Return the FastAPI router for memory tool endpoints."""
|
||||
"""Creates and returns the FastAPI router for the memory tool.
|
||||
|
||||
The router exposes endpoints for all the memory management operations.
|
||||
|
||||
Returns:
|
||||
An APIRouter instance with the defined memory tool endpoints.
|
||||
"""
|
||||
router = APIRouter()
|
||||
|
||||
router.add_api_route(
|
||||
|
|
|
@ -1,38 +1,44 @@
|
|||
"""Storage implementation for memory tool."""
|
||||
"""This module handles the storage and retrieval of the memory graph.
|
||||
|
||||
It provides functions for reading from and writing to a JSON file that persists
|
||||
the state of the Memory tool. This includes all memories and entities. The
|
||||
module also contains utility functions for generating unique memory IDs and
|
||||
timestamps, ensuring that all memories are properly tracked and organised.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import HTTPException
|
||||
|
||||
from .models import Entity, Memory, MemoryGraph
|
||||
from openapi_mcp_server.core.config import get_app_config
|
||||
|
||||
MEMORY_FILE_PATH_ENV = os.getenv("MEMORY_FILE_PATH", "memory.json")
|
||||
MEMORY_FILE_PATH = Path(
|
||||
MEMORY_FILE_PATH_ENV
|
||||
if Path(MEMORY_FILE_PATH_ENV).is_absolute()
|
||||
else Path(__file__).parent / MEMORY_FILE_PATH_ENV
|
||||
)
|
||||
from .models import Entity, Memory, MemoryGraph
|
||||
|
||||
|
||||
def read_memory_graph() -> MemoryGraph:
|
||||
"""Read the memory graph from file.
|
||||
"""Reads the memory graph from the configured JSON file.
|
||||
|
||||
If the memory file does not exist, it returns an empty MemoryGraph. If the
|
||||
file is corrupted or in a legacy format, it also returns an empty graph.
|
||||
|
||||
Returns:
|
||||
MemoryGraph: The memory graph loaded from storage.
|
||||
A MemoryGraph object loaded from the storage file.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the memory file is not found or permission is denied.
|
||||
HTTPException: If there is a permission error when reading the file.
|
||||
"""
|
||||
if not MEMORY_FILE_PATH.exists():
|
||||
app_config = get_app_config()
|
||||
memory_file_path = Path(app_config.memory_file_path)
|
||||
|
||||
if not memory_file_path.exists():
|
||||
return MemoryGraph(memories=[], entities=[])
|
||||
|
||||
try:
|
||||
with MEMORY_FILE_PATH.open(encoding="utf-8") as f:
|
||||
with memory_file_path.open(encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
memories = [Memory(**m) for m in data.get("memories", [])]
|
||||
|
@ -42,7 +48,7 @@ def read_memory_graph() -> MemoryGraph:
|
|||
except PermissionError as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Permission denied when reading memory file: {MEMORY_FILE_PATH}",
|
||||
detail=f"Permission denied when reading memory file: {memory_file_path}",
|
||||
) from e
|
||||
except json.JSONDecodeError:
|
||||
# Handle legacy format or corrupted file
|
||||
|
@ -50,39 +56,50 @@ def read_memory_graph() -> MemoryGraph:
|
|||
|
||||
|
||||
def save_memory_graph(graph: MemoryGraph) -> None:
|
||||
"""Save the memory graph to file.
|
||||
"""Saves the provided memory graph to the configured JSON file.
|
||||
|
||||
Args:
|
||||
graph: The MemoryGraph object to be saved.
|
||||
|
||||
Raises:
|
||||
HTTPException: If the memory file is not found or permission is denied.
|
||||
HTTPException: If there is a permission error when writing to the file.
|
||||
"""
|
||||
app_config = get_app_config()
|
||||
memory_file_path = Path(app_config.memory_file_path)
|
||||
|
||||
data = {
|
||||
"memories": [m.model_dump() for m in graph.memories],
|
||||
"entities": [e.model_dump() for e in graph.entities],
|
||||
}
|
||||
|
||||
try:
|
||||
with MEMORY_FILE_PATH.open("w", encoding="utf-8") as f:
|
||||
memory_file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with memory_file_path.open("w", encoding="utf-8") as f:
|
||||
json.dump(data, f, indent=2)
|
||||
except PermissionError as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Permission denied when writing to memory file: {MEMORY_FILE_PATH}",
|
||||
detail=f"Permission denied when writing to memory file: {memory_file_path}",
|
||||
) from e
|
||||
|
||||
|
||||
def generate_memory_id() -> str:
|
||||
"""Generate a unique ID for a memory.
|
||||
"""Generates a unique, timestamp-based ID for a new memory.
|
||||
|
||||
The format of the ID is 'mem_YYYYMMDD_HHMMSS_ffffff'.
|
||||
|
||||
Returns:
|
||||
str: Unique memory identifier with timestamp.
|
||||
A unique string identifier for a memory.
|
||||
"""
|
||||
return f"mem_{datetime.now(UTC).strftime('%Y%m%d_%H%M%S_%f')}"
|
||||
|
||||
|
||||
def get_current_timestamp() -> str:
|
||||
"""Get current UTC timestamp in ISO format.
|
||||
"""Gets the current UTC timestamp in ISO 8601 format.
|
||||
|
||||
The timestamp is appended with a 'Z' to indicate UTC.
|
||||
|
||||
Returns:
|
||||
str: UTC timestamp in ISO format with Z suffix.
|
||||
The current UTC timestamp as an ISO formatted string.
|
||||
"""
|
||||
return datetime.now(UTC).isoformat() + "Z"
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
"""Search functionality via SearXNG."""
|
||||
"""This module initialises the SearXNG tool package.
|
||||
|
||||
It provides functionalities for performing web searches by proxying requests
|
||||
to a configured SearXNG instance. This tool allows the system to access and
|
||||
utilise information from the internet.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
"""Data models for SearXNG search tool."""
|
||||
"""This module defines the Pydantic models for the SearXNG tool.
|
||||
|
||||
These models are used to structure and validate the data for search operations,
|
||||
including search requests, search results, and responses for available categories
|
||||
and engines. The models ensure that the API requests and responses for the
|
||||
SearXNG tool are consistent and well-defined.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -8,7 +14,16 @@ from pydantic import BaseModel, Field
|
|||
|
||||
|
||||
class SearchRequest(BaseModel):
|
||||
"""Request model for search operations."""
|
||||
"""Represents a request to perform a search using the SearXNG tool.
|
||||
|
||||
Attributes:
|
||||
query: The search query string.
|
||||
categories: Comma-separated list of categories to search in.
|
||||
engines: Comma-separated list of search engines to use.
|
||||
language: The language code for the search (e.g., 'en', 'de').
|
||||
format: The desired response format (e.g., 'json', 'csv').
|
||||
pageno: The page number of the search results to retrieve.
|
||||
"""
|
||||
|
||||
query: str = Field(..., description="Search query string")
|
||||
categories: str | None = Field(
|
||||
|
@ -21,7 +36,15 @@ class SearchRequest(BaseModel):
|
|||
|
||||
|
||||
class SearchResult(BaseModel):
|
||||
"""Individual search result model."""
|
||||
"""Represents a single search result returned by the SearXNG tool.
|
||||
|
||||
Attributes:
|
||||
title: The title of the search result.
|
||||
url: The URL of the search result.
|
||||
content: A snippet or summary of the result's content.
|
||||
engine: The search engine that provided the result.
|
||||
category: The category to which the result belongs.
|
||||
"""
|
||||
|
||||
title: str = Field(..., description="Result title")
|
||||
url: str = Field(..., description="Result URL")
|
||||
|
@ -31,7 +54,16 @@ class SearchResult(BaseModel):
|
|||
|
||||
|
||||
class SearchResponse(BaseModel):
|
||||
"""Response model for search operations."""
|
||||
"""Represents the full response from a search operation.
|
||||
|
||||
Attributes:
|
||||
query: The original search query.
|
||||
number_of_results: The total number of results found.
|
||||
results: A list of the search results.
|
||||
infoboxes: A list of information boxes related to the query.
|
||||
suggestions: A list of search suggestions.
|
||||
engines: A list of the engines used for the search.
|
||||
"""
|
||||
|
||||
query: str = Field(..., description="Original search query")
|
||||
number_of_results: int = Field(..., description="Total number of results found")
|
||||
|
@ -42,21 +74,36 @@ class SearchResponse(BaseModel):
|
|||
|
||||
|
||||
class CategoriesResponse(BaseModel):
|
||||
"""Response model for available categories."""
|
||||
"""Represents the response containing the available search categories.
|
||||
|
||||
Attributes:
|
||||
categories: A list of the available search categories.
|
||||
note: An optional note providing additional information.
|
||||
"""
|
||||
|
||||
categories: list[str] = Field(..., description="Available search categories")
|
||||
note: str | None = Field(None, description="Additional notes about the response")
|
||||
|
||||
|
||||
class EnginesResponse(BaseModel):
|
||||
"""Response model for available engines."""
|
||||
"""Represents the response containing the available search engines.
|
||||
|
||||
Attributes:
|
||||
engines: A list of the available search engines.
|
||||
note: An optional note providing additional information.
|
||||
"""
|
||||
|
||||
engines: list[str] = Field(..., description="Available search engines")
|
||||
note: str | None = Field(None, description="Additional notes about the response")
|
||||
|
||||
|
||||
class HealthResponse(BaseModel):
|
||||
"""Health response model for SearXNG service."""
|
||||
"""Represents the health status of the SearXNG service.
|
||||
|
||||
Attributes:
|
||||
status: The health status of the service (e.g., 'OK').
|
||||
searxng_url: The URL of the SearXNG instance being checked.
|
||||
"""
|
||||
|
||||
status: str = Field(..., description="Service health status")
|
||||
searxng_url: str = Field(..., description="SearXNG instance URL")
|
||||
|
|
|
@ -1,13 +1,19 @@
|
|||
"""API routes for SearXNG search tool."""
|
||||
"""This module defines the API routes for the SearXNG tool.
|
||||
|
||||
It provides a proxy to a SearXNG instance, allowing users to perform web
|
||||
searches across various search engines and categories. The tool also offers
|
||||
endpoints to discover the available search categories and engines from the
|
||||
configured SearXNG instance.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import requests
|
||||
from fastapi import APIRouter, HTTPException
|
||||
|
||||
from openapi_mcp_server.core.config import get_app_config
|
||||
from openapi_mcp_server.tools.base import BaseTool
|
||||
|
||||
from .models import (
|
||||
|
@ -18,21 +24,34 @@ from .models import (
|
|||
SearchResult,
|
||||
)
|
||||
|
||||
SEARXNG_BASE_URL = os.getenv("SEARXNG_BASE_URL", "http://localhost:8080")
|
||||
|
||||
|
||||
class SearxngTool(BaseTool):
|
||||
"""SearXNG search proxy tool."""
|
||||
"""A tool for proxying search requests to a SearXNG instance.
|
||||
|
||||
This tool provides an OpenAPI-compliant interface for interacting with a
|
||||
SearXNG search server, enabling web searches and retrieval of available
|
||||
search configurations.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the searxng tool."""
|
||||
"""Initialises the SearxngTool.
|
||||
|
||||
Sets up the tool's name and a description of its capabilities.
|
||||
"""
|
||||
super().__init__(
|
||||
name="searxng",
|
||||
description="Proxy server for SearXNG search instances with OpenAPI compatibility",
|
||||
)
|
||||
|
||||
def get_router(self) -> APIRouter:
|
||||
"""Return the FastAPI router for searxng tool endpoints."""
|
||||
"""Creates and returns the FastAPI router for the SearXNG tool.
|
||||
|
||||
The router exposes endpoints for performing searches and retrieving
|
||||
available search categories and engines.
|
||||
|
||||
Returns:
|
||||
An APIRouter instance with the defined SearXNG tool endpoints.
|
||||
"""
|
||||
router = APIRouter()
|
||||
|
||||
@router.post(
|
||||
|
@ -41,18 +60,24 @@ class SearxngTool(BaseTool):
|
|||
summary="Search the web across multiple search engines",
|
||||
)
|
||||
def search(request: SearchRequest) -> SearchResponse:
|
||||
"""Search the web across multiple search engines.
|
||||
"""Performs a web search using the configured SearXNG instance.
|
||||
|
||||
This endpoint takes a search query and various parameters to control
|
||||
the search, such as categories, engines, and language.
|
||||
|
||||
Args:
|
||||
request: A SearchRequest object containing the search parameters.
|
||||
|
||||
Returns:
|
||||
SearchResponse: Search results from SearXNG.
|
||||
A SearchResponse object containing the search results.
|
||||
"""
|
||||
return SearxngTool._perform_search(
|
||||
request.query,
|
||||
request.categories,
|
||||
request.engines,
|
||||
request.language,
|
||||
request.format,
|
||||
request.pageno,
|
||||
request.language or "en",
|
||||
request.format or "json",
|
||||
request.pageno or 1,
|
||||
)
|
||||
|
||||
@router.get(
|
||||
|
@ -61,18 +86,23 @@ class SearxngTool(BaseTool):
|
|||
summary="Get available search categories (general, images, news, etc)",
|
||||
)
|
||||
def categories() -> CategoriesResponse:
|
||||
"""Get available search categories from SearXNG.
|
||||
"""Retrieves the available search categories from the SearXNG instance.
|
||||
|
||||
If the SearXNG instance is unavailable, it returns a default list of
|
||||
common categories.
|
||||
|
||||
Returns:
|
||||
CategoriesResponse: Available search categories.
|
||||
A CategoriesResponse object containing the list of available
|
||||
search categories.
|
||||
"""
|
||||
app_config = get_app_config()
|
||||
try:
|
||||
response = requests.get(f"{SEARXNG_BASE_URL}/config", timeout=10)
|
||||
response = requests.get(f"{app_config.searxng_base_url}/config", timeout=10)
|
||||
response.raise_for_status()
|
||||
config = response.json()
|
||||
|
||||
categories = config.get("categories", [])
|
||||
return CategoriesResponse(categories=categories or [])
|
||||
return CategoriesResponse(categories=categories or [], note=None)
|
||||
|
||||
except Exception as e:
|
||||
return CategoriesResponse(
|
||||
|
@ -97,19 +127,21 @@ class SearxngTool(BaseTool):
|
|||
summary="Get list of available search engines (Google, Bing, DuckDuckGo, etc)",
|
||||
)
|
||||
def engines() -> EnginesResponse:
|
||||
"""Get available search engines from SearXNG.
|
||||
"""Retrieves the available search engines from the SearXNG instance.
|
||||
|
||||
Returns:
|
||||
EnginesResponse: Available search engines.
|
||||
An EnginesResponse object containing the list of available
|
||||
search engines.
|
||||
"""
|
||||
app_config = get_app_config()
|
||||
try:
|
||||
response = requests.get(f"{SEARXNG_BASE_URL}/config", timeout=10)
|
||||
response = requests.get(f"{app_config.searxng_base_url}/config", timeout=10)
|
||||
response.raise_for_status()
|
||||
config = response.json()
|
||||
|
||||
engines = config.get("engines", [])
|
||||
engine_names = [engine.get("name") for engine in engines if engine.get("name")]
|
||||
return EnginesResponse(engines=engine_names)
|
||||
return EnginesResponse(engines=engine_names, note=None)
|
||||
|
||||
except Exception as e:
|
||||
return EnginesResponse(
|
||||
|
@ -128,14 +160,27 @@ class SearxngTool(BaseTool):
|
|||
response_format: str = "json",
|
||||
pageno: int = 1,
|
||||
) -> SearchResponse:
|
||||
"""Internal function to perform the actual search.
|
||||
"""Performs the actual search operation by querying the SearXNG instance.
|
||||
|
||||
This static method constructs the search request, sends it to the
|
||||
SearXNG server, and processes the response.
|
||||
|
||||
Args:
|
||||
query: The search query string.
|
||||
categories: A comma-separated list of categories to search in.
|
||||
engines: A comma-separated list of search engines to use.
|
||||
language: The language code for the search.
|
||||
response_format: The desired response format (only 'json' is supported).
|
||||
pageno: The page number of the search results.
|
||||
|
||||
Returns:
|
||||
SearchResponse: Search results from SearXNG.
|
||||
A SearchResponse object containing the search results.
|
||||
|
||||
Raises:
|
||||
HTTPException: If search fails or service is unavailable.
|
||||
HTTPException: If the search request fails, the service is
|
||||
unavailable, or the response is invalid.
|
||||
"""
|
||||
app_config = get_app_config()
|
||||
params = {"q": query, "format": response_format, "lang": language, "pageno": pageno}
|
||||
|
||||
if categories:
|
||||
|
@ -145,7 +190,9 @@ class SearxngTool(BaseTool):
|
|||
params["engines"] = engines
|
||||
|
||||
try:
|
||||
response = requests.get(f"{SEARXNG_BASE_URL}/search", params=params, timeout=30)
|
||||
response = requests.get(
|
||||
f"{app_config.searxng_base_url}/search", params=params, timeout=30
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
if response_format.lower() != "json":
|
||||
|
|
|
@ -1,3 +1,9 @@
|
|||
"""Time-related operations tool."""
|
||||
"""This module initialises the Time tool package.
|
||||
|
||||
It provides functionalities for various time-related operations, including
|
||||
retrieving the current time, converting between timezones, calculating elapsed
|
||||
time, and parsing different timestamp formats. This tool ensures accurate and
|
||||
flexible time management within the system.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
"""Data models for time operations tool."""
|
||||
"""This module defines the Pydantic models for the Time tool.
|
||||
|
||||
These models are used to structure and validate the data for various time-related
|
||||
operations, such as time conversion, elapsed time calculation, and timestamp
|
||||
parsing. The models ensure that the API requests and responses for the Time tool
|
||||
are consistent and well-defined.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -8,7 +14,13 @@ from pydantic import BaseModel, Field
|
|||
|
||||
|
||||
class ConvertTimeInput(BaseModel):
|
||||
"""Input model for converting time between timezones."""
|
||||
"""Represents the input for converting a timestamp between timezones.
|
||||
|
||||
Attributes:
|
||||
timestamp: The ISO 8601 formatted time string to be converted.
|
||||
from_tz: The original IANA timezone of the input timestamp.
|
||||
to_tz: The target IANA timezone to which the time should be converted.
|
||||
"""
|
||||
|
||||
timestamp: str = Field(
|
||||
..., description="ISO 8601 formatted time string (e.g., 2024-01-01T12:00:00Z)"
|
||||
|
@ -20,7 +32,13 @@ class ConvertTimeInput(BaseModel):
|
|||
|
||||
|
||||
class ElapsedTimeInput(BaseModel):
|
||||
"""Input model for calculating elapsed time."""
|
||||
"""Represents the input for calculating the elapsed time between two timestamps.
|
||||
|
||||
Attributes:
|
||||
start: The starting timestamp in ISO 8601 format.
|
||||
end: The ending timestamp in ISO 8601 format.
|
||||
units: The units in which to return the elapsed time.
|
||||
"""
|
||||
|
||||
start: str = Field(..., description="Start timestamp in ISO 8601 format")
|
||||
end: str = Field(..., description="End timestamp in ISO 8601 format")
|
||||
|
@ -30,7 +48,12 @@ class ElapsedTimeInput(BaseModel):
|
|||
|
||||
|
||||
class ParseTimestampInput(BaseModel):
|
||||
"""Input model for parsing timestamp strings."""
|
||||
"""Represents the input for parsing a flexible timestamp string.
|
||||
|
||||
Attributes:
|
||||
timestamp: The timestamp string to be parsed.
|
||||
timezone: The timezone to assume if none is specified in the string.
|
||||
"""
|
||||
|
||||
timestamp: str = Field(
|
||||
..., description="Flexible input timestamp string (e.g., 2024-06-01 12:00 PM)"
|
||||
|
@ -39,38 +62,64 @@ class ParseTimestampInput(BaseModel):
|
|||
|
||||
|
||||
class UnixToIsoInput(BaseModel):
|
||||
"""Input model for converting Unix timestamp to ISO format."""
|
||||
"""Represents the input for converting a Unix timestamp to ISO 8601 format.
|
||||
|
||||
Attributes:
|
||||
timestamp: The Unix epoch timestamp (seconds since 1970-01-01).
|
||||
timezone: The target timezone for the output ISO string.
|
||||
"""
|
||||
|
||||
timestamp: float = Field(..., description="Unix epoch timestamp (seconds since 1970-01-01)")
|
||||
timezone: str = Field("UTC", description="Target timezone for output (defaults to UTC)")
|
||||
|
||||
|
||||
class TimeResponse(BaseModel):
|
||||
"""Response model for UTC time."""
|
||||
"""Represents the response containing the current UTC time.
|
||||
|
||||
Attributes:
|
||||
utc: The current UTC time in ISO 8601 format.
|
||||
"""
|
||||
|
||||
utc: str = Field(..., description="UTC time in ISO format")
|
||||
|
||||
|
||||
class ConvertedTimeResponse(BaseModel):
|
||||
"""Response model for converted time."""
|
||||
"""Represents the response containing a converted time.
|
||||
|
||||
Attributes:
|
||||
converted_time: The converted time in ISO 8601 format.
|
||||
"""
|
||||
|
||||
converted_time: str = Field(..., description="Converted time in ISO format")
|
||||
|
||||
|
||||
class ElapsedTimeResponse(BaseModel):
|
||||
"""Response model for elapsed time calculation."""
|
||||
"""Represents the response for an elapsed time calculation.
|
||||
|
||||
Attributes:
|
||||
elapsed: The elapsed time in the specified units.
|
||||
unit: The unit of the elapsed time.
|
||||
"""
|
||||
|
||||
elapsed: float = Field(..., description="Elapsed time in specified units")
|
||||
unit: str = Field(..., description="Unit of elapsed time")
|
||||
|
||||
|
||||
class ParsedTimestampResponse(BaseModel):
|
||||
"""Response model for parsed timestamp."""
|
||||
"""Represents the response containing a parsed timestamp.
|
||||
|
||||
Attributes:
|
||||
utc: The parsed timestamp in UTC ISO 8601 format.
|
||||
"""
|
||||
|
||||
utc: str = Field(..., description="Parsed timestamp in UTC ISO format")
|
||||
|
||||
|
||||
class UnixToIsoResponse(BaseModel):
|
||||
"""Response model for Unix to ISO conversion."""
|
||||
"""Represents the response for a Unix to ISO conversion.
|
||||
|
||||
Attributes:
|
||||
iso_time: The ISO 8601 formatted timestamp.
|
||||
"""
|
||||
|
||||
iso_time: str = Field(..., description="ISO formatted timestamp")
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
"""API routes for time operations tool."""
|
||||
"""This module defines the API routes for the Time tool.
|
||||
|
||||
It provides a comprehensive set of endpoints for time-related operations,
|
||||
including retrieving the current time, converting between timezones, calculating
|
||||
elapsed time, and parsing various timestamp formats. The tool is designed to be
|
||||
a reliable and secure source for time information within the system.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -8,6 +14,7 @@ import pytz
|
|||
from dateutil import parser as dateutil_parser
|
||||
from fastapi import APIRouter, HTTPException
|
||||
|
||||
from openapi_mcp_server.core.config import get_app_config
|
||||
from openapi_mcp_server.tools.base import BaseTool
|
||||
|
||||
from .models import (
|
||||
|
@ -24,10 +31,17 @@ from .models import (
|
|||
|
||||
|
||||
class TimeTool(BaseTool):
|
||||
"""Secure time utilities tool."""
|
||||
"""A tool for performing various time-related operations.
|
||||
|
||||
This tool provides a suite of utilities for handling time, including
|
||||
timezone conversions, timestamp parsing, and elapsed time calculations.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the time tool."""
|
||||
"""Initialises the TimeTool.
|
||||
|
||||
Sets up the tool's name and a description of its capabilities.
|
||||
"""
|
||||
super().__init__(
|
||||
name="time",
|
||||
description="Provides secure UTC/local time retrieval and formatting",
|
||||
|
@ -35,16 +49,17 @@ class TimeTool(BaseTool):
|
|||
|
||||
@staticmethod
|
||||
def get_current(timezone: str = "UTC") -> TimeResponse:
|
||||
"""Get current time in specified timezone (defaults to UTC).
|
||||
"""Retrieves the current time in the specified timezone.
|
||||
|
||||
Args:
|
||||
timezone: IANA timezone name (e.g. 'UTC', 'America/New_York')
|
||||
timezone: The IANA timezone name (e.g., 'UTC', 'America/New_York').
|
||||
Defaults to 'UTC'.
|
||||
|
||||
Returns:
|
||||
TimeResponse: Current time in ISO format for specified timezone.
|
||||
A TimeResponse object containing the current time in ISO format.
|
||||
|
||||
Raises:
|
||||
HTTPException: If timezone is invalid.
|
||||
HTTPException: If the provided timezone is invalid.
|
||||
"""
|
||||
try:
|
||||
tz = pytz.timezone(timezone)
|
||||
|
@ -55,16 +70,17 @@ class TimeTool(BaseTool):
|
|||
|
||||
@staticmethod
|
||||
def unix_to_iso(data: UnixToIsoInput) -> UnixToIsoResponse:
|
||||
"""Convert Unix epoch timestamp to ISO format.
|
||||
"""Converts a Unix epoch timestamp to an ISO 8601 formatted string.
|
||||
|
||||
Args:
|
||||
data: Unix timestamp and optional timezone
|
||||
data: A UnixToIsoInput object containing the Unix timestamp and an
|
||||
optional target timezone.
|
||||
|
||||
Returns:
|
||||
UnixToIsoResponse: ISO formatted timestamp.
|
||||
A UnixToIsoResponse object with the ISO formatted timestamp.
|
||||
|
||||
Raises:
|
||||
HTTPException: If timestamp or timezone is invalid.
|
||||
HTTPException: If the timestamp or timezone is invalid.
|
||||
"""
|
||||
try:
|
||||
dt = datetime.fromtimestamp(data.timestamp, tz=UTC)
|
||||
|
@ -79,13 +95,17 @@ class TimeTool(BaseTool):
|
|||
|
||||
@staticmethod
|
||||
def convert_time(data: ConvertTimeInput) -> ConvertedTimeResponse:
|
||||
"""Convert a timestamp from one timezone to another.
|
||||
"""Converts a timestamp from one timezone to another.
|
||||
|
||||
Args:
|
||||
data: A ConvertTimeInput object containing the timestamp and the
|
||||
source and target timezones.
|
||||
|
||||
Returns:
|
||||
ConvertedTimeResponse: Timestamp converted to target timezone.
|
||||
A ConvertedTimeResponse object with the converted timestamp.
|
||||
|
||||
Raises:
|
||||
HTTPException: If timezone or timestamp is invalid.
|
||||
HTTPException: If the timestamp or any of the timezones are invalid.
|
||||
"""
|
||||
try:
|
||||
from_zone = pytz.timezone(data.from_tz)
|
||||
|
@ -103,13 +123,17 @@ class TimeTool(BaseTool):
|
|||
|
||||
@staticmethod
|
||||
def elapsed_time(data: ElapsedTimeInput) -> ElapsedTimeResponse:
|
||||
"""Calculate the difference between two timestamps in chosen units.
|
||||
"""Calculates the elapsed time between two timestamps.
|
||||
|
||||
Args:
|
||||
data: An ElapsedTimeInput object containing the start and end
|
||||
timestamps and the desired units for the result.
|
||||
|
||||
Returns:
|
||||
ElapsedTimeResponse: Time difference in specified units.
|
||||
An ElapsedTimeResponse object with the calculated elapsed time.
|
||||
|
||||
Raises:
|
||||
HTTPException: If timestamps are invalid.
|
||||
HTTPException: If the timestamps are invalid.
|
||||
"""
|
||||
try:
|
||||
start_dt = dateutil_parser.parse(data.start)
|
||||
|
@ -130,13 +154,17 @@ class TimeTool(BaseTool):
|
|||
|
||||
@staticmethod
|
||||
def parse_timestamp(data: ParseTimestampInput) -> ParsedTimestampResponse:
|
||||
"""Parse human-friendly input timestamp and return standardized UTC ISO time.
|
||||
"""Parses a human-readable timestamp string into a standardised format.
|
||||
|
||||
Args:
|
||||
data: A ParseTimestampInput object containing the timestamp string
|
||||
and an optional timezone.
|
||||
|
||||
Returns:
|
||||
ParsedTimestampResponse: Standardized UTC timestamp.
|
||||
A ParsedTimestampResponse object with the timestamp in UTC ISO format.
|
||||
|
||||
Raises:
|
||||
HTTPException: If timestamp cannot be parsed.
|
||||
HTTPException: If the timestamp string cannot be parsed.
|
||||
"""
|
||||
try:
|
||||
tz = pytz.timezone(data.timezone)
|
||||
|
@ -150,11 +178,21 @@ class TimeTool(BaseTool):
|
|||
|
||||
@staticmethod
|
||||
def list_time_zones() -> list[str]:
|
||||
"""Return a list of all valid IANA time zones."""
|
||||
"""Retrieves a list of all valid IANA timezone names.
|
||||
|
||||
Returns:
|
||||
A list of strings, where each string is a valid IANA timezone.
|
||||
"""
|
||||
return list(pytz.all_timezones)
|
||||
|
||||
def get_router(self) -> APIRouter:
|
||||
"""Return the FastAPI router for time tool endpoints."""
|
||||
"""Creates and returns the FastAPI router for the time tool.
|
||||
|
||||
The router exposes endpoints for all the time-related operations.
|
||||
|
||||
Returns:
|
||||
An APIRouter instance with the defined time tool endpoints.
|
||||
"""
|
||||
router = APIRouter()
|
||||
|
||||
@router.get(
|
||||
|
@ -162,7 +200,12 @@ class TimeTool(BaseTool):
|
|||
response_model=TimeResponse,
|
||||
summary="Get current time in specified IANA timezone (defaults to UTC)",
|
||||
)
|
||||
def get_time(timezone: str = "UTC") -> TimeResponse:
|
||||
def get_time(timezone: str = get_app_config().default_timezone) -> TimeResponse:
|
||||
"""Retrieves the current time in the specified timezone.
|
||||
|
||||
Returns:
|
||||
TimeResponse: The current time in the specified timezone.
|
||||
"""
|
||||
return TimeTool.get_current(timezone)
|
||||
|
||||
@router.post(
|
||||
|
@ -171,6 +214,11 @@ class TimeTool(BaseTool):
|
|||
summary="Convert Unix epoch timestamp to ISO format",
|
||||
)
|
||||
def unix_to_iso(data: UnixToIsoInput) -> UnixToIsoResponse:
|
||||
"""Converts a Unix epoch timestamp to an ISO 8601 formatted string.
|
||||
|
||||
Returns:
|
||||
UnixToIsoResponse: The ISO 8601 formatted timestamp.
|
||||
"""
|
||||
return TimeTool.unix_to_iso(data)
|
||||
|
||||
@router.post(
|
||||
|
@ -179,6 +227,11 @@ class TimeTool(BaseTool):
|
|||
summary="Convert timestamp from one timezone to another",
|
||||
)
|
||||
def convert_time(data: ConvertTimeInput) -> ConvertedTimeResponse:
|
||||
"""Converts a timestamp from one timezone to another.
|
||||
|
||||
Returns:
|
||||
ConvertedTimeResponse: The converted timestamp.
|
||||
"""
|
||||
return TimeTool.convert_time(data)
|
||||
|
||||
@router.post(
|
||||
|
@ -187,6 +240,11 @@ class TimeTool(BaseTool):
|
|||
summary="Calculate time difference between two timestamps",
|
||||
)
|
||||
def elapsed_time(data: ElapsedTimeInput) -> ElapsedTimeResponse:
|
||||
"""Calculates the elapsed time between two timestamps.
|
||||
|
||||
Returns:
|
||||
ElapsedTimeResponse: The elapsed time.
|
||||
"""
|
||||
return TimeTool.elapsed_time(data)
|
||||
|
||||
@router.post(
|
||||
|
@ -198,10 +256,20 @@ class TimeTool(BaseTool):
|
|||
),
|
||||
)
|
||||
def parse_timestamp(data: ParseTimestampInput) -> ParsedTimestampResponse:
|
||||
"""Parses a human-readable timestamp string into a standardised format.
|
||||
|
||||
Returns:
|
||||
ParsedTimestampResponse: The parsed timestamp in UTC ISO format.
|
||||
"""
|
||||
return TimeTool.parse_timestamp(data)
|
||||
|
||||
@router.get("/list_time_zones", summary="Get list of all valid IANA timezone names")
|
||||
def list_time_zones() -> list[str]:
|
||||
"""Retrieves a list of all valid IANA timezone names.
|
||||
|
||||
Returns:
|
||||
list[str]: A list of strings, where each string is a valid IANA timezone.
|
||||
"""
|
||||
return TimeTool.list_time_zones()
|
||||
|
||||
return router
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
"""Weather information tool."""
|
||||
"""This module initialises the Weather tool package.
|
||||
|
||||
It provides functionalities for retrieving real-time weather information and
|
||||
forecasts. This tool allows the system to access and utilise up-to-date weather
|
||||
data for various locations.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
|
|
@ -1,4 +1,11 @@
|
|||
"""Data models for weather information tool."""
|
||||
"""This module defines the Pydantic models for the Weather tool.
|
||||
|
||||
These models are used to structure and validate the data returned by the weather
|
||||
API. They include models for current weather conditions, hourly forecasts, and
|
||||
the overall weather forecast output. The models are designed to be used by the
|
||||
Weather tool's routes to ensure that the API responses are consistent and
|
||||
well-defined.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -6,7 +13,13 @@ from pydantic import BaseModel, Field
|
|||
|
||||
|
||||
class CurrentWeather(BaseModel):
|
||||
"""Current weather conditions model."""
|
||||
"""Represents the current weather conditions at a specific location.
|
||||
|
||||
Attributes:
|
||||
time: The ISO 8601 formatted timestamp for the weather data.
|
||||
temperature_2m: The air temperature at 2 metres above the ground.
|
||||
wind_speed_10m: The wind speed at 10 metres above the ground.
|
||||
"""
|
||||
|
||||
time: str = Field(..., description="ISO 8601 format timestamp")
|
||||
temperature_2m: float = Field(
|
||||
|
@ -20,7 +33,14 @@ class CurrentWeather(BaseModel):
|
|||
|
||||
|
||||
class HourlyUnits(BaseModel):
|
||||
"""Units for hourly weather data."""
|
||||
"""Represents the units for the hourly weather data.
|
||||
|
||||
Attributes:
|
||||
time: The unit for the time values (e.g., "iso8601").
|
||||
temperature_2m: The unit for the temperature values (e.g., "°C").
|
||||
relative_humidity_2m: The unit for the relative humidity values (e.g., "%").
|
||||
wind_speed_10m: The unit for the wind speed values (e.g., "km/h").
|
||||
"""
|
||||
|
||||
time: str
|
||||
temperature_2m: str
|
||||
|
@ -29,7 +49,14 @@ class HourlyUnits(BaseModel):
|
|||
|
||||
|
||||
class HourlyData(BaseModel):
|
||||
"""Hourly weather data model."""
|
||||
"""Represents the hourly weather forecast data.
|
||||
|
||||
Attributes:
|
||||
time: A list of ISO 8601 formatted timestamps for the forecast.
|
||||
temperature_2m: A list of temperature values at 2 metres.
|
||||
relative_humidity_2m: A list of relative humidity values at 2 metres.
|
||||
wind_speed_10m: A list of wind speed values at 10 metres.
|
||||
"""
|
||||
|
||||
time: list[str]
|
||||
temperature_2m: list[float]
|
||||
|
@ -38,7 +65,23 @@ class HourlyData(BaseModel):
|
|||
|
||||
|
||||
class WeatherForecastOutput(BaseModel):
|
||||
"""Complete weather forecast output model."""
|
||||
"""Represents the complete weather forecast output from the API.
|
||||
|
||||
This model combines the current weather conditions, hourly forecast data,
|
||||
and metadata about the forecast location.
|
||||
|
||||
Attributes:
|
||||
latitude: The latitude of the forecast location.
|
||||
longitude: The longitude of the forecast location.
|
||||
generationtime_ms: The time taken to generate the forecast in milliseconds.
|
||||
utc_offset_seconds: The UTC offset in seconds.
|
||||
timezone: The timezone of the forecast location (e.g., "GMT").
|
||||
timezone_abbreviation: The abbreviated timezone name (e.g., "GMT").
|
||||
elevation: The elevation of the forecast location in metres.
|
||||
current: The current weather conditions.
|
||||
hourly_units: The units for the hourly forecast data.
|
||||
hourly: The hourly forecast data.
|
||||
"""
|
||||
|
||||
latitude: float
|
||||
longitude: float
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
"""API routes for weather information tool."""
|
||||
"""This module defines the API routes for the Weather tool.
|
||||
|
||||
It provides endpoints for retrieving weather forecasts using the Open-Meteo API.
|
||||
The tool can determine the user's location from coordinates or a configured
|
||||
default location and automatically selects the appropriate temperature unit
|
||||
(Celsius or Fahrenheit) based on the country.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -7,7 +13,10 @@ from typing import Annotated
|
|||
import requests
|
||||
import reverse_geocoder as rg
|
||||
from fastapi import APIRouter, HTTPException, Query
|
||||
from geopy.adapters import RequestsAdapter
|
||||
from geopy.geocoders import Nominatim
|
||||
|
||||
from openapi_mcp_server.core.config import get_app_config
|
||||
from openapi_mcp_server.tools.base import BaseTool
|
||||
|
||||
from .models import WeatherForecastOutput
|
||||
|
@ -17,17 +26,53 @@ FAHRENHEIT_COUNTRIES = {"US", "LR", "MM"}
|
|||
|
||||
|
||||
class WeatherTool(BaseTool):
|
||||
"""Weather forecast tool using Open-Meteo API."""
|
||||
"""A tool for retrieving weather forecasts using the Open-Meteo API.
|
||||
|
||||
This tool provides functionality to get the current weather and hourly
|
||||
forecast for a given geographical location.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the weather tool."""
|
||||
"""Initialises the WeatherTool.
|
||||
|
||||
Sets up the tool's name, description, and a geolocator for converting
|
||||
location names into coordinates.
|
||||
"""
|
||||
super().__init__(
|
||||
name="weather",
|
||||
description="Provides weather retrieval by latitude and longitude using Open-Meteo",
|
||||
)
|
||||
self.geolocator = Nominatim(
|
||||
user_agent="openapi-mcp-server", adapter_factory=RequestsAdapter
|
||||
)
|
||||
|
||||
def geocode_location(self, location: str) -> tuple[float, float] | None:
|
||||
"""Converts a location name into geographical coordinates.
|
||||
|
||||
Args:
|
||||
location: The name of the location to geocode (e.g., "London, UK").
|
||||
|
||||
Returns:
|
||||
A tuple containing the latitude and longitude, or None if the
|
||||
location cannot be found.
|
||||
"""
|
||||
coords = None
|
||||
try:
|
||||
location_data = self.geolocator.geocode(location)
|
||||
if location_data:
|
||||
coords = float(location_data.latitude), float(location_data.longitude) # type: ignore[attr-defined]
|
||||
except Exception:
|
||||
pass # Geocoding failed, coords remains None
|
||||
return coords
|
||||
|
||||
def get_router(self) -> APIRouter:
|
||||
"""Return the FastAPI router for weather tool endpoints."""
|
||||
"""Creates and returns the FastAPI router for the weather tool.
|
||||
|
||||
The router exposes an endpoint for retrieving weather forecasts.
|
||||
|
||||
Returns:
|
||||
An APIRouter instance with the defined weather endpoints.
|
||||
"""
|
||||
router = APIRouter()
|
||||
|
||||
@router.get(
|
||||
|
@ -36,25 +81,49 @@ class WeatherTool(BaseTool):
|
|||
summary="Get current weather conditions and hourly forecast by coordinates",
|
||||
)
|
||||
def forecast(
|
||||
latitude: Annotated[
|
||||
float | None, Query(description="Latitude for the location (e.g. 52.52)")
|
||||
] = None,
|
||||
latitude: Annotated[float, Query(description="Latitude for the location (e.g. 52.52)")],
|
||||
longitude: Annotated[
|
||||
float | None, Query(description="Longitude for the location (e.g. 13.41)")
|
||||
] = None,
|
||||
float, Query(description="Longitude for the location (e.g. 13.41)")
|
||||
],
|
||||
) -> WeatherForecastOutput:
|
||||
"""Retrieves current weather conditions and hourly forecast for the given coordinates.
|
||||
"""Retrieves the weather forecast for a given location.
|
||||
|
||||
Temperature unit (Celsius/Fahrenheit) is determined automatically based on location.
|
||||
This endpoint fetches the current weather conditions and an hourly
|
||||
forecast. The temperature unit (Celsius or Fahrenheit) is determined
|
||||
automatically based on the location's country. If no coordinates
|
||||
are provided, it falls back to a default location if configured.
|
||||
|
||||
Args:
|
||||
latitude: The latitude of the location.
|
||||
longitude: The longitude of the location.
|
||||
|
||||
Returns:
|
||||
WeatherForecastOutput: Current weather conditions and forecast data.
|
||||
A WeatherForecastOutput object containing the current weather
|
||||
and forecast data.
|
||||
|
||||
Raises:
|
||||
HTTPException: If weather API request fails or returns invalid data.
|
||||
HTTPException: If the coordinates are missing and no default
|
||||
location is set, if the default location cannot be geocoded,
|
||||
or if there is an error communicating with the weather API.
|
||||
"""
|
||||
# If coordinates not provided, try to use default location
|
||||
if latitude is None or longitude is None:
|
||||
raise HTTPException(status_code=422, detail="Latitude and longitude are required.")
|
||||
default_location = get_app_config().default_location
|
||||
if default_location:
|
||||
coords = self.geocode_location(default_location)
|
||||
if coords:
|
||||
latitude, longitude = coords
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=422,
|
||||
detail=f"Could not geocode default location: {default_location}",
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=422,
|
||||
detail="Latitude and longitude are required, "
|
||||
"and no default location is configured.",
|
||||
)
|
||||
# Determine temperature unit based on location
|
||||
try:
|
||||
geo_results = rg.search((latitude, longitude), mode=1)
|
||||
|
|
|
@ -1,3 +1,8 @@
|
|||
"""Web content parsing tool."""
|
||||
"""This module initialises the Web tool package.
|
||||
|
||||
It provides functionalities for parsing and extracting content from web pages.
|
||||
This tool enables the system to process and understand information from the
|
||||
internet, converting raw HTML into more structured and readable formats.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
"""Data models for web content parsing tool."""
|
||||
"""This module defines the Pydantic models for the Web tool.
|
||||
|
||||
These models are used to structure and validate the data for parsing web content.
|
||||
They include models for the web request, the parsed content response, and the
|
||||
raw HTML response. The models ensure that the API requests and responses are
|
||||
consistent and well-defined.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -6,7 +12,17 @@ from pydantic import BaseModel, Field, HttpUrl
|
|||
|
||||
|
||||
class WebRequest(BaseModel):
|
||||
"""Request model for web content parsing."""
|
||||
"""Represents a request to parse and extract content from a URL.
|
||||
|
||||
Attributes:
|
||||
url: The URL to parse and extract content from.
|
||||
favor_recall: Whether to favour recall over precision in extraction.
|
||||
with_metadata: Whether to include metadata in the extraction.
|
||||
include_formatting: Whether to keep formatting in the output.
|
||||
include_images: Whether to include images in the output.
|
||||
include_links: Whether to include links in the output.
|
||||
include_tables: Whether to include tables in the output.
|
||||
"""
|
||||
|
||||
url: HttpUrl = Field(..., description="URL to parse and extract content from")
|
||||
favor_recall: bool | None = Field(True, description="Favor recall over precision in extraction")
|
||||
|
@ -18,7 +34,12 @@ class WebRequest(BaseModel):
|
|||
|
||||
|
||||
class WebResponse(BaseModel):
|
||||
"""Response model for parsed web content."""
|
||||
"""Represents the response with parsed web content.
|
||||
|
||||
Attributes:
|
||||
url: The original URL that was parsed.
|
||||
content: The extracted content in Markdown format with metadata frontmatter.
|
||||
"""
|
||||
|
||||
url: str = Field(..., description="Original URL that was parsed")
|
||||
content: str = Field(
|
||||
|
@ -28,7 +49,14 @@ class WebResponse(BaseModel):
|
|||
|
||||
|
||||
class WebRawResponse(BaseModel):
|
||||
"""Response model for raw web content."""
|
||||
"""Represents the response with raw web content.
|
||||
|
||||
Attributes:
|
||||
url: The original URL that was fetched.
|
||||
status_code: The HTTP status code of the response.
|
||||
headers: The response headers.
|
||||
content: The raw HTML content of the response.
|
||||
"""
|
||||
|
||||
url: str = Field(..., description="Original URL that was fetched")
|
||||
status_code: int = Field(..., description="HTTP status code")
|
||||
|
|
|
@ -1,4 +1,11 @@
|
|||
"""API routes for web content parsing tool."""
|
||||
"""This module defines the API routes for the Web tool.
|
||||
|
||||
It provides endpoints for parsing web pages to extract clean, readable content
|
||||
in Markdown format using the trafilatura library. The tool offers options to
|
||||
include or exclude various elements like metadata, formatting, images, links,
|
||||
and tables. It also provides an endpoint to fetch the raw HTML content of a
|
||||
web page.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@ -9,23 +16,38 @@ import trafilatura
|
|||
from fastapi import APIRouter, HTTPException, Query
|
||||
from pydantic import HttpUrl # noqa: TC002
|
||||
|
||||
from openapi_mcp_server.core.config import get_app_config
|
||||
from openapi_mcp_server.tools.base import BaseTool
|
||||
|
||||
from .models import WebRawResponse, WebRequest, WebResponse
|
||||
|
||||
|
||||
class WebTool(BaseTool):
|
||||
"""Web content parsing tool."""
|
||||
"""A tool for parsing and extracting content from web pages.
|
||||
|
||||
This tool uses the trafilatura library to extract the main content from a
|
||||
web page and format it as Markdown. It provides options to control the
|
||||
extraction process and can return either the parsed content or the raw HTML.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the web tool."""
|
||||
"""Initialises the WebTool.
|
||||
|
||||
Sets up the tool's name and a description of its capabilities.
|
||||
"""
|
||||
super().__init__(
|
||||
name="web",
|
||||
description="Extract and parse web page content into Markdown using trafilatura",
|
||||
)
|
||||
|
||||
def get_router(self) -> APIRouter:
|
||||
"""Return the FastAPI router for web tool endpoints."""
|
||||
"""Creates and returns the FastAPI router for the web tool.
|
||||
|
||||
The router exposes endpoints for parsing web pages and fetching raw HTML.
|
||||
|
||||
Returns:
|
||||
An APIRouter instance with the defined web tool endpoints.
|
||||
"""
|
||||
router = APIRouter()
|
||||
|
||||
@router.post(
|
||||
|
@ -34,18 +56,24 @@ class WebTool(BaseTool):
|
|||
summary="Extract and parse webpage content into clean markdown",
|
||||
)
|
||||
def read(request: WebRequest) -> WebResponse:
|
||||
"""Extract and parse webpage content into clean markdown.
|
||||
"""Extracts and parses the content of a web page into clean Markdown.
|
||||
|
||||
This endpoint takes a URL and a set of options to control the parsing
|
||||
process. It returns the extracted content in a structured format.
|
||||
|
||||
Args:
|
||||
request: A WebRequest object containing the URL and parsing options.
|
||||
|
||||
Returns:
|
||||
WebResponse: Parsed web content.
|
||||
A WebResponse object containing the parsed web content.
|
||||
"""
|
||||
return WebTool._parse_web_page(
|
||||
str(request.url),
|
||||
request.with_metadata,
|
||||
request.include_formatting,
|
||||
request.include_images,
|
||||
request.include_links,
|
||||
request.include_tables,
|
||||
request.with_metadata or True,
|
||||
request.include_formatting or True,
|
||||
request.include_images or True,
|
||||
request.include_links or True,
|
||||
request.include_tables or True,
|
||||
)
|
||||
|
||||
@router.get(
|
||||
|
@ -54,23 +82,31 @@ class WebTool(BaseTool):
|
|||
summary="Fetch raw HTML content and headers from any URL",
|
||||
)
|
||||
def raw(
|
||||
url: Annotated[
|
||||
HttpUrl | None, Query(description="URL to fetch raw content and headers from")
|
||||
] = None,
|
||||
url: Annotated[HttpUrl, Query(description="URL to fetch raw content and headers from")],
|
||||
) -> WebRawResponse:
|
||||
"""Fetch raw HTML content and headers from URL.
|
||||
"""Fetches the raw HTML content and headers from a given URL.
|
||||
|
||||
This endpoint is useful for retrieving the original, unprocessed content of
|
||||
a web page.
|
||||
|
||||
Args:
|
||||
url: The URL from which to fetch the raw content.
|
||||
|
||||
Returns:
|
||||
WebRawResponse: Raw HTML content and headers.
|
||||
A WebRawResponse object containing the raw HTML content, status code,
|
||||
and headers.
|
||||
|
||||
Raises:
|
||||
HTTPException: If request fails or URL is invalid.
|
||||
HTTPException: If the URL is missing, the request fails, or an
|
||||
internal error occurs.
|
||||
"""
|
||||
if not url:
|
||||
raise HTTPException(status_code=422, detail="URL query parameter is required.")
|
||||
|
||||
try:
|
||||
response = requests.get(str(url), timeout=30)
|
||||
config = get_app_config()
|
||||
headers = {"User-Agent": config.web_user_agent}
|
||||
response = requests.get(str(url), headers=headers, timeout=30)
|
||||
response.raise_for_status()
|
||||
|
||||
headers = dict(response.headers)
|
||||
|
@ -103,24 +139,37 @@ class WebTool(BaseTool):
|
|||
include_links: bool = True,
|
||||
include_tables: bool = True,
|
||||
) -> WebResponse:
|
||||
"""Internal function to perform the actual web page parsing.
|
||||
"""Performs the actual web page parsing using the trafilatura library.
|
||||
|
||||
This static method handles the fetching and extraction of content from a
|
||||
given URL based on the provided options.
|
||||
|
||||
Args:
|
||||
url: The URL of the web page to parse.
|
||||
with_metadata: Whether to include metadata in the extraction.
|
||||
include_formatting: Whether to keep formatting in the output.
|
||||
include_images: Whether to include images in the output.
|
||||
include_links: Whether to include links in the output.
|
||||
include_tables: Whether to include tables in the output.
|
||||
|
||||
Returns:
|
||||
WebResponse: Parsed web content.
|
||||
A WebResponse object containing the parsed web content.
|
||||
|
||||
Raises:
|
||||
HTTPException: If parsing fails or URL is invalid.
|
||||
HTTPException: If the URL cannot be fetched, content cannot be
|
||||
extracted, or another error occurs during parsing.
|
||||
"""
|
||||
try:
|
||||
downloaded = trafilatura.fetch_url(url)
|
||||
config = get_app_config()
|
||||
headers = {"User-Agent": config.web_user_agent}
|
||||
response = requests.get(url, headers=headers, timeout=30)
|
||||
response.raise_for_status()
|
||||
downloaded = response.text
|
||||
if not downloaded:
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Unable to fetch content from URL: {url}"
|
||||
)
|
||||
|
||||
config = trafilatura.settings.use_config()
|
||||
config.set("DEFAULT", "EXTRACTION_TIMEOUT", "30")
|
||||
|
||||
extracted_content = trafilatura.extract(
|
||||
downloaded,
|
||||
output_format="markdown",
|
||||
|
@ -130,7 +179,6 @@ class WebTool(BaseTool):
|
|||
include_images=include_images,
|
||||
include_links=include_links,
|
||||
include_tables=include_tables,
|
||||
config=config,
|
||||
)
|
||||
|
||||
if not extracted_content:
|
||||
|
|
114
pyproject.toml
114
pyproject.toml
|
@ -3,37 +3,48 @@ name = "openapi-mcp-server"
|
|||
version = "0.1.0"
|
||||
description = "An OpenAPI-compatible server for the Multi-agent Conversation Platform (MCP)."
|
||||
readme = "README.md"
|
||||
license = { text = "MIT" }
|
||||
license = { text = "Apache-2.0" }
|
||||
authors = [{ name = "Tom Foster", email = "tom@tcpip.uk" }]
|
||||
maintainers = [{ name = "Tom Foster", email = "tom@tcpip.uk" }]
|
||||
requires-python = ">=3.13"
|
||||
classifiers = [
|
||||
"Development Status :: 4 - Beta",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Topic :: Communications :: Chat",
|
||||
"Framework :: AsyncIO",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Development Status :: 4 - Beta",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Topic :: Communications :: Chat",
|
||||
"Framework :: AsyncIO",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
]
|
||||
dependencies = [
|
||||
"fastapi>=0",
|
||||
"pydantic>=2",
|
||||
"pydantic-settings>=2",
|
||||
"python-dateutil>=2",
|
||||
"python-multipart>=0",
|
||||
"pytz>=2025",
|
||||
"requests>=2",
|
||||
"reverse-geocoder>=1",
|
||||
"trafilatura>=2",
|
||||
"uvicorn[standard]>=0",
|
||||
"fastapi>=0",
|
||||
"pydantic>=2",
|
||||
"pydantic-settings>=2",
|
||||
"python-dateutil>=2",
|
||||
"python-multipart>=0",
|
||||
"pytz>=2025",
|
||||
"httpx>=0",
|
||||
"requests>=2",
|
||||
"reverse-geocoder>=1",
|
||||
"trafilatura>=2",
|
||||
"uvicorn[standard]>=0",
|
||||
"pyyaml>=6",
|
||||
"geopy>=2",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://git.tomfos.tr/tom/openapi-mcp-server"
|
||||
|
||||
[dependency-groups]
|
||||
dev = ["ruff>=0"]
|
||||
dev = [
|
||||
"ruff>=0",
|
||||
"pytest>=8",
|
||||
"pytest-asyncio>=0",
|
||||
"pytest-cov>=0",
|
||||
"pytest-mock>=0",
|
||||
"httpx>=0",
|
||||
"respx>=0",
|
||||
]
|
||||
|
||||
[tool.uv]
|
||||
package = true
|
||||
|
@ -64,26 +75,26 @@ skip-magic-trailing-comma = false
|
|||
[tool.ruff.lint]
|
||||
fixable = ["ALL"]
|
||||
ignore = [
|
||||
"ANN401", # use of Any type
|
||||
"BLE001", # blind Exception usage
|
||||
"COM812", # missing trailing comma
|
||||
"CPY", # flake8-copyright
|
||||
"FBT", # boolean arguments
|
||||
"PLR0912", # too many branches
|
||||
"PLR0913", # too many arguments
|
||||
"PLR0915", # too many statements
|
||||
"PLR0917", # too many positional arguments
|
||||
"PLR6301", # method could be static
|
||||
"RUF029", # async methods that don't await
|
||||
"S104", # binding to all interfaces
|
||||
"S110", # passed exceptions
|
||||
"TRY301", # raise inside try block
|
||||
"ANN401", # use of Any type
|
||||
"BLE001", # blind Exception usage
|
||||
"COM812", # missing trailing comma
|
||||
"CPY", # flake8-copyright
|
||||
"FBT", # boolean arguments
|
||||
"PLR0912", # too many branches
|
||||
"PLR0913", # too many arguments
|
||||
"PLR0915", # too many statements
|
||||
"PLR0917", # too many positional arguments
|
||||
"PLR6301", # method could be static
|
||||
"RUF029", # async methods that don't await
|
||||
"S104", # binding to all interfaces
|
||||
"S110", # passed exceptions
|
||||
"TRY301", # raise inside try block
|
||||
]
|
||||
select = ["ALL"]
|
||||
unfixable = [
|
||||
"F841", # local variable assigned but never used
|
||||
"RUF100", # unused noqa comments
|
||||
"T201", # don't strip print statement
|
||||
"F841", # local variable assigned but never used
|
||||
"RUF100", # unused noqa comments
|
||||
"T201", # don't strip print statement
|
||||
]
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
|
@ -92,3 +103,34 @@ required-imports = ["from __future__ import annotations"]
|
|||
|
||||
[tool.ruff.lint.pydocstyle]
|
||||
convention = "google"
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"tests/**/*.py" = [
|
||||
"ANN202", # missing return type annotation for private function
|
||||
"ANN401", # use of Any type
|
||||
"PLR2004", # magic value used in comparison
|
||||
"S101", # use of assert detected
|
||||
"S106", # possible hardcoded password
|
||||
"SLF001", # private member accessed
|
||||
]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = ["tests"]
|
||||
python_files = ["test_*.py"]
|
||||
python_classes = ["Test*"]
|
||||
python_functions = ["test_*"]
|
||||
asyncio_mode = "auto"
|
||||
addopts = [
|
||||
"--strict-markers",
|
||||
"--strict-config",
|
||||
"--cov=openapi_mcp_server",
|
||||
"--cov-report=html",
|
||||
"--cov-report=term-missing",
|
||||
"--cov-fail-under=60",
|
||||
]
|
||||
markers = [
|
||||
"unit: Unit tests",
|
||||
"integration: Integration tests",
|
||||
"e2e: End-to-end tests",
|
||||
"slow: Slow running tests",
|
||||
]
|
||||
|
|
1
tests/__init__.py
Normal file
1
tests/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
from __future__ import annotations
|
281
tests/conftest.py
Normal file
281
tests/conftest.py
Normal file
|
@ -0,0 +1,281 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from unittest.mock import Mock
|
||||
|
||||
import pytest
|
||||
import yaml
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from openapi_mcp_server.server import create_app
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Generator
|
||||
|
||||
from fastapi import FastAPI
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def test_config(temp_memory_file):
|
||||
"""Override configuration values for testing."""
|
||||
original_env = os.environ.copy()
|
||||
|
||||
# Create test config file
|
||||
test_config_path = Path("test_config.yaml")
|
||||
test_config_data = {
|
||||
"app": {
|
||||
"tools": {
|
||||
"memory": {"file_path": temp_memory_file},
|
||||
"searxng": {"base_url": "https://searx.example.com"},
|
||||
"forgejo": {"tokens": {"https://forgejo.example.com": "test_token_123"}},
|
||||
"time": {"default_timezone": "UTC"},
|
||||
"weather": {"default_location": "London, UK"},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Write test config
|
||||
with test_config_path.open("w", encoding="utf-8") as f:
|
||||
yaml.dump(test_config_data, f)
|
||||
|
||||
# Set test environment variables
|
||||
os.environ.update({
|
||||
"OPENAPI_MCP_SERVER_DEBUG": "true",
|
||||
"OPENAPI_MCP_SERVER_HOST": "127.0.0.1",
|
||||
"OPENAPI_MCP_SERVER_PORT": "8080",
|
||||
"OPENAPI_MCP_SERVER_RELOAD": "false",
|
||||
})
|
||||
|
||||
# Backup and replace config
|
||||
original_config_path = Path("config.yaml")
|
||||
config_backup = None
|
||||
if original_config_path.exists():
|
||||
config_backup = original_config_path.read_text()
|
||||
original_config_path.unlink()
|
||||
|
||||
test_config_path.rename("config.yaml")
|
||||
|
||||
# Force re-initialization of AppConfig after config.yaml is in place
|
||||
from openapi_mcp_server.core.config import AppConfig, get_app_config
|
||||
|
||||
AppConfig._instance = None
|
||||
get_app_config()
|
||||
|
||||
yield
|
||||
|
||||
# Restore original environment
|
||||
os.environ.clear()
|
||||
os.environ.update(original_env)
|
||||
|
||||
# Restore config file
|
||||
if Path("config.yaml").exists():
|
||||
Path("config.yaml").unlink()
|
||||
if config_backup:
|
||||
original_config_path.write_text(config_backup)
|
||||
|
||||
# Clean up test memory file
|
||||
test_memory_path = Path("/tmp/test_memory.json")
|
||||
if test_memory_path.exists():
|
||||
test_memory_path.unlink()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app() -> FastAPI:
|
||||
"""Create FastAPI app instance for testing."""
|
||||
return create_app()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(app: FastAPI) -> TestClient:
|
||||
"""Create test client for API calls."""
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_httpx_client():
|
||||
"""Mock httpx.AsyncClient for external API calls."""
|
||||
mock_client = Mock()
|
||||
mock_response = Mock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {"message": "mocked response"}
|
||||
mock_response.text = "mocked text response"
|
||||
mock_response.content = b"mocked content"
|
||||
mock_response.headers = {"Content-Type": "application/json"}
|
||||
mock_response.raise_for_status.return_value = None
|
||||
mock_client.get.return_value = mock_response
|
||||
mock_client.post.return_value = mock_response
|
||||
return mock_client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_requests():
|
||||
"""Mock requests module for external API calls."""
|
||||
mock_response = Mock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.json.return_value = {"message": "mocked response"}
|
||||
mock_response.text = "mocked text response"
|
||||
mock_response.content = b"mocked content"
|
||||
mock_response.headers = {"Content-Type": "application/json"}
|
||||
mock_response.raise_for_status.return_value = None
|
||||
return mock_response
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_memory_file() -> Generator[str]:
|
||||
"""Create temporary file for memory storage testing."""
|
||||
with tempfile.NamedTemporaryFile(encoding="utf-8", mode="w", suffix=".json", delete=False) as f:
|
||||
temp_file = f.name
|
||||
|
||||
yield temp_file
|
||||
|
||||
# Clean up
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
os.unlink(temp_file)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_memory_data() -> list[dict[str, Any]]:
|
||||
"""Sample memory data for testing."""
|
||||
return [
|
||||
{
|
||||
"id": "1",
|
||||
"content": "John likes pizza",
|
||||
"entities": ["John"],
|
||||
"timestamp": "2024-01-01T12:00:00Z",
|
||||
},
|
||||
{
|
||||
"id": "2",
|
||||
"content": "Alice works at Google",
|
||||
"entities": ["Alice", "Google"],
|
||||
"timestamp": "2024-01-01T13:00:00Z",
|
||||
},
|
||||
{
|
||||
"id": "3",
|
||||
"content": "Bob is learning Python",
|
||||
"entities": ["Bob", "Python"],
|
||||
"timestamp": "2024-01-01T14:00:00Z",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_weather_data() -> dict[str, Any]:
|
||||
"""Sample weather API response for testing."""
|
||||
return {
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.41,
|
||||
"timezone": "Europe/Berlin",
|
||||
"timezone_abbreviation": "CET",
|
||||
"current": {
|
||||
"time": "2024-01-01T12:00",
|
||||
"temperature_2m": 15.5,
|
||||
"wind_speed_10m": 10.2,
|
||||
},
|
||||
"current_units": {
|
||||
"time": "iso8601",
|
||||
"temperature_2m": "°C",
|
||||
"wind_speed_10m": "km/h",
|
||||
},
|
||||
"hourly": {
|
||||
"time": [
|
||||
"2024-01-01T12:00",
|
||||
"2024-01-01T13:00",
|
||||
"2024-01-01T14:00",
|
||||
],
|
||||
"temperature_2m": [15.5, 16.0, 15.8],
|
||||
"relative_humidity_2m": [65, 62, 68],
|
||||
"wind_speed_10m": [10.2, 9.8, 11.1],
|
||||
},
|
||||
"hourly_units": {
|
||||
"time": "iso8601",
|
||||
"temperature_2m": "°C",
|
||||
"relative_humidity_2m": "%",
|
||||
"wind_speed_10m": "km/h",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_searxng_data() -> dict[str, Any]:
|
||||
"""Sample SearXNG API response for testing."""
|
||||
return {
|
||||
"query": "test query",
|
||||
"number_of_results": 2,
|
||||
"results": [
|
||||
{
|
||||
"title": "Test Result 1",
|
||||
"url": "https://example.com/1",
|
||||
"content": "This is the first test result",
|
||||
"engine": "google",
|
||||
"score": 0.95,
|
||||
},
|
||||
{
|
||||
"title": "Test Result 2",
|
||||
"url": "https://example.com/2",
|
||||
"content": "This is the second test result",
|
||||
"engine": "bing",
|
||||
"score": 0.88,
|
||||
},
|
||||
],
|
||||
"suggestions": ["test suggestion"],
|
||||
"answers": [],
|
||||
"corrections": [],
|
||||
"infoboxes": [],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_forgejo_data() -> dict[str, Any]:
|
||||
"""Sample Forgejo API response data for testing."""
|
||||
return {
|
||||
"version": "1.21.0",
|
||||
"repositories": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "test-repo",
|
||||
"full_name": "testuser/test-repo",
|
||||
"owner": {
|
||||
"id": 1,
|
||||
"login": "testuser",
|
||||
},
|
||||
"html_url": "https://forgejo.example.com/testuser/test-repo",
|
||||
"description": "Test repository",
|
||||
"fork": False,
|
||||
"empty": False,
|
||||
"private": False,
|
||||
"archived": False,
|
||||
"mirror": False,
|
||||
"size": 1024,
|
||||
"created_at": "2024-01-01T12:00:00Z",
|
||||
"updated_at": "2024-01-01T12:00:00Z",
|
||||
"pushed_at": "2024-01-01T12:00:00Z",
|
||||
"default_branch": "main",
|
||||
}
|
||||
],
|
||||
"branches": [
|
||||
{
|
||||
"name": "main",
|
||||
"commit": {
|
||||
"id": "abc123",
|
||||
"message": "Initial commit",
|
||||
"url": "https://forgejo.example.com/testuser/test-repo/commit/abc123",
|
||||
"author": {
|
||||
"name": "Test User",
|
||||
"email": "test@example.com",
|
||||
"username": "testuser",
|
||||
},
|
||||
"committer": {
|
||||
"name": "Test User",
|
||||
"email": "test@example.com",
|
||||
"username": "testuser",
|
||||
},
|
||||
"timestamp": "2024-01-01T12:00:00Z",
|
||||
},
|
||||
"protected": False,
|
||||
}
|
||||
],
|
||||
}
|
1
tests/e2e/__init__.py
Normal file
1
tests/e2e/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
from __future__ import annotations
|
1
tests/fixtures/__init__.py
vendored
Normal file
1
tests/fixtures/__init__.py
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
from __future__ import annotations
|
176
tests/fixtures/mock_responses.py
vendored
Normal file
176
tests/fixtures/mock_responses.py
vendored
Normal file
|
@ -0,0 +1,176 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
|
||||
class MockWeatherResponse:
|
||||
"""Mock weather API response data."""
|
||||
|
||||
@staticmethod
|
||||
def success_response() -> dict[str, Any]:
|
||||
return {
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.41,
|
||||
"timezone": "Europe/Berlin",
|
||||
"timezone_abbreviation": "CET",
|
||||
"current": {
|
||||
"time": "2024-01-01T12:00",
|
||||
"temperature_2m": 15.5,
|
||||
"wind_speed_10m": 10.2,
|
||||
},
|
||||
"current_units": {
|
||||
"time": "iso8601",
|
||||
"temperature_2m": "°C",
|
||||
"wind_speed_10m": "km/h",
|
||||
},
|
||||
"hourly": {
|
||||
"time": [
|
||||
"2024-01-01T12:00",
|
||||
"2024-01-01T13:00",
|
||||
"2024-01-01T14:00",
|
||||
],
|
||||
"temperature_2m": [15.5, 16.0, 15.8],
|
||||
"relative_humidity_2m": [65, 62, 68],
|
||||
"wind_speed_10m": [10.2, 9.8, 11.1],
|
||||
},
|
||||
"hourly_units": {
|
||||
"time": "iso8601",
|
||||
"temperature_2m": "°C",
|
||||
"relative_humidity_2m": "%",
|
||||
"wind_speed_10m": "km/h",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class MockSearXNGResponse:
|
||||
"""Mock SearXNG API response data."""
|
||||
|
||||
@staticmethod
|
||||
def search_response() -> dict[str, Any]:
|
||||
return {
|
||||
"query": "test query",
|
||||
"number_of_results": 2,
|
||||
"results": [
|
||||
{
|
||||
"title": "Test Result 1",
|
||||
"url": "https://example.com/1",
|
||||
"content": "This is the first test result",
|
||||
"engine": "google",
|
||||
"score": 0.95,
|
||||
},
|
||||
{
|
||||
"title": "Test Result 2",
|
||||
"url": "https://example.com/2",
|
||||
"content": "This is the second test result",
|
||||
"engine": "bing",
|
||||
"score": 0.88,
|
||||
},
|
||||
],
|
||||
"suggestions": ["test suggestion"],
|
||||
"answers": [],
|
||||
"corrections": [],
|
||||
"infoboxes": [],
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def categories_response() -> dict[str, Any]:
|
||||
return {
|
||||
"categories": [
|
||||
"general",
|
||||
"images",
|
||||
"news",
|
||||
"videos",
|
||||
"music",
|
||||
"it",
|
||||
"science",
|
||||
]
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def engines_response() -> dict[str, Any]:
|
||||
return {
|
||||
"engines": [
|
||||
{"name": "google", "categories": ["general"]},
|
||||
{"name": "bing", "categories": ["general"]},
|
||||
{"name": "duckduckgo", "categories": ["general"]},
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
class MockForgejoResponse:
|
||||
"""Mock Forgejo API response data."""
|
||||
|
||||
@staticmethod
|
||||
def version_response() -> dict[str, Any]:
|
||||
return {"version": "1.21.0"}
|
||||
|
||||
@staticmethod
|
||||
def repositories_response() -> list[dict[str, Any]]:
|
||||
return [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "test-repo",
|
||||
"full_name": "testuser/test-repo",
|
||||
"owner": {
|
||||
"id": 1,
|
||||
"login": "testuser",
|
||||
},
|
||||
"html_url": "https://forgejo.example.com/testuser/test-repo",
|
||||
"description": "Test repository",
|
||||
"fork": False,
|
||||
"empty": False,
|
||||
"private": False,
|
||||
"archived": False,
|
||||
"mirror": False,
|
||||
"size": 1024,
|
||||
"created_at": "2024-01-01T12:00:00Z",
|
||||
"updated_at": "2024-01-01T12:00:00Z",
|
||||
"pushed_at": "2024-01-01T12:00:00Z",
|
||||
"default_branch": "main",
|
||||
}
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def branches_response() -> list[dict[str, Any]]:
|
||||
return [
|
||||
{
|
||||
"name": "main",
|
||||
"commit": {
|
||||
"id": "abc123",
|
||||
"message": "Initial commit",
|
||||
"url": "https://forgejo.example.com/testuser/test-repo/commit/abc123",
|
||||
"author": {
|
||||
"name": "Test User",
|
||||
"email": "test@example.com",
|
||||
"username": "testuser",
|
||||
},
|
||||
"committer": {
|
||||
"name": "Test User",
|
||||
"email": "test@example.com",
|
||||
"username": "testuser",
|
||||
},
|
||||
"timestamp": "2024-01-01T12:00:00Z",
|
||||
},
|
||||
"protected": False,
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
class MockWebResponse:
|
||||
"""Mock web scraping response data."""
|
||||
|
||||
@staticmethod
|
||||
def trafilatura_response() -> str:
|
||||
return "# Test Article\n\nThis is a test article extracted from a webpage."
|
||||
|
||||
@staticmethod
|
||||
def raw_html_response() -> str:
|
||||
return """
|
||||
<html>
|
||||
<head><title>Test Page</title></head>
|
||||
<body>
|
||||
<h1>Test Article</h1>
|
||||
<p>This is a test article.</p>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
207
tests/fixtures/test_data.py
vendored
Normal file
207
tests/fixtures/test_data.py
vendored
Normal file
|
@ -0,0 +1,207 @@
|
|||
from __future__ import annotations
|
||||
|
||||
|
||||
class TimeTestData:
|
||||
"""Test data for time tool tests."""
|
||||
|
||||
VALID_TIMEZONES = [
|
||||
"UTC",
|
||||
"Europe/London",
|
||||
"America/New_York",
|
||||
"Asia/Tokyo",
|
||||
"Australia/Sydney",
|
||||
]
|
||||
|
||||
INVALID_TIMEZONES = [
|
||||
"Invalid/Timezone",
|
||||
"UTC+5",
|
||||
"",
|
||||
"America/NonExistent",
|
||||
"NotATimezone",
|
||||
]
|
||||
|
||||
UNIX_TIMESTAMPS = [
|
||||
1704067200, # 2024-01-01 00:00:00 UTC
|
||||
1704153600, # 2024-01-02 00:00:00 UTC
|
||||
1704240000, # 2024-01-03 00:00:00 UTC
|
||||
]
|
||||
|
||||
ISO_TIMESTAMPS = [
|
||||
"2024-01-01T00:00:00Z",
|
||||
"2024-01-02T00:00:00Z",
|
||||
"2024-01-03T00:00:00Z",
|
||||
]
|
||||
|
||||
HUMAN_TIMESTAMPS = [
|
||||
"2024-01-01 12:00:00",
|
||||
"January 1, 2024 12:00 PM",
|
||||
"2024-01-01T12:00:00",
|
||||
"01/01/2024 12:00",
|
||||
]
|
||||
|
||||
|
||||
class MemoryTestData:
|
||||
"""Test data for memory tool tests."""
|
||||
|
||||
SAMPLE_MEMORIES = [
|
||||
{
|
||||
"content": "John likes pizza",
|
||||
"entities": ["John"],
|
||||
},
|
||||
{
|
||||
"content": "Alice works at Google",
|
||||
"entities": ["Alice", "Google"],
|
||||
},
|
||||
{
|
||||
"content": "Bob is learning Python programming",
|
||||
"entities": ["Bob", "Python"],
|
||||
},
|
||||
{
|
||||
"content": "Sarah has a cat named Whiskers",
|
||||
"entities": ["Sarah", "Whiskers"],
|
||||
},
|
||||
]
|
||||
|
||||
SEARCH_QUERIES = [
|
||||
{"keywords": ["pizza"], "expected_count": 1},
|
||||
{"keywords": ["Google"], "expected_count": 1},
|
||||
{"keywords": ["Python"], "expected_count": 1},
|
||||
{"keywords": ["programming"], "expected_count": 1},
|
||||
{"keywords": ["cat"], "expected_count": 1},
|
||||
{"keywords": ["nonexistent"], "expected_count": 0},
|
||||
]
|
||||
|
||||
ENTITY_QUERIES = [
|
||||
{"entities": ["John"], "expected_count": 1},
|
||||
{"entities": ["Alice"], "expected_count": 1},
|
||||
{"entities": ["Bob"], "expected_count": 1},
|
||||
{"entities": ["Sarah"], "expected_count": 1},
|
||||
{"entities": ["Nonexistent"], "expected_count": 0},
|
||||
]
|
||||
|
||||
|
||||
class WeatherTestData:
|
||||
"""Test data for weather tool tests."""
|
||||
|
||||
VALID_COORDINATES = [
|
||||
{"latitude": 52.52, "longitude": 13.41}, # Berlin
|
||||
{"latitude": 51.5074, "longitude": -0.1278}, # London
|
||||
{"latitude": 40.7128, "longitude": -74.0060}, # New York
|
||||
]
|
||||
|
||||
INVALID_COORDINATES = [
|
||||
{"latitude": 91.0, "longitude": 0.0}, # Invalid latitude
|
||||
{"latitude": 0.0, "longitude": 181.0}, # Invalid longitude
|
||||
{"latitude": -91.0, "longitude": 0.0}, # Invalid latitude
|
||||
{"latitude": 0.0, "longitude": -181.0}, # Invalid longitude
|
||||
]
|
||||
|
||||
GEOCODING_LOCATIONS = [
|
||||
{"location": "London, UK", "expected_lat": 51.5074, "expected_lon": -0.1278},
|
||||
{"location": "New York, NY", "expected_lat": 40.7128, "expected_lon": -74.0060},
|
||||
{"location": "Berlin, Germany", "expected_lat": 52.52, "expected_lon": 13.41},
|
||||
]
|
||||
|
||||
|
||||
class WebTestData:
|
||||
"""Test data for web tool tests."""
|
||||
|
||||
VALID_URLS = [
|
||||
"https://example.com",
|
||||
"https://httpbin.org/html",
|
||||
"https://www.github.com",
|
||||
]
|
||||
|
||||
INVALID_URLS = [
|
||||
"not-a-url",
|
||||
"ftp://example.com",
|
||||
"https://",
|
||||
"",
|
||||
"https://nonexistent-domain-12345.com",
|
||||
]
|
||||
|
||||
EXPECTED_CONTENT_TYPES = [
|
||||
"text/html",
|
||||
"application/json",
|
||||
"text/plain",
|
||||
]
|
||||
|
||||
|
||||
class SearXNGTestData:
|
||||
"""Test data for SearXNG tool tests."""
|
||||
|
||||
SEARCH_QUERIES = [
|
||||
{"query": "python programming", "category": "general"},
|
||||
{"query": "machine learning", "category": "it"},
|
||||
{"query": "climate change", "category": "science"},
|
||||
]
|
||||
|
||||
INVALID_QUERIES = [
|
||||
{"query": "", "category": "general"},
|
||||
{"query": "test", "category": "invalid_category"},
|
||||
]
|
||||
|
||||
CATEGORIES = [
|
||||
"general",
|
||||
"images",
|
||||
"news",
|
||||
"videos",
|
||||
"music",
|
||||
"it",
|
||||
"science",
|
||||
]
|
||||
|
||||
ENGINES = [
|
||||
"google",
|
||||
"bing",
|
||||
"duckduckgo",
|
||||
"startpage",
|
||||
"yahoo",
|
||||
]
|
||||
|
||||
|
||||
class ForgejoTestData:
|
||||
"""Test data for Forgejo tool tests."""
|
||||
|
||||
REPOSITORIES = [
|
||||
{"owner": "testuser", "repo": "test-repo"},
|
||||
{"owner": "orgname", "repo": "project-repo"},
|
||||
]
|
||||
|
||||
BRANCHES = [
|
||||
"main",
|
||||
"develop",
|
||||
"feature/new-feature",
|
||||
"hotfix/urgent-fix",
|
||||
]
|
||||
|
||||
FILE_PATHS = [
|
||||
"README.md",
|
||||
"src/main.py",
|
||||
"docs/api.md",
|
||||
"tests/test_main.py",
|
||||
]
|
||||
|
||||
COMMIT_SHAS = [
|
||||
"abc123def456",
|
||||
"def456ghi789",
|
||||
"ghi789jkl012",
|
||||
]
|
||||
|
||||
WORKFLOW_RUNS = [
|
||||
{"id": 1, "status": "completed", "conclusion": "success"},
|
||||
{"id": 2, "status": "in_progress", "conclusion": None},
|
||||
{"id": 3, "status": "completed", "conclusion": "failure"},
|
||||
]
|
||||
|
||||
ISSUES = [
|
||||
{"index": 1, "title": "Bug in authentication", "state": "open"},
|
||||
{"index": 2, "title": "Feature request", "state": "closed"},
|
||||
{"index": 3, "title": "Documentation update", "state": "open"},
|
||||
]
|
||||
|
||||
PULL_REQUESTS = [
|
||||
{"index": 1, "title": "Fix authentication bug", "state": "open"},
|
||||
{"index": 2, "title": "Add new feature", "state": "merged"},
|
||||
{"index": 3, "title": "Update documentation", "state": "closed"},
|
||||
]
|
1
tests/integration/__init__.py
Normal file
1
tests/integration/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
from __future__ import annotations
|
3740
tests/integration/test_forgejo_endpoints.py
Normal file
3740
tests/integration/test_forgejo_endpoints.py
Normal file
File diff suppressed because it is too large
Load diff
118
tests/integration/test_memory_endpoints.py
Normal file
118
tests/integration/test_memory_endpoints.py
Normal file
|
@ -0,0 +1,118 @@
|
|||
"""Integration tests for the Memory tool endpoints.
|
||||
|
||||
These tests verify the functionality of the Memory API endpoints by making actual HTTP requests
|
||||
to the FastAPI application. They focus on ensuring that the endpoints return the expected
|
||||
data structures and handle various scenarios correctly, including successful responses and
|
||||
error conditions.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from openapi_mcp_server.server import app
|
||||
from openapi_mcp_server.tools.memory.models import (
|
||||
CreateMemoryRequest,
|
||||
DeleteMemoryRequest,
|
||||
GetEntityRequest,
|
||||
SearchMemoryRequest,
|
||||
)
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
def test_create_memory() -> None:
|
||||
"""Test the /memory/create endpoint.
|
||||
|
||||
This test verifies that a new memory can be successfully created by sending a POST request
|
||||
with valid memory content and associated entities. It checks for a 200 OK status code
|
||||
and that the response contains the newly created memory's ID and content.
|
||||
"""
|
||||
request_data = CreateMemoryRequest(content="This is a test memory.", entities=["test_entity"])
|
||||
response = client.post("/memory/create", json=request_data.model_dump())
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
assert "id" in response_json
|
||||
assert "timestamp" in response_json
|
||||
assert response_json["content"] == "This is a test memory."
|
||||
assert len(response_json["entities"]) == 1
|
||||
assert response_json["entities"][0]["name"] == "test_entity"
|
||||
assert response_json["entities"][0]["entity_type"] == "generic"
|
||||
|
||||
|
||||
def test_get_all_memories() -> None:
|
||||
"""Test the /memory/all endpoint.
|
||||
|
||||
This test verifies that the endpoint correctly retrieves all stored memories.
|
||||
It checks for a 200 OK status code and that the response contains a list of memories
|
||||
and entities within a MemoryGraph structure.
|
||||
"""
|
||||
response = client.get("/memory/all")
|
||||
assert response.status_code == 200
|
||||
assert "memories" in response.json()
|
||||
assert "entities" in response.json()
|
||||
assert isinstance(response.json()["memories"], list)
|
||||
assert isinstance(response.json()["entities"], list)
|
||||
|
||||
|
||||
def test_search_memories() -> None:
|
||||
"""Test the /memory/search endpoint.
|
||||
|
||||
This test verifies that the endpoint can search for memories based on a query string.
|
||||
It sends a POST request with a search query and asserts that the response is a 200 OK
|
||||
and contains a list of matching memories.
|
||||
"""
|
||||
request_data = SearchMemoryRequest(query="test", limit=10)
|
||||
response = client.post("/memory/search", json=request_data.model_dump())
|
||||
assert response.status_code == 200
|
||||
assert "memories" in response.json()
|
||||
assert isinstance(response.json()["memories"], list)
|
||||
|
||||
|
||||
def test_get_entity_memories() -> None:
|
||||
"""Test the /memory/entity endpoint.
|
||||
|
||||
This test verifies that the endpoint can retrieve memories associated with specific entities.
|
||||
It sends a POST request with a list of entity names and asserts a 200 OK response
|
||||
containing memories linked to those entities.
|
||||
"""
|
||||
request_data = GetEntityRequest(entities=["test_entity"], limit=10)
|
||||
response = client.post("/memory/entity", json=request_data.model_dump())
|
||||
assert response.status_code == 200
|
||||
assert "memories" in response.json()
|
||||
assert isinstance(response.json()["memories"], list)
|
||||
|
||||
|
||||
def test_delete_memories() -> None:
|
||||
"""Test the /memory/delete endpoint.
|
||||
|
||||
This test verifies that specific memories can be deleted by their IDs.
|
||||
It sends a POST request with a list of memory IDs to delete and asserts a 200 OK response
|
||||
and a confirmation message indicating the number of memories deleted.
|
||||
Note: This test assumes there are memories to delete. In a real scenario, you might create
|
||||
memories first within the test or use a fixture.
|
||||
"""
|
||||
# To make this test effective, you would typically create a memory first
|
||||
# and then attempt to delete its ID.
|
||||
# For now, we'll send an empty list or a dummy ID.
|
||||
request_data = DeleteMemoryRequest(memory_ids=[]) # Or a dummy ID like ["non_existent_id"]
|
||||
response = client.post("/memory/delete", json=request_data.model_dump())
|
||||
assert response.status_code == 200
|
||||
assert "message" in response.json()
|
||||
|
||||
|
||||
def test_get_memory_summary() -> None:
|
||||
"""Test the /memory/stats endpoint.
|
||||
|
||||
This test verifies that the endpoint returns summary statistics about the memory graph.
|
||||
It checks for a 200 OK status code and that the response contains expected summary fields
|
||||
like total_memories, total_entities, etc.
|
||||
"""
|
||||
response = client.get("/memory/stats")
|
||||
assert response.status_code == 200
|
||||
assert "total_memories" in response.json()
|
||||
assert "total_entities" in response.json()
|
||||
assert "oldest_memory" in response.json()
|
||||
assert "latest_memory" in response.json()
|
||||
assert "memory_timespan_days" in response.json()
|
||||
assert "top_entities" in response.json()
|
92
tests/integration/test_searxng_endpoints.py
Normal file
92
tests/integration/test_searxng_endpoints.py
Normal file
|
@ -0,0 +1,92 @@
|
|||
"""Integration tests for the SearXNG tool endpoints.
|
||||
|
||||
These tests verify the functionality of the SearXNG API endpoints by making actual HTTP requests
|
||||
to the FastAPI application. They focus on ensuring that the endpoints return the expected
|
||||
data structures and handle various scenarios correctly, including successful responses and
|
||||
error conditions. Mocking is used to prevent actual external network calls to the SearXNG instance.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from openapi_mcp_server.server import app
|
||||
from openapi_mcp_server.tools.searxng.models import SearchRequest
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
@patch("requests.get")
|
||||
def test_search_web(mock_get) -> None:
|
||||
"""Test the /searxng/search endpoint.
|
||||
|
||||
This test verifies that the endpoint correctly performs a web search and returns results.
|
||||
It mocks the external call to the SearXNG instance to control the response and ensure
|
||||
the endpoint processes it as expected. It checks for a 200 OK status code and the
|
||||
presence of key fields in the search response.
|
||||
"""
|
||||
mock_get.return_value.status_code = 200
|
||||
mock_get.return_value.json.return_value = {
|
||||
"query": "test query",
|
||||
"number_of_results": 1,
|
||||
"results": [
|
||||
{
|
||||
"title": "Test Result",
|
||||
"url": "http://example.com",
|
||||
"content": "This is a test snippet.",
|
||||
"engine": "test_engine",
|
||||
"category": "general",
|
||||
}
|
||||
],
|
||||
"infoboxes": [],
|
||||
"suggestions": [],
|
||||
"engines": [],
|
||||
}
|
||||
|
||||
request_data = SearchRequest(query="test query")
|
||||
response = client.post("/searxng/search", json=request_data.model_dump())
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json()["query"] == "test query"
|
||||
assert len(response.json()["results"]) == 1
|
||||
assert response.json()["results"][0]["title"] == "Test Result"
|
||||
|
||||
|
||||
@patch("requests.get")
|
||||
def test_get_categories(mock_get) -> None:
|
||||
"""Test the /searxng/categories endpoint.
|
||||
|
||||
This test verifies that the endpoint correctly retrieves available search categories.
|
||||
It mocks the external call to the SearXNG instance's config endpoint and checks for
|
||||
a 200 OK status code and the presence of categories in the response.
|
||||
"""
|
||||
mock_get.return_value.status_code = 200
|
||||
mock_get.return_value.json.return_value = {"categories": ["general", "images"]}
|
||||
|
||||
response = client.get("/searxng/categories")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert "categories" in response.json()
|
||||
assert "general" in response.json()["categories"]
|
||||
assert "images" in response.json()["categories"]
|
||||
|
||||
|
||||
@patch("requests.get")
|
||||
def test_get_engines(mock_get) -> None:
|
||||
"""Test the /searxng/engines endpoint.
|
||||
|
||||
This test verifies that the endpoint correctly retrieves available search engines.
|
||||
It mocks the external call to the SearXNG instance's config endpoint and checks for
|
||||
a 200 OK status code and the presence of engines in the response.
|
||||
"""
|
||||
mock_get.return_value.status_code = 200
|
||||
mock_get.return_value.json.return_value = {"engines": [{"name": "Google"}, {"name": "Bing"}]}
|
||||
|
||||
response = client.get("/searxng/engines")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert "engines" in response.json()
|
||||
assert "Google" in response.json()["engines"]
|
||||
assert "Bing" in response.json()["engines"]
|
113
tests/integration/test_time_endpoints.py
Normal file
113
tests/integration/test_time_endpoints.py
Normal file
|
@ -0,0 +1,113 @@
|
|||
"""Integration tests for the Time tool endpoints.
|
||||
|
||||
These tests verify the functionality of the Time API endpoints by making actual HTTP requests
|
||||
to the FastAPI application. They focus on ensuring that the endpoints return the expected
|
||||
data structures and handle various scenarios correctly, including successful responses and
|
||||
error conditions.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from openapi_mcp_server.server import app
|
||||
from openapi_mcp_server.tools.time.models import (
|
||||
ConvertTimeInput,
|
||||
ElapsedTimeInput,
|
||||
ParseTimestampInput,
|
||||
UnixToIsoInput,
|
||||
)
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
def test_get_current_time() -> None:
|
||||
"""Test the /time/get_time endpoint.
|
||||
|
||||
This test verifies that the endpoint returns the current time in the specified timezone.
|
||||
It checks for a 200 OK status code and that the response contains a non-empty 'utc' field.
|
||||
"""
|
||||
response = client.get("/time/get_time?timezone=UTC")
|
||||
assert response.status_code == 200
|
||||
assert "utc" in response.json()
|
||||
assert response.json()["utc"] is not None
|
||||
|
||||
|
||||
def test_unix_to_iso() -> None:
|
||||
"""Test the /time/unix_to_iso endpoint.
|
||||
|
||||
This test verifies that the endpoint correctly converts a Unix timestamp to ISO 8601 format.
|
||||
It sends a POST request with a Unix timestamp and checks for a 200 OK status code
|
||||
and a valid ISO formatted time in the response.
|
||||
"""
|
||||
request_data = UnixToIsoInput(
|
||||
timestamp=1678886400, timezone="UTC"
|
||||
) # March 15, 2023 12:00:00 PM UTC
|
||||
response = client.post("/time/unix_to_iso", json=request_data.model_dump())
|
||||
assert response.status_code == 200
|
||||
assert "iso_time" in response.json()
|
||||
assert response.json()["iso_time"].startswith("2023-03-15T12:00:00") or response.json()[
|
||||
"iso_time"
|
||||
].startswith("2023-03-15T12:00:00+00:00")
|
||||
|
||||
|
||||
def test_convert_time() -> None:
|
||||
"""Test the /time/convert_time endpoint.
|
||||
|
||||
This test verifies that the endpoint correctly converts a timestamp between timezones.
|
||||
It sends a POST request with a timestamp, source timezone, and target timezone,
|
||||
then checks for a 200 OK status code and a valid converted time.
|
||||
"""
|
||||
request_data = ConvertTimeInput(
|
||||
timestamp="2023-03-15T12:00:00", from_tz="America/New_York", to_tz="Europe/London"
|
||||
)
|
||||
response = client.post("/time/convert_time", json=request_data.model_dump())
|
||||
assert response.status_code == 200
|
||||
assert "converted_time" in response.json()
|
||||
assert response.json()["converted_time"].startswith(
|
||||
"2023-03-15T16:00:00"
|
||||
) # 12 PM NY is 4 PM London
|
||||
|
||||
|
||||
def test_elapsed_time() -> None:
|
||||
"""Test the /time/elapsed_time endpoint.
|
||||
|
||||
This test verifies that the endpoint correctly calculates the elapsed time between two timestamps.
|
||||
It sends a POST request with start and end timestamps and the desired unit,
|
||||
then checks for a 200 OK status code and the correct elapsed time.
|
||||
"""
|
||||
request_data = ElapsedTimeInput(
|
||||
start="2023-03-15T12:00:00Z", end="2023-03-15T13:00:00Z", units="hours"
|
||||
)
|
||||
response = client.post("/time/elapsed_time", json=request_data.model_dump())
|
||||
assert response.status_code == 200
|
||||
assert "elapsed" in response.json()
|
||||
assert response.json()["elapsed"] == 1.0
|
||||
assert response.json()["unit"] == "hours"
|
||||
|
||||
|
||||
def test_parse_timestamp() -> None:
|
||||
"""Test the /time/parse_timestamp endpoint.
|
||||
|
||||
This test verifies that the endpoint can parse a human-readable timestamp string
|
||||
into a standardized UTC ISO format. It sends a POST request with a timestamp string
|
||||
and checks for a 200 OK status code and a valid UTC ISO formatted time.
|
||||
"""
|
||||
request_data = ParseTimestampInput(timestamp="June 1st 2024 3:30 PM", timezone="UTC")
|
||||
response = client.post("/time/parse_timestamp", json=request_data.model_dump())
|
||||
assert response.status_code == 200
|
||||
assert "utc" in response.json()
|
||||
assert response.json()["utc"].startswith("2024-06-01T15:30:00")
|
||||
|
||||
|
||||
def test_list_time_zones() -> None:
|
||||
"""Test the /time/list_time_zones endpoint.
|
||||
|
||||
This test verifies that the endpoint returns a list of all valid IANA timezone names.
|
||||
It checks for a 200 OK status code and that the response is a non-empty list of strings.
|
||||
"""
|
||||
response = client.get("/time/list_time_zones")
|
||||
assert response.status_code == 200
|
||||
assert isinstance(response.json(), list)
|
||||
assert len(response.json()) > 0
|
||||
assert "UTC" in response.json()
|
245
tests/integration/test_weather_endpoints.py
Normal file
245
tests/integration/test_weather_endpoints.py
Normal file
|
@ -0,0 +1,245 @@
|
|||
"""Integration tests for the Weather tool endpoints.
|
||||
|
||||
These tests verify the functionality of the Weather API endpoints by making actual HTTP requests
|
||||
to the FastAPI application. They focus on ensuring that the endpoints return the expected
|
||||
data structures and handle various scenarios correctly, including successful responses and
|
||||
error conditions. Mocking is used to prevent actual external network calls to the weather API
|
||||
and geocoding services.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import requests
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from openapi_mcp_server.core.config import get_app_config
|
||||
from openapi_mcp_server.server import app
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
@patch("requests.get")
|
||||
@patch("openapi_mcp_server.tools.weather.routes.WeatherTool.geocode_location")
|
||||
@patch("reverse_geocoder.search")
|
||||
def test_get_weather_forecast_with_coords(
|
||||
mock_rg_search: MagicMock,
|
||||
mock_geocode_location: MagicMock,
|
||||
mock_requests_get: MagicMock,
|
||||
) -> None:
|
||||
"""Test the /weather/forecast endpoint with explicit coordinates.
|
||||
|
||||
This test verifies that the endpoint correctly retrieves weather data when latitude
|
||||
and longitude are provided. It mocks the external API calls to ensure a controlled
|
||||
test environment and checks for a 200 OK status code and the expected structure
|
||||
of the weather forecast response.
|
||||
"""
|
||||
# Mock external API responses
|
||||
mock_requests_get.return_value.status_code = 200
|
||||
mock_requests_get.return_value.json.return_value = {
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.41,
|
||||
"generationtime_ms": 0.123,
|
||||
"utc_offset_seconds": 0,
|
||||
"timezone": "Europe/Berlin",
|
||||
"timezone_abbreviation": "CEST",
|
||||
"elevation": 38.0,
|
||||
"current_units": {"time": "iso8601", "temperature_2m": "°C", "wind_speed_10m": "km/h"},
|
||||
"current": {"time": "2023-10-27T10:00", "temperature_2m": 10.5, "wind_speed_10m": 15.0},
|
||||
"hourly_units": {
|
||||
"time": "iso8601",
|
||||
"temperature_2m": "°C",
|
||||
"relative_humidity_2m": "%",
|
||||
"wind_speed_10m": "km/h",
|
||||
},
|
||||
"hourly": {
|
||||
"time": ["2023-10-27T10:00", "2023-10-27T11:00"],
|
||||
"temperature_2m": [10.5, 11.0],
|
||||
"relative_humidity_2m": [80, 75],
|
||||
"wind_speed_10m": [15.0, 12.0],
|
||||
},
|
||||
}
|
||||
|
||||
# Mock reverse geocoder to return a country code (e.g., Germany for Celsius)
|
||||
mock_rg_search.return_value = [{"cc": "DE"}]
|
||||
|
||||
response = client.get("/weather/forecast?latitude=52.52&longitude=13.41")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json()["latitude"] == 52.52
|
||||
assert response.json()["current"]["temperature_2m"] == 10.5
|
||||
mock_requests_get.assert_called_once_with(
|
||||
"https://api.open-meteo.com/v1/forecast",
|
||||
params={
|
||||
"latitude": 52.52,
|
||||
"longitude": 13.41,
|
||||
"current": "temperature_2m,wind_speed_10m",
|
||||
"hourly": "temperature_2m,relative_humidity_2m,wind_speed_10m",
|
||||
"timezone": "auto",
|
||||
"temperature_unit": "celsius",
|
||||
},
|
||||
timeout=30,
|
||||
)
|
||||
|
||||
|
||||
@patch("requests.get")
|
||||
@patch("openapi_mcp_server.tools.weather.routes.WeatherTool.geocode_location")
|
||||
@patch("reverse_geocoder.search")
|
||||
def test_get_weather_forecast_with_default_location(
|
||||
mock_rg_search: MagicMock,
|
||||
mock_geocode_location: MagicMock,
|
||||
mock_requests_get: MagicMock,
|
||||
) -> None:
|
||||
"""Test the /weather/forecast endpoint with a default location.
|
||||
|
||||
This test verifies that the endpoint correctly uses the configured default location
|
||||
when no coordinates are provided. It mocks the geocoding service to return coordinates
|
||||
for the default location and the external weather API. It checks for a 200 OK status
|
||||
code and the expected weather data.
|
||||
"""
|
||||
# Temporarily set a default location in the app config for this test
|
||||
original_default_location = get_app_config().default_location
|
||||
get_app_config().default_location = "London, UK"
|
||||
|
||||
# Mock geocoding to return coordinates for "London, UK"
|
||||
mock_geocode_location.return_value = (51.5, -0.1)
|
||||
|
||||
# Mock external API responses
|
||||
mock_requests_get.return_value.status_code = 200
|
||||
mock_requests_get.return_value.json.return_value = {
|
||||
"latitude": 51.5,
|
||||
"longitude": -0.1,
|
||||
"generationtime_ms": 0.123,
|
||||
"utc_offset_seconds": 0,
|
||||
"timezone": "Europe/London",
|
||||
"timezone_abbreviation": "BST",
|
||||
"elevation": 24.0,
|
||||
"current_units": {"time": "iso8601", "temperature_2m": "°C", "wind_speed_10m": "km/h"},
|
||||
"current": {"time": "2023-10-27T10:00", "temperature_2m": 12.0, "wind_speed_10m": 10.0},
|
||||
"hourly_units": {
|
||||
"time": "iso8601",
|
||||
"temperature_2m": "°C",
|
||||
"relative_humidity_2m": "%",
|
||||
"wind_speed_10m": "km/h",
|
||||
},
|
||||
"hourly": {
|
||||
"time": ["2023-10-27T10:00", "2023-10-27T11:00"],
|
||||
"temperature_2m": [12.0, 12.5],
|
||||
"relative_humidity_2m": [85, 80],
|
||||
"wind_speed_10m": [10.0, 8.0],
|
||||
},
|
||||
}
|
||||
|
||||
# Mock reverse geocoder to return a country code (e.g., UK for Celsius)
|
||||
mock_rg_search.return_value = [{"cc": "GB"}]
|
||||
|
||||
response = client.get("/weather/forecast")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json()["latitude"] == 51.5
|
||||
assert response.json()["current"]["temperature_2m"] == 12.0
|
||||
mock_geocode_location.assert_called_once_with("London, UK")
|
||||
mock_requests_get.assert_called_once_with(
|
||||
"https://api.open-meteo.com/v1/forecast",
|
||||
params={
|
||||
"latitude": 51.5,
|
||||
"longitude": -0.1,
|
||||
"current": "temperature_2m,wind_speed_10m",
|
||||
"hourly": "temperature_2m,relative_humidity_2m,wind_speed_10m",
|
||||
"timezone": "auto",
|
||||
"temperature_unit": "celsius",
|
||||
},
|
||||
timeout=30,
|
||||
)
|
||||
|
||||
# Restore original default location
|
||||
get_app_config().default_location = original_default_location
|
||||
|
||||
|
||||
@patch("requests.get")
|
||||
@patch("openapi_mcp_server.tools.weather.routes.WeatherTool.geocode_location")
|
||||
@patch("reverse_geocoder.search")
|
||||
def test_get_weather_forecast_no_coords_no_default(
|
||||
mock_rg_search: MagicMock,
|
||||
mock_geocode_location: MagicMock,
|
||||
mock_requests_get: MagicMock,
|
||||
) -> None:
|
||||
"""Test the /weather/forecast endpoint when no coordinates and no default location are set.
|
||||
|
||||
This test verifies that the endpoint returns a 422 Unprocessable Entity error when
|
||||
neither explicit coordinates nor a default location are provided. It ensures that
|
||||
the API correctly handles this missing input scenario.
|
||||
"""
|
||||
# Temporarily clear default location in the app config for this test
|
||||
original_default_location = get_app_config().default_location
|
||||
get_app_config().default_location = None
|
||||
|
||||
response = client.get("/weather/forecast")
|
||||
|
||||
assert response.status_code == 422
|
||||
assert "Latitude and longitude are required" in response.json()["detail"]
|
||||
mock_geocode_location.assert_not_called()
|
||||
mock_requests_get.assert_not_called()
|
||||
|
||||
# Restore original default location
|
||||
get_app_config().default_location = original_default_location
|
||||
|
||||
|
||||
@patch("requests.get")
|
||||
@patch("openapi_mcp_server.tools.weather.routes.WeatherTool.geocode_location")
|
||||
@patch("reverse_geocoder.search")
|
||||
def test_get_weather_forecast_geocode_failure(
|
||||
mock_rg_search: MagicMock,
|
||||
mock_geocode_location: MagicMock,
|
||||
mock_requests_get: MagicMock,
|
||||
) -> None:
|
||||
"""Test the /weather/forecast endpoint when geocoding of default location fails.
|
||||
|
||||
This test verifies that the endpoint returns a 422 Unprocessable Entity error when
|
||||
a default location is set but cannot be geocoded into coordinates. It ensures that
|
||||
the API correctly handles geocoding failures.
|
||||
"""
|
||||
# Temporarily set a default location that will fail geocoding
|
||||
original_default_location = get_app_config().default_location
|
||||
get_app_config().default_location = "Invalid Location That Does Not Exist"
|
||||
|
||||
# Mock geocoding to return None (failure)
|
||||
mock_geocode_location.return_value = None
|
||||
|
||||
response = client.get("/weather/forecast")
|
||||
|
||||
assert response.status_code == 422
|
||||
assert "Could not geocode default location" in response.json()["detail"]
|
||||
mock_geocode_location.assert_called_once_with("Invalid Location That Does Not Exist")
|
||||
mock_requests_get.assert_not_called()
|
||||
|
||||
# Restore original default location
|
||||
get_app_config().default_location = original_default_location
|
||||
|
||||
|
||||
@patch("requests.get")
|
||||
@patch("openapi_mcp_server.tools.weather.routes.WeatherTool.geocode_location")
|
||||
@patch("reverse_geocoder.search")
|
||||
def test_get_weather_forecast_api_error(
|
||||
mock_rg_search: MagicMock,
|
||||
mock_geocode_location: MagicMock,
|
||||
mock_requests_get: MagicMock,
|
||||
) -> None:
|
||||
"""Test the /weather/forecast endpoint when the external weather API returns an error.
|
||||
|
||||
This test verifies that the endpoint correctly handles errors from the Open-Meteo API.
|
||||
It mocks the external API call to raise an exception (e.g., connection error) and checks
|
||||
for a 503 Service Unavailable status code.
|
||||
"""
|
||||
# Mock external API to raise an exception
|
||||
mock_requests_get.side_effect = requests.exceptions.RequestException("Connection error")
|
||||
|
||||
# Mock reverse geocoder to return a country code (e.g., Germany for Celsius)
|
||||
mock_rg_search.return_value = [{"cc": "DE"}]
|
||||
|
||||
response = client.get("/weather/forecast?latitude=52.52&longitude=13.41")
|
||||
|
||||
assert response.status_code == 503
|
||||
assert "Error connecting to Open-Meteo API" in response.json()["detail"]
|
||||
mock_requests_get.assert_called_once()
|
134
tests/integration/test_web_endpoints.py
Normal file
134
tests/integration/test_web_endpoints.py
Normal file
|
@ -0,0 +1,134 @@
|
|||
"""Integration tests for the Web tool endpoints.
|
||||
|
||||
These tests verify the functionality of the Web API endpoints by making actual HTTP requests
|
||||
to the FastAPI application. They focus on ensuring that the endpoints return the expected
|
||||
data structures and handle various scenarios correctly, including successful responses and
|
||||
error conditions. Mocking is used to prevent actual external network calls.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from openapi_mcp_server.server import app
|
||||
from openapi_mcp_server.tools.web.models import WebRequest
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
@patch("trafilatura.fetch_url")
|
||||
@patch("trafilatura.extract")
|
||||
def test_web_read_success(
|
||||
mock_trafilatura_extract: MagicMock,
|
||||
mock_trafilatura_fetch_url: MagicMock,
|
||||
) -> None:
|
||||
"""Test the /web/web_read endpoint for successful content extraction.
|
||||
|
||||
This test verifies that the endpoint correctly extracts and returns web content
|
||||
in Markdown format. It mocks the `trafilatura` library functions to simulate
|
||||
a successful content fetch and extraction, and checks for a 200 OK status code
|
||||
and the expected content in the response.
|
||||
"""
|
||||
mock_trafilatura_fetch_url.return_value = (
|
||||
"<html><body><h1>Test</h1><p>Content</p></body></html>"
|
||||
)
|
||||
mock_trafilatura_extract.return_value = "# Test\n\nContent"
|
||||
|
||||
request_data = WebRequest(url="http://example.com")
|
||||
response = client.post("/web/web_read", json=request_data.model_dump())
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json()["url"] == "http://example.com"
|
||||
assert response.json()["content"] == "# Test\n\nContent"
|
||||
mock_trafilatura_fetch_url.assert_called_once_with("http://example.com")
|
||||
mock_trafilatura_extract.assert_called_once()
|
||||
|
||||
|
||||
@patch("trafilatura.fetch_url")
|
||||
@patch("trafilatura.extract")
|
||||
def test_web_read_fetch_failure(
|
||||
mock_trafilatura_extract: MagicMock,
|
||||
mock_trafilatura_fetch_url: MagicMock,
|
||||
) -> None:
|
||||
"""Test the /web/web_read endpoint when fetching the URL fails.
|
||||
|
||||
This test verifies that the endpoint returns a 404 Not Found error when `trafilatura.fetch_url`
|
||||
returns None, indicating a failure to fetch the web page. It ensures proper error handling.
|
||||
"""
|
||||
mock_trafilatura_fetch_url.return_value = None
|
||||
|
||||
request_data = WebRequest(url="http://nonexistent.com")
|
||||
response = client.post("/web/web_read", json=request_data.model_dump())
|
||||
|
||||
assert response.status_code == 404
|
||||
assert "Unable to fetch content from URL" in response.json()["detail"]
|
||||
mock_trafilatura_fetch_url.assert_called_once_with("http://nonexistent.com")
|
||||
mock_trafilatura_extract.assert_not_called()
|
||||
|
||||
|
||||
@patch("trafilatura.fetch_url")
|
||||
@patch("trafilatura.extract")
|
||||
def test_web_read_extraction_failure(
|
||||
mock_trafilatura_extract: MagicMock,
|
||||
mock_trafilatura_fetch_url: MagicMock,
|
||||
) -> None:
|
||||
"""Test the /web/web_read endpoint when content extraction fails.
|
||||
|
||||
This test verifies that the endpoint returns a 422 Unprocessable Entity error when
|
||||
`trafilatura.extract` returns None, indicating a failure to extract meaningful content.
|
||||
"""
|
||||
mock_trafilatura_fetch_url.return_value = (
|
||||
"<html><body><p>No extractable content</p></body></html>"
|
||||
)
|
||||
mock_trafilatura_extract.return_value = None
|
||||
|
||||
request_data = WebRequest(url="http://example.com/empty")
|
||||
response = client.post("/web/web_read", json=request_data.model_dump())
|
||||
|
||||
assert response.status_code == 422
|
||||
assert "Unable to extract content from URL" in response.json()["detail"]
|
||||
mock_trafilatura_fetch_url.assert_called_once_with("http://example.com/empty")
|
||||
mock_trafilatura_extract.assert_called_once()
|
||||
|
||||
|
||||
@patch("requests.get")
|
||||
def test_web_raw_success(mock_requests_get: MagicMock) -> None:
|
||||
"""Test the /web/web_raw endpoint for successful raw HTML fetching.
|
||||
|
||||
This test verifies that the endpoint correctly fetches and returns the raw HTML content
|
||||
and headers of a given URL. It mocks the `requests.get` call to simulate a successful
|
||||
HTTP response and checks for a 200 OK status code and the expected content.
|
||||
"""
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.text = "<html>Raw HTML</html>"
|
||||
mock_response.headers = {"Content-Type": "text/html"}
|
||||
mock_requests_get.return_value = mock_response
|
||||
|
||||
response = client.get("/web/web_raw?url=http://example.com/raw")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert response.json()["url"] == "http://example.com/raw"
|
||||
assert response.json()["content"] == "<html>Raw HTML</html>"
|
||||
assert response.json()["status_code"] == 200
|
||||
assert response.json()["headers"]["Content-Type"] == "text/html"
|
||||
mock_requests_get.assert_called_once_with("http://example.com/raw", timeout=30)
|
||||
|
||||
|
||||
@patch("requests.get")
|
||||
def test_web_raw_fetch_failure(mock_requests_get: MagicMock) -> None:
|
||||
"""Test the /web/web_raw endpoint when fetching the URL fails.
|
||||
|
||||
This test verifies that the endpoint returns a 503 Service Unavailable error when
|
||||
`requests.get` raises a `requests.exceptions.RequestException`, simulating a network
|
||||
or connection error. It ensures proper error handling for external HTTP requests.
|
||||
"""
|
||||
mock_requests_get.side_effect = requests.exceptions.RequestException("Connection error")
|
||||
|
||||
response = client.get("/web/web_raw?url=http://nonexistent.com/raw")
|
||||
|
||||
assert response.status_code == 503
|
||||
assert "Unable to fetch content from URL" in response.json()["detail"]
|
||||
mock_requests_get.assert_called_once_with("http://nonexistent.com/raw", timeout=30)
|
39
tests/test_health.py
Normal file
39
tests/test_health.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_health_endpoint(client) -> None:
|
||||
"""Test the global health endpoint."""
|
||||
response = client.get("/health")
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert data["service"] == "openapi-mcp-server"
|
||||
assert data["status"] == "healthy"
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_health_endpoint_response_schema(client) -> None:
|
||||
"""Test that health endpoint returns correct schema."""
|
||||
response = client.get("/health")
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert isinstance(data, dict)
|
||||
assert "service" in data
|
||||
assert "status" in data
|
||||
assert isinstance(data["service"], str)
|
||||
assert isinstance(data["status"], str)
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_health_endpoint_headers(client) -> None:
|
||||
"""Test health endpoint returns correct headers."""
|
||||
response = client.get("/health")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert "application/json" in response.headers.get("content-type", "")
|
78
tests/test_server.py
Normal file
78
tests/test_server.py
Normal file
|
@ -0,0 +1,78 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_server_creation(app) -> None:
|
||||
"""Test that the FastAPI app is created successfully."""
|
||||
assert app is not None
|
||||
assert hasattr(app, "routes")
|
||||
assert hasattr(app, "openapi")
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_openapi_docs_endpoint(client) -> None:
|
||||
"""Test that OpenAPI docs endpoint is accessible."""
|
||||
response = client.get("/docs")
|
||||
assert response.status_code == 200
|
||||
assert "text/html" in response.headers.get("content-type", "")
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_redoc_endpoint(client) -> None:
|
||||
"""Test that ReDoc endpoint is accessible."""
|
||||
response = client.get("/redoc")
|
||||
assert response.status_code == 200
|
||||
assert "text/html" in response.headers.get("content-type", "")
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_openapi_json_endpoint(client) -> None:
|
||||
"""Test that OpenAPI JSON schema is accessible."""
|
||||
response = client.get("/openapi.json")
|
||||
assert response.status_code == 200
|
||||
assert "application/json" in response.headers.get("content-type", "")
|
||||
|
||||
data = response.json()
|
||||
assert "openapi" in data
|
||||
assert "info" in data
|
||||
assert "paths" in data
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_tool_endpoints_registered(client) -> None:
|
||||
"""Test that all tool endpoints are registered."""
|
||||
response = client.get("/openapi.json")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
paths = data["paths"]
|
||||
|
||||
# Check that tool endpoints are registered
|
||||
tool_prefixes = ["/web/", "/memory/", "/time/", "/weather/", "/searxng/", "/forgejo/"]
|
||||
|
||||
for prefix in tool_prefixes:
|
||||
found_tool_endpoint = any(path.startswith(prefix) for path in paths)
|
||||
assert found_tool_endpoint, f"No endpoints found for tool with prefix {prefix}"
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_cors_headers(client) -> None:
|
||||
"""Test that CORS headers are set correctly."""
|
||||
response = client.get("/health")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Check for presence of CORS headers
|
||||
headers = response.headers
|
||||
assert "access-control-allow-origin" in headers or "Access-Control-Allow-Origin" in headers
|
||||
|
||||
|
||||
@pytest.mark.integration
|
||||
def test_404_handling(client) -> None:
|
||||
"""Test that 404 errors are handled properly."""
|
||||
response = client.get("/nonexistent/endpoint")
|
||||
assert response.status_code == 404
|
||||
|
||||
data = response.json()
|
||||
assert "detail" in data
|
1
tests/unit/__init__.py
Normal file
1
tests/unit/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
from __future__ import annotations
|
1
tests/unit/tools/__init__.py
Normal file
1
tests/unit/tools/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
from __future__ import annotations
|
378
uv.lock
generated
378
uv.lock
generated
|
@ -35,11 +35,11 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2025.6.15"
|
||||
version = "2025.7.9"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/73/f7/f14b46d4bcd21092d7d3ccef689615220d8a08fb25e564b65d20738e672e/certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b", size = 158753, upload-time = "2025-06-15T02:45:51.329Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/de/8a/c729b6b60c66a38f590c4e774decc4b2ec7b0576be8f1aa984a53ffa812a/certifi-2025.7.9.tar.gz", hash = "sha256:c1d2ec05395148ee10cf672ffc28cd37ea0ab0d99f9cc74c43e588cbd111b079", size = 160386, upload-time = "2025-07-09T02:13:58.874Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/84/ae/320161bd181fc06471eed047ecce67b693fd7515b16d495d8932db763426/certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057", size = 157650, upload-time = "2025-06-15T02:45:49.977Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/f3/80a3f974c8b535d394ff960a11ac20368e06b736da395b551a49ce950cce/certifi-2025.7.9-py3-none-any.whl", hash = "sha256:d842783a14f8fdd646895ac26f719a061408834473cfc10203f6a575beb15d39", size = 159230, upload-time = "2025-07-09T02:13:57.007Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -99,9 +99,40 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/8e/ca/6a667ccbe649856dcd3458bab80b016681b274399d6211187c6ab969fc50/courlan-1.3.2-py3-none-any.whl", hash = "sha256:d0dab52cf5b5b1000ee2839fbc2837e93b2514d3cb5bb61ae158a55b7a04c6be", size = 33848, upload-time = "2024-10-29T16:40:18.325Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
version = "7.9.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/04/b7/c0465ca253df10a9e8dae0692a4ae6e9726d245390aaef92360e1d6d3832/coverage-7.9.2.tar.gz", hash = "sha256:997024fa51e3290264ffd7492ec97d0690293ccd2b45a6cd7d82d945a4a80c8b", size = 813556, upload-time = "2025-07-03T10:54:15.101Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/94/9d/7a8edf7acbcaa5e5c489a646226bed9591ee1c5e6a84733c0140e9ce1ae1/coverage-7.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:985abe7f242e0d7bba228ab01070fde1d6c8fa12f142e43debe9ed1dde686038", size = 212367, upload-time = "2025-07-03T10:53:25.811Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/9e/5cd6f130150712301f7e40fb5865c1bc27b97689ec57297e568d972eec3c/coverage-7.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82c3939264a76d44fde7f213924021ed31f55ef28111a19649fec90c0f109e6d", size = 212632, upload-time = "2025-07-03T10:53:27.075Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/de/6287a2c2036f9fd991c61cefa8c64e57390e30c894ad3aa52fac4c1e14a8/coverage-7.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae5d563e970dbe04382f736ec214ef48103d1b875967c89d83c6e3f21706d5b3", size = 245793, upload-time = "2025-07-03T10:53:28.408Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/cc/9b5a9961d8160e3cb0b558c71f8051fe08aa2dd4b502ee937225da564ed1/coverage-7.9.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdd612e59baed2a93c8843c9a7cb902260f181370f1d772f4842987535071d14", size = 243006, upload-time = "2025-07-03T10:53:29.754Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/d9/4616b787d9f597d6443f5588619c1c9f659e1f5fc9eebf63699eb6d34b78/coverage-7.9.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:256ea87cb2a1ed992bcdfc349d8042dcea1b80436f4ddf6e246d6bee4b5d73b6", size = 244990, upload-time = "2025-07-03T10:53:31.098Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/83/801cdc10f137b2d02b005a761661649ffa60eb173dcdaeb77f571e4dc192/coverage-7.9.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f44ae036b63c8ea432f610534a2668b0c3aee810e7037ab9d8ff6883de480f5b", size = 245157, upload-time = "2025-07-03T10:53:32.717Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/a4/41911ed7e9d3ceb0ffb019e7635468df7499f5cc3edca5f7dfc078e9c5ec/coverage-7.9.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:82d76ad87c932935417a19b10cfe7abb15fd3f923cfe47dbdaa74ef4e503752d", size = 243128, upload-time = "2025-07-03T10:53:34.009Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/41/344543b71d31ac9cb00a664d5d0c9ef134a0fe87cb7d8430003b20fa0b7d/coverage-7.9.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:619317bb86de4193debc712b9e59d5cffd91dc1d178627ab2a77b9870deb2868", size = 244511, upload-time = "2025-07-03T10:53:35.434Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/81/3b68c77e4812105e2a060f6946ba9e6f898ddcdc0d2bfc8b4b152a9ae522/coverage-7.9.2-cp313-cp313-win32.whl", hash = "sha256:0a07757de9feb1dfafd16ab651e0f628fd7ce551604d1bf23e47e1ddca93f08a", size = 214765, upload-time = "2025-07-03T10:53:36.787Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/a2/7fac400f6a346bb1a4004eb2a76fbff0e242cd48926a2ce37a22a6a1d917/coverage-7.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:115db3d1f4d3f35f5bb021e270edd85011934ff97c8797216b62f461dd69374b", size = 215536, upload-time = "2025-07-03T10:53:38.188Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/47/2c6c215452b4f90d87017e61ea0fd9e0486bb734cb515e3de56e2c32075f/coverage-7.9.2-cp313-cp313-win_arm64.whl", hash = "sha256:48f82f889c80af8b2a7bb6e158d95a3fbec6a3453a1004d04e4f3b5945a02694", size = 213943, upload-time = "2025-07-03T10:53:39.492Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/46/e211e942b22d6af5e0f323faa8a9bc7c447a1cf1923b64c47523f36ed488/coverage-7.9.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:55a28954545f9d2f96870b40f6c3386a59ba8ed50caf2d949676dac3ecab99f5", size = 213088, upload-time = "2025-07-03T10:53:40.874Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/2f/762551f97e124442eccd907bf8b0de54348635b8866a73567eb4e6417acf/coverage-7.9.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cdef6504637731a63c133bb2e6f0f0214e2748495ec15fe42d1e219d1b133f0b", size = 213298, upload-time = "2025-07-03T10:53:42.218Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/b7/76d2d132b7baf7360ed69be0bcab968f151fa31abe6d067f0384439d9edb/coverage-7.9.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcd5ebe66c7a97273d5d2ddd4ad0ed2e706b39630ed4b53e713d360626c3dbb3", size = 256541, upload-time = "2025-07-03T10:53:43.823Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/17/392b219837d7ad47d8e5974ce5f8dc3deb9f99a53b3bd4d123602f960c81/coverage-7.9.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9303aed20872d7a3c9cb39c5d2b9bdbe44e3a9a1aecb52920f7e7495410dfab8", size = 252761, upload-time = "2025-07-03T10:53:45.19Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/77/4256d3577fe1b0daa8d3836a1ebe68eaa07dd2cbaf20cf5ab1115d6949d4/coverage-7.9.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc18ea9e417a04d1920a9a76fe9ebd2f43ca505b81994598482f938d5c315f46", size = 254917, upload-time = "2025-07-03T10:53:46.931Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/99/fc1a008eef1805e1ddb123cf17af864743354479ea5129a8f838c433cc2c/coverage-7.9.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6406cff19880aaaadc932152242523e892faff224da29e241ce2fca329866584", size = 256147, upload-time = "2025-07-03T10:53:48.289Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/c0/f63bf667e18b7f88c2bdb3160870e277c4874ced87e21426128d70aa741f/coverage-7.9.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d0d4f6ecdf37fcc19c88fec3e2277d5dee740fb51ffdd69b9579b8c31e4232e", size = 254261, upload-time = "2025-07-03T10:53:49.99Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/32/37dd1c42ce3016ff8ec9e4b607650d2e34845c0585d3518b2a93b4830c1a/coverage-7.9.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c33624f50cf8de418ab2b4d6ca9eda96dc45b2c4231336bac91454520e8d1fac", size = 255099, upload-time = "2025-07-03T10:53:51.354Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/2e/af6b86f7c95441ce82f035b3affe1cd147f727bbd92f563be35e2d585683/coverage-7.9.2-cp313-cp313t-win32.whl", hash = "sha256:1df6b76e737c6a92210eebcb2390af59a141f9e9430210595251fbaf02d46926", size = 215440, upload-time = "2025-07-03T10:53:52.808Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/bb/8a785d91b308867f6b2e36e41c569b367c00b70c17f54b13ac29bcd2d8c8/coverage-7.9.2-cp313-cp313t-win_amd64.whl", hash = "sha256:f5fd54310b92741ebe00d9c0d1d7b2b27463952c022da6d47c175d246a98d1bd", size = 216537, upload-time = "2025-07-03T10:53:54.273Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/a0/a6bffb5e0f41a47279fd45a8f3155bf193f77990ae1c30f9c224b61cacb0/coverage-7.9.2-cp313-cp313t-win_arm64.whl", hash = "sha256:c48c2375287108c887ee87d13b4070a381c6537d30e8487b24ec721bf2a781cb", size = 214398, upload-time = "2025-07-03T10:53:56.715Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/38/bbe2e63902847cf79036ecc75550d0698af31c91c7575352eb25190d0fb3/coverage-7.9.2-py3-none-any.whl", hash = "sha256:e425cd5b00f6fc0ed7cdbd766c70be8baab4b7839e4d4fe5fac48581dd968ea4", size = 204005, upload-time = "2025-07-03T10:54:13.491Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dateparser"
|
||||
version = "1.2.1"
|
||||
version = "1.2.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "python-dateutil" },
|
||||
|
@ -109,23 +140,44 @@ dependencies = [
|
|||
{ name = "regex" },
|
||||
{ name = "tzlocal" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/bd/3f/d3207a05f5b6a78c66d86631e60bfba5af163738a599a5b9aa2c2737a09e/dateparser-1.2.1.tar.gz", hash = "sha256:7e4919aeb48481dbfc01ac9683c8e20bfe95bb715a38c1e9f6af889f4f30ccc3", size = 309924, upload-time = "2025-02-05T12:34:55.593Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a9/30/064144f0df1749e7bb5faaa7f52b007d7c2d08ec08fed8411aba87207f68/dateparser-1.2.2.tar.gz", hash = "sha256:986316f17cb8cdc23ea8ce563027c5ef12fc725b6fb1d137c14ca08777c5ecf7", size = 329840, upload-time = "2025-06-26T09:29:23.211Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cf/0a/981c438c4cd84147c781e4e96c1d72df03775deb1bc76c5a6ee8afa89c62/dateparser-1.2.1-py3-none-any.whl", hash = "sha256:bdcac262a467e6260030040748ad7c10d6bacd4f3b9cdb4cfd2251939174508c", size = 295658, upload-time = "2025-02-05T12:34:53.1Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/22/f020c047ae1346613db9322638186468238bcfa8849b4668a22b97faad65/dateparser-1.2.2-py3-none-any.whl", hash = "sha256:5a5d7211a09013499867547023a2a0c91d5a27d15dd4dbcea676ea9fe66f2482", size = 315453, upload-time = "2025-06-26T09:29:21.412Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.115.12"
|
||||
version = "0.116.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pydantic" },
|
||||
{ name = "starlette" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f4/55/ae499352d82338331ca1e28c7f4a63bfd09479b16395dce38cf50a39e2c2/fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681", size = 295236, upload-time = "2025-03-23T22:55:43.822Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/20/38/e1da78736143fd885c36213a3ccc493c384ae8fea6a0f0bc272ef42ebea8/fastapi-0.116.0.tar.gz", hash = "sha256:80dc0794627af0390353a6d1171618276616310d37d24faba6648398e57d687a", size = 296518, upload-time = "2025-07-07T15:09:27.82Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/50/b3/b51f09c2ba432a576fe63758bddc81f78f0c6309d9e5c10d194313bf021e/fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d", size = 95164, upload-time = "2025-03-23T22:55:42.101Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/68/d80347fe2360445b5f58cf290e588a4729746e7501080947e6cdae114b1f/fastapi-0.116.0-py3-none-any.whl", hash = "sha256:fdcc9ed272eaef038952923bef2b735c02372402d1203ee1210af4eea7a78d2b", size = 95625, upload-time = "2025-07-07T15:09:26.348Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "geographiclib"
|
||||
version = "2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/96/cd/90271fd195d79a9c2af0ca21632b297a6cc3e852e0413a2e4519e67be213/geographiclib-2.0.tar.gz", hash = "sha256:f7f41c85dc3e1c2d3d935ec86660dc3b2c848c83e17f9a9e51ba9d5146a15859", size = 36720, upload-time = "2022-04-23T13:01:11.495Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/5a/a26132406f1f40cf51ea349a5f11b0a46cec02a2031ff82e391c2537247a/geographiclib-2.0-py3-none-any.whl", hash = "sha256:6b7225248e45ff7edcee32becc4e0a1504c606ac5ee163a5656d482e0cd38734", size = 40324, upload-time = "2022-04-23T13:01:09.958Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "geopy"
|
||||
version = "2.4.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "geographiclib" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0e/fd/ef6d53875ceab72c1fad22dbed5ec1ad04eb378c2251a6a8024bad890c3b/geopy-2.4.1.tar.gz", hash = "sha256:50283d8e7ad07d89be5cb027338c6365a32044df3ae2556ad3f52f4840b3d0d1", size = 117625, upload-time = "2023-11-23T21:49:32.734Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/15/cf2a69ade4b194aa524ac75112d5caac37414b20a3a03e6865dfe0bd1539/geopy-2.4.1-py3-none-any.whl", hash = "sha256:ae8b4bc5c1131820f4d75fce9d4aaaca0c85189b3aa5d64c3dcaf5e3b7b882a7", size = 125437, upload-time = "2023-11-23T21:49:30.421Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -153,6 +205,19 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/05/49/8872130016209c20436ce0c1067de1cf630755d0443d068a5bc17fa95015/htmldate-1.9.3-py3-none-any.whl", hash = "sha256:3fadc422cf3c10a5cdb5e1b914daf37ec7270400a80a1b37e2673ff84faaaff8", size = 31565, upload-time = "2024-12-30T12:52:32.145Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpcore"
|
||||
version = "1.0.9"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
{ name = "h11" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httptools"
|
||||
version = "0.6.4"
|
||||
|
@ -168,6 +233,21 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682, upload-time = "2024-10-16T19:44:46.46Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpx"
|
||||
version = "0.28.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "certifi" },
|
||||
{ name = "httpcore" },
|
||||
{ name = "idna" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.10"
|
||||
|
@ -177,6 +257,15 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "justext"
|
||||
version = "3.0.2"
|
||||
|
@ -233,32 +322,32 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "numpy"
|
||||
version = "2.3.0"
|
||||
version = "2.3.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f3/db/8e12381333aea300890829a0a36bfa738cac95475d88982d538725143fd9/numpy-2.3.0.tar.gz", hash = "sha256:581f87f9e9e9db2cba2141400e160e9dd644ee248788d6f90636eeb8fd9260a6", size = 20382813, upload-time = "2025-06-07T14:54:32.608Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/2e/19/d7c972dfe90a353dbd3efbbe1d14a5951de80c99c9dc1b93cd998d51dc0f/numpy-2.3.1.tar.gz", hash = "sha256:1ec9ae20a4226da374362cca3c62cd753faf2f951440b0e3b98e93c235441d2b", size = 20390372, upload-time = "2025-06-21T12:28:33.469Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/73/fc/1d67f751fd4dbafc5780244fe699bc4084268bad44b7c5deb0492473127b/numpy-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5754ab5595bfa2c2387d241296e0381c21f44a4b90a776c3c1d39eede13a746a", size = 20889633, upload-time = "2025-06-07T14:44:06.839Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/95/73ffdb69e5c3f19ec4530f8924c4386e7ba097efc94b9c0aff607178ad94/numpy-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d11fa02f77752d8099573d64e5fe33de3229b6632036ec08f7080f46b6649959", size = 14151683, upload-time = "2025-06-07T14:44:28.847Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/d5/06d4bb31bb65a1d9c419eb5676173a2f90fd8da3c59f816cc54c640ce265/numpy-2.3.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:aba48d17e87688a765ab1cd557882052f238e2f36545dfa8e29e6a91aef77afe", size = 5102683, upload-time = "2025-06-07T14:44:38.417Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/8b/6c2cef44f8ccdc231f6b56013dff1d71138c48124334aded36b1a1b30c5a/numpy-2.3.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4dc58865623023b63b10d52f18abaac3729346a7a46a778381e0e3af4b7f3beb", size = 6640253, upload-time = "2025-06-07T14:44:49.359Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/aa/fca4bf8de3396ddb59544df9b75ffe5b73096174de97a9492d426f5cd4aa/numpy-2.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:df470d376f54e052c76517393fa443758fefcdd634645bc9c1f84eafc67087f0", size = 14258658, upload-time = "2025-06-07T14:45:10.156Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/12/734dce1087eed1875f2297f687e671cfe53a091b6f2f55f0c7241aad041b/numpy-2.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:87717eb24d4a8a64683b7a4e91ace04e2f5c7c77872f823f02a94feee186168f", size = 16628765, upload-time = "2025-06-07T14:45:35.076Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/03/ffa41ade0e825cbcd5606a5669962419528212a16082763fc051a7247d76/numpy-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d8fa264d56882b59dcb5ea4d6ab6f31d0c58a57b41aec605848b6eb2ef4a43e8", size = 15564335, upload-time = "2025-06-07T14:45:58.797Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/58/869398a11863310aee0ff85a3e13b4c12f20d032b90c4b3ee93c3b728393/numpy-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e651756066a0eaf900916497e20e02fe1ae544187cb0fe88de981671ee7f6270", size = 18360608, upload-time = "2025-06-07T14:46:25.687Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/8a/5756935752ad278c17e8a061eb2127c9a3edf4ba2c31779548b336f23c8d/numpy-2.3.0-cp313-cp313-win32.whl", hash = "sha256:e43c3cce3b6ae5f94696669ff2a6eafd9a6b9332008bafa4117af70f4b88be6f", size = 6310005, upload-time = "2025-06-07T14:50:13.138Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/60/61d60cf0dfc0bf15381eaef46366ebc0c1a787856d1db0c80b006092af84/numpy-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:81ae0bf2564cf475f94be4a27ef7bcf8af0c3e28da46770fc904da9abd5279b5", size = 12729093, upload-time = "2025-06-07T14:50:31.82Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/31/2f2f2d2b3e3c32d5753d01437240feaa32220b73258c9eef2e42a0832866/numpy-2.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:c8738baa52505fa6e82778580b23f945e3578412554d937093eac9205e845e6e", size = 9885689, upload-time = "2025-06-07T14:50:47.888Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/89/c7828f23cc50f607ceb912774bb4cff225ccae7131c431398ad8400e2c98/numpy-2.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:39b27d8b38942a647f048b675f134dd5a567f95bfff481f9109ec308515c51d8", size = 20986612, upload-time = "2025-06-07T14:46:56.077Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/46/79ecf47da34c4c50eedec7511e53d57ffdfd31c742c00be7dc1d5ffdb917/numpy-2.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0eba4a1ea88f9a6f30f56fdafdeb8da3774349eacddab9581a21234b8535d3d3", size = 14298953, upload-time = "2025-06-07T14:47:18.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/44/f6caf50713d6ff4480640bccb2a534ce1d8e6e0960c8f864947439f0ee95/numpy-2.3.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0f1f11d0a1da54927436505a5a7670b154eac27f5672afc389661013dfe3d4f", size = 5225806, upload-time = "2025-06-07T14:47:27.524Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/43/e1fd1aca7c97e234dd05e66de4ab7a5be54548257efcdd1bc33637e72102/numpy-2.3.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:690d0a5b60a47e1f9dcec7b77750a4854c0d690e9058b7bef3106e3ae9117808", size = 6735169, upload-time = "2025-06-07T14:47:38.057Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/89/f76f93b06a03177c0faa7ca94d0856c4e5c4bcaf3c5f77640c9ed0303e1c/numpy-2.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:8b51ead2b258284458e570942137155978583e407babc22e3d0ed7af33ce06f8", size = 14330701, upload-time = "2025-06-07T14:47:59.113Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/f5/4858c3e9ff7a7d64561b20580cf7cc5d085794bd465a19604945d6501f6c/numpy-2.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:aaf81c7b82c73bd9b45e79cfb9476cb9c29e937494bfe9092c26aece812818ad", size = 16692983, upload-time = "2025-06-07T14:48:24.196Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/17/0e3b4182e691a10e9483bcc62b4bb8693dbf9ea5dc9ba0b77a60435074bb/numpy-2.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f420033a20b4f6a2a11f585f93c843ac40686a7c3fa514060a97d9de93e5e72b", size = 15641435, upload-time = "2025-06-07T14:48:47.712Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/d5/463279fda028d3c1efa74e7e8d507605ae87f33dbd0543cf4c4527c8b882/numpy-2.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d344ca32ab482bcf8735d8f95091ad081f97120546f3d250240868430ce52555", size = 18433798, upload-time = "2025-06-07T14:49:14.866Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/1e/7a9d98c886d4c39a2b4d3a7c026bffcf8fbcaf518782132d12a301cfc47a/numpy-2.3.0-cp313-cp313t-win32.whl", hash = "sha256:48a2e8eaf76364c32a1feaa60d6925eaf32ed7a040183b807e02674305beef61", size = 6438632, upload-time = "2025-06-07T14:49:25.67Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/ab/66fc909931d5eb230107d016861824f335ae2c0533f422e654e5ff556784/numpy-2.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ba17f93a94e503551f154de210e4d50c5e3ee20f7e7a1b5f6ce3f22d419b93bb", size = 12868491, upload-time = "2025-06-07T14:49:44.898Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/e8/2c8a1c9e34d6f6d600c83d5ce5b71646c32a13f34ca5c518cc060639841c/numpy-2.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:f14e016d9409680959691c109be98c436c6249eaf7f118b424679793607b5944", size = 9935345, upload-time = "2025-06-07T14:50:02.311Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/bd/35ad97006d8abff8631293f8ea6adf07b0108ce6fec68da3c3fcca1197f2/numpy-2.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:25a1992b0a3fdcdaec9f552ef10d8103186f5397ab45e2d25f8ac51b1a6b97e8", size = 20889381, upload-time = "2025-06-21T12:19:04.103Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/4f/df5923874d8095b6062495b39729178eef4a922119cee32a12ee1bd4664c/numpy-2.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7dea630156d39b02a63c18f508f85010230409db5b2927ba59c8ba4ab3e8272e", size = 14152726, upload-time = "2025-06-21T12:19:25.599Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/0f/a1f269b125806212a876f7efb049b06c6f8772cf0121139f97774cd95626/numpy-2.3.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:bada6058dd886061f10ea15f230ccf7dfff40572e99fef440a4a857c8728c9c0", size = 5105145, upload-time = "2025-06-21T12:19:34.782Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/63/a7f7fd5f375b0361682f6ffbf686787e82b7bbd561268e4f30afad2bb3c0/numpy-2.3.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:a894f3816eb17b29e4783e5873f92faf55b710c2519e5c351767c51f79d8526d", size = 6639409, upload-time = "2025-06-21T12:19:45.228Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/0d/1854a4121af895aab383f4aa233748f1df4671ef331d898e32426756a8a6/numpy-2.3.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:18703df6c4a4fee55fd3d6e5a253d01c5d33a295409b03fda0c86b3ca2ff41a1", size = 14257630, upload-time = "2025-06-21T12:20:06.544Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/30/af1b277b443f2fb08acf1c55ce9d68ee540043f158630d62cef012750f9f/numpy-2.3.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:5902660491bd7a48b2ec16c23ccb9124b8abfd9583c5fdfa123fe6b421e03de1", size = 16627546, upload-time = "2025-06-21T12:20:31.002Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/ec/3b68220c277e463095342d254c61be8144c31208db18d3fd8ef02712bcd6/numpy-2.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:36890eb9e9d2081137bd78d29050ba63b8dab95dff7912eadf1185e80074b2a0", size = 15562538, upload-time = "2025-06-21T12:20:54.322Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/2b/4014f2bcc4404484021c74d4c5ee8eb3de7e3f7ac75f06672f8dcf85140a/numpy-2.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a780033466159c2270531e2b8ac063704592a0bc62ec4a1b991c7c40705eb0e8", size = 18360327, upload-time = "2025-06-21T12:21:21.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/40/8d/2ddd6c9b30fcf920837b8672f6c65590c7d92e43084c25fc65edc22e93ca/numpy-2.3.1-cp313-cp313-win32.whl", hash = "sha256:39bff12c076812595c3a306f22bfe49919c5513aa1e0e70fac756a0be7c2a2b8", size = 6312330, upload-time = "2025-06-21T12:25:07.447Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/c8/beaba449925988d415efccb45bf977ff8327a02f655090627318f6398c7b/numpy-2.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:8d5ee6eec45f08ce507a6570e06f2f879b374a552087a4179ea7838edbcbfa42", size = 12731565, upload-time = "2025-06-21T12:25:26.444Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/c3/5c0c575d7ec78c1126998071f58facfc124006635da75b090805e642c62e/numpy-2.3.1-cp313-cp313-win_arm64.whl", hash = "sha256:0c4d9e0a8368db90f93bd192bfa771ace63137c3488d198ee21dfb8e7771916e", size = 10190262, upload-time = "2025-06-21T12:25:42.196Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/19/a029cd335cf72f79d2644dcfc22d90f09caa86265cbbde3b5702ccef6890/numpy-2.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:b0b5397374f32ec0649dd98c652a1798192042e715df918c20672c62fb52d4b8", size = 20987593, upload-time = "2025-06-21T12:21:51.664Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/91/8ea8894406209107d9ce19b66314194675d31761fe2cb3c84fe2eeae2f37/numpy-2.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c5bdf2015ccfcee8253fb8be695516ac4457c743473a43290fd36eba6a1777eb", size = 14300523, upload-time = "2025-06-21T12:22:13.583Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/7f/06187b0066eefc9e7ce77d5f2ddb4e314a55220ad62dd0bfc9f2c44bac14/numpy-2.3.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d70f20df7f08b90a2062c1f07737dd340adccf2068d0f1b9b3d56e2038979fee", size = 5227993, upload-time = "2025-06-21T12:22:22.53Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/ec/a926c293c605fa75e9cfb09f1e4840098ed46d2edaa6e2152ee35dc01ed3/numpy-2.3.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:2fb86b7e58f9ac50e1e9dd1290154107e47d1eef23a0ae9145ded06ea606f992", size = 6736652, upload-time = "2025-06-21T12:22:33.629Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/62/d68e52fb6fde5586650d4c0ce0b05ff3a48ad4df4ffd1b8866479d1d671d/numpy-2.3.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:23ab05b2d241f76cb883ce8b9a93a680752fbfcbd51c50eff0b88b979e471d8c", size = 14331561, upload-time = "2025-06-21T12:22:55.056Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/ec/b74d3f2430960044bdad6900d9f5edc2dc0fb8bf5a0be0f65287bf2cbe27/numpy-2.3.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:ce2ce9e5de4703a673e705183f64fd5da5bf36e7beddcb63a25ee2286e71ca48", size = 16693349, upload-time = "2025-06-21T12:23:20.53Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/15/def96774b9d7eb198ddadfcbd20281b20ebb510580419197e225f5c55c3e/numpy-2.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c4913079974eeb5c16ccfd2b1f09354b8fed7e0d6f2cab933104a09a6419b1ee", size = 15642053, upload-time = "2025-06-21T12:23:43.697Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/57/c3203974762a759540c6ae71d0ea2341c1fa41d84e4971a8e76d7141678a/numpy-2.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:010ce9b4f00d5c036053ca684c77441f2f2c934fd23bee058b4d6f196efd8280", size = 18434184, upload-time = "2025-06-21T12:24:10.708Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/8a/ccdf201457ed8ac6245187850aff4ca56a79edbea4829f4e9f14d46fa9a5/numpy-2.3.1-cp313-cp313t-win32.whl", hash = "sha256:6269b9edfe32912584ec496d91b00b6d34282ca1d07eb10e82dfc780907d6c2e", size = 6440678, upload-time = "2025-06-21T12:24:21.596Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/7e/7f431d8bd8eb7e03d79294aed238b1b0b174b3148570d03a8a8a8f6a0da9/numpy-2.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:2a809637460e88a113e186e87f228d74ae2852a2e0c44de275263376f17b5bdc", size = 12870697, upload-time = "2025-06-21T12:24:40.644Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/ca/af82bf0fad4c3e573c6930ed743b5308492ff19917c7caaf2f9b6f9e2e98/numpy-2.3.1-cp313-cp313t-win_arm64.whl", hash = "sha256:eccb9a159db9aed60800187bc47a6d3451553f0e1b08b068d8b277ddfbb9b244", size = 10260376, upload-time = "2025-06-21T12:24:56.884Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -267,11 +356,14 @@ version = "0.1.0"
|
|||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "fastapi" },
|
||||
{ name = "geopy" },
|
||||
{ name = "httpx" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "pydantic-settings" },
|
||||
{ name = "python-dateutil" },
|
||||
{ name = "python-multipart" },
|
||||
{ name = "pytz" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "requests" },
|
||||
{ name = "reverse-geocoder" },
|
||||
{ name = "trafilatura" },
|
||||
|
@ -280,17 +372,26 @@ dependencies = [
|
|||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "httpx" },
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-asyncio" },
|
||||
{ name = "pytest-cov" },
|
||||
{ name = "pytest-mock" },
|
||||
{ name = "respx" },
|
||||
{ name = "ruff" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "fastapi", specifier = ">=0" },
|
||||
{ name = "geopy", specifier = ">=2" },
|
||||
{ name = "httpx", specifier = ">=0" },
|
||||
{ name = "pydantic", specifier = ">=2" },
|
||||
{ name = "pydantic-settings", specifier = ">=2" },
|
||||
{ name = "python-dateutil", specifier = ">=2" },
|
||||
{ name = "python-multipart", specifier = ">=0" },
|
||||
{ name = "pytz", specifier = ">=2025" },
|
||||
{ name = "pyyaml", specifier = ">=6" },
|
||||
{ name = "requests", specifier = ">=2" },
|
||||
{ name = "reverse-geocoder", specifier = ">=1" },
|
||||
{ name = "trafilatura", specifier = ">=2" },
|
||||
|
@ -298,7 +399,33 @@ requires-dist = [
|
|||
]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [{ name = "ruff", specifier = ">=0" }]
|
||||
dev = [
|
||||
{ name = "httpx", specifier = ">=0" },
|
||||
{ name = "pytest", specifier = ">=8" },
|
||||
{ name = "pytest-asyncio", specifier = ">=0" },
|
||||
{ name = "pytest-cov", specifier = ">=0" },
|
||||
{ name = "pytest-mock", specifier = ">=0" },
|
||||
{ name = "respx", specifier = ">=0" },
|
||||
{ name = "ruff", specifier = ">=0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "25.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.6.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
|
@ -345,16 +472,79 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "pydantic-settings"
|
||||
version = "2.9.1"
|
||||
version = "2.10.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pydantic" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "typing-inspection" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/67/1d/42628a2c33e93f8e9acbde0d5d735fa0850f3e6a2f8cb1eb6c40b9a732ac/pydantic_settings-2.9.1.tar.gz", hash = "sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268", size = 163234, upload-time = "2025-04-18T16:44:48.265Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/68/85/1ea668bbab3c50071ca613c6ab30047fb36ab0da1b92fa8f17bbc38fd36c/pydantic_settings-2.10.1.tar.gz", hash = "sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee", size = 172583, upload-time = "2025-06-24T13:26:46.841Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/5f/d6d641b490fd3ec2c4c13b4244d68deea3a1b970a97be64f34fb5504ff72/pydantic_settings-2.9.1-py3-none-any.whl", hash = "sha256:59b4f431b1defb26fe620c71a7d3968a710d719f5f4cdbbdb7926edeb770f6ef", size = 44356, upload-time = "2025-04-18T16:44:46.617Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/f0/427018098906416f580e3cf1366d3b1abfb408a0652e9f31600c24a1903c/pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796", size = 45235, upload-time = "2025-06-24T13:26:45.485Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.4.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "iniconfig" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pluggy" },
|
||||
{ name = "pygments" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714, upload-time = "2025-06-18T05:48:06.109Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-asyncio"
|
||||
version = "1.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pytest" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d0/d4/14f53324cb1a6381bef29d698987625d80052bb33932d8e7cbf9b337b17c/pytest_asyncio-1.0.0.tar.gz", hash = "sha256:d15463d13f4456e1ead2594520216b225a16f781e144f8fdf6c5bb4667c48b3f", size = 46960, upload-time = "2025-05-26T04:54:40.484Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/30/05/ce271016e351fddc8399e546f6e23761967ee09c8c568bbfbecb0c150171/pytest_asyncio-1.0.0-py3-none-any.whl", hash = "sha256:4f024da9f1ef945e680dc68610b52550e36590a67fd31bb3b4943979a1f90ef3", size = 15976, upload-time = "2025-05-26T04:54:39.035Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-cov"
|
||||
version = "6.2.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "coverage" },
|
||||
{ name = "pluggy" },
|
||||
{ name = "pytest" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/18/99/668cade231f434aaa59bbfbf49469068d2ddd945000621d3d165d2e7dd7b/pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2", size = 69432, upload-time = "2025-06-12T10:47:47.684Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/16/4ea354101abb1287856baa4af2732be351c7bee728065aed451b678153fd/pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5", size = 24644, upload-time = "2025-06-12T10:47:45.932Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-mock"
|
||||
version = "3.14.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pytest" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/71/28/67172c96ba684058a4d24ffe144d64783d2a270d0af0d9e792737bddc75c/pytest_mock-3.14.1.tar.gz", hash = "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e", size = 33241, upload-time = "2025-05-26T13:58:45.167Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/05/77b60e520511c53d1c1ca75f1930c7dd8e971d0c4379b7f4b3f9644685ba/pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0", size = 9923, upload-time = "2025-05-26T13:58:43.487Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -371,11 +561,11 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "python-dotenv"
|
||||
version = "1.1.0"
|
||||
version = "1.1.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920, upload-time = "2025-03-25T10:14:56.835Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256, upload-time = "2025-03-25T10:14:55.034Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -451,6 +641,18 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "respx"
|
||||
version = "0.22.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f4/7c/96bd0bc759cf009675ad1ee1f96535edcb11e9666b985717eb8c87192a95/respx-0.22.0.tar.gz", hash = "sha256:3c8924caa2a50bd71aefc07aa812f2466ff489f1848c96e954a5362d17095d91", size = 28439, upload-time = "2024-12-19T22:33:59.374Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/67/afbb0978d5399bc9ea200f1d4489a23c9a1dad4eee6376242b8182389c79/respx-0.22.0-py2.py3-none-any.whl", hash = "sha256:631128d4c9aba15e56903fb5f66fb1eff412ce28dd387ca3a81339e52dbd3ad0", size = 25127, upload-time = "2024-12-19T22:33:57.837Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "reverse-geocoder"
|
||||
version = "1.5.1"
|
||||
|
@ -463,56 +665,56 @@ sdist = { url = "https://files.pythonhosted.org/packages/0b/0f/b7d5d4b36553731f1
|
|||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.11.13"
|
||||
version = "0.12.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ed/da/9c6f995903b4d9474b39da91d2d626659af3ff1eeb43e9ae7c119349dba6/ruff-0.11.13.tar.gz", hash = "sha256:26fa247dc68d1d4e72c179e08889a25ac0c7ba4d78aecfc835d49cbfd60bf514", size = 4282054, upload-time = "2025-06-05T21:00:15.721Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6c/3d/d9a195676f25d00dbfcf3cf95fdd4c685c497fcfa7e862a44ac5e4e96480/ruff-0.12.2.tar.gz", hash = "sha256:d7b4f55cd6f325cb7621244f19c873c565a08aff5a4ba9c69aa7355f3f7afd3e", size = 4432239, upload-time = "2025-07-03T16:40:19.566Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/ce/a11d381192966e0b4290842cc8d4fac7dc9214ddf627c11c1afff87da29b/ruff-0.11.13-py3-none-linux_armv6l.whl", hash = "sha256:4bdfbf1240533f40042ec00c9e09a3aade6f8c10b6414cf11b519488d2635d46", size = 10292516, upload-time = "2025-06-05T20:59:32.944Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/db/87c3b59b0d4e753e40b6a3b4a2642dfd1dcaefbff121ddc64d6c8b47ba00/ruff-0.11.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:aef9c9ed1b5ca28bb15c7eac83b8670cf3b20b478195bd49c8d756ba0a36cf48", size = 11106083, upload-time = "2025-06-05T20:59:37.03Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/79/d8cec175856ff810a19825d09ce700265f905c643c69f45d2b737e4a470a/ruff-0.11.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53b15a9dfdce029c842e9a5aebc3855e9ab7771395979ff85b7c1dedb53ddc2b", size = 10436024, upload-time = "2025-06-05T20:59:39.741Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/5b/f6d94f2980fa1ee854b41568368a2e1252681b9238ab2895e133d303538f/ruff-0.11.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab153241400789138d13f362c43f7edecc0edfffce2afa6a68434000ecd8f69a", size = 10646324, upload-time = "2025-06-05T20:59:42.185Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/9c/b4c2acf24ea4426016d511dfdc787f4ce1ceb835f3c5fbdbcb32b1c63bda/ruff-0.11.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c51f93029d54a910d3d24f7dd0bb909e31b6cd989a5e4ac513f4eb41629f0dc", size = 10174416, upload-time = "2025-06-05T20:59:44.319Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/10/e2e62f77c65ede8cd032c2ca39c41f48feabedb6e282bfd6073d81bb671d/ruff-0.11.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1808b3ed53e1a777c2ef733aca9051dc9bf7c99b26ece15cb59a0320fbdbd629", size = 11724197, upload-time = "2025-06-05T20:59:46.935Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/f0/466fe8469b85c561e081d798c45f8a1d21e0b4a5ef795a1d7f1a9a9ec182/ruff-0.11.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d28ce58b5ecf0f43c1b71edffabe6ed7f245d5336b17805803312ec9bc665933", size = 12511615, upload-time = "2025-06-05T20:59:49.534Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/0e/cefe778b46dbd0cbcb03a839946c8f80a06f7968eb298aa4d1a4293f3448/ruff-0.11.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55e4bc3a77842da33c16d55b32c6cac1ec5fb0fbec9c8c513bdce76c4f922165", size = 12117080, upload-time = "2025-06-05T20:59:51.654Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/2c/caaeda564cbe103bed145ea557cb86795b18651b0f6b3ff6a10e84e5a33f/ruff-0.11.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:633bf2c6f35678c56ec73189ba6fa19ff1c5e4807a78bf60ef487b9dd272cc71", size = 11326315, upload-time = "2025-06-05T20:59:54.469Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/f0/782e7d681d660eda8c536962920c41309e6dd4ebcea9a2714ed5127d44bd/ruff-0.11.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ffbc82d70424b275b089166310448051afdc6e914fdab90e08df66c43bb5ca9", size = 11555640, upload-time = "2025-06-05T20:59:56.986Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/d4/3d580c616316c7f07fb3c99dbecfe01fbaea7b6fd9a82b801e72e5de742a/ruff-0.11.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a9ddd3ec62a9a89578c85842b836e4ac832d4a2e0bfaad3b02243f930ceafcc", size = 10507364, upload-time = "2025-06-05T20:59:59.154Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/dc/195e6f17d7b3ea6b12dc4f3e9de575db7983db187c378d44606e5d503319/ruff-0.11.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d237a496e0778d719efb05058c64d28b757c77824e04ffe8796c7436e26712b7", size = 10141462, upload-time = "2025-06-05T21:00:01.481Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/8e/39a094af6967faa57ecdeacb91bedfb232474ff8c3d20f16a5514e6b3534/ruff-0.11.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:26816a218ca6ef02142343fd24c70f7cd8c5aa6c203bca284407adf675984432", size = 11121028, upload-time = "2025-06-05T21:00:04.06Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/c0/b0b508193b0e8a1654ec683ebab18d309861f8bd64e3a2f9648b80d392cb/ruff-0.11.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:51c3f95abd9331dc5b87c47ac7f376db5616041173826dfd556cfe3d4977f492", size = 11602992, upload-time = "2025-06-05T21:00:06.249Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/91/263e33ab93ab09ca06ce4f8f8547a858cc198072f873ebc9be7466790bae/ruff-0.11.13-py3-none-win32.whl", hash = "sha256:96c27935418e4e8e77a26bb05962817f28b8ef3843a6c6cc49d8783b5507f250", size = 10474944, upload-time = "2025-06-05T21:00:08.459Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/f4/7c27734ac2073aae8efb0119cae6931b6fb48017adf048fdf85c19337afc/ruff-0.11.13-py3-none-win_amd64.whl", hash = "sha256:29c3189895a8a6a657b7af4e97d330c8a3afd2c9c8f46c81e2fc5a31866517e3", size = 11548669, upload-time = "2025-06-05T21:00:11.147Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/bf/b273dd11673fed8a6bd46032c0ea2a04b2ac9bfa9c628756a5856ba113b0/ruff-0.11.13-py3-none-win_arm64.whl", hash = "sha256:b4385285e9179d608ff1d2fb9922062663c658605819a6876d8beef0c30b7f3b", size = 10683928, upload-time = "2025-06-05T21:00:13.758Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/b6/2098d0126d2d3318fd5bec3ad40d06c25d377d95749f7a0c5af17129b3b1/ruff-0.12.2-py3-none-linux_armv6l.whl", hash = "sha256:093ea2b221df1d2b8e7ad92fc6ffdca40a2cb10d8564477a987b44fd4008a7be", size = 10369761, upload-time = "2025-07-03T16:39:38.847Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/4b/5da0142033dbe155dc598cfb99262d8ee2449d76920ea92c4eeb9547c208/ruff-0.12.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:09e4cf27cc10f96b1708100fa851e0daf21767e9709e1649175355280e0d950e", size = 11155659, upload-time = "2025-07-03T16:39:42.294Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/21/967b82550a503d7c5c5c127d11c935344b35e8c521f52915fc858fb3e473/ruff-0.12.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8ae64755b22f4ff85e9c52d1f82644abd0b6b6b6deedceb74bd71f35c24044cc", size = 10537769, upload-time = "2025-07-03T16:39:44.75Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/91/00cff7102e2ec71a4890fb7ba1803f2cdb122d82787c7d7cf8041fe8cbc1/ruff-0.12.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eb3a6b2db4d6e2c77e682f0b988d4d61aff06860158fdb413118ca133d57922", size = 10717602, upload-time = "2025-07-03T16:39:47.652Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/eb/928814daec4e1ba9115858adcda44a637fb9010618721937491e4e2283b8/ruff-0.12.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:73448de992d05517170fc37169cbca857dfeaeaa8c2b9be494d7bcb0d36c8f4b", size = 10198772, upload-time = "2025-07-03T16:39:49.641Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/fa/f15089bc20c40f4f72334f9145dde55ab2b680e51afb3b55422effbf2fb6/ruff-0.12.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b8b94317cbc2ae4a2771af641739f933934b03555e51515e6e021c64441532d", size = 11845173, upload-time = "2025-07-03T16:39:52.069Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/9f/1f6f98f39f2b9302acc161a4a2187b1e3a97634fe918a8e731e591841cf4/ruff-0.12.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:45fc42c3bf1d30d2008023a0a9a0cfb06bf9835b147f11fe0679f21ae86d34b1", size = 12553002, upload-time = "2025-07-03T16:39:54.551Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/70/08991ac46e38ddd231c8f4fd05ef189b1b94be8883e8c0c146a025c20a19/ruff-0.12.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce48f675c394c37e958bf229fb5c1e843e20945a6d962cf3ea20b7a107dcd9f4", size = 12171330, upload-time = "2025-07-03T16:39:57.55Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/a9/5a55266fec474acfd0a1c73285f19dd22461d95a538f29bba02edd07a5d9/ruff-0.12.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793d8859445ea47591272021a81391350205a4af65a9392401f418a95dfb75c9", size = 11774717, upload-time = "2025-07-03T16:39:59.78Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/e5/0c270e458fc73c46c0d0f7cf970bb14786e5fdb88c87b5e423a4bd65232b/ruff-0.12.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6932323db80484dda89153da3d8e58164d01d6da86857c79f1961934354992da", size = 11646659, upload-time = "2025-07-03T16:40:01.934Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/b6/45ab96070c9752af37f0be364d849ed70e9ccede07675b0ec4e3ef76b63b/ruff-0.12.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6aa7e623a3a11538108f61e859ebf016c4f14a7e6e4eba1980190cacb57714ce", size = 10604012, upload-time = "2025-07-03T16:40:04.363Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/91/26a6e6a424eb147cc7627eebae095cfa0b4b337a7c1c413c447c9ebb72fd/ruff-0.12.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2a4a20aeed74671b2def096bdf2eac610c7d8ffcbf4fb0e627c06947a1d7078d", size = 10176799, upload-time = "2025-07-03T16:40:06.514Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/0c/9f344583465a61c8918a7cda604226e77b2c548daf8ef7c2bfccf2b37200/ruff-0.12.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:71a4c550195612f486c9d1f2b045a600aeba851b298c667807ae933478fcef04", size = 11241507, upload-time = "2025-07-03T16:40:08.708Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/b7/99c34ded8fb5f86c0280278fa89a0066c3760edc326e935ce0b1550d315d/ruff-0.12.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4987b8f4ceadf597c927beee65a5eaf994c6e2b631df963f86d8ad1bdea99342", size = 11717609, upload-time = "2025-07-03T16:40:10.836Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/de/8589fa724590faa057e5a6d171e7f2f6cffe3287406ef40e49c682c07d89/ruff-0.12.2-py3-none-win32.whl", hash = "sha256:369ffb69b70cd55b6c3fc453b9492d98aed98062db9fec828cdfd069555f5f1a", size = 10523823, upload-time = "2025-07-03T16:40:13.203Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/47/8abf129102ae4c90cba0c2199a1a9b0fa896f6f806238d6f8c14448cc748/ruff-0.12.2-py3-none-win_amd64.whl", hash = "sha256:dca8a3b6d6dc9810ed8f328d406516bf4d660c00caeaef36eb831cf4871b0639", size = 11629831, upload-time = "2025-07-03T16:40:15.478Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/1f/72d2946e3cc7456bb837e88000eb3437e55f80db339c840c04015a11115d/ruff-0.12.2-py3-none-win_arm64.whl", hash = "sha256:48d6c6bfb4761df68bc05ae630e24f506755e702d4fb08f08460be778c7ccb12", size = 10735334, upload-time = "2025-07-03T16:40:17.677Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scipy"
|
||||
version = "1.15.3"
|
||||
version = "1.16.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "numpy" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0f/37/6964b830433e654ec7485e45a00fc9a27cf868d622838f6b6d9c5ec0d532/scipy-1.15.3.tar.gz", hash = "sha256:eae3cf522bc7df64b42cad3925c876e1b0b6c35c1337c93e12c0f366f55b0eaf", size = 59419214, upload-time = "2025-05-08T16:13:05.955Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/81/18/b06a83f0c5ee8cddbde5e3f3d0bb9b702abfa5136ef6d4620ff67df7eee5/scipy-1.16.0.tar.gz", hash = "sha256:b5ef54021e832869c8cfb03bc3bf20366cbcd426e02a58e8a58d7584dfbb8f62", size = 30581216, upload-time = "2025-06-22T16:27:55.782Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/73/18/ec27848c9baae6e0d6573eda6e01a602e5649ee72c27c3a8aad673ebecfd/scipy-1.15.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2c620736bcc334782e24d173c0fdbb7590a0a436d2fdf39310a8902505008759", size = 38728256, upload-time = "2025-05-08T16:06:58.696Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/cd/1aef2184948728b4b6e21267d53b3339762c285a46a274ebb7863c9e4742/scipy-1.15.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:7e11270a000969409d37ed399585ee530b9ef6aa99d50c019de4cb01e8e54e62", size = 30109540, upload-time = "2025-05-08T16:07:04.209Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/d8/59e452c0a255ec352bd0a833537a3bc1bfb679944c4938ab375b0a6b3a3e/scipy-1.15.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:8c9ed3ba2c8a2ce098163a9bdb26f891746d02136995df25227a20e71c396ebb", size = 22383115, upload-time = "2025-05-08T16:07:08.998Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/f5/456f56bbbfccf696263b47095291040655e3cbaf05d063bdc7c7517f32ac/scipy-1.15.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:0bdd905264c0c9cfa74a4772cdb2070171790381a5c4d312c973382fc6eaf730", size = 25163884, upload-time = "2025-05-08T16:07:14.091Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/66/a9618b6a435a0f0c0b8a6d0a2efb32d4ec5a85f023c2b79d39512040355b/scipy-1.15.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79167bba085c31f38603e11a267d862957cbb3ce018d8b38f79ac043bc92d825", size = 35174018, upload-time = "2025-05-08T16:07:19.427Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/09/c5b6734a50ad4882432b6bb7c02baf757f5b2f256041da5df242e2d7e6b6/scipy-1.15.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9deabd6d547aee2c9a81dee6cc96c6d7e9a9b1953f74850c179f91fdc729cb7", size = 37269716, upload-time = "2025-05-08T16:07:25.712Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/0a/eac00ff741f23bcabd352731ed9b8995a0a60ef57f5fd788d611d43d69a1/scipy-1.15.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dde4fc32993071ac0c7dd2d82569e544f0bdaff66269cb475e0f369adad13f11", size = 36872342, upload-time = "2025-05-08T16:07:31.468Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/54/4379be86dd74b6ad81551689107360d9a3e18f24d20767a2d5b9253a3f0a/scipy-1.15.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f77f853d584e72e874d87357ad70f44b437331507d1c311457bed8ed2b956126", size = 39670869, upload-time = "2025-05-08T16:07:38.002Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/2e/892ad2862ba54f084ffe8cc4a22667eaf9c2bcec6d2bff1d15713c6c0703/scipy-1.15.3-cp313-cp313-win_amd64.whl", hash = "sha256:b90ab29d0c37ec9bf55424c064312930ca5f4bde15ee8619ee44e69319aab163", size = 40988851, upload-time = "2025-05-08T16:08:33.671Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/e9/7a879c137f7e55b30d75d90ce3eb468197646bc7b443ac036ae3fe109055/scipy-1.15.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3ac07623267feb3ae308487c260ac684b32ea35fd81e12845039952f558047b8", size = 38863011, upload-time = "2025-05-08T16:07:44.039Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/d1/226a806bbd69f62ce5ef5f3ffadc35286e9fbc802f606a07eb83bf2359de/scipy-1.15.3-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6487aa99c2a3d509a5227d9a5e889ff05830a06b2ce08ec30df6d79db5fcd5c5", size = 30266407, upload-time = "2025-05-08T16:07:49.891Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/9b/f32d1d6093ab9eeabbd839b0f7619c62e46cc4b7b6dbf05b6e615bbd4400/scipy-1.15.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:50f9e62461c95d933d5c5ef4a1f2ebf9a2b4e83b0db374cb3f1de104d935922e", size = 22540030, upload-time = "2025-05-08T16:07:54.121Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/29/c278f699b095c1a884f29fda126340fcc201461ee8bfea5c8bdb1c7c958b/scipy-1.15.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:14ed70039d182f411ffc74789a16df3835e05dc469b898233a245cdfd7f162cb", size = 25218709, upload-time = "2025-05-08T16:07:58.506Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/18/9e5374b617aba742a990581373cd6b68a2945d65cc588482749ef2e64467/scipy-1.15.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a769105537aa07a69468a0eefcd121be52006db61cdd8cac8a0e68980bbb723", size = 34809045, upload-time = "2025-05-08T16:08:03.929Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/fe/9c4361e7ba2927074360856db6135ef4904d505e9b3afbbcb073c4008328/scipy-1.15.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9db984639887e3dffb3928d118145ffe40eff2fa40cb241a306ec57c219ebbbb", size = 36703062, upload-time = "2025-05-08T16:08:09.558Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/8e/038ccfe29d272b30086b25a4960f757f97122cb2ec42e62b460d02fe98e9/scipy-1.15.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:40e54d5c7e7ebf1aa596c374c49fa3135f04648a0caabcb66c52884b943f02b4", size = 36393132, upload-time = "2025-05-08T16:08:15.34Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/7e/5c12285452970be5bdbe8352c619250b97ebf7917d7a9a9e96b8a8140f17/scipy-1.15.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5e721fed53187e71d0ccf382b6bf977644c533e506c4d33c3fb24de89f5c3ed5", size = 38979503, upload-time = "2025-05-08T16:08:21.513Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/06/0a5e5349474e1cbc5757975b21bd4fad0e72ebf138c5592f191646154e06/scipy-1.15.3-cp313-cp313t-win_amd64.whl", hash = "sha256:76ad1fb5f8752eabf0fa02e4cc0336b4e8f021e2d5f061ed37d6d264db35e3ca", size = 40308097, upload-time = "2025-05-08T16:08:27.627Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/95/0746417bc24be0c2a7b7563946d61f670a3b491b76adede420e9d173841f/scipy-1.16.0-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:e9f414cbe9ca289a73e0cc92e33a6a791469b6619c240aa32ee18abdce8ab451", size = 36418162, upload-time = "2025-06-22T16:19:56.3Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/5a/914355a74481b8e4bbccf67259bbde171348a3f160b67b4945fbc5f5c1e5/scipy-1.16.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:bbba55fb97ba3cdef9b1ee973f06b09d518c0c7c66a009c729c7d1592be1935e", size = 28465985, upload-time = "2025-06-22T16:20:01.238Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/46/63477fc1246063855969cbefdcee8c648ba4b17f67370bd542ba56368d0b/scipy-1.16.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:58e0d4354eacb6004e7aa1cd350e5514bd0270acaa8d5b36c0627bb3bb486974", size = 20737961, upload-time = "2025-06-22T16:20:05.913Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/86/0fbb5588b73555e40f9d3d6dde24ee6fac7d8e301a27f6f0cab9d8f66ff2/scipy-1.16.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:75b2094ec975c80efc273567436e16bb794660509c12c6a31eb5c195cbf4b6dc", size = 23377941, upload-time = "2025-06-22T16:20:10.668Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/80/a561f2bf4c2da89fa631b3cbf31d120e21ea95db71fd9ec00cb0247c7a93/scipy-1.16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6b65d232157a380fdd11a560e7e21cde34fdb69d65c09cb87f6cc024ee376351", size = 33196703, upload-time = "2025-06-22T16:20:16.097Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/6b/3443abcd0707d52e48eb315e33cc669a95e29fc102229919646f5a501171/scipy-1.16.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1d8747f7736accd39289943f7fe53a8333be7f15a82eea08e4afe47d79568c32", size = 35083410, upload-time = "2025-06-22T16:20:21.734Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/ab/eb0fc00e1e48961f1bd69b7ad7e7266896fe5bad4ead91b5fc6b3561bba4/scipy-1.16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eb9f147a1b8529bb7fec2a85cf4cf42bdfadf9e83535c309a11fdae598c88e8b", size = 35387829, upload-time = "2025-06-22T16:20:27.548Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/9e/d6fc64e41fad5d481c029ee5a49eefc17f0b8071d636a02ceee44d4a0de2/scipy-1.16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d2b83c37edbfa837a8923d19c749c1935ad3d41cf196006a24ed44dba2ec4358", size = 37841356, upload-time = "2025-06-22T16:20:35.112Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/a7/4c94bbe91f12126b8bf6709b2471900577b7373a4fd1f431f28ba6f81115/scipy-1.16.0-cp313-cp313-win_amd64.whl", hash = "sha256:79a3c13d43c95aa80b87328a46031cf52508cf5f4df2767602c984ed1d3c6bbe", size = 38403710, upload-time = "2025-06-22T16:21:54.473Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/20/965da8497f6226e8fa90ad3447b82ed0e28d942532e92dd8b91b43f100d4/scipy-1.16.0-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:f91b87e1689f0370690e8470916fe1b2308e5b2061317ff76977c8f836452a47", size = 36813833, upload-time = "2025-06-22T16:20:43.925Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/f4/197580c3dac2d234e948806e164601c2df6f0078ed9f5ad4a62685b7c331/scipy-1.16.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:88a6ca658fb94640079e7a50b2ad3b67e33ef0f40e70bdb7dc22017dae73ac08", size = 28974431, upload-time = "2025-06-22T16:20:51.302Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/fc/e18b8550048d9224426e76906694c60028dbdb65d28b1372b5503914b89d/scipy-1.16.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:ae902626972f1bd7e4e86f58fd72322d7f4ec7b0cfc17b15d4b7006efc385176", size = 21246454, upload-time = "2025-06-22T16:20:57.276Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/48/07b97d167e0d6a324bfd7484cd0c209cc27338b67e5deadae578cf48e809/scipy-1.16.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:8cb824c1fc75ef29893bc32b3ddd7b11cf9ab13c1127fe26413a05953b8c32ed", size = 23772979, upload-time = "2025-06-22T16:21:03.363Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/4f/9efbd3f70baf9582edf271db3002b7882c875ddd37dc97f0f675ad68679f/scipy-1.16.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:de2db7250ff6514366a9709c2cba35cb6d08498e961cba20d7cff98a7ee88938", size = 33341972, upload-time = "2025-06-22T16:21:11.14Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/dc/9e496a3c5dbe24e76ee24525155ab7f659c20180bab058ef2c5fa7d9119c/scipy-1.16.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e85800274edf4db8dd2e4e93034f92d1b05c9421220e7ded9988b16976f849c1", size = 35185476, upload-time = "2025-06-22T16:21:19.156Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/b3/21001cff985a122ba434c33f2c9d7d1dc3b669827e94f4fc4e1fe8b9dfd8/scipy-1.16.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4f720300a3024c237ace1cb11f9a84c38beb19616ba7c4cdcd771047a10a1706", size = 35570990, upload-time = "2025-06-22T16:21:27.797Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/d3/7ba42647d6709251cdf97043d0c107e0317e152fa2f76873b656b509ff55/scipy-1.16.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:aad603e9339ddb676409b104c48a027e9916ce0d2838830691f39552b38a352e", size = 37950262, upload-time = "2025-06-22T16:21:36.976Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/c4/231cac7a8385394ebbbb4f1ca662203e9d8c332825ab4f36ffc3ead09a42/scipy-1.16.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f56296fefca67ba605fd74d12f7bd23636267731a72cb3947963e76b8c0a25db", size = 38515076, upload-time = "2025-06-22T16:21:45.694Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -574,11 +776,11 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.14.0"
|
||||
version = "4.14.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d1/bc/51647cd02527e87d05cb083ccc402f93e441606ff1f01739a62c8ad09ba5/typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4", size = 107423, upload-time = "2025-06-02T14:52:11.399Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/69/e0/552843e0d356fbb5256d21449fa957fa4eff3bbc135a74a691ee70c7c5da/typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af", size = 43839, upload-time = "2025-06-02T14:52:10.026Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -616,24 +818,24 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.4.0"
|
||||
version = "2.5.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672, upload-time = "2025-04-10T15:23:39.232Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload-time = "2025-04-10T15:23:37.377Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uvicorn"
|
||||
version = "0.34.3"
|
||||
version = "0.35.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
{ name = "h11" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/de/ad/713be230bcda622eaa35c28f0d328c3675c371238470abdea52417f17a8e/uvicorn-0.34.3.tar.gz", hash = "sha256:35919a9a979d7a59334b6b10e05d77c1d0d574c50e0fc98b8b1a0f165708b55a", size = 76631, upload-time = "2025-06-01T07:48:17.531Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473, upload-time = "2025-06-28T16:15:46.058Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/0d/8adfeaa62945f90d19ddc461c55f4a50c258af7662d34b6a3d5d1f8646f6/uvicorn-0.34.3-py3-none-any.whl", hash = "sha256:16246631db62bdfbf069b0645177d6e8a77ba950cfedbfd093acef9444e4d885", size = 62431, upload-time = "2025-06-01T07:48:15.664Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue