Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Python: updated pydantic 2.10.3 works again #9923

Merged
merged 25 commits into from
Dec 12, 2024
Merged
Show file tree
Hide file tree
Changes from 23 commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
75f9da3
updated pydantic 10.3 works again
eavanvalkenburg Dec 10, 2024
1d9a0e2
add refresh to unit tests to make sure new packages are picked up
eavanvalkenburg Dec 10, 2024
ccb5d3e
reinstall
eavanvalkenburg Dec 10, 2024
60c0d9b
U instead
eavanvalkenburg Dec 10, 2024
70d2449
add prerelease
eavanvalkenburg Dec 10, 2024
8950516
pinned to 2.10.4 or 2.9
eavanvalkenburg Dec 10, 2024
a368035
revert qdrant collection change
eavanvalkenburg Dec 10, 2024
a9eeb33
added logging to info to pytest runner
eavanvalkenburg Dec 11, 2024
5b94309
added duration param so we can spot slow tests and increased timeout
eavanvalkenburg Dec 11, 2024
219e709
added sample skip logic to getting started and learn resources
eavanvalkenburg Dec 11, 2024
6e44ff3
small update to types
eavanvalkenburg Dec 11, 2024
39344f2
back to dict
eavanvalkenburg Dec 11, 2024
4023ebb
moved asyncio into settings
eavanvalkenburg Dec 11, 2024
f4237fd
removed loose brackets
eavanvalkenburg Dec 12, 2024
e17a99c
add per test timeout
eavanvalkenburg Dec 12, 2024
a89e0ea
some cleanup and scope altered
eavanvalkenburg Dec 12, 2024
908c133
tweaks and scope to session
eavanvalkenburg Dec 12, 2024
5869a11
hopefully fixed event loop
eavanvalkenburg Dec 12, 2024
75dfeef
fixed endpoint types used in test completions
eavanvalkenburg Dec 12, 2024
cf07b21
forgot embeddings
eavanvalkenburg Dec 12, 2024
f867a5b
split ollama tests off
eavanvalkenburg Dec 12, 2024
95cd576
added missing ollama mark
eavanvalkenburg Dec 12, 2024
9ca3451
added logic for bedrock and azure openai to skip if not available
eavanvalkenburg Dec 12, 2024
133c31e
fixed bedrock
eavanvalkenburg Dec 12, 2024
41478c5
xfail flaky test
eavanvalkenburg Dec 12, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
73 changes: 54 additions & 19 deletions .github/workflows/python-integration-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -121,21 +121,6 @@ jobs:
- name: Install dependencies
run: |
uv sync --all-extras --dev
- name: Install Ollama
eavanvalkenburg marked this conversation as resolved.
Show resolved Hide resolved
if: matrix.os == 'ubuntu-latest'
run: |
curl -fsSL https://ollama.com/install.sh | sh
ollama serve &
sleep 5
- name: Pull model in Ollama
if: matrix.os == 'ubuntu-latest'
run: |
ollama pull ${{ vars.OLLAMA_CHAT_MODEL_ID }}
ollama pull ${{ vars.OLLAMA_CHAT_MODEL_ID_IMAGE }}
ollama pull ${{ vars.OLLAMA_CHAT_MODEL_ID_TOOL_CALL }}
ollama pull ${{ vars.OLLAMA_TEXT_MODEL_ID }}
ollama pull ${{ vars.OLLAMA_EMBEDDING_MODEL_ID }}
ollama list
- name: Google auth
uses: google-github-actions/auth@v2
with:
Expand All @@ -158,10 +143,59 @@ jobs:
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
- name: Run Integration Tests
id: run_tests_ai_services
timeout-minutes: 25
shell: bash
run: |
uv run pytest -v -n logical --dist loadfile --dist worksteal ./tests/integration/completions ./tests/integration/embeddings ./tests/samples ./tests/integration/cross_language
uv run pytest -v --log-cli-level=INFO --durations=20 -n logical --dist loadfile --dist worksteal -m "not ollama" ./tests/integration/completions ./tests/integration/embeddings ./tests/samples ./tests/integration/cross_language

python-merge-gate-ollama:
name: Python Pre-Merge Integration Tests - Ollama
needs: paths-filter
if: github.event_name != 'pull_request' && github.event_name != 'schedule' && needs.paths-filter.outputs.pythonChanges == 'true'
strategy:
max-parallel: 1
fail-fast: false
matrix:
python-version: ["3.11"]
os: [ubuntu-latest]
defaults:
run:
working-directory: python
runs-on: ${{ matrix.os }}
environment: "integration"
env:
UV_PYTHON: ${{ matrix.python-version }}
COMPLETIONS_CONCEPT_SAMPLE: "true"
steps:
- uses: actions/checkout@v4
- name: Set up uv
uses: astral-sh/setup-uv@v4
with:
version: "0.5.x"
enable-cache: true
cache-suffix: ${{ runner.os }}-${{ matrix.python-version }}
- name: Install dependencies
run: |
uv sync --all-extras --dev
- name: Install Ollama
if: matrix.os == 'ubuntu-latest'
run: |
curl -fsSL https://ollama.com/install.sh | sh
ollama serve &
sleep 5
- name: Pull model in Ollama
if: matrix.os == 'ubuntu-latest'
run: |
ollama pull ${{ vars.OLLAMA_CHAT_MODEL_ID }}
ollama pull ${{ vars.OLLAMA_CHAT_MODEL_ID_IMAGE }}
ollama pull ${{ vars.OLLAMA_CHAT_MODEL_ID_TOOL_CALL }}
ollama pull ${{ vars.OLLAMA_TEXT_MODEL_ID }}
ollama pull ${{ vars.OLLAMA_EMBEDDING_MODEL_ID }}
ollama list
- name: Run Integration Tests
id: run_tests_ai_services
shell: bash
run: |
uv run pytest -v --log-cli-level=INFO --durations=0 -n logical --dist loadfile --dist worksteal -m ollama --timeout=300 ./tests/integration/completions ./tests/integration/embeddings

python-merge-gate-memory:
name: Python Pre-Merge Integration Tests - Memory (incl samples using those)
Expand Down Expand Up @@ -215,10 +249,10 @@ jobs:
subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
- name: Run Integration Tests
id: run_tests_memory
timeout-minutes: 10
timeout-minutes: 15
shell: bash
run: |
uv run pytest -v -n logical --dist loadfile --dist worksteal ./tests/integration/memory ./tests/samples
uv run pytest -v --log-cli-level=INFO --durations=20 -n logical --dist loadfile --dist worksteal ./tests/integration/memory ./tests/samples

python-integration-tests:
name: Python Integration Tests - Scheduled run
Expand Down Expand Up @@ -352,6 +386,7 @@ jobs:
needs:
[
python-merge-gate-ai-services,
python-merge-gate-ollama,
python-merge-gate-memory,
python-integration-tests,
]
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/python-unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ jobs:
enable-cache: true
cache-suffix: ${{ runner.os }}-${{ matrix.python-version }}
- name: Install the project
run: uv sync --all-extras --dev
run: uv sync --all-extras --dev -U --prerelease=if-necessary-or-explicit
- name: Test with pytest
env:
PYTHON_GIL: ${{ matrix.gil }}
Expand Down
12 changes: 9 additions & 3 deletions python/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ classifiers = [
dependencies = [
"aiohttp ~= 3.8",
"cloudevents ~=1.0",
"pydantic >=2.0,<2.10",
"pydantic >=2.0,<2.11,!=2.10.0,!=2.10.1,!=2.10.2,!=2.10.3",
eavanvalkenburg marked this conversation as resolved.
Show resolved Hide resolved
"pydantic-settings ~= 2.0",
"defusedxml ~= 0.7",
# azure identity
Expand Down Expand Up @@ -133,11 +133,12 @@ dev-dependencies = [
"pytest ~= 8.2",
"pytest-xdist[psutil] ~= 3.6",
"pytest-cov >= 5.0",
"pytest-asyncio ~= 0.23",
"pytest-asyncio ~= 0.24",
"pytest-timeout>=2.3.1",
"snoop ~= 0.4",
"mypy >= 1.10",
"types-PyYAML ~= 6.0.12.20240311",
"ruff ~= 0.7"
"ruff ~= 0.7",
]
environments = [
"sys_platform == 'darwin'",
Expand All @@ -148,10 +149,15 @@ environments = [
[tool.pytest.ini_options]
testpaths = 'tests'
addopts = "-ra -q -r fEX"
asyncio_mode = "auto"
asyncio_default_fixture_loop_scope = "function"
filterwarnings = [
'ignore:.*FunctionChoiceBehavior.*:DeprecationWarning'
]
timeout = 120
markers = [
"ollama: mark a test as requiring the Ollama service (use \"not ollama\" to skip those tests)"
]

[tool.ruff]
line-length = 120
Expand Down
5 changes: 0 additions & 5 deletions python/samples/learn_resources/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import asyncio
from typing import Annotated

from samples.sk_service_configurator import add_service
from semantic_kernel import Kernel
from semantic_kernel.functions import kernel_function

Expand Down Expand Up @@ -41,10 +40,6 @@ async def main():
# Initialize the kernel
kernel = Kernel()

# Add the service to the kernel
# use_chat: True to use chat completion, False to use text completion
kernel = add_service(kernel=kernel, use_chat=True)

light_plugin = kernel.add_plugin(
LightPlugin(),
plugin_name="LightPlugin",
Expand Down
4 changes: 2 additions & 2 deletions python/semantic_kernel/functions/kernel_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import logging
import time
from abc import abstractmethod
from collections.abc import AsyncGenerator, Callable
from collections.abc import AsyncGenerator, Callable, Mapping, Sequence
from copy import copy, deepcopy
from inspect import isasyncgen, isgenerator
from typing import TYPE_CHECKING, Any
Expand Down Expand Up @@ -108,7 +108,7 @@ def from_prompt(
prompt_template: "PromptTemplateBase | None " = None,
prompt_template_config: "PromptTemplateConfig | None" = None,
prompt_execution_settings: (
"PromptExecutionSettings | list[PromptExecutionSettings] | dict[str, PromptExecutionSettings] | None"
"PromptExecutionSettings | Sequence[PromptExecutionSettings] | Mapping[str, PromptExecutionSettings] | None"
) = None,
) -> "KernelFunctionFromPrompt":
"""Create a new instance of the KernelFunctionFromPrompt class."""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import logging
from abc import ABC
from collections.abc import Mapping, Sequence
from functools import singledispatchmethod
from typing import TYPE_CHECKING, Any, Literal

Expand Down Expand Up @@ -126,7 +127,7 @@ def add_function(
prompt: str | None = None,
prompt_template_config: PromptTemplateConfig | None = None,
prompt_execution_settings: (
PromptExecutionSettings | list[PromptExecutionSettings] | dict[str, PromptExecutionSettings] | None
PromptExecutionSettings | Sequence[PromptExecutionSettings] | Mapping[str, PromptExecutionSettings] | None
) = None,
template_format: TEMPLATE_FORMAT_TYPES = KERNEL_TEMPLATE_FORMAT_NAME,
prompt_template: PromptTemplateBase | None = None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import logging
import os
from collections.abc import AsyncGenerator
from collections.abc import AsyncGenerator, Mapping, Sequence
from html import unescape
from typing import TYPE_CHECKING, Any

Expand Down Expand Up @@ -64,8 +64,8 @@ def __init__(
prompt_template: PromptTemplateBase | None = None,
prompt_template_config: PromptTemplateConfig | None = None,
prompt_execution_settings: PromptExecutionSettings
| list[PromptExecutionSettings]
| dict[str, PromptExecutionSettings]
| Sequence[PromptExecutionSettings]
| Mapping[str, PromptExecutionSettings]
| None = None,
) -> None:
"""Initializes a new instance of the KernelFunctionFromPrompt class.
Expand Down Expand Up @@ -154,7 +154,7 @@ def rewrite_execution_settings(
data["prompt_execution_settings"] = {
prompt_execution_settings.service_id or DEFAULT_SERVICE_NAME: prompt_execution_settings
}
if isinstance(prompt_execution_settings, list):
if isinstance(prompt_execution_settings, Sequence):
data["prompt_execution_settings"] = {
s.service_id or DEFAULT_SERVICE_NAME: s for s in prompt_execution_settings
}
Expand Down
4 changes: 2 additions & 2 deletions python/semantic_kernel/kernel_pydantic.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@
from typing import Annotated, Any, ClassVar, TypeVar

from pydantic import BaseModel, ConfigDict, Field, UrlConstraints
from pydantic.networks import Url
from pydantic.networks import AnyUrl
from pydantic_settings import BaseSettings, SettingsConfigDict

HttpsUrl = Annotated[Url, UrlConstraints(max_length=2083, allowed_schemes=["https"])]
HttpsUrl = Annotated[AnyUrl, UrlConstraints(max_length=2083, allowed_schemes=["https"])]


class KernelBaseModel(BaseModel):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# Copyright (c) Microsoft. All rights reserved.
import logging
from collections.abc import Mapping, Sequence
from typing import TypeVar

from pydantic import Field, field_validator, model_validator
Expand Down Expand Up @@ -52,14 +53,17 @@ def check_input_variables(self):
@classmethod
def rewrite_execution_settings(
cls,
settings: PromptExecutionSettings | list[PromptExecutionSettings] | dict[str, PromptExecutionSettings] | None,
) -> dict[str, PromptExecutionSettings]:
settings: PromptExecutionSettings
| Sequence[PromptExecutionSettings]
| Mapping[str, PromptExecutionSettings]
| None,
) -> Mapping[str, PromptExecutionSettings]:
"""Rewrite execution settings to a dictionary."""
if not settings:
return {}
if isinstance(settings, PromptExecutionSettings):
return {settings.service_id or DEFAULT_SERVICE_NAME: settings}
if isinstance(settings, list):
if isinstance(settings, Sequence):
return {s.service_id or DEFAULT_SERVICE_NAME: s for s in settings}
return settings

Expand All @@ -70,7 +74,7 @@ def add_execution_settings(self, settings: PromptExecutionSettings, overwrite: b
self.execution_settings[settings.service_id or DEFAULT_SERVICE_NAME] = settings
logger.warning("Execution settings already exist and overwrite is set to False")

def get_kernel_parameter_metadata(self) -> list[KernelParameterMetadata]:
def get_kernel_parameter_metadata(self) -> Sequence[KernelParameterMetadata]:
"""Get the kernel parameter metadata for the input variables."""
return [
KernelParameterMetadata(
Expand Down Expand Up @@ -103,8 +107,8 @@ def restore(
description: str,
template: str,
template_format: TEMPLATE_FORMAT_TYPES = KERNEL_TEMPLATE_FORMAT_NAME,
input_variables: list[InputVariable] = [],
execution_settings: dict[str, PromptExecutionSettings] = {},
input_variables: Sequence[InputVariable] = [],
execution_settings: Mapping[str, PromptExecutionSettings] = {},
allow_dangerously_set_content: bool = False,
) -> "PromptTemplateConfig":
"""Restore a PromptTemplateConfig instance from the specified parameters.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,9 @@
)


@pytest.mark.asyncio(scope="module")
class TestAudioToText(AudioToTextTestBase):
"""Test audio-to-text services."""

@pytest.mark.asyncio
async def test_audio_to_text(
self,
services: dict[str, AudioToTextClientBase],
Expand Down
Loading
Loading