Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
78 changes: 78 additions & 0 deletions tests/unit/vertexai/genai/replays/test_skills_create.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests the skills.create() method against the Vertex AI endpoint using replays."""

import io
import os
import tempfile
import zipfile

from tests.unit.vertexai.genai.replays import pytest_helper
from vertexai._genai import types

# MANDATORY: Initialize the replay test framework for this module
pytestmark = pytest_helper.setup(
file=__file__,
globals_for_file=globals(),
)


def test_create_skill(client):
"""Tests the creation of a skill using `client.skills.create()`."""
client._api_client._http_options.base_url = (
"https://us-central1-aiplatform.googleapis.com"
)

with tempfile.TemporaryDirectory() as tmpdir:
# Create a dummy skill structure (SKILL.md is required by the spec)
with open(os.path.join(tmpdir, "SKILL.md"), "w") as f:
f.write("# My Replay Skill\nThis is a test skill for replay tests.")

skill = client.skills.create(
display_name="My Replay Skill",
description="My Replay Skill Description",
config=types.CreateSkillConfig(
local_path=tmpdir, wait_for_completion=True
),
)

assert skill.name is not None
assert skill.display_name == "My Replay Skill"
assert skill.description == "My Replay Skill Description"


def test_create_skill_with_prezipped_bytes(client):
"""Tests the creation of a skill with pre-zipped bytes."""
client._api_client._http_options.base_url = (
"https://us-central1-aiplatform.googleapis.com"
)

zip_buffer = io.BytesIO()
zinfo = zipfile.ZipInfo("SKILL.md", date_time=(1980, 1, 1, 0, 0, 0))
with zipfile.ZipFile(zip_buffer, "w") as zip_file:
zip_file.writestr(zinfo, "# My Zipped Replay Skill\nThis is a test.")
zipped_bytes = zip_buffer.getvalue()

skill = client.skills.create(
display_name="My Zipped Replay Skill",
description="My Zipped Replay Skill Description",
config=types.CreateSkillConfig(
zipped_filesystem=zipped_bytes, wait_for_completion=True
),
)

assert skill.name is not None
assert skill.display_name == "My Zipped Replay Skill"
assert skill.description == "My Zipped Replay Skill Description"
113 changes: 113 additions & 0 deletions tests/unit/vertexai/genai/replays/test_skills_update.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests the skills.update() method against the Vertex AI endpoint using replays."""

import io
import os
import random
import tempfile
import zipfile

from tests.unit.vertexai.genai.replays import pytest_helper
from vertexai._genai import types

# MANDATORY: Initialize the replay test framework for this module
pytestmark = pytest_helper.setup(
file=__file__,
globals_for_file=globals(),
)

PROJECT_ID = os.environ.get("GOOGLE_CLOUD_PROJECT", "srbai-testing")
REGION = "us-central1"


def test_update_skill(client):
"""Tests the update of a skill using `client.skills.update()` (metadata only)."""
# Target the autopush sandbox endpoint for the Skill Registry API
client._api_client._http_options.base_url = (
"https://us-central1-autopush-aiplatform.sandbox.googleapis.com"
)

# 1. Create a fresh unique skill first
skill_id = f"update_meta_test_skill_{random.randint(10000, 99999)}"
with tempfile.TemporaryDirectory() as tmpdir:
with open(os.path.join(tmpdir, "SKILL.md"), "w") as f:
f.write("# Test Skill\nInitial content.")

created_skill = client.skills.create(
display_name="Original Skill",
description="Original Description",
config=types.CreateSkillConfig(
local_path=tmpdir, skill_id=skill_id, wait_for_completion=True
),
)

# 2. Perform the metadata-only update on the new skill
updated_skill = client.skills.update(
name=created_skill.name,
config=types.UpdateSkillConfig(
display_name="My Updated Replay Skill",
description="My Updated Replay Skill Description",
wait_for_completion=True,
),
)

assert updated_skill.name == created_skill.name
assert updated_skill.display_name == "My Updated Replay Skill"
assert updated_skill.description == "My Updated Replay Skill Description"


def test_update_skill_with_zipped_bytes(client):
"""Tests the update of a skill with zipped bytes filesystem."""
# Target the autopush sandbox endpoint for the Skill Registry API
client._api_client._http_options.base_url = (
"https://us-central1-autopush-aiplatform.sandbox.googleapis.com"
)

# 1. Create a fresh unique skill first
skill_id = f"update_zip_test_skill_{random.randint(10000, 99999)}"
with tempfile.TemporaryDirectory() as tmpdir:
with open(os.path.join(tmpdir, "SKILL.md"), "w") as f:
f.write("# Test Skill\nInitial content.")

created_skill = client.skills.create(
display_name="Original Skill",
description="Original Description",
config=types.CreateSkillConfig(
local_path=tmpdir, skill_id=skill_id, wait_for_completion=True
),
)

# 2. Prepare zipped bytes for update
zip_buffer = io.BytesIO()
zinfo = zipfile.ZipInfo("SKILL.md", date_time=(1980, 1, 1, 0, 0, 0))
with zipfile.ZipFile(zip_buffer, "w") as zip_file:
zip_file.writestr(
zinfo, "# My Updated Zipped Replay Skill\nThis is updated."
)
zipped_bytes = zip_buffer.getvalue()

# 3. Update the skill with new zipped bytes
updated_skill = client.skills.update(
name=created_skill.name,
config=types.UpdateSkillConfig(
zipped_filesystem=zipped_bytes, wait_for_completion=True
),
)

assert updated_skill.name == created_skill.name
assert (
updated_skill.display_name == "Original Skill"
) # Display name remains unchanged
116 changes: 116 additions & 0 deletions vertexai/_genai/_skills_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Utility functions for Skills."""

import asyncio
import base64
import io
import os
import time
from typing import Any, Awaitable, Callable
import zipfile


def zip_directory(directory_path: str) -> bytes:
"""Zips a directory into memory and returns the bytes.

Args:
directory_path (str): Required. The local path to the directory.

Returns:
bytes: The zipped directory content.
"""
if not os.path.isdir(directory_path):
raise ValueError(f"Path is not a directory: {directory_path}")

zip_buffer = io.BytesIO()
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zip_file:
for root, _, files in os.walk(directory_path):
for file in files:
file_path = os.path.join(root, file)
arcname = os.path.relpath(file_path, directory_path)

# Read actual file data
with open(file_path, "rb") as f:
file_data = f.read()

# Use deterministic ZipInfo (mtime: 1980-01-01 00:00:00)
zinfo = zipfile.ZipInfo(arcname, date_time=(1980, 1, 1, 0, 0, 0))
zinfo.compress_type = zipfile.ZIP_DEFLATED
zinfo.external_attr = 0o644 << 16 # Constant file permissions

zip_file.writestr(zinfo, file_data)
return zip_buffer.getvalue()


def get_zipped_filesystem_payload(directory_path: str) -> str:
"""Zips a directory and base64-encodes the result to a UTF-8 string.

Args:
directory_path (str): Required. The local path to the directory.

Returns:
str: The base64-encoded zipped directory.
"""
zip_bytes = zip_directory(directory_path)
return base64.b64encode(zip_bytes).decode("utf-8")


def await_operation(
*,
operation_name: str,
get_operation_fn: Callable[..., Any],
poll_interval_seconds: float = 10.0,
) -> Any:
"""Waits for a long running operation to complete.

Args:
operation_name (str): Required. The name of the operation.
get_operation_fn (Callable): Required. Function to get the operation
status.
poll_interval_seconds (float): The interval between polls in seconds.

Returns:
Any: The completed operation.
"""
operation = get_operation_fn(operation_name=operation_name)
while not operation.done:
time.sleep(poll_interval_seconds)
operation = get_operation_fn(operation_name=operation.name)
return operation


async def await_operation_async(
*,
operation_name: str,
get_operation_fn: Callable[..., Awaitable[Any]],
poll_interval_seconds: float = 10.0,
) -> Any:
"""Waits for a long running operation to complete asynchronously.

Args:
operation_name (str): Required. The name of the operation.
get_operation_fn (Callable): Required. Async function to get the operation
status.
poll_interval_seconds (float): The interval between polls in seconds.

Returns:
Any: The completed operation.
"""
operation = await get_operation_fn(operation_name=operation_name)
while not operation.done:
await asyncio.sleep(poll_interval_seconds)
operation = await get_operation_fn(operation_name=operation.name)
return operation
Loading
Loading