Skip to content

Commit

Permalink
Ensure that pytest is able to run without optional dependencies (#1927
Browse files Browse the repository at this point in the history
)

* Ensure it is possible to run `pytest --run_slow` with only the dev yaml installed.
* Mark the tests in `tests/utils/test_shared_process_pool.py` as slow tests
* Remove redundant import error messages.

Closes [#1920](#1920)

## By Submitting this PR I confirm:
- I am familiar with the [Contributing Guidelines](https://github.com/nv-morpheus/Morpheus/blob/main/docs/source/developer_guide/contributing.md).
- When the PR is ready for review, new or existing tests cover these changes.
- When the PR is ready for review, the documentation is up to date with these changes.

Authors:
  - David Gardner (https://github.com/dagardner-nv)

Approvers:
  - Anuradha Karuppiah (https://github.com/AnuradhaKaruppiah)

URL: #1927
  • Loading branch information
dagardner-nv authored Oct 4, 2024
1 parent 502cbf3 commit f43be04
Show file tree
Hide file tree
Showing 16 changed files with 251 additions and 107 deletions.
18 changes: 18 additions & 0 deletions python/morpheus_llm/morpheus_llm/error.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# Copyright (c) 2024, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

IMPORT_ERROR_MESSAGE = (
"{package} not found. Install it and other additional dependencies by running the following command:\n"
"`conda env update --solver=libmamba -n morpheus "
"--file conda/environments/examples_cuda-121_arch-x86_64.yaml`")
13 changes: 11 additions & 2 deletions python/morpheus_llm/morpheus_llm/llm/nodes/langchain_agent_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,19 @@
import logging
import typing

from langchain_core.exceptions import OutputParserException

from morpheus_llm.error import IMPORT_ERROR_MESSAGE
from morpheus_llm.llm import LLMContext
from morpheus_llm.llm import LLMNodeBase

logger = logging.getLogger(__name__)

IMPORT_EXCEPTION = None

try:
from langchain_core.exceptions import OutputParserException
except ImportError as import_exc:
IMPORT_EXCEPTION = import_exc

if typing.TYPE_CHECKING:
from langchain.agents import AgentExecutor

Expand All @@ -47,6 +53,9 @@ def __init__(self,
agent_executor: "AgentExecutor",
replace_exceptions: bool = False,
replace_exceptions_value: typing.Optional[str] = None):
if IMPORT_EXCEPTION is not None:
raise ImportError(IMPORT_ERROR_MESSAGE.format('langchain_core')) from IMPORT_EXCEPTION

super().__init__()

self._agent_executor = agent_executor
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,13 @@
import warnings

from morpheus.utils.env_config_value import EnvConfigValue
from morpheus_llm.error import IMPORT_ERROR_MESSAGE
from morpheus_llm.llm.services.llm_service import LLMClient
from morpheus_llm.llm.services.llm_service import LLMService

logger = logging.getLogger(__name__)

IMPORT_EXCEPTION = None
IMPORT_ERROR_MESSAGE = (
"NemoLLM not found. Install it and other additional dependencies by running the following command:\n"
"`conda env update --solver=libmamba -n morpheus "
"--file conda/environments/examples_cuda-121_arch-x86_64.yaml --prune`")

try:
import nemollm
Expand All @@ -53,7 +50,7 @@ class NeMoLLMClient(LLMClient):

def __init__(self, parent: "NeMoLLMService", *, model_name: str, **model_kwargs) -> None:
if IMPORT_EXCEPTION is not None:
raise ImportError(IMPORT_ERROR_MESSAGE) from IMPORT_EXCEPTION
raise ImportError(IMPORT_ERROR_MESSAGE.format(package='nemollm')) from IMPORT_EXCEPTION

super().__init__()

Expand Down Expand Up @@ -231,7 +228,7 @@ def __init__(self,
"""

if IMPORT_EXCEPTION is not None:
raise ImportError(IMPORT_ERROR_MESSAGE) from IMPORT_EXCEPTION
raise ImportError(IMPORT_ERROR_MESSAGE.format(package='nemollm')) from IMPORT_EXCEPTION

super().__init__()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,17 +16,13 @@
import typing

from morpheus.utils.env_config_value import EnvConfigValue
from morpheus_llm.error import IMPORT_ERROR_MESSAGE
from morpheus_llm.llm.services.llm_service import LLMClient
from morpheus_llm.llm.services.llm_service import LLMService

logger = logging.getLogger(__name__)

IMPORT_EXCEPTION = None
IMPORT_ERROR_MESSAGE = (
"The `langchain-nvidia-ai-endpoints` package was not found. Install it and other additional dependencies by "
"running the following command:"
"`conda env update --solver=libmamba -n morpheus "
"--file conda/environments/examples_cuda-121_arch-x86_64.yaml`")

try:
from langchain_core.prompt_values import StringPromptValue
Expand All @@ -52,7 +48,8 @@ class NVFoundationLLMClient(LLMClient):

def __init__(self, parent: "NVFoundationLLMService", *, model_name: str, **model_kwargs) -> None:
if IMPORT_EXCEPTION is not None:
raise ImportError(IMPORT_ERROR_MESSAGE) from IMPORT_EXCEPTION
raise ImportError(
IMPORT_ERROR_MESSAGE.format(package='langchain-nvidia-ai-endpoints')) from IMPORT_EXCEPTION

super().__init__()

Expand Down Expand Up @@ -218,7 +215,8 @@ class BaseURL(EnvConfigValue):

def __init__(self, *, api_key: APIKey | str = None, base_url: BaseURL | str = None, **model_kwargs) -> None:
if IMPORT_EXCEPTION is not None:
raise ImportError(IMPORT_ERROR_MESSAGE) from IMPORT_EXCEPTION
raise ImportError(
IMPORT_ERROR_MESSAGE.format(package='langchain-nvidia-ai-endpoints')) from IMPORT_EXCEPTION

super().__init__()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,16 +23,13 @@
import appdirs

from morpheus.utils.env_config_value import EnvConfigValue
from morpheus_llm.error import IMPORT_ERROR_MESSAGE
from morpheus_llm.llm.services.llm_service import LLMClient
from morpheus_llm.llm.services.llm_service import LLMService

logger = logging.getLogger(__name__)

IMPORT_EXCEPTION = None
IMPORT_ERROR_MESSAGE = ("OpenAIChatService & OpenAIChatClient require the openai package to be installed. "
"Install it by running the following command:\n"
"`conda env update --solver=libmamba -n morpheus "
"--file conda/environments/examples_cuda-121_arch-x86_64.yaml --prune`")

try:
import openai
Expand Down Expand Up @@ -107,7 +104,7 @@ def __init__(self,
json=False,
**model_kwargs) -> None:
if IMPORT_EXCEPTION is not None:
raise ImportError(IMPORT_ERROR_MESSAGE) from IMPORT_EXCEPTION
raise ImportError(IMPORT_ERROR_MESSAGE.format(package='openai')) from IMPORT_EXCEPTION

super().__init__()

Expand Down Expand Up @@ -400,7 +397,7 @@ def __init__(self,
default_model_kwargs: dict = None) -> None:

if IMPORT_EXCEPTION is not None:
raise ImportError(IMPORT_ERROR_MESSAGE) from IMPORT_EXCEPTION
raise ImportError(IMPORT_ERROR_MESSAGE.format(package='openai')) from IMPORT_EXCEPTION

super().__init__()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,13 @@

import cudf

from morpheus_llm.error import IMPORT_ERROR_MESSAGE
from morpheus_llm.service.vdb.vector_db_service import VectorDBResourceService
from morpheus_llm.service.vdb.vector_db_service import VectorDBService

logger = logging.getLogger(__name__)

IMPORT_EXCEPTION = None
IMPORT_ERROR_MESSAGE = "FaissDBResourceService requires the FAISS library to be installed."

try:
from langchain.embeddings.base import Embeddings
Expand All @@ -50,7 +50,7 @@ class FaissVectorDBResourceService(VectorDBResourceService):

def __init__(self, parent: "FaissVectorDBService", *, name: str) -> None:
if IMPORT_EXCEPTION is not None:
raise ImportError(IMPORT_ERROR_MESSAGE) from IMPORT_EXCEPTION
raise ImportError(IMPORT_ERROR_MESSAGE.format(package='langchain and faiss-gpu')) from IMPORT_EXCEPTION

super().__init__()

Expand Down Expand Up @@ -285,7 +285,7 @@ class FaissVectorDBService(VectorDBService):
def __init__(self, local_dir: str, embeddings: "Embeddings"):

if IMPORT_EXCEPTION is not None:
raise ImportError(IMPORT_ERROR_MESSAGE) from IMPORT_EXCEPTION
raise ImportError(IMPORT_ERROR_MESSAGE.format(package='langchain and faiss-gpu')) from IMPORT_EXCEPTION

self._local_dir = local_dir
self._embeddings = embeddings
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,13 @@
from morpheus.io.utils import cudf_string_cols_exceed_max_bytes
from morpheus.io.utils import truncate_string_cols_by_bytes
from morpheus.utils.type_aliases import DataFrameType
from morpheus_llm.error import IMPORT_ERROR_MESSAGE
from morpheus_llm.service.vdb.vector_db_service import VectorDBResourceService
from morpheus_llm.service.vdb.vector_db_service import VectorDBService

logger = logging.getLogger(__name__)

IMPORT_EXCEPTION = None
IMPORT_ERROR_MESSAGE = "MilvusVectorDBResourceService requires the milvus and pymilvus packages to be installed."

# Milvus has a max string length in bytes of 65,535. Multi-byte characters like "ñ" will have a string length of 1, the
# byte length encoded as UTF-8 will be 2
Expand Down Expand Up @@ -234,7 +234,7 @@ class MilvusVectorDBResourceService(VectorDBResourceService):

def __init__(self, name: str, client: "MilvusClient", truncate_long_strings: bool = False) -> None:
if IMPORT_EXCEPTION is not None:
raise ImportError(IMPORT_ERROR_MESSAGE) from IMPORT_EXCEPTION
raise ImportError(IMPORT_ERROR_MESSAGE.format(package='pymilvus')) from IMPORT_EXCEPTION

super().__init__()

Expand Down
9 changes: 7 additions & 2 deletions tests/benchmarks/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,13 @@
from unittest import mock

import pytest
from pynvml.smi import NVSMI_QUERY_GPU
from pynvml.smi import nvidia_smi

try:
from pynvml.smi import NVSMI_QUERY_GPU
from pynvml.smi import nvidia_smi
except ImportError:
print("pynvml is not installed")

from test_bench_e2e_pipelines import E2E_TEST_CONFIGS


Expand Down
16 changes: 10 additions & 6 deletions tests/benchmarks/test_bench_agents_simple_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,17 @@
import typing
from unittest import mock

import langchain
import pytest
from langchain.agents import AgentType
from langchain.agents import initialize_agent
from langchain.agents import load_tools
from langchain.agents.tools import Tool
from langchain.utilities import serpapi

try:
import langchain
from langchain.agents import AgentType
from langchain.agents import initialize_agent
from langchain.agents import load_tools
from langchain.agents.tools import Tool
from langchain.utilities import serpapi
except ImportError:
print("langchain is not installed")

import cudf

Expand Down
62 changes: 43 additions & 19 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,10 @@
from _utils.kafka import kafka_server # noqa: F401 pylint:disable=unused-import
from _utils.kafka import zookeeper_proc # noqa: F401 pylint:disable=unused-import

OPT_DEP_SKIP_REASON = (
"This test requires the {package} package to be installed, to install this run:\n"
"`conda env update --solver=libmamba -n morpheus --file conda/environments/examples_cuda-121_arch-x86_64.yaml`")


def pytest_addoption(parser: pytest.Parser):
"""
Expand Down Expand Up @@ -1064,33 +1068,53 @@ def nemollm_fixture(fail_missing: bool):
"""
Fixture to ensure nemollm is installed
"""
skip_reason = ("Tests for the NeMoLLMService require the nemollm package to be installed, to install this run:\n"
"`conda env update --solver=libmamba -n morpheus "
"--file conda/environments/all_cuda-121_arch-x86_64.yaml --prune`")
yield import_or_skip("nemollm", reason=skip_reason, fail_missing=fail_missing)
yield import_or_skip("nemollm", reason=OPT_DEP_SKIP_REASON.format(package="nemollm"), fail_missing=fail_missing)


@pytest.fixture(name="nvfoundationllm", scope='session')
def nvfoundationllm_fixture(fail_missing: bool):
@pytest.fixture(name="openai", scope='session')
def openai_fixture(fail_missing: bool):
"""
Fixture to ensure nvfoundationllm is installed
Fixture to ensure openai is installed
"""
skip_reason = (
"Tests for NVFoundation require the langchain-nvidia-ai-endpoints package to be installed, to install this "
"run:\n `conda env update --solver=libmamba -n morpheus "
"--file conda/environments/all_cuda-121_arch-x86_64.yaml --prune`")
yield import_or_skip("langchain_nvidia_ai_endpoints", reason=skip_reason, fail_missing=fail_missing)
yield import_or_skip("openai", reason=OPT_DEP_SKIP_REASON.format(package="openai"), fail_missing=fail_missing)


@pytest.fixture(name="openai", scope='session')
def openai_fixture(fail_missing: bool):
@pytest.fixture(name="langchain", scope='session')
def langchain_fixture(fail_missing: bool):
"""
Fixture to ensure openai is installed
Fixture to ensure langchain is installed
"""
yield import_or_skip("langchain", reason=OPT_DEP_SKIP_REASON.format(package="langchain"), fail_missing=fail_missing)


@pytest.fixture(name="langchain_core", scope='session')
def langchain_core_fixture(fail_missing: bool):
"""
Fixture to ensure langchain_core is installed
"""
yield import_or_skip("langchain_core",
reason=OPT_DEP_SKIP_REASON.format(package="langchain_core"),
fail_missing=fail_missing)


@pytest.fixture(name="langchain_community", scope='session')
def langchain_community_fixture(fail_missing: bool):
"""
Fixture to ensure langchain_community is installed
"""
yield import_or_skip("langchain_community",
reason=OPT_DEP_SKIP_REASON.format(package="langchain_community"),
fail_missing=fail_missing)


@pytest.fixture(name="langchain_nvidia_ai_endpoints", scope='session')
def langchain_nvidia_ai_endpoints_fixture(fail_missing: bool):
"""
Fixture to ensure langchain_nvidia_ai_endpoints is installed
"""
skip_reason = ("Tests for the OpenAIChatService require the openai package to be installed, to install this run:\n"
"`conda env update --solver=libmamba -n morpheus "
"--file conda/environments/all_cuda-121_arch-x86_64.yaml --prune`")
yield import_or_skip("openai", reason=skip_reason, fail_missing=fail_missing)
yield import_or_skip("langchain_nvidia_ai_endpoints",
reason=OPT_DEP_SKIP_REASON.format(package="langchain_nvidia_ai_endpoints"),
fail_missing=fail_missing)


@pytest.mark.usefixtures("openai")
Expand Down
Loading

0 comments on commit f43be04

Please sign in to comment.