Skip to content

Api

API interaction for LLM services.

logger module-attribute

logger = getLogger(__name__)

PydanticModelT module-attribute

PydanticModelT = TypeVar('PydanticModelT', bound=BaseModel)

MessageDict

Bases: TypedDict

Typed dictionary for LLM message structure.

Source code in src/codemap/llm/api.py
32
33
34
35
36
class MessageDict(TypedDict):
	"""Typed dictionary for LLM message structure."""

	role: Literal["user", "system"]
	content: str

role instance-attribute

role: Literal['user', 'system']

content instance-attribute

content: str

validate_schema

validate_schema(
	model: type[PydanticModelT], input_data: str | object
) -> PydanticModelT

Validate the schema of the input data.

Source code in src/codemap/llm/api.py
39
40
41
42
43
def validate_schema(model: type[PydanticModelT], input_data: str | object) -> PydanticModelT:
	"""Validate the schema of the input data."""
	if isinstance(input_data, str):
		return model.model_validate_json(input_data)
	return model.model_validate(input_data)

call_llm_api

call_llm_api(
	messages: list[MessageDict],
	config_loader: ConfigLoader,
	pydantic_model: type[PydanticModelT] | None = None,
) -> str | PydanticModelT

Call an LLM API using pydantic-ai.

Parameters:

Name Type Description Default
messages list[MessageDict]

The list of messages to send to the LLM

required
config_loader ConfigLoader

ConfigLoader instance for additional configuration

required
pydantic_model type[PydanticModelT] | None

Optional Pydantic model class to structure the output. If provided, the function will return an instance of this model. Otherwise, it returns a string.

None

Returns:

Type Description
str | PydanticModelT

The generated response, either as a string or an instance of the pydantic_model.

Raises:

Type Description
LLMError

If pydantic-ai is not installed or the API call fails.

Source code in src/codemap/llm/api.py
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
def call_llm_api(
	messages: list[MessageDict],
	config_loader: ConfigLoader,
	pydantic_model: type[PydanticModelT] | None = None,
) -> str | PydanticModelT:
	"""
	Call an LLM API using pydantic-ai.

	Args:
	    messages: The list of messages to send to the LLM
	    config_loader: ConfigLoader instance for additional configuration
	    pydantic_model: Optional Pydantic model class to structure the output.
	                  If provided, the function will return an instance of this model.
	                  Otherwise, it returns a string.

	Returns:
	    The generated response, either as a string or an instance of the pydantic_model.

	Raises:
	    LLMError: If pydantic-ai is not installed or the API call fails.
	"""
	if Agent is None or End is None or FinalResult is None:  # Check all imports
		msg = "Pydantic-AI library or its required types (AgentNode, End, FinalResult) not installed/found."
		logger.exception(msg)
		raise LLMError(msg) from None

	# Determine system prompt
	system_prompt_str = (
		"You are an AI programming assistant. Follow the user's requirements carefully and to the letter."
	)

	for msg in messages:
		if msg["role"] == "system":
			system_prompt_str = msg["content"]
			break

	# If an output_model is specified, pydantic-ai handles instructing the LLM for structured output.
	# So, no need to manually add schema instructions to the system_prompt_str here.

	# Determine the output_type for the Pydantic-AI Agent
	agent_output_type: type = pydantic_model if pydantic_model else str

	try:
		# Initialize Pydantic-AI Agent
		model_name = config_loader.get.llm.model

		if is_ollama_model(model_name):
			from pydantic_ai.models.openai import OpenAIModel
			from pydantic_ai.providers.openai import OpenAIProvider

			model_name = model_name.split(":", 1)[1]

			base_url = config_loader.get.llm.base_url
			if base_url is None:
				base_url = "http://localhost:11434/v1"

			ollama_model = OpenAIModel(model_name=model_name, provider=OpenAIProvider(base_url=base_url))

			agent = Agent(
				ollama_model,
				system_prompt=system_prompt_str,
				output_type=agent_output_type,
			)
		else:
			agent = Agent(
				model=config_loader.get.llm.model,
				system_prompt=system_prompt_str,
				output_type=agent_output_type,
			)

		run_settings = {
			"temperature": config_loader.get.llm.temperature,
			"max_tokens": config_loader.get.llm.max_output_tokens,
		}

		logger.debug(
			"Calling Pydantic-AI Agent with model: %s, system_prompt: '%s...', params: %s",
			config_loader.get.llm.model,
			system_prompt_str[:100],
			run_settings,
		)

		if not any(msg.get("role") == "user" for msg in messages):
			msg = "No user content found in messages for Pydantic-AI agent."
			logger.exception(msg)
			raise LLMError(msg)

		if not messages or messages[-1].get("role") != "user":
			msg = "Last message is not an user prompt"
			logger.exception(msg)
			raise LLMError(msg)

		user_prompt = messages[-1]["content"]

		if ModelSettings is None:
			msg = "ModelSettings not found in pydantic-ai. Install the correct version."
			logger.exception(msg)
			raise LLMError(msg)

		# Run the agent and validate the output
		run = agent.run_sync(user_prompt=user_prompt, model_settings=ModelSettings(**run_settings))

		if run.output is not None:
			if pydantic_model:
				try:
					return validate_schema(pydantic_model, run.output)
				except ValidationError as e:
					raise LLMError from e
			elif isinstance(run.output, (str, BaseModel)):
				return run.output  # type: ignore[return-value]

		msg = "Pydantic-AI call succeeded but returned no structured data or text."
		logger.error(msg)
		raise LLMError(msg)

	except ImportError:
		msg = "Pydantic-AI library not installed. Install it with 'uv add pydantic-ai'."
		logger.exception(msg)
		raise LLMError(msg) from None
	except Exception as e:
		logger.exception("Pydantic-AI LLM API call failed")
		msg = f"Pydantic-AI LLM API call failed: {e}"
		raise LLMError(msg) from e