|
| 1 | +{ |
| 2 | + "description": "The `pipelex` cognitive task (cogt) subsystem orchestrates AI-driven content generation and inference. The `Inference Manager` serves as the central hub, abstracting various AI tasks and delegating them to specialized job factories like the `LLM Job Factory/Worker`. Model selection is handled by the `Model Routing` component, which consults the `Model Manager` for available model configurations. The `Model Manager`, in turn, relies on the `Plugin Manager` to discover and integrate `AI Provider Adapters`, which are concrete implementations for interacting with external AI services. Complex content creation workflows are managed by the `Content Generator`, which leverages the `Inference Manager` and the `Templating Engine` for dynamic prompt generation and output formatting. This architecture ensures a modular and extensible system for managing diverse AI capabilities.", |
| 3 | + "components": [ |
| 4 | + { |
| 5 | + "name": "Inference Manager", |
| 6 | + "description": "Provides a high-level, unified interface for initiating and managing various types of AI inference tasks (LLM, image generation, extraction), abstracting the underlying job factories and workers. It acts as the primary orchestrator for AI operations within the subsystem.", |
| 7 | + "referenced_source_code": [ |
| 8 | + { |
| 9 | + "qualified_name": "pipelex.cogt.inference.inference_manager.InferenceManager", |
| 10 | + "reference_file": "pipelex/cogt/inference/inference_manager.py", |
| 11 | + "reference_start_line": 15, |
| 12 | + "reference_end_line": 129 |
| 13 | + } |
| 14 | + ], |
| 15 | + "assigned_files": [ |
| 16 | + "pipelex/cogt/inference/__init__.py", |
| 17 | + "pipelex/cogt/inference/inference_job_abstract.py", |
| 18 | + "pipelex/cogt/inference/inference_manager_protocol.py", |
| 19 | + "pipelex/cogt/inference/inference_manager.py", |
| 20 | + "pipelex/cogt/inference/inference_worker_abstract.py" |
| 21 | + ], |
| 22 | + "can_expand": true |
| 23 | + }, |
| 24 | + { |
| 25 | + "name": "Content Generator", |
| 26 | + "description": "Orchestrates complex content generation tasks, potentially involving multiple steps such as templating, LLM calls, and image generation, enabling higher-level cognitive task generation.", |
| 27 | + "referenced_source_code": [ |
| 28 | + { |
| 29 | + "qualified_name": "pipelex.cogt.content_generation.content_generator.ContentGenerator", |
| 30 | + "reference_file": "pipelex/cogt/content_generation/content_generator.py", |
| 31 | + "reference_start_line": 37, |
| 32 | + "reference_end_line": 276 |
| 33 | + } |
| 34 | + ], |
| 35 | + "assigned_files": [ |
| 36 | + "pipelex/cogt/content_generation/__init__.py", |
| 37 | + "pipelex/cogt/content_generation/content_generator_dry.py", |
| 38 | + "pipelex/cogt/content_generation/assignment_models.py", |
| 39 | + "pipelex/cogt/content_generation/llm_generate.py", |
| 40 | + "pipelex/cogt/content_generation/templating_generate.py", |
| 41 | + "pipelex/cogt/content_generation/extract_generate.py", |
| 42 | + "pipelex/cogt/content_generation/content_generator_protocol.py", |
| 43 | + "pipelex/cogt/content_generation/content_generator.py", |
| 44 | + "pipelex/cogt/content_generation/img_gen_generate.py" |
| 45 | + ], |
| 46 | + "can_expand": true |
| 47 | + }, |
| 48 | + { |
| 49 | + "name": "Model Manager", |
| 50 | + "description": "Maintains a comprehensive catalog of available AI models, their specifications, configurations, and capabilities, providing essential metadata for model selection and routing.", |
| 51 | + "referenced_source_code": [ |
| 52 | + { |
| 53 | + "qualified_name": "pipelex.cogt.models.model_manager.ModelManager", |
| 54 | + "reference_file": "pipelex/cogt/models/model_manager.py", |
| 55 | + "reference_start_line": 19, |
| 56 | + "reference_end_line": 165 |
| 57 | + } |
| 58 | + ], |
| 59 | + "assigned_files": [ |
| 60 | + "pipelex/cogt/models/__init__.py", |
| 61 | + "pipelex/cogt/models/model_manager_abstract.py", |
| 62 | + "pipelex/cogt/models/model_deck_loader.py", |
| 63 | + "pipelex/cogt/models/model_manager.py", |
| 64 | + "pipelex/cogt/models/model_deck_check.py", |
| 65 | + "pipelex/cogt/models/model_deck.py", |
| 66 | + "pipelex/cogt/models/model_deck_config.py", |
| 67 | + "pipelex/cogt/model_backends/backend.py", |
| 68 | + "pipelex/cogt/model_backends/backend_factory.py", |
| 69 | + "pipelex/cogt/model_backends/__init__.py", |
| 70 | + "pipelex/cogt/model_backends/model_constraints.py", |
| 71 | + "pipelex/cogt/model_backends/model_spec_factory.py", |
| 72 | + "pipelex/cogt/model_backends/prompting_target.py", |
| 73 | + "pipelex/cogt/model_backends/model_lists.py", |
| 74 | + "pipelex/cogt/model_backends/backend_library.py", |
| 75 | + "pipelex/cogt/model_backends/model_spec.py", |
| 76 | + "pipelex/cogt/model_backends/model_type.py" |
| 77 | + ], |
| 78 | + "can_expand": true |
| 79 | + }, |
| 80 | + { |
| 81 | + "name": "Model Routing", |
| 82 | + "description": "Intelligently selects the most appropriate AI model for a given task based on criteria such as cost, performance, specific capabilities, or user-defined routing profiles, enabling intelligent model routing.", |
| 83 | + "referenced_source_code": [ |
| 84 | + { |
| 85 | + "qualified_name": "pipelex.cogt.model_routing.routing_profile.RoutingProfile", |
| 86 | + "reference_file": "pipelex/cogt/model_routing/routing_profile.py", |
| 87 | + "reference_start_line": 8, |
| 88 | + "reference_end_line": 80 |
| 89 | + } |
| 90 | + ], |
| 91 | + "assigned_files": [ |
| 92 | + "pipelex/cogt/model_routing/routing_profile_loader.py", |
| 93 | + "pipelex/cogt/model_routing/routing_models.py", |
| 94 | + "pipelex/cogt/model_routing/__init__.py", |
| 95 | + "pipelex/cogt/model_routing/routing_profile_factory.py", |
| 96 | + "pipelex/cogt/model_routing/routing_profile.py" |
| 97 | + ], |
| 98 | + "can_expand": false |
| 99 | + }, |
| 100 | + { |
| 101 | + "name": "Plugin Manager", |
| 102 | + "description": "Discovers, loads, and manages the lifecycle of various AI provider plugins, making them available to the system and enabling the standardized abstraction layer for AI integration.", |
| 103 | + "referenced_source_code": [ |
| 104 | + { |
| 105 | + "qualified_name": "pipelex.plugins.plugin_manager.PluginManager", |
| 106 | + "reference_file": "pipelex/plugins/plugin_manager.py", |
| 107 | + "reference_start_line": 4, |
| 108 | + "reference_end_line": 12 |
| 109 | + } |
| 110 | + ], |
| 111 | + "assigned_files": [ |
| 112 | + "pipelex/plugins/plugin_sdk_registry.py", |
| 113 | + "pipelex/plugins/plugin_manager.py" |
| 114 | + ], |
| 115 | + "can_expand": false |
| 116 | + }, |
| 117 | + { |
| 118 | + "name": "AI Provider Adapters", |
| 119 | + "description": "Provide concrete, standardized interfaces for interacting with specific external AI/ML service APIs (e.g., LLM inference, image generation, extraction), handling API calls and provider-specific details.", |
| 120 | + "referenced_source_code": [ |
| 121 | + { |
| 122 | + "qualified_name": "pipelex.plugins.plugin.Plugin", |
| 123 | + "reference_file": "pipelex/plugins/plugin.py", |
| 124 | + "reference_start_line": 6, |
| 125 | + "reference_end_line": 19 |
| 126 | + } |
| 127 | + ], |
| 128 | + "assigned_files": [ |
| 129 | + "pipelex/plugins/plugin.py", |
| 130 | + "pipelex/plugins/openai/__init__.py", |
| 131 | + "pipelex/plugins/openai/openai_llm_worker.py", |
| 132 | + "pipelex/plugins/openai/openai_llms.py", |
| 133 | + "pipelex/plugins/openai/openai_factory.py", |
| 134 | + "pipelex/plugins/openai/openai_func.py", |
| 135 | + "pipelex/plugins/openai/vertexai_factory.py", |
| 136 | + "pipelex/plugins/openai/openai_img_gen_worker.py", |
| 137 | + "pipelex/plugins/openai/openai_img_gen_factory.py", |
| 138 | + "pipelex/plugins/openai/openai_img_gen_alt_worker.py", |
| 139 | + "pipelex/plugins/bedrock/bedrock_llms.py", |
| 140 | + "pipelex/plugins/bedrock/__init__.py", |
| 141 | + "pipelex/plugins/bedrock/bedrock_client_protocol.py", |
| 142 | + "pipelex/plugins/bedrock/bedrock_factory.py", |
| 143 | + "pipelex/plugins/bedrock/bedrock_message.py", |
| 144 | + "pipelex/plugins/bedrock/bedrock_llm_worker.py", |
| 145 | + "pipelex/plugins/bedrock/bedrock_client_aioboto3.py", |
| 146 | + "pipelex/plugins/bedrock/bedrock_client_boto3.py", |
| 147 | + "pipelex/plugins/anthropic/__init__.py", |
| 148 | + "pipelex/plugins/anthropic/anthropic_exceptions.py", |
| 149 | + "pipelex/plugins/anthropic/anthropic_llm_worker.py", |
| 150 | + "pipelex/plugins/anthropic/anthropic_factory.py", |
| 151 | + "pipelex/plugins/anthropic/anthropic_llms.py", |
| 152 | + "pipelex/plugins/azure_rest/__init__.py", |
| 153 | + "pipelex/plugins/azure_rest/azure_img_gen_factory.py", |
| 154 | + "pipelex/plugins/azure_rest/azure_img_gen_worker.py", |
| 155 | + "pipelex/plugins/fal/__init__.py", |
| 156 | + "pipelex/plugins/fal/fal_config.py", |
| 157 | + "pipelex/plugins/fal/fal_factory.py", |
| 158 | + "pipelex/plugins/fal/fal_img_gen_worker.py", |
| 159 | + "pipelex/plugins/pypdfium2/__init__.py", |
| 160 | + "pipelex/plugins/pypdfium2/pypdfium2_worker.py", |
| 161 | + "pipelex/plugins/google/__init__.py", |
| 162 | + "pipelex/plugins/google/google_factory.py", |
| 163 | + "pipelex/plugins/google/google_llm_worker.py", |
| 164 | + "pipelex/plugins/mistral/__init__.py", |
| 165 | + "pipelex/plugins/mistral/mistral_factory.py", |
| 166 | + "pipelex/plugins/mistral/mistral_exceptions.py", |
| 167 | + "pipelex/plugins/mistral/mistral_llms.py", |
| 168 | + "pipelex/plugins/mistral/mistral_llm_worker.py", |
| 169 | + "pipelex/plugins/mistral/mistral_extract_worker.py" |
| 170 | + ], |
| 171 | + "can_expand": false |
| 172 | + }, |
| 173 | + { |
| 174 | + "name": "Templating Engine", |
| 175 | + "description": "Processes and renders templates for dynamic prompt generation, output formatting, or other text manipulation tasks, supporting advanced templating for AI interactions.", |
| 176 | + "referenced_source_code": [ |
| 177 | + { |
| 178 | + "qualified_name": "pipelex.cogt.templating.template_rendering.render_template", |
| 179 | + "reference_file": "pipelex/cogt/templating/template_rendering.py", |
| 180 | + "reference_start_line": 9, |
| 181 | + "reference_end_line": 22 |
| 182 | + } |
| 183 | + ], |
| 184 | + "assigned_files": [ |
| 185 | + "pipelex/cogt/templating/__init__.py", |
| 186 | + "pipelex/cogt/templating/template_rendering.py", |
| 187 | + "pipelex/cogt/templating/template_preprocessor.py", |
| 188 | + "pipelex/cogt/templating/template_blueprint.py", |
| 189 | + "pipelex/cogt/templating/template_category.py", |
| 190 | + "pipelex/cogt/templating/templating_style.py" |
| 191 | + ], |
| 192 | + "can_expand": true |
| 193 | + }, |
| 194 | + { |
| 195 | + "name": "LLM Job Factory/Worker", |
| 196 | + "description": "Creates and manages jobs for Large Language Model (LLM) inference, abstracting the execution details of calling specific LLM providers and handling LLM-specific task orchestration.", |
| 197 | + "referenced_source_code": [ |
| 198 | + { |
| 199 | + "qualified_name": "pipelex.cogt.llm.llm_job_factory.LLMJobFactory", |
| 200 | + "reference_file": "pipelex/cogt/llm/llm_job_factory.py", |
| 201 | + "reference_start_line": 8, |
| 202 | + "reference_end_line": 37 |
| 203 | + } |
| 204 | + ], |
| 205 | + "assigned_files": [ |
| 206 | + "pipelex/cogt/llm/llm_job.py", |
| 207 | + "pipelex/cogt/llm/llm_job_components.py", |
| 208 | + "pipelex/cogt/llm/llm_prompt_template.py", |
| 209 | + "pipelex/cogt/llm/__init__.py", |
| 210 | + "pipelex/cogt/llm/llm_utils.py", |
| 211 | + "pipelex/cogt/llm/llm_worker_abstract.py", |
| 212 | + "pipelex/cogt/llm/llm_setting.py", |
| 213 | + "pipelex/cogt/llm/llm_worker_factory.py", |
| 214 | + "pipelex/cogt/llm/llm_worker_internal_abstract.py", |
| 215 | + "pipelex/cogt/llm/llm_job_factory.py", |
| 216 | + "pipelex/cogt/llm/llm_prompt_factory_abstract.py", |
| 217 | + "pipelex/cogt/llm/llm_report.py", |
| 218 | + "pipelex/cogt/llm/llm_prompt.py", |
| 219 | + "pipelex/cogt/llm/structured_output.py", |
| 220 | + "pipelex/cogt/llm/llm_prompt_template_inputs.py" |
| 221 | + ], |
| 222 | + "can_expand": false |
| 223 | + }, |
| 224 | + { |
| 225 | + "name": "Unclassified", |
| 226 | + "description": "Component for all unclassified files and utility functions (Utility functions/External Libraries/Dependencies)", |
| 227 | + "referenced_source_code": [], |
| 228 | + "assigned_files": [ |
| 229 | + "pipelex/cogt/__init__.py", |
| 230 | + "pipelex/cogt/config_cogt.py", |
| 231 | + "pipelex/cogt/exceptions.py", |
| 232 | + "pipelex/cogt/extract/extract_job_components.py", |
| 233 | + "pipelex/cogt/extract/__init__.py", |
| 234 | + "pipelex/cogt/extract/extract_setting.py", |
| 235 | + "pipelex/cogt/extract/extract_job.py", |
| 236 | + "pipelex/cogt/extract/extract_job_factory.py", |
| 237 | + "pipelex/cogt/extract/extract_input.py", |
| 238 | + "pipelex/cogt/extract/extract_worker_abstract.py", |
| 239 | + "pipelex/cogt/extract/extract_output.py", |
| 240 | + "pipelex/cogt/extract/extract_worker_factory.py", |
| 241 | + "pipelex/cogt/image/prompt_image.py", |
| 242 | + "pipelex/cogt/image/__init__.py", |
| 243 | + "pipelex/cogt/image/prompt_image_factory.py", |
| 244 | + "pipelex/cogt/image/generated_image.py", |
| 245 | + "pipelex/cogt/usage/__init__.py", |
| 246 | + "pipelex/cogt/usage/costs_per_token.py", |
| 247 | + "pipelex/cogt/usage/cost_registry.py", |
| 248 | + "pipelex/cogt/usage/token_category.py", |
| 249 | + "pipelex/cogt/usage/cost_category.py", |
| 250 | + "pipelex/cogt/img_gen/img_gen_job_components.py", |
| 251 | + "pipelex/cogt/img_gen/__init__.py", |
| 252 | + "pipelex/cogt/img_gen/img_gen_worker_abstract.py", |
| 253 | + "pipelex/cogt/img_gen/img_gen_job.py", |
| 254 | + "pipelex/cogt/img_gen/img_gen_worker_factory.py", |
| 255 | + "pipelex/cogt/img_gen/img_gen_setting.py", |
| 256 | + "pipelex/cogt/img_gen/img_gen_job_factory.py", |
| 257 | + "pipelex/cogt/img_gen/img_gen_prompt.py" |
| 258 | + ], |
| 259 | + "can_expand": false |
| 260 | + } |
| 261 | + ], |
| 262 | + "components_relations": [ |
| 263 | + { |
| 264 | + "relation": "delegates AI tasks to", |
| 265 | + "src_name": "Inference Manager", |
| 266 | + "dst_name": "LLM Job Factory/Worker" |
| 267 | + }, |
| 268 | + { |
| 269 | + "relation": "retrieves model configurations from", |
| 270 | + "src_name": "Inference Manager", |
| 271 | + "dst_name": "Model Manager" |
| 272 | + }, |
| 273 | + { |
| 274 | + "relation": "leverages", |
| 275 | + "src_name": "Content Generator", |
| 276 | + "dst_name": "Inference Manager" |
| 277 | + }, |
| 278 | + { |
| 279 | + "relation": "utilizes", |
| 280 | + "src_name": "Content Generator", |
| 281 | + "dst_name": "Templating Engine" |
| 282 | + }, |
| 283 | + { |
| 284 | + "relation": "provides model catalog data to", |
| 285 | + "src_name": "Model Manager", |
| 286 | + "dst_name": "Model Routing" |
| 287 | + }, |
| 288 | + { |
| 289 | + "relation": "receives information about available models from", |
| 290 | + "src_name": "Model Manager", |
| 291 | + "dst_name": "Plugin Manager" |
| 292 | + }, |
| 293 | + { |
| 294 | + "relation": "consults", |
| 295 | + "src_name": "Model Routing", |
| 296 | + "dst_name": "Model Manager" |
| 297 | + }, |
| 298 | + { |
| 299 | + "relation": "is used by", |
| 300 | + "src_name": "Model Routing", |
| 301 | + "dst_name": "LLM Job Factory/Worker" |
| 302 | + }, |
| 303 | + { |
| 304 | + "relation": "manages the lifecycle of", |
| 305 | + "src_name": "Plugin Manager", |
| 306 | + "dst_name": "AI Provider Adapters" |
| 307 | + }, |
| 308 | + { |
| 309 | + "relation": "provides discovered plugins to", |
| 310 | + "src_name": "Plugin Manager", |
| 311 | + "dst_name": "Model Manager" |
| 312 | + }, |
| 313 | + { |
| 314 | + "relation": "are used by", |
| 315 | + "src_name": "AI Provider Adapters", |
| 316 | + "dst_name": "LLM Job Factory/Worker" |
| 317 | + }, |
| 318 | + { |
| 319 | + "relation": "are managed by", |
| 320 | + "src_name": "AI Provider Adapters", |
| 321 | + "dst_name": "Plugin Manager" |
| 322 | + }, |
| 323 | + { |
| 324 | + "relation": "is used by", |
| 325 | + "src_name": "Templating Engine", |
| 326 | + "dst_name": "Content Generator" |
| 327 | + }, |
| 328 | + { |
| 329 | + "relation": "is used by", |
| 330 | + "src_name": "Templating Engine", |
| 331 | + "dst_name": "LLM Job Factory/Worker" |
| 332 | + }, |
| 333 | + { |
| 334 | + "relation": "uses", |
| 335 | + "src_name": "LLM Job Factory/Worker", |
| 336 | + "dst_name": "AI Provider Adapters" |
| 337 | + }, |
| 338 | + { |
| 339 | + "relation": "consults", |
| 340 | + "src_name": "LLM Job Factory/Worker", |
| 341 | + "dst_name": "Model Routing" |
| 342 | + } |
| 343 | + ] |
| 344 | +} |
0 commit comments