artifact_path: file:///home/vino/ML_Projects/End-to-end-llm-pipeline-huggingface/mlruns/710899643861413467/models/m-ab39989523454f2786759297aba06b14/artifacts | |
flavors: | |
python_function: | |
env: | |
conda: conda.yaml | |
virtualenv: python_env.yaml | |
loader_module: mlflow.transformers | |
python_version: 3.11.13 | |
transformers: | |
code: null | |
components: | |
- tokenizer | |
framework: pt | |
instance_type: TextGenerationPipeline | |
model_binary: model | |
pipeline_model_type: Qwen3ForCausalLM | |
source_model_name: ./models/lora/Qwen3_06B_lora_fp16_r64_e10_msl2048 | |
task: text-generation | |
tokenizer_type: Qwen2TokenizerFast | |
torch_dtype: torch.bfloat16 | |
transformers_version: 4.54.0 | |
is_signature_from_type_hint: false | |
mlflow_version: 3.1.4 | |
model_id: m-ab39989523454f2786759297aba06b14 | |
model_size_bytes: 554888572 | |
model_uuid: m-ab39989523454f2786759297aba06b14 | |
prompts: null | |
run_id: c61be72e02f74b1c93cb4b20c16164a1 | |
signature: | |
inputs: '[{"type": "string", "required": true}]' | |
outputs: '[{"type": "string", "required": true}]' | |
params: null | |
type_hint_from_example: false | |
utc_time_created: '2025-07-26 11:04:34.618339' | |