From 834e1262c8d644322a46c4a088b781f23c62bf91 Mon Sep 17 00:00:00 2001 From: Aditi Singh <47007177+Addi-11@users.noreply.github.com> Date: Thu, 7 Nov 2024 01:56:37 +0530 Subject: [PATCH 1/3] added MLModel files (#3564) --- .../models/system/ALLaM-2-7b-instruct/MLmodel | 24 +++++++++++++ .../MLmodel | 30 ++++++++++++++++ .../models/system/Deci-DeciCoder-1b/MLmodel | 30 ++++++++++++++++ .../system/Deci-DeciLM-7B-instruct/MLmodel | 30 ++++++++++++++++ assets/models/system/Deci-DeciLM-7B/MLmodel | 30 ++++++++++++++++ assets/models/system/Llama-2-13b-chat/MLmodel | 28 +++++++++++++++ assets/models/system/Llama-2-13b/MLmodel | 30 ++++++++++++++++ assets/models/system/Llama-2-70b-chat/MLmodel | 30 ++++++++++++++++ assets/models/system/Llama-2-70b/MLmodel | 28 +++++++++++++++ assets/models/system/Llama-2-7b-chat/MLmodel | 28 +++++++++++++++ assets/models/system/Llama-2-7b/MLmodel | 31 ++++++++++++++++ .../system/Mistral-7B-Instruct-v0-1/MLmodel | 25 +++++++++++++ .../system/Mistral-7B-Instruct-v0-2/MLmodel | 32 +++++++++++++++++ .../system/Mistral-7B-Instruct-v0-3/MLmodel | 31 ++++++++++++++++ assets/models/system/Mistral-7B-v0-1/MLmodel | 26 ++++++++++++++ .../models/system/Mixtral-8x7B-v0-1/MLmodel | 26 ++++++++++++++ assets/models/system/Prism/MLmodel | 28 +++++++++++++++ assets/models/system/Virchow/MLmodel | 32 +++++++++++++++++ assets/models/system/Virchow2/MLmodel | 32 +++++++++++++++++ .../automl-image-classification/MLmodel | 29 +++++++++++++++ assets/models/system/automl-ner/MLmodel | 23 ++++++++++++ .../system/automl-text-classification/MLmodel | 23 ++++++++++++ .../automl_instance_segmentation/MLmodel | 28 +++++++++++++++ .../system/automl_object_detection/MLmodel | 28 +++++++++++++++ assets/models/system/bert-base-cased/MLmodel | 32 +++++++++++++++++ .../models/system/bert-base-uncased/MLmodel | 32 +++++++++++++++++ assets/models/system/bert-large-cased/MLmodel | 32 +++++++++++++++++ .../models/system/bert-large-uncased/MLmodel | 32 +++++++++++++++++ .../MLmodel | 33 +++++++++++++++++ assets/models/system/camembert-base/MLmodel | 32 +++++++++++++++++ .../compvis-stable-diffusion-v1-4/MLmodel | 31 ++++++++++++++++ assets/models/system/cxrreportgen/MLmodel | 28 +++++++++++++++ .../system/databricks-dolly-v2-12b/MLmodel | 32 +++++++++++++++++ .../system/deci-decidiffusion-v1-0/MLmodel | 31 ++++++++++++++++ .../deepset-minilm-uncased-squad2/MLmodel | 32 +++++++++++++++++ .../deepset-roberta-base-squad2/MLmodel | 32 +++++++++++++++++ .../MLmodel | 31 ++++++++++++++++ .../MLmodel | 32 +++++++++++++++++ .../system/distilbert-base-cased/MLmodel | 32 +++++++++++++++++ .../MLmodel | 32 +++++++++++++++++ .../MLmodel | 32 +++++++++++++++++ .../system/distilbert-base-uncased/MLmodel | 32 +++++++++++++++++ assets/models/system/distilgpt2/MLmodel | 32 +++++++++++++++++ .../models/system/distilroberta-base/MLmodel | 32 +++++++++++++++++ .../system/facebook-bart-large-cnn/MLmodel | 32 +++++++++++++++++ .../facebook-deit-base-patch16-224/MLmodel | 35 +++++++++++++++++++ .../MLmodel | 32 +++++++++++++++++ .../MLmodel | 27 ++++++++++++++ .../MLmodel | 27 ++++++++++++++ .../system/facebook-sam-vit-base/MLmodel | 30 ++++++++++++++++ .../system/facebook-sam-vit-huge/MLmodel | 30 ++++++++++++++++ .../system/facebook-sam-vit-large/MLmodel | 30 ++++++++++++++++ .../MLmodel | 32 +++++++++++++++++ .../google-vit-base-patch16-224/MLmodel | 35 +++++++++++++++++++ assets/models/system/gpt2-large/MLmodel | 32 +++++++++++++++++ assets/models/system/gpt2-medium/MLmodel | 32 +++++++++++++++++ assets/models/system/gpt2/MLmodel | 32 +++++++++++++++++ .../jean-baptiste-camembert-ner/MLmodel | 19 ++++++++++ .../MLmodel | 31 ++++++++++++++++ assets/models/system/medimageinsight/MLmodel | 29 +++++++++++++++ assets/models/system/medimageparse/MLmodel | 25 +++++++++++++ .../MLmodel | 35 +++++++++++++++++++ .../microsoft-deberta-base-mnli/MLmodel | 32 +++++++++++++++++ .../system/microsoft-deberta-base/MLmodel | 32 +++++++++++++++++ .../microsoft-deberta-large-mnli/MLmodel | 32 +++++++++++++++++ .../system/microsoft-deberta-large/MLmodel | 32 +++++++++++++++++ .../system/microsoft-deberta-xlarge/MLmodel | 32 +++++++++++++++++ .../MLmodel | 30 ++++++++++++++++ .../system/microsoft-orca-2-13b/MLmodel | 32 +++++++++++++++++ .../models/system/microsoft-orca-2-7b/MLmodel | 32 +++++++++++++++++ .../models/system/microsoft-phi-1-5/MLmodel | 25 +++++++++++++ assets/models/system/microsoft-phi-2/MLmodel | 31 ++++++++++++++++ .../MLmodel | 35 +++++++++++++++++++ .../MLmodel | 32 +++++++++++++++++ .../MLmodel | 32 +++++++++++++++++ .../mistralai-Mixtral-8x22B-v0-1/MLmodel | 32 +++++++++++++++++ .../MLmodel | 26 ++++++++++++++ .../MLmodel | 34 ++++++++++++++++++ .../MLmodel | 34 ++++++++++++++++++ .../MLmodel | 29 +++++++++++++++ .../MLmodel | 34 ++++++++++++++++++ .../MLmodel | 34 ++++++++++++++++++ .../MLmodel | 34 ++++++++++++++++++ .../MLmodel | 34 ++++++++++++++++++ .../mmd-3x-yolof_r50_c5_8x8_1x_coco/MLmodel | 34 ++++++++++++++++++ assets/models/system/mmeft/MLmodel | 16 +++++++++ .../MLmodel | 33 +++++++++++++++++ .../MLmodel | 29 +++++++++++++++ .../MLmodel | 29 +++++++++++++++ .../openai-clip-vit-base-patch32/MLmodel | 29 +++++++++++++++ .../openai-clip-vit-large-patch14/MLmodel | 29 +++++++++++++++ .../system/openai-whisper-large-v3/MLmodel | 21 +++++++++++ .../system/openai-whisper-large/MLmodel | 25 +++++++++++++ .../system/phi-3-medium-128k-instruct/MLmodel | 31 ++++++++++++++++ .../system/phi-3-medium-4k-instruct/MLmodel | 31 ++++++++++++++++ .../system/phi-3-mini-128k-instruct/MLmodel | 30 ++++++++++++++++ .../system/phi-3-mini-4k-instruct/MLmodel | 31 ++++++++++++++++ .../system/phi-3-small-128k-instruct/MLmodel | 31 ++++++++++++++++ .../system/phi-3-small-8k-instruct/MLmodel | 31 ++++++++++++++++ .../system/phi-3-vision-128k-instruct/MLmodel | 24 +++++++++++++ .../system/phi-3.5-mini-128k-instruct/MLmodel | 31 ++++++++++++++++ .../system/phi-3.5-moe-128k-instruct/MLmodel | 31 ++++++++++++++++ .../phi-3.5-vision-128k-instruct/MLmodel | 31 ++++++++++++++++ .../MLmodel | 25 +++++++++++++ .../system/projecte-aina-FLOR-1-3B/MLmodel | 25 +++++++++++++ .../MLmodel | 25 +++++++++++++ .../system/projecte-aina-FLOR-6-3B/MLmodel | 25 +++++++++++++ .../system/projecte-aina-aguila-7b/MLmodel | 31 ++++++++++++++++ .../roberta-base-openai-detector/MLmodel | 32 +++++++++++++++++ assets/models/system/roberta-base/MLmodel | 33 +++++++++++++++++ .../models/system/roberta-large-mnli/MLmodel | 32 +++++++++++++++++ .../roberta-large-openai-detector/MLmodel | 32 +++++++++++++++++ assets/models/system/roberta-large/MLmodel | 32 +++++++++++++++++ .../runwayml-stable-diffusion-v1-5/MLmodel | 31 ++++++++++++++++ .../MLmodel | 32 +++++++++++++++++ .../MLmodel | 27 ++++++++++++++ .../system/salesforce-blip-vqa-base/MLmodel | 28 +++++++++++++++ .../MLmodel | 27 ++++++++++++++ .../salesforce-blip2-opt-2-7b-vqa/MLmodel | 28 +++++++++++++++ .../system/snowflake-arctic-base/MLmodel | 22 ++++++++++++ .../system/snowflake-artic-instruct/MLmodel | 22 ++++++++++++ .../MLmodel | 31 ++++++++++++++++ .../MLmodel | 31 ++++++++++++++++ .../sshleifer-distilbart-cnn-12-6/MLmodel | 32 +++++++++++++++++ .../stabilityai-stable-diffusion-2-1/MLmodel | 31 ++++++++++++++++ .../MLmodel | 32 +++++++++++++++++ .../MLmodel | 30 ++++++++++++++++ .../MLmodel | 29 +++++++++++++++ assets/models/system/t5-base/MLmodel | 32 +++++++++++++++++ assets/models/system/t5-large/MLmodel | 32 +++++++++++++++++ assets/models/system/t5-small/MLmodel | 32 +++++++++++++++++ .../system/tiiuae-falcon-40b-instruct/MLmodel | 31 ++++++++++++++++ .../models/system/tiiuae-falcon-40b/MLmodel | 31 ++++++++++++++++ .../system/tiiuae-falcon-7b-instruct/MLmodel | 31 ++++++++++++++++ assets/models/system/tiiuae-falcon-7b/MLmodel | 31 ++++++++++++++++ .../MLmodel | 31 ++++++++++++++++ .../MLmodel | 31 ++++++++++++++++ .../system/yolof_r50_c5_8x8_1x_coco/MLmodel | 31 ++++++++++++++++ 138 files changed, 4146 insertions(+) create mode 100644 assets/models/system/ALLaM-2-7b-instruct/MLmodel create mode 100644 assets/models/system/BiomedCLIP-PubMedBERT_256-vit_base_patch16_224/MLmodel create mode 100644 assets/models/system/Deci-DeciCoder-1b/MLmodel create mode 100644 assets/models/system/Deci-DeciLM-7B-instruct/MLmodel create mode 100644 assets/models/system/Deci-DeciLM-7B/MLmodel create mode 100644 assets/models/system/Llama-2-13b-chat/MLmodel create mode 100644 assets/models/system/Llama-2-13b/MLmodel create mode 100644 assets/models/system/Llama-2-70b-chat/MLmodel create mode 100644 assets/models/system/Llama-2-70b/MLmodel create mode 100644 assets/models/system/Llama-2-7b-chat/MLmodel create mode 100644 assets/models/system/Llama-2-7b/MLmodel create mode 100644 assets/models/system/Mistral-7B-Instruct-v0-1/MLmodel create mode 100644 assets/models/system/Mistral-7B-Instruct-v0-2/MLmodel create mode 100644 assets/models/system/Mistral-7B-Instruct-v0-3/MLmodel create mode 100644 assets/models/system/Mistral-7B-v0-1/MLmodel create mode 100644 assets/models/system/Mixtral-8x7B-v0-1/MLmodel create mode 100644 assets/models/system/Prism/MLmodel create mode 100644 assets/models/system/Virchow/MLmodel create mode 100644 assets/models/system/Virchow2/MLmodel create mode 100644 assets/models/system/automl-image-classification/MLmodel create mode 100644 assets/models/system/automl-ner/MLmodel create mode 100644 assets/models/system/automl-text-classification/MLmodel create mode 100644 assets/models/system/automl_instance_segmentation/MLmodel create mode 100644 assets/models/system/automl_object_detection/MLmodel create mode 100644 assets/models/system/bert-base-cased/MLmodel create mode 100644 assets/models/system/bert-base-uncased/MLmodel create mode 100644 assets/models/system/bert-large-cased/MLmodel create mode 100644 assets/models/system/bert-large-uncased/MLmodel create mode 100644 assets/models/system/bytetrack_yolox_x_crowdhuman_mot17-private-half/MLmodel create mode 100644 assets/models/system/camembert-base/MLmodel create mode 100644 assets/models/system/compvis-stable-diffusion-v1-4/MLmodel create mode 100644 assets/models/system/cxrreportgen/MLmodel create mode 100644 assets/models/system/databricks-dolly-v2-12b/MLmodel create mode 100644 assets/models/system/deci-decidiffusion-v1-0/MLmodel create mode 100644 assets/models/system/deepset-minilm-uncased-squad2/MLmodel create mode 100644 assets/models/system/deepset-roberta-base-squad2/MLmodel create mode 100644 assets/models/system/deformable_detr_twostage_refine_r50_16x2_50e_coco/MLmodel create mode 100644 assets/models/system/distilbert-base-cased-distilled-squad/MLmodel create mode 100644 assets/models/system/distilbert-base-cased/MLmodel create mode 100644 assets/models/system/distilbert-base-uncased-distilled-squad/MLmodel create mode 100644 assets/models/system/distilbert-base-uncased-finetuned-sst-2-english/MLmodel create mode 100644 assets/models/system/distilbert-base-uncased/MLmodel create mode 100644 assets/models/system/distilgpt2/MLmodel create mode 100644 assets/models/system/distilroberta-base/MLmodel create mode 100644 assets/models/system/facebook-bart-large-cnn/MLmodel create mode 100644 assets/models/system/facebook-deit-base-patch16-224/MLmodel create mode 100644 assets/models/system/facebook-dinov2-base-imagenet1k-1-layer/MLmodel create mode 100644 assets/models/system/facebook-dinov2-image-embeddings-base/MLmodel create mode 100644 assets/models/system/facebook-dinov2-image-embeddings-giant/MLmodel create mode 100644 assets/models/system/facebook-sam-vit-base/MLmodel create mode 100644 assets/models/system/facebook-sam-vit-huge/MLmodel create mode 100644 assets/models/system/facebook-sam-vit-large/MLmodel create mode 100644 assets/models/system/finiteautomata-bertweet-base-sentiment-analysis/MLmodel create mode 100644 assets/models/system/google-vit-base-patch16-224/MLmodel create mode 100644 assets/models/system/gpt2-large/MLmodel create mode 100644 assets/models/system/gpt2-medium/MLmodel create mode 100644 assets/models/system/gpt2/MLmodel create mode 100644 assets/models/system/jean-baptiste-camembert-ner/MLmodel create mode 100644 assets/models/system/mask_rcnn_swin-t-p4-w7_fpn_1x_coco/MLmodel create mode 100644 assets/models/system/medimageinsight/MLmodel create mode 100644 assets/models/system/medimageparse/MLmodel create mode 100644 assets/models/system/microsoft-beit-base-patch16-224-pt22k-ft22k/MLmodel create mode 100644 assets/models/system/microsoft-deberta-base-mnli/MLmodel create mode 100644 assets/models/system/microsoft-deberta-base/MLmodel create mode 100644 assets/models/system/microsoft-deberta-large-mnli/MLmodel create mode 100644 assets/models/system/microsoft-deberta-large/MLmodel create mode 100644 assets/models/system/microsoft-deberta-xlarge/MLmodel create mode 100644 assets/models/system/microsoft-llava-med-v1.5-mistral-7b/MLmodel create mode 100644 assets/models/system/microsoft-orca-2-13b/MLmodel create mode 100644 assets/models/system/microsoft-orca-2-7b/MLmodel create mode 100644 assets/models/system/microsoft-phi-1-5/MLmodel create mode 100644 assets/models/system/microsoft-phi-2/MLmodel create mode 100644 assets/models/system/microsoft-swinv2-base-patch4-window12-192-22k/MLmodel create mode 100644 assets/models/system/mistral-community-Mixtral-8x22B-v01/MLmodel create mode 100644 assets/models/system/mistralai-Mixtral-8x22B-Instruct-v0-1/MLmodel create mode 100644 assets/models/system/mistralai-Mixtral-8x22B-v0-1/MLmodel create mode 100644 assets/models/system/mistralai-Mixtral-8x7B-Instruct-v01/MLmodel create mode 100644 assets/models/system/mmd-3x-deformable-detr_refine_twostage_r50_16xb2-50e_coco/MLmodel create mode 100644 assets/models/system/mmd-3x-mask-rcnn_swin-t-p4-w7_fpn_1x_coco/MLmodel create mode 100644 assets/models/system/mmd-3x-rtmdet-ins_x_8xb16-300e_coco/MLmodel create mode 100644 assets/models/system/mmd-3x-sparse-rcnn_r101_fpn_300-proposals_crop-ms-480-800-3x_coco/MLmodel create mode 100644 assets/models/system/mmd-3x-sparse-rcnn_r50_fpn_300-proposals_crop-ms-480-800-3x_coco/MLmodel create mode 100644 assets/models/system/mmd-3x-vfnet_r50-mdconv-c3-c5_fpn_ms-2x_coco/MLmodel create mode 100644 assets/models/system/mmd-3x-vfnet_x101-64x4d-mdconv-c3-c5_fpn_ms-2x_coco/MLmodel create mode 100644 assets/models/system/mmd-3x-yolof_r50_c5_8x8_1x_coco/MLmodel create mode 100644 assets/models/system/mmeft/MLmodel create mode 100644 assets/models/system/ocsort_yolox_x_crowdhuman_mot17-private-half/MLmodel create mode 100644 assets/models/system/openai-clip-image-text-embeddings-vit-base-patch32/MLmodel create mode 100644 assets/models/system/openai-clip-image-text-embeddings-vit-large-patch14-336/MLmodel create mode 100644 assets/models/system/openai-clip-vit-base-patch32/MLmodel create mode 100644 assets/models/system/openai-clip-vit-large-patch14/MLmodel create mode 100644 assets/models/system/openai-whisper-large-v3/MLmodel create mode 100644 assets/models/system/openai-whisper-large/MLmodel create mode 100644 assets/models/system/phi-3-medium-128k-instruct/MLmodel create mode 100644 assets/models/system/phi-3-medium-4k-instruct/MLmodel create mode 100644 assets/models/system/phi-3-mini-128k-instruct/MLmodel create mode 100644 assets/models/system/phi-3-mini-4k-instruct/MLmodel create mode 100644 assets/models/system/phi-3-small-128k-instruct/MLmodel create mode 100644 assets/models/system/phi-3-small-8k-instruct/MLmodel create mode 100644 assets/models/system/phi-3-vision-128k-instruct/MLmodel create mode 100644 assets/models/system/phi-3.5-mini-128k-instruct/MLmodel create mode 100644 assets/models/system/phi-3.5-moe-128k-instruct/MLmodel create mode 100644 assets/models/system/phi-3.5-vision-128k-instruct/MLmodel create mode 100644 assets/models/system/projecte-aina-FLOR-1-3B-Instructed/MLmodel create mode 100644 assets/models/system/projecte-aina-FLOR-1-3B/MLmodel create mode 100644 assets/models/system/projecte-aina-FLOR-6-3B-Instructed/MLmodel create mode 100644 assets/models/system/projecte-aina-FLOR-6-3B/MLmodel create mode 100644 assets/models/system/projecte-aina-aguila-7b/MLmodel create mode 100644 assets/models/system/roberta-base-openai-detector/MLmodel create mode 100644 assets/models/system/roberta-base/MLmodel create mode 100644 assets/models/system/roberta-large-mnli/MLmodel create mode 100644 assets/models/system/roberta-large-openai-detector/MLmodel create mode 100644 assets/models/system/roberta-large/MLmodel create mode 100644 assets/models/system/runwayml-stable-diffusion-v1-5/MLmodel create mode 100644 assets/models/system/runwayml_stable_diffusion_inpainting/MLmodel create mode 100644 assets/models/system/salesforce-blip-image-captioning-base/MLmodel create mode 100644 assets/models/system/salesforce-blip-vqa-base/MLmodel create mode 100644 assets/models/system/salesforce-blip2-opt-2-7b-image-to-text/MLmodel create mode 100644 assets/models/system/salesforce-blip2-opt-2-7b-vqa/MLmodel create mode 100644 assets/models/system/snowflake-arctic-base/MLmodel create mode 100644 assets/models/system/snowflake-artic-instruct/MLmodel create mode 100644 assets/models/system/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco/MLmodel create mode 100644 assets/models/system/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco/MLmodel create mode 100644 assets/models/system/sshleifer-distilbart-cnn-12-6/MLmodel create mode 100644 assets/models/system/stabilityai-stable-diffusion-2-1/MLmodel create mode 100644 assets/models/system/stabilityai-stable-diffusion-2-inpainting/MLmodel create mode 100644 assets/models/system/stabilityai-stable-diffusion-xl-base-1-0/MLmodel create mode 100644 assets/models/system/stabilityai-stable-diffusion-xl-refiner-1-0/MLmodel create mode 100644 assets/models/system/t5-base/MLmodel create mode 100644 assets/models/system/t5-large/MLmodel create mode 100644 assets/models/system/t5-small/MLmodel create mode 100644 assets/models/system/tiiuae-falcon-40b-instruct/MLmodel create mode 100644 assets/models/system/tiiuae-falcon-40b/MLmodel create mode 100644 assets/models/system/tiiuae-falcon-7b-instruct/MLmodel create mode 100644 assets/models/system/tiiuae-falcon-7b/MLmodel create mode 100644 assets/models/system/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco/MLmodel create mode 100644 assets/models/system/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/MLmodel create mode 100644 assets/models/system/yolof_r50_c5_8x8_1x_coco/MLmodel diff --git a/assets/models/system/ALLaM-2-7b-instruct/MLmodel b/assets/models/system/ALLaM-2-7b-instruct/MLmodel new file mode 100644 index 0000000000..17fac66dc0 --- /dev/null +++ b/assets/models/system/ALLaM-2-7b-instruct/MLmodel @@ -0,0 +1,24 @@ +flavors: + hftransformersv2: + code: null + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + model_data: data + pytorch_version: 1.13.1 + task_type: chat-completion + transformers_version: 4.42.4 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.9.19 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:49 + base_model_name: ALLaM-2-7b-instruct + base_model_task: chat-completion + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.12.2 +model_uuid: 3573e6b5dc7649c79c0be21b48cbe095 +utc_time_created: '2024-08-31 13:39:08.777155' diff --git a/assets/models/system/BiomedCLIP-PubMedBERT_256-vit_base_patch16_224/MLmodel b/assets/models/system/BiomedCLIP-PubMedBERT_256-vit_base_patch16_224/MLmodel new file mode 100644 index 0000000000..01c18ce877 --- /dev/null +++ b/assets/models/system/BiomedCLIP-PubMedBERT_256-vit_base_patch16_224/MLmodel @@ -0,0 +1,30 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/BiomedCLIP-PubMedBERT_256-vit_base_patch16_224 + uri: ../../BiomedCLIP-PubMedBERT_256-vit_base_patch16_224/BiomedCLIP-PubMedBERT_256-vit_base_patch16_224 + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.9.19 + streamable: false +metadata: + base_model_name: null + base_model_task: zero-shot-image-classification + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.15.1 +model_size_bytes: 789393877 +model_uuid: 87e0b5b21dc34fad8381248be8107f2b +signature: + inputs: '[{"type": "binary", "name": "image", "required": true}, {"type": "string", + "name": "text", "required": true}]' + outputs: '[{"type": "string", "name": "probs", "required": true}, {"type": "string", + "name": "labels", "required": true}]' + params: null +utc_time_created: '2024-09-19 19:59:15.785066' diff --git a/assets/models/system/Deci-DeciCoder-1b/MLmodel b/assets/models/system/Deci-DeciCoder-1b/MLmodel new file mode 100644 index 0000000000..c306118f3c --- /dev/null +++ b/assets/models/system/Deci-DeciCoder-1b/MLmodel @@ -0,0 +1,30 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: Deci/DeciCoder-1b + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 1.13.1 + task_type: text-generation + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.34.0 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: Deci/DeciCoder-1b + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.6.0 +model_uuid: 045f50822e264144b88dc8fc9884de99 +utc_time_created: '2023-12-12 10:24:21.482548' diff --git a/assets/models/system/Deci-DeciLM-7B-instruct/MLmodel b/assets/models/system/Deci-DeciLM-7B-instruct/MLmodel new file mode 100644 index 0000000000..d1fcc01415 --- /dev/null +++ b/assets/models/system/Deci-DeciLM-7B-instruct/MLmodel @@ -0,0 +1,30 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: Deci/DeciLM-7B-instruct + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 2.0.0+cu117 + task_type: text-generation + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.36.0 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: Deci/DeciLM-7B-instruct + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.6.0 +model_uuid: cacc364686f743da842ec07d0af5ff54 +utc_time_created: '2023-12-13 11:49:35.201762' diff --git a/assets/models/system/Deci-DeciLM-7B/MLmodel b/assets/models/system/Deci-DeciLM-7B/MLmodel new file mode 100644 index 0000000000..f2cc2b42eb --- /dev/null +++ b/assets/models/system/Deci-DeciLM-7B/MLmodel @@ -0,0 +1,30 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: Deci/DeciLM-7B + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 2.0.0+cu117 + task_type: text-generation + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.36.0 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: Deci/DeciLM-7B + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.6.0 +model_uuid: 209d8ac9ffdb4c5881a34143dac3217b +utc_time_created: '2023-12-13 11:38:47.400961' diff --git a/assets/models/system/Llama-2-13b-chat/MLmodel b/assets/models/system/Llama-2-13b-chat/MLmodel new file mode 100644 index 0000000000..6cdb8b9db4 --- /dev/null +++ b/assets/models/system/Llama-2-13b-chat/MLmodel @@ -0,0 +1,28 @@ +flavors: + hftransformersv2: + code: code + hf_config_class: LlamaConfig + hf_predict_module: llama_predict_meta + hf_pretrained_class: LlamaForCausalLM + hf_tokenizer_class: LlamaTokenizer + model_data: data + pytorch_version: 2.0.1+cu117 + task_type: chat-completion + transformers_version: 4.31.0 + model_hf_load_kwargs: + torch_dtype: torch.float16 + device_map: auto + python_function: + code: code + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.16 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:45 + base_model_name: Llama-2-13b-chat + base_model_task: chat-completion + model_provider_name: meta +mlflow_version: 2.3.1 +model_uuid: 192e572d7e464297a5b7b3916aaafab5 +utc_time_created: '2023-07-24 06:22:15.253950' diff --git a/assets/models/system/Llama-2-13b/MLmodel b/assets/models/system/Llama-2-13b/MLmodel new file mode 100644 index 0000000000..f4d759aeae --- /dev/null +++ b/assets/models/system/Llama-2-13b/MLmodel @@ -0,0 +1,30 @@ +flavors: + hftransformersv2: + code: null + generator_config: + do_sample: true + max_new_tokens: 256 + temperature: 0.8 + top_p: 0.7 + hf_config_class: LlamaConfig + hf_pretrained_class: LlamaForCausalLM + hf_tokenizer_class: LlamaTokenizer + pytorch_version: 2.0.1+cu117 + task_type: text-generation + transformers_version: 4.31.0 + model_hf_load_kwargs: + torch_dtype: torch.float16 + device_map: auto + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.16 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:45 + base_model_name: Llama-2-13b + base_model_task: text-generation + model_provider_name: meta +mlflow_version: 2.3.1 +model_uuid: 12fd644f9e9349509a2472763b78491f +utc_time_created: '2023-07-24 06:22:15.253950' diff --git a/assets/models/system/Llama-2-70b-chat/MLmodel b/assets/models/system/Llama-2-70b-chat/MLmodel new file mode 100644 index 0000000000..ba1c2469ec --- /dev/null +++ b/assets/models/system/Llama-2-70b-chat/MLmodel @@ -0,0 +1,30 @@ +flavors: + hftransformersv2: + code: code + hf_config_class: LlamaConfig + hf_pretrained_class: LlamaForCausalLM + hf_tokenizer_class: LlamaTokenizer + hf_predict_module: llama_predict_meta + model_data: data + model_hf_load_kwargs: + torch_dtype: torch.bfloat16 + device_map: auto + pytorch_version: 2.0.1+cu117 + task_type: chat-completion + transformers_version: 4.31.0 + trust_remote_code: true + python_function: + code: code + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.5 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:45 + base_model_name: Llama-2-70b-chat + base_model_task: chat-completion + model_provider_name: meta +mlflow_version: 2.3.1 +model_uuid: 8d0710795b734825b37cc614ace80f9d +utc_time_created: '2023-07-12 03:26:51.391279' + diff --git a/assets/models/system/Llama-2-70b/MLmodel b/assets/models/system/Llama-2-70b/MLmodel new file mode 100644 index 0000000000..1584a83b95 --- /dev/null +++ b/assets/models/system/Llama-2-70b/MLmodel @@ -0,0 +1,28 @@ +flavors: + hftransformersv2: + code: code + hf_config_class: LlamaConfig + hf_pretrained_class: LlamaForCausalLM + hf_tokenizer_class: LlamaTokenizer + model_data: data + model_hf_load_kwargs: + torch_dtype: torch.bfloat16 + device_map: auto + pytorch_version: 2.0.1+cu117 + task_type: text-generation + transformers_version: 4.31.0 + trust_remote_code: true + python_function: + code: code + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.16 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:45 + base_model_name: Llama-2-70b + base_model_task: text-generation + model_provider_name: meta +mlflow_version: 2.3.1 +model_uuid: 8d0710795b734825b37cc614ace80f9d +utc_time_created: '2023-07-12 03:26:51.391279' diff --git a/assets/models/system/Llama-2-7b-chat/MLmodel b/assets/models/system/Llama-2-7b-chat/MLmodel new file mode 100644 index 0000000000..7d3ba315d2 --- /dev/null +++ b/assets/models/system/Llama-2-7b-chat/MLmodel @@ -0,0 +1,28 @@ +flavors: + hftransformersv2: + code: code + hf_config_class: LlamaConfig + hf_predict_module: llama_predict_meta + hf_pretrained_class: LlamaForCausalLM + hf_tokenizer_class: LlamaTokenizer + model_data: data + pytorch_version: 2.0.1+cu117 + task_type: chat-completion + transformers_version: 4.31.0 + model_hf_load_kwargs: + torch_dtype: torch.float16 + device_map: auto + python_function: + code: code + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.16 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:45 + base_model_name: Llama-2-7b-chat + base_model_task: chat-completion + model_provider_name: meta +mlflow_version: 2.3.1 +model_uuid: 2ecad009eb3d468eb33aadccb6846ce7 +utc_time_created: '2023-07-24 06:22:15.253950' diff --git a/assets/models/system/Llama-2-7b/MLmodel b/assets/models/system/Llama-2-7b/MLmodel new file mode 100644 index 0000000000..5c2496ef01 --- /dev/null +++ b/assets/models/system/Llama-2-7b/MLmodel @@ -0,0 +1,31 @@ +flavors: + hftransformersv2: + code: null + generator_config: + do_sample: true + max_new_tokens: 256 + temperature: 0.8 + top_p: 0.7 + hf_config_class: LlamaConfig + hf_pretrained_class: LlamaForCausalLM + hf_tokenizer_class: LlamaTokenizer + model_data: data + pytorch_version: 2.0.1+cu117 + task_type: text-generation + transformers_version: 4.31.0 + model_hf_load_kwargs: + torch_dtype: torch.float16 + device_map: auto + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.16 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:45 + model_provider_name: meta + base_model_name: Llama-2-7b + base_model_task: text-generation +mlflow_version: 2.3.1 +model_uuid: ad2c45e23ad340378a1791bebb5ee906 +utc_time_created: '2023-07-24 06:22:15.253950' diff --git a/assets/models/system/Mistral-7B-Instruct-v0-1/MLmodel b/assets/models/system/Mistral-7B-Instruct-v0-1/MLmodel new file mode 100644 index 0000000000..58207e9d75 --- /dev/null +++ b/assets/models/system/Mistral-7B-Instruct-v0-1/MLmodel @@ -0,0 +1,25 @@ +flavors: + hftransformersv2: + code: null + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: mistralai/Mistral-7B-Instruct-v0.2 + model_data: data + pytorch_version: 1.13.1 + task_type: chat-completion + transformers_version: 4.37.2 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: mistralai/Mistral-7B-Instruct-v0.2 + base_model_task: chat-completion + is_acft_model: true + is_finetuned_model: false + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:32 +mlflow_version: 2.9.2 +model_uuid: 77a1902e3a414556823bac84b91c1462 +utc_time_created: '2024-02-26 20:56:26.654737' diff --git a/assets/models/system/Mistral-7B-Instruct-v0-2/MLmodel b/assets/models/system/Mistral-7B-Instruct-v0-2/MLmodel new file mode 100644 index 0000000000..8d492070b8 --- /dev/null +++ b/assets/models/system/Mistral-7B-Instruct-v0-2/MLmodel @@ -0,0 +1,32 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: mistralai/Mistral-7B-Instruct-v0.2 + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 1.13.1 + task_type: chat-completion + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.38.2 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: mistralai/Mistral-7B-Instruct-v0.2 + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:45 + base_model_task: chat-completion + model_provider_name: mistral + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_uuid: a2e7f441fd904bfe93604d6219d99899 +utc_time_created: '2024-04-02 10:02:58.717060' diff --git a/assets/models/system/Mistral-7B-Instruct-v0-3/MLmodel b/assets/models/system/Mistral-7B-Instruct-v0-3/MLmodel new file mode 100644 index 0000000000..2e7accc49c --- /dev/null +++ b/assets/models/system/Mistral-7B-Instruct-v0-3/MLmodel @@ -0,0 +1,31 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: mistralai/Mistral-7B-Instruct-v0.3 + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 1.13.1 + task_type: chat-completion + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.40.1 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.19 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:45 + base_model_name: mistralai/Mistral-7B-Instruct-v0.3 + base_model_task: chat-completion + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_uuid: 2cddbcb8f35d41159270b212ffd95030 +utc_time_created: '2024-06-17 11:59:49.418562' diff --git a/assets/models/system/Mistral-7B-v0-1/MLmodel b/assets/models/system/Mistral-7B-v0-1/MLmodel new file mode 100644 index 0000000000..720e469fec --- /dev/null +++ b/assets/models/system/Mistral-7B-v0-1/MLmodel @@ -0,0 +1,26 @@ +flavors: + hftransformersv2: + code: null + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: mistralai/Mistral-7B-v0.1 + model_data: data + pytorch_version: 1.13.1 + task_type: text-generation + transformers_version: 4.37.2 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: mistralai/Mistral-7B-v0.1 + base_model_task: text-generation + model_provider_name: mistral + is_acft_model: true + is_finetuned_model: false + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:45 +mlflow_version: 2.9.2 +model_uuid: a37dc0c0333b432ca65dae2fbcae07d1 +utc_time_created: '2024-02-22 08:48:44.168221' diff --git a/assets/models/system/Mixtral-8x7B-v0-1/MLmodel b/assets/models/system/Mixtral-8x7B-v0-1/MLmodel new file mode 100644 index 0000000000..87c8afab96 --- /dev/null +++ b/assets/models/system/Mixtral-8x7B-v0-1/MLmodel @@ -0,0 +1,26 @@ +flavors: + hftransformersv2: + code: null + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: mistralai/Mixtral-8x7B-v0.1 + model_data: data + pytorch_version: 1.13.1 + task_type: text-generation + transformers_version: 4.37.2 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: mistralai/Mixtral-8x7B-v0.1 + base_model_task: text-generation + model_provider_name: mistral + is_acft_model: true + is_finetuned_model: false + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:45 +mlflow_version: 2.9.2 +model_uuid: 9a5de5dd9af842a6b7ce5da4661e64b4 +utc_time_created: '2024-02-22 20:00:34.426697' diff --git a/assets/models/system/Prism/MLmodel b/assets/models/system/Prism/MLmodel new file mode 100644 index 0000000000..2da0d9a985 --- /dev/null +++ b/assets/models/system/Prism/MLmodel @@ -0,0 +1,28 @@ +flavors: + python_function: + artifacts: + checkpoint_path: + path: artifacts/Prism + uri: Prism + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.10.14 + streamable: false +metadata: + base_model_name: Prism + base_model_task: zero-shot-image-classification + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.13.2 +model_size_bytes: 2355510581 +model_uuid: b14330b0a47745c9bfdc83c32299a6eb +signature: + inputs: '[{"type": "string", "name": "embeddings", "required": true}, {"type": "string", "name": "neg_prompts", "required": false}, {"type": "string", "name": "pos_prompts", "required": false}]' + outputs: '[{"type": "string", "name": "output", "required": true}]' + params: null +utc_time_created: '2024-09-09 03:48:15.134832' diff --git a/assets/models/system/Virchow/MLmodel b/assets/models/system/Virchow/MLmodel new file mode 100644 index 0000000000..50d694ce18 --- /dev/null +++ b/assets/models/system/Virchow/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + artifacts: + checkpoint_path: + path: artifacts/Virchow/pytorch_model.bin + uri: pytorch_model.bin + config_path: + path: artifacts/Virchow/config.json + uri: config.json + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.10.14 + streamable: false +metadata: + base_model_name: Virchow + base_model_task: embeddings + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.13.2 +model_size_bytes: 2525083692 +model_uuid: 60f9e8d90ec344589112026f81464b44 +signature: + inputs: '[{"type": "string", "name": "image", "required": true}]' + outputs: '[{"type": "string", "name": "output", "required": true}]' + params: '[{"name": "device_type", "type": "string", "default": "cuda", "shape": null}, + {"name": "to_half_precision", "type": "boolean", "default": false, "shape": null}]' +utc_time_created: '2024-09-01 22:06:50.910220' diff --git a/assets/models/system/Virchow2/MLmodel b/assets/models/system/Virchow2/MLmodel new file mode 100644 index 0000000000..76f0645fb5 --- /dev/null +++ b/assets/models/system/Virchow2/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + artifacts: + checkpoint_path: + path: artifacts/Virchow2/pytorch_model.bin + uri: pytorch_model.bin + config_path: + path: artifacts/Virchow2/config.json + uri: config.json + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.10.14 + streamable: false +metadata: + base_model_name: Virchow2 + base_model_task: embeddings + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.13.2 +model_size_bytes: 2525083692 +model_uuid: 60f9e8d90ec344589112026f81464b44 +signature: + inputs: '[{"type": "string", "name": "image", "required": true}]' + outputs: '[{"type": "string", "name": "output", "required": true}]' + params: '[{"name": "device_type", "type": "string", "default": "cuda", "shape": null}, + {"name": "to_half_precision", "type": "boolean", "default": false, "shape": null}]' +utc_time_created: '2024-09-01 22:06:50.910220' diff --git a/assets/models/system/automl-image-classification/MLmodel b/assets/models/system/automl-image-classification/MLmodel new file mode 100644 index 0000000000..005892b5f9 --- /dev/null +++ b/assets/models/system/automl-image-classification/MLmodel @@ -0,0 +1,29 @@ +flavors: + python_function: + artifacts: + model: + path: artifacts/vitb16r224-3c68ea1f.pth + uri: /tmp/vitb16r224-3c68ea1f.pth + settings: + path: artifacts/settings.json + uri: /mnt/azureml/cr/j/a723e21c98ba4abfb95424b4e73fb58e/exe/wd/azureml/model/mgmt/processors/pyfunc/automl/settings.json + cloudpickle_version: 2.2.1 + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: null + base_model_task: image-classification + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.6.0 +model_uuid: 836be882c838417a8d3fd5d267f7084f +signature: + inputs: '[{"type": "binary", "name": "image"}]' + outputs: '[{"type": "string", "name": "probs"}, {"type": "string", "name": "labels"}, + {"type": "string", "name": "visualizations"}, {"type": "string", "name": "attributions"}]' + params: null +utc_time_created: '2023-11-20 19:17:17.406942' diff --git a/assets/models/system/automl-ner/MLmodel b/assets/models/system/automl-ner/MLmodel new file mode 100644 index 0000000000..4c630ddc66 --- /dev/null +++ b/assets/models/system/automl-ner/MLmodel @@ -0,0 +1,23 @@ +artifact_path: outputs/mlflow-model +flavors: + python_function: + data: data/_model_impl_ce60ki2j.pkl + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: azureml.automl.dnn.nlp + python_version: 3.8.16 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/azureml-automl-dnn-text-gpu:74 + azureml.engine: automl +mlflow_version: 2.4.2 +model_uuid: ac4f60aca36d4491b82f96fccfc36074 +run_id: sleepy_tiger_fx9qgzr9n2_HD_0 +saved_input_example_info: + artifact_path: input_example.json + format: tf-serving + type: ndarray +signature: + inputs: '[{"type": "tensor", "tensor-spec": {"dtype": "object", "shape": [-1]}}]' + outputs: '[{"type": "tensor", "tensor-spec": {"dtype": "object", "shape": [-1]}}]' +utc_time_created: '2023-12-01 01:25:39.502410' diff --git a/assets/models/system/automl-text-classification/MLmodel b/assets/models/system/automl-text-classification/MLmodel new file mode 100644 index 0000000000..96cc8bf52a --- /dev/null +++ b/assets/models/system/automl-text-classification/MLmodel @@ -0,0 +1,23 @@ +artifact_path: outputs/mlflow-model +flavors: + python_function: + data: data/_model_impl_4e9ot4tr.pkl + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: azureml.automl.dnn.nlp + python_version: 3.8.16 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/azureml-automl-dnn-text-gpu:74 + azureml.engine: automl +mlflow_version: 2.4.2 +model_uuid: f85b9e51088142bcb5440a94da8c8abf +run_id: great_seed_l2nz1803cn_HD_0 +saved_input_example_info: + artifact_path: input_example.json + pandas_orient: split + type: dataframe +signature: + inputs: '[{"type": "string", "name": "ReviewText"}]' + outputs: '[{"type": "tensor", "tensor-spec": {"dtype": "str", "shape": [-1]}}]' +utc_time_created: '2023-12-01 01:20:01.420541' diff --git a/assets/models/system/automl_instance_segmentation/MLmodel b/assets/models/system/automl_instance_segmentation/MLmodel new file mode 100644 index 0000000000..007b3e1729 --- /dev/null +++ b/assets/models/system/automl_instance_segmentation/MLmodel @@ -0,0 +1,28 @@ +flavors: + python_function: + artifacts: + model: + path: artifacts/maskrcnn.pth + uri: /tmp/maskrcnn.pth + settings: + path: artifacts/settings.json + uri: /mnt/azureml/cr/j/49771e2dfde342ef81cd7dc6049c9b32/exe/wd/azureml/model/mgmt/processors/pyfunc/automl/settings.json + cloudpickle_version: 2.2.1 + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: null + base_model_task: image-instance-segmentation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.6.0 +model_uuid: 6194687d01b148659a091b4e8109b0af +signature: + inputs: '[{"type": "binary", "name": "image"}]' + outputs: '[{"type": "string", "name": "boxes"}]' + params: null +utc_time_created: '2023-11-22 00:53:09.104625' diff --git a/assets/models/system/automl_object_detection/MLmodel b/assets/models/system/automl_object_detection/MLmodel new file mode 100644 index 0000000000..04a8d372c3 --- /dev/null +++ b/assets/models/system/automl_object_detection/MLmodel @@ -0,0 +1,28 @@ +flavors: + python_function: + artifacts: + model: + path: artifacts/yolov5.pth + uri: /tmp/yolov5.pth + settings: + path: artifacts/settings.json + uri: /mnt/azureml/cr/j/bee857b55e454cd9a8ea41e44bc485ed/exe/wd/azureml/model/mgmt/processors/pyfunc/automl/settings.json + cloudpickle_version: 2.2.1 + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: null + base_model_task: image-object-detection + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.6.0 +model_uuid: a9cc0135960c4b2787c8ac973870746a +signature: + inputs: '[{"type": "binary", "name": "image"}]' + outputs: '[{"type": "string", "name": "boxes"}]' + params: null +utc_time_created: '2023-11-22 00:48:53.693341' diff --git a/assets/models/system/bert-base-cased/MLmodel b/assets/models/system/bert-base-cased/MLmodel new file mode 100644 index 0000000000..6a31ad530e --- /dev/null +++ b/assets/models/system/bert-base-cased/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: FillMaskPipeline + model_binary: model + pipeline_model_type: BertForMaskedLM + task: fill-mask + tokenizer_type: BertTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: bert-base-cased + base_model_task: fill-mask + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 434271456 +model_uuid: 73f55963511d40a3bce8d227ace9c8bd +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-06 13:19:09.230875' diff --git a/assets/models/system/bert-base-uncased/MLmodel b/assets/models/system/bert-base-uncased/MLmodel new file mode 100644 index 0000000000..3f5f80dd09 --- /dev/null +++ b/assets/models/system/bert-base-uncased/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: FillMaskPipeline + model_binary: model + pipeline_model_type: BertForMaskedLM + task: fill-mask + tokenizer_type: BertTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: bert-base-uncased + base_model_task: fill-mask + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 439025970 +model_uuid: 83fe5eef75014072b71df726e5d2026a +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-08 12:29:22.941318' diff --git a/assets/models/system/bert-large-cased/MLmodel b/assets/models/system/bert-large-cased/MLmodel new file mode 100644 index 0000000000..02be085460 --- /dev/null +++ b/assets/models/system/bert-large-cased/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: FillMaskPipeline + model_binary: model + pipeline_model_type: BertForMaskedLM + task: fill-mask + tokenizer_type: BertTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: bert-large-cased + base_model_task: fill-mask + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 1335408493 +model_uuid: d7b9bc14e71c4731bb7db78cc4042cf3 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-08 12:33:13.972896' diff --git a/assets/models/system/bert-large-uncased/MLmodel b/assets/models/system/bert-large-uncased/MLmodel new file mode 100644 index 0000000000..c21a92cb07 --- /dev/null +++ b/assets/models/system/bert-large-uncased/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: FillMaskPipeline + model_binary: model + pipeline_model_type: BertForMaskedLM + task: fill-mask + tokenizer_type: BertTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: bert-large-uncased + base_model_task: fill-mask + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 1341725432 +model_uuid: 58ef4bbdc28946c0912edb27c7bf8e4a +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-06 10:03:02.292933' diff --git a/assets/models/system/bytetrack_yolox_x_crowdhuman_mot17-private-half/MLmodel b/assets/models/system/bytetrack_yolox_x_crowdhuman_mot17-private-half/MLmodel new file mode 100644 index 0000000000..bfe9ff3446 --- /dev/null +++ b/assets/models/system/bytetrack_yolox_x_crowdhuman_mot17-private-half/MLmodel @@ -0,0 +1,33 @@ +flavors: + python_function: + artifacts: + config_path: + path: artifacts/bytetrack_yolox_x_crowdhuman_mot17-private-half.py + uri: /mnt/azureml/cr/j/8f2a300123244415847ffe8244e9ac12/cap/data-capability/wd/INPUT_model_path/model/bytetrack_yolox_x_crowdhuman_mot17-private-half.py + model_metadata: + path: artifacts/model_metadata.json + uri: /mnt/azureml/cr/j/8f2a300123244415847ffe8244e9ac12/cap/data-capability/wd/INPUT_model_path/model/model_metadata.json + weights_path: + path: artifacts/bytetrack_yolox_x_crowdhuman_mot17-private-half_weights.pth + uri: /mnt/azureml/cr/j/8f2a300123244415847ffe8244e9ac12/cap/data-capability/wd/INPUT_model_path/model/bytetrack_yolox_x_crowdhuman_mot17-private-half_weights.pth + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: null + base_model_task: video-multi-object-tracking + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 793188435 +model_uuid: 439c035f257845a98ebce768eec1bf0c +signature: + inputs: '[{"type": "string", "name": "video", "required": true}]' + outputs: '[{"type": "string", "name": "boxes", "required": true}]' + params: null +utc_time_created: '2024-04-30 11:06:52.859453' diff --git a/assets/models/system/camembert-base/MLmodel b/assets/models/system/camembert-base/MLmodel new file mode 100644 index 0000000000..a8557b39b0 --- /dev/null +++ b/assets/models/system/camembert-base/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: FillMaskPipeline + model_binary: model + pipeline_model_type: CamembertForMaskedLM + task: fill-mask + tokenizer_type: CamembertTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: camembert-base + base_model_task: fill-mask + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 445879153 +model_uuid: 92d6130c5e75425589f4b6ba329f4e4a +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-08 12:39:46.603888' diff --git a/assets/models/system/compvis-stable-diffusion-v1-4/MLmodel b/assets/models/system/compvis-stable-diffusion-v1-4/MLmodel new file mode 100644 index 0000000000..41e3d25f10 --- /dev/null +++ b/assets/models/system/compvis-stable-diffusion-v1-4/MLmodel @@ -0,0 +1,31 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/3f2bf44d58304950bd2273f3b0069074/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:40 + base_model_name: CompVis/stable-diffusion-v1-4 + base_model_task: text-to-image + is_acft_model: true + is_finetuned_model: false + model_type: stable-diffusion +mlflow_version: 2.10.2 +model_size_bytes: 23322366947 +model_uuid: ec83963c34bf4144b87473b6c87e018e +signature: + inputs: '[{"type": "string", "name": "prompt", "required": true}]' + outputs: '[{"type": "string", "name": "prompt", "required": true}, {"type": "binary", + "name": "generated_image", "required": true}, {"type": "boolean", "name": "nsfw_content_detected", + "required": true}]' + params: null +utc_time_created: '2024-04-26 12:32:28.417409' diff --git a/assets/models/system/cxrreportgen/MLmodel b/assets/models/system/cxrreportgen/MLmodel new file mode 100644 index 0000000000..916db973ec --- /dev/null +++ b/assets/models/system/cxrreportgen/MLmodel @@ -0,0 +1,28 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/v1.0.0 + uri: /home/mablonde/medical/HLSRadDeployment/checkpoints/v1.0.0 + cloudpickle_version: 3.0.0 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.10.14 + streamable: false +mlflow_version: 2.16.2 +model_size_bytes: 15761478249 +model_uuid: e4adcb9729184787b9cc663abc6cd92f +signature: + inputs: '[{"type": "binary", "name": "frontal_image", "required": true}, {"type": + "binary", "name": "lateral_image", "required": false}, {"type": "binary", "name": + "prior_image", "required": false}, {"type": "string", "name": "indication", "required": + false}, {"type": "string", "name": "technique", "required": false}, {"type": "string", + "name": "prior_report", "required": false}, {"type": "string", "name": "comparison", + "required": false}]' + outputs: '[{"type": "string", "name": "output", "required": true}]' + params: null +utc_time_created: '2024-10-04 19:15:32.643224' diff --git a/assets/models/system/databricks-dolly-v2-12b/MLmodel b/assets/models/system/databricks-dolly-v2-12b/MLmodel new file mode 100644 index 0000000000..7f275dc930 --- /dev/null +++ b/assets/models/system/databricks-dolly-v2-12b/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: InstructionTextGenerationPipeline + model_binary: model + pipeline_model_type: GPTNeoXForCausalLM + task: text-generation + tokenizer_type: GPTNeoXTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: databricks/dolly-v2-12b + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 47369790595 +model_uuid: f7383acbad6f48069c76424fe00f52fd +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: '[{"name": "top_p", "type": "float", "default": 0.9, "shape": null}, {"name": "temperature", "type": "float", "default": 0.2, "shape": null}, {"name": "max_new_tokens", "type": "integer", "default": 50, "shape": null}, {"name": "do_sample", "type": "boolean", "default": true, "shape": null}, {"name": "return_full_text", "type": "boolean", "default": true, "shape": null}]' +utc_time_created: '2023-12-07 04:44:00.270496' diff --git a/assets/models/system/deci-decidiffusion-v1-0/MLmodel b/assets/models/system/deci-decidiffusion-v1-0/MLmodel new file mode 100644 index 0000000000..07427a0efe --- /dev/null +++ b/assets/models/system/deci-decidiffusion-v1-0/MLmodel @@ -0,0 +1,31 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/130d2372e4dd404eb4eae71fa00fffa1/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:40 + base_model_name: Deci/DeciDiffusion-v1-0 + base_model_task: text-to-image + is_acft_model: true + is_finetuned_model: false + model_type: stable-diffusion +mlflow_version: 2.10.2 +model_size_bytes: 4371854212 +model_uuid: 59ac1531a7aa44dea8671bab03ee0c62 +signature: + inputs: '[{"type": "string", "name": "prompt", "required": true}]' + outputs: '[{"type": "string", "name": "prompt", "required": true}, {"type": "binary", + "name": "generated_image", "required": true}, {"type": "boolean", "name": "nsfw_content_detected", + "required": true}]' + params: null +utc_time_created: '2024-04-29 09:42:57.030237' diff --git a/assets/models/system/deepset-minilm-uncased-squad2/MLmodel b/assets/models/system/deepset-minilm-uncased-squad2/MLmodel new file mode 100644 index 0000000000..c8b6042130 --- /dev/null +++ b/assets/models/system/deepset-minilm-uncased-squad2/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: QuestionAnsweringPipeline + model_binary: model + pipeline_model_type: BertForQuestionAnswering + task: question-answering + tokenizer_type: BertTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: deepset/minilm-uncased-squad2 + base_model_task: question-answering + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 133819900 +model_uuid: cd7f3c2d0cb844d6ad675581428a2e82 +signature: + inputs: '[{"type": "string", "name": "question"}, {"type": "string", "name": "context"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-06 10:06:23.282894' diff --git a/assets/models/system/deepset-roberta-base-squad2/MLmodel b/assets/models/system/deepset-roberta-base-squad2/MLmodel new file mode 100644 index 0000000000..543dc5518d --- /dev/null +++ b/assets/models/system/deepset-roberta-base-squad2/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: QuestionAnsweringPipeline + model_binary: model + pipeline_model_type: RobertaForQuestionAnswering + task: question-answering + tokenizer_type: RobertaTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: deepset/roberta-base-squad2 + base_model_task: question-answering + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 499616549 +model_uuid: 6bd3bc76265b495f8c7465aec96c5e2b +signature: + inputs: '[{"type": "string", "name": "question"}, {"type": "string", "name": "context"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-06 09:28:05.414899' diff --git a/assets/models/system/deformable_detr_twostage_refine_r50_16x2_50e_coco/MLmodel b/assets/models/system/deformable_detr_twostage_refine_r50_16x2_50e_coco/MLmodel new file mode 100644 index 0000000000..92ec1a230d --- /dev/null +++ b/assets/models/system/deformable_detr_twostage_refine_r50_16x2_50e_coco/MLmodel @@ -0,0 +1,31 @@ +flavors: + python_function: + artifacts: + config_path: + path: artifacts/deformable_detr_twostage_refine_r50_16x2_50e_coco.py + uri: /mnt/azureml/cr/j/34a96aa62ea14d3da5bdd7c2473a3f25/cap/data-capability/wd/INPUT_model_path/model/deformable_detr_twostage_refine_r50_16x2_50e_coco.py + model_metadata: + path: artifacts/model_metadata.json + uri: /mnt/azureml/cr/j/34a96aa62ea14d3da5bdd7c2473a3f25/cap/data-capability/wd/INPUT_model_path/model/model_metadata.json + weights_path: + path: artifacts/deformable_detr_twostage_refine_r50_16x2_50e_coco_weights.pth + uri: /mnt/azureml/cr/j/34a96aa62ea14d3da5bdd7c2473a3f25/cap/data-capability/wd/INPUT_model_path/model/deformable_detr_twostage_refine_r50_16x2_50e_coco_weights.pth + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.16 +metadata: + base_model_name: deformable_detr_twostage_refine_r50_16x2_50e_coco + is_acft_model: true + is_finetuned_model: false + base_model_task: object-detection +mlflow_version: 2.3.1 +model_uuid: 347d7583985d4739a01cb24a5870d3e5 +signature: + inputs: '[{"name": "image", "type": "binary"}]' + outputs: '[{"name": "boxes", "type": "string"}]' +utc_time_created: '2023-07-27 15:40:33.852681' diff --git a/assets/models/system/distilbert-base-cased-distilled-squad/MLmodel b/assets/models/system/distilbert-base-cased-distilled-squad/MLmodel new file mode 100644 index 0000000000..4ee0b44939 --- /dev/null +++ b/assets/models/system/distilbert-base-cased-distilled-squad/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: QuestionAnsweringPipeline + model_binary: model + pipeline_model_type: DistilBertForQuestionAnswering + task: question-answering + tokenizer_type: DistilBertTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: distilbert-base-cased-distilled-squad + base_model_task: question-answering + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 261666525 +model_uuid: d370c74a0e7c4f11b10cc9323b8e3265 +signature: + inputs: '[{"type": "string", "name": "question"}, {"type": "string", "name": "context"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-06 11:33:24.037183' diff --git a/assets/models/system/distilbert-base-cased/MLmodel b/assets/models/system/distilbert-base-cased/MLmodel new file mode 100644 index 0000000000..fbff8e3098 --- /dev/null +++ b/assets/models/system/distilbert-base-cased/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: FillMaskPipeline + model_binary: model + pipeline_model_type: DistilBertForMaskedLM + task: fill-mask + tokenizer_type: DistilBertTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: distilbert-base-cased + base_model_task: fill-mask + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 264145149 +model_uuid: 5d0f6bd326224257bd382f08f9220783 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-15 13:54:20.400469' diff --git a/assets/models/system/distilbert-base-uncased-distilled-squad/MLmodel b/assets/models/system/distilbert-base-uncased-distilled-squad/MLmodel new file mode 100644 index 0000000000..791e2f7056 --- /dev/null +++ b/assets/models/system/distilbert-base-uncased-distilled-squad/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: QuestionAnsweringPipeline + model_binary: model + pipeline_model_type: DistilBertForQuestionAnswering + task: question-answering + tokenizer_type: DistilBertTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: distilbert-base-uncased-distilled-squad + base_model_task: question-answering + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 266414913 +model_uuid: fa50e21262fc4112922c42005fc5367c +signature: + inputs: '[{"type": "string", "name": "question"}, {"type": "string", "name": "context"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-06 10:46:21.551654' diff --git a/assets/models/system/distilbert-base-uncased-finetuned-sst-2-english/MLmodel b/assets/models/system/distilbert-base-uncased-finetuned-sst-2-english/MLmodel new file mode 100644 index 0000000000..35db8d220c --- /dev/null +++ b/assets/models/system/distilbert-base-uncased-finetuned-sst-2-english/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: TextClassificationPipeline + model_binary: model + pipeline_model_type: DistilBertForSequenceClassification + task: text-classification + tokenizer_type: DistilBertTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: distilbert-base-uncased-finetuned-sst-2-english + base_model_task: text-classification + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 268777643 +model_uuid: ad169c62ab254efea8651b51c0d695ac +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-06 10:49:21.397227' diff --git a/assets/models/system/distilbert-base-uncased/MLmodel b/assets/models/system/distilbert-base-uncased/MLmodel new file mode 100644 index 0000000000..2d97b2681a --- /dev/null +++ b/assets/models/system/distilbert-base-uncased/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: FillMaskPipeline + model_binary: model + pipeline_model_type: DistilBertForMaskedLM + task: fill-mask + tokenizer_type: DistilBertTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: distilbert-base-uncased + base_model_task: fill-mask + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 268899640 +model_uuid: e6bed35db9004fdcb81726fe1eff1c71 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-08 12:13:22.231468' diff --git a/assets/models/system/distilgpt2/MLmodel b/assets/models/system/distilgpt2/MLmodel new file mode 100644 index 0000000000..079a2d0654 --- /dev/null +++ b/assets/models/system/distilgpt2/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: TextGenerationPipeline + model_binary: model + pipeline_model_type: GPT2LMHeadModel + task: text-generation + tokenizer_type: GPT2TokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: distilgpt2 + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 331021915 +model_uuid: f6eca4d47bba4bfc9fbd007991be02b0 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: '[{"name": "top_p", "type": "float", "default": 1.0, "shape": null}, {"name": "temperature", "type": "float", "default": 0.8, "shape": null}, {"name": "max_new_tokens", "type": "integer", "default": 50, "shape": null}, {"name": "do_sample", "type": "boolean", "default": true, "shape": null}, {"name": "return_full_text", "type": "boolean", "default": true, "shape": null}]' +utc_time_created: '2023-12-08 20:08:26.590574' diff --git a/assets/models/system/distilroberta-base/MLmodel b/assets/models/system/distilroberta-base/MLmodel new file mode 100644 index 0000000000..3078493344 --- /dev/null +++ b/assets/models/system/distilroberta-base/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: FillMaskPipeline + model_binary: model + pipeline_model_type: RobertaForMaskedLM + task: fill-mask + tokenizer_type: RobertaTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: distilroberta-base + base_model_task: fill-mask + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 332058902 +model_uuid: 8e853e27e4614124a5209232ffbc1bad +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-08 12:40:26.214858' diff --git a/assets/models/system/facebook-bart-large-cnn/MLmodel b/assets/models/system/facebook-bart-large-cnn/MLmodel new file mode 100644 index 0000000000..9af3c0398d --- /dev/null +++ b/assets/models/system/facebook-bart-large-cnn/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: SummarizationPipeline + model_binary: model + pipeline_model_type: BartForConditionalGeneration + task: summarization + tokenizer_type: BartTokenizerFast + transformers_version: 4.37.2 +metadata: + base_model_name: facebook/bart-large-cnn + base_model_task: summarization + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.9.2 +model_size_bytes: 1628834502 +model_uuid: 32118d22d5934d959a6093e055794794 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2024-02-16 11:25:39.043716' diff --git a/assets/models/system/facebook-deit-base-patch16-224/MLmodel b/assets/models/system/facebook-deit-base-patch16-224/MLmodel new file mode 100644 index 0000000000..4bdef7dff2 --- /dev/null +++ b/assets/models/system/facebook-deit-base-patch16-224/MLmodel @@ -0,0 +1,35 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.19 + transformers: + code: null + components: + - image_processor + framework: pt + image_processor_type: ViTImageProcessor + instance_type: ImageClassificationPipeline + model_binary: model + pipeline_model_type: ViTForImageClassification + source_model_name: /mnt/azureml/cr/j/6dc4585417fc46c5a6e3b889478e447f/cap/data-capability/wd/INPUT_model_path + task: image-classification + torch_dtype: torch.float32 + transformers_version: 4.40.1 +metadata: + base_model_name: facebook/deit-base-patch16-224 + base_model_task: image-classification + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 346364593 +model_uuid: 0ce4e5f0104e42debf0b31e2347aa3a1 +signature: + inputs: '[{"type": "string", "required": true}]' + outputs: '[{"type": "string", "name": "label", "required": true}, {"type": "double", + "name": "score", "required": true}]' + params: null +utc_time_created: '2024-05-20 05:21:05.857905' diff --git a/assets/models/system/facebook-dinov2-base-imagenet1k-1-layer/MLmodel b/assets/models/system/facebook-dinov2-base-imagenet1k-1-layer/MLmodel new file mode 100644 index 0000000000..fe33c8dbd3 --- /dev/null +++ b/assets/models/system/facebook-dinov2-base-imagenet1k-1-layer/MLmodel @@ -0,0 +1,32 @@ +flavors: + hftransformersv2: + code: code + hf_config_class: AutoConfig + hf_predict_module: predict + hf_pretrained_class: AutoModelForImageClassification + hf_tokenizer_class: AutoImageProcessor + huggingface_id: facebook/dinov2-base-imagenet1k-1-layer + model_data: data + pytorch_version: 1.13.1 + task_type: image-classification + train_label_list: + path_list: train_label_list.npy + transformers_version: 4.37.2 + python_function: + code: code + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: facebook/dinov2-base-imagenet1k-1-layer + base_model_task: image-classification + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.9.2 +model_uuid: ddab417ee8cb4bfdbb7d38e40675785f +signature: + inputs: '[{"type": "binary", "name": "image"}]' + outputs: '[{"type": "string", "name": "probs"}, {"type": "string", "name": "labels"}]' + params: null +utc_time_created: '2024-03-06 11:32:18.360703' diff --git a/assets/models/system/facebook-dinov2-image-embeddings-base/MLmodel b/assets/models/system/facebook-dinov2-image-embeddings-base/MLmodel new file mode 100644 index 0000000000..5c17791038 --- /dev/null +++ b/assets/models/system/facebook-dinov2-image-embeddings-base/MLmodel @@ -0,0 +1,27 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/45bc6e2676da449f8e5dee97f3775d88/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: facebook/dinov2-base + base_model_task: embeddings + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 692754241 +model_uuid: 4931568b67c24238932eb040fc0fe74c +signature: + inputs: '[{"type": "string", "name": "image", "required": true}]' + outputs: '[{"type": "string", "name": "image_features", "required": true}]' + params: null +utc_time_created: '2024-04-30 06:18:59.760632' diff --git a/assets/models/system/facebook-dinov2-image-embeddings-giant/MLmodel b/assets/models/system/facebook-dinov2-image-embeddings-giant/MLmodel new file mode 100644 index 0000000000..be837e965b --- /dev/null +++ b/assets/models/system/facebook-dinov2-image-embeddings-giant/MLmodel @@ -0,0 +1,27 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/aa4532dc398b4a1cad7afd77195f49f1/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: facebook/dinov2-giant + base_model_task: embeddings + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 9092178071 +model_uuid: d45c05b94afe4b7b97604d9ec6ed323c +signature: + inputs: '[{"type": "string", "name": "image", "required": true}]' + outputs: '[{"type": "string", "name": "image_features", "required": true}]' + params: null +utc_time_created: '2024-04-30 05:26:44.101077' diff --git a/assets/models/system/facebook-sam-vit-base/MLmodel b/assets/models/system/facebook-sam-vit-base/MLmodel new file mode 100644 index 0000000000..17d23d17db --- /dev/null +++ b/assets/models/system/facebook-sam-vit-base/MLmodel @@ -0,0 +1,30 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/51c59d8a077d461dabee2b7367b04f34/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: facebook/sam-vit-base + base_model_task: mask-generation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 1125354982 +model_uuid: 8d620480eeea4f3d95c758a94d644520 +signature: + inputs: '[{"type": "string", "name": "image", "required": true}, {"type": "string", + "name": "input_points", "required": true}, {"type": "string", "name": "input_boxes", + "required": true}, {"type": "string", "name": "input_labels", "required": true}, + {"type": "boolean", "name": "multimask_output", "required": true}]' + outputs: '[{"type": "string", "name": "response", "required": true}]' + params: null +utc_time_created: '2024-04-30 06:50:01.009274' diff --git a/assets/models/system/facebook-sam-vit-huge/MLmodel b/assets/models/system/facebook-sam-vit-huge/MLmodel new file mode 100644 index 0000000000..8ea475e377 --- /dev/null +++ b/assets/models/system/facebook-sam-vit-huge/MLmodel @@ -0,0 +1,30 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/690cac0d7529498b955f4886a77feaee/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: facebook/sam-vit-huge + base_model_task: mask-generation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 7694061784 +model_uuid: 05d406d4850d4d439a6d0c3a3f27d42d +signature: + inputs: '[{"type": "string", "name": "image", "required": true}, {"type": "string", + "name": "input_points", "required": true}, {"type": "string", "name": "input_boxes", + "required": true}, {"type": "string", "name": "input_labels", "required": true}, + {"type": "boolean", "name": "multimask_output", "required": true}]' + outputs: '[{"type": "string", "name": "response", "required": true}]' + params: null +utc_time_created: '2024-04-30 07:06:52.596118' diff --git a/assets/models/system/facebook-sam-vit-large/MLmodel b/assets/models/system/facebook-sam-vit-large/MLmodel new file mode 100644 index 0000000000..4952ade462 --- /dev/null +++ b/assets/models/system/facebook-sam-vit-large/MLmodel @@ -0,0 +1,30 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/05e10f0d49ae42cf8ae3d60b33a76f7b/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: facebook/sam-vit-large + base_model_task: mask-generation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 3748896354 +model_uuid: c02ca1a067d146c3a12b45f8ccb31861 +signature: + inputs: '[{"type": "string", "name": "image", "required": true}, {"type": "string", + "name": "input_points", "required": true}, {"type": "string", "name": "input_boxes", + "required": true}, {"type": "string", "name": "input_labels", "required": true}, + {"type": "boolean", "name": "multimask_output", "required": true}]' + outputs: '[{"type": "string", "name": "response", "required": true}]' + params: null +utc_time_created: '2024-04-30 07:03:28.797225' diff --git a/assets/models/system/finiteautomata-bertweet-base-sentiment-analysis/MLmodel b/assets/models/system/finiteautomata-bertweet-base-sentiment-analysis/MLmodel new file mode 100644 index 0000000000..c2f581d93a --- /dev/null +++ b/assets/models/system/finiteautomata-bertweet-base-sentiment-analysis/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: TextClassificationPipeline + model_binary: model + pipeline_model_type: RobertaForSequenceClassification + task: text-classification + tokenizer_type: BertweetTokenizer + transformers_version: 4.35.2 +metadata: + base_model_name: finiteautomata/bertweet-base-sentiment-analysis + base_model_task: text-classification + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 541576646 +model_uuid: d0d50245993b426dba7f289ef938323d +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-06 11:43:07.633973' diff --git a/assets/models/system/google-vit-base-patch16-224/MLmodel b/assets/models/system/google-vit-base-patch16-224/MLmodel new file mode 100644 index 0000000000..565136bbfd --- /dev/null +++ b/assets/models/system/google-vit-base-patch16-224/MLmodel @@ -0,0 +1,35 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.19 + transformers: + code: null + components: + - image_processor + framework: pt + image_processor_type: ViTImageProcessor + instance_type: ImageClassificationPipeline + model_binary: model + pipeline_model_type: ViTForImageClassification + source_model_name: /mnt/azureml/cr/j/51e7a92f1d4e4e3a9860a0502feb9eac/cap/data-capability/wd/INPUT_model_path + task: image-classification + torch_dtype: torch.float32 + transformers_version: 4.40.1 +metadata: + base_model_name: google/vit-base-patch16-224 + base_model_task: image-classification + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 346364593 +model_uuid: 28ba5aa869ec4407b4cfdcff59268add +signature: + inputs: '[{"type": "string", "required": true}]' + outputs: '[{"type": "string", "name": "label", "required": true}, {"type": "double", + "name": "score", "required": true}]' + params: null +utc_time_created: '2024-05-03 04:19:08.175561' diff --git a/assets/models/system/gpt2-large/MLmodel b/assets/models/system/gpt2-large/MLmodel new file mode 100644 index 0000000000..45f33178c1 --- /dev/null +++ b/assets/models/system/gpt2-large/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: TextGenerationPipeline + model_binary: model + pipeline_model_type: GPT2LMHeadModel + task: text-generation + tokenizer_type: GPT2TokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: gpt2-large + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 3099561926 +model_uuid: 916fcb500dd44c7c84eb43ec02107e05 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: '[{"name": "top_p", "type": "float", "default": 1.0, "shape": null}, {"name": "temperature", "type": "float", "default": 0.8, "shape": null}, {"name": "max_new_tokens", "type": "integer", "default": 50, "shape": null}, {"name": "do_sample", "type": "boolean", "default": true, "shape": null}, {"name": "return_full_text", "type": "boolean", "default": true, "shape": null}]' +utc_time_created: '2023-12-06 09:18:29.934345' diff --git a/assets/models/system/gpt2-medium/MLmodel b/assets/models/system/gpt2-medium/MLmodel new file mode 100644 index 0000000000..46210315b1 --- /dev/null +++ b/assets/models/system/gpt2-medium/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: TextGenerationPipeline + model_binary: model + pipeline_model_type: GPT2LMHeadModel + task: text-generation + tokenizer_type: GPT2TokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: gpt2-medium + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 1422708538 +model_uuid: 798d0a704f874098b395bbe1fdcaa86c +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: '[{"name": "top_p", "type": "float", "default": 1.0, "shape": null}, {"name": "temperature", "type": "float", "default": 0.8, "shape": null}, {"name": "max_new_tokens", "type": "integer", "default": 50, "shape": null}, {"name": "do_sample", "type": "boolean", "default": true, "shape": null}, {"name": "return_full_text", "type": "boolean", "default": true, "shape": null}]' +utc_time_created: '2023-12-06 09:45:45.494930' diff --git a/assets/models/system/gpt2/MLmodel b/assets/models/system/gpt2/MLmodel new file mode 100644 index 0000000000..035b7b9228 --- /dev/null +++ b/assets/models/system/gpt2/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: TextGenerationPipeline + model_binary: model + pipeline_model_type: GPT2LMHeadModel + task: text-generation + tokenizer_type: GPT2TokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: gpt2 + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 501138122 +model_uuid: c49bee2e635f40a6a62b38ede9b0e6c8 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: '[{"name": "top_p", "type": "float", "default": 1.0, "shape": null}, {"name": "temperature", "type": "float", "default": 0.8, "shape": null}, {"name": "max_new_tokens", "type": "integer", "default": 50, "shape": null}, {"name": "do_sample", "type": "boolean", "default": true, "shape": null}, {"name": "return_full_text", "type": "boolean", "default": true, "shape": null}]' +utc_time_created: '2023-12-06 11:43:15.922970' diff --git a/assets/models/system/jean-baptiste-camembert-ner/MLmodel b/assets/models/system/jean-baptiste-camembert-ner/MLmodel new file mode 100644 index 0000000000..d88b8f63ea --- /dev/null +++ b/assets/models/system/jean-baptiste-camembert-ner/MLmodel @@ -0,0 +1,19 @@ +flavors: + hftransformersv2: + code: null + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForTokenClassification + hf_tokenizer_class: AutoTokenizer + huggingface_id: Jean-Baptiste/camembert-ner + model_data: data + pytorch_version: 1.13.1 + task_type: token-classification + transformers_version: 4.33.1 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.17 +mlflow_version: 2.6.0 +model_uuid: faf2e837e62a4bf9a1699517f14e265c +utc_time_created: '2023-10-31 12:09:27.287525' diff --git a/assets/models/system/mask_rcnn_swin-t-p4-w7_fpn_1x_coco/MLmodel b/assets/models/system/mask_rcnn_swin-t-p4-w7_fpn_1x_coco/MLmodel new file mode 100644 index 0000000000..2bdc09a2eb --- /dev/null +++ b/assets/models/system/mask_rcnn_swin-t-p4-w7_fpn_1x_coco/MLmodel @@ -0,0 +1,31 @@ +flavors: + python_function: + artifacts: + config_path: + path: artifacts/mask_rcnn_swin-t-p4-w7_fpn_1x_coco.py + uri: /mnt/azureml/cr/j/6cca95a9833e469dbcb228c4f4727d85/cap/data-capability/wd/INPUT_model_path/model/mask_rcnn_swin-t-p4-w7_fpn_1x_coco.py + model_metadata: + path: artifacts/model_metadata.json + uri: /mnt/azureml/cr/j/6cca95a9833e469dbcb228c4f4727d85/cap/data-capability/wd/INPUT_model_path/model/model_metadata.json + weights_path: + path: artifacts/mask_rcnn_swin-t-p4-w7_fpn_1x_coco_weights.pth + uri: /mnt/azureml/cr/j/6cca95a9833e469dbcb228c4f4727d85/cap/data-capability/wd/INPUT_model_path/model/mask_rcnn_swin-t-p4-w7_fpn_1x_coco_weights.pth + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.16 +metadata: + base_model_name: mask_rcnn_swin-t-p4-w7_fpn_1x_coco + is_acft_model: true + is_finetuned_model: false + task: image-segmentation +mlflow_version: 2.3.1 +model_uuid: d089e3aaea97458b98282ae2a16e010a +signature: + inputs: '[{"name": "image", "type": "binary"}]' + outputs: '[{"name": "boxes", "type": "string"}]' +utc_time_created: '2023-07-27 15:46:05.573921' diff --git a/assets/models/system/medimageinsight/MLmodel b/assets/models/system/medimageinsight/MLmodel new file mode 100644 index 0000000000..6c15998a89 --- /dev/null +++ b/assets/models/system/medimageinsight/MLmodel @@ -0,0 +1,29 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/2024.09.27.v.1.0.0 + uri: /mnt/batch/tasks/shared/LS_root/mounts/clusters/albertogpu4/code/Users/albertosa/mii_ado/FlorenceMDdeployment/checkpoints/2024.09.27.v.1.0.0 + cloudpickle_version: 3.0.0 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.19 + streamable: false +mlflow_version: 2.14.3 +model_size_bytes: 2465553032 +model_uuid: 303978b7303b453f99790fd3dae58493 +signature: + inputs: '[{"type": "binary", "name": "image", "required": false}, {"type": "string", + "name": "text", "required": false}]' + outputs: '[{"type": "string", "name": "image_features", "required": false}, {"type": + "string", "name": "text_features", "required": false}, {"type": "string", "name": + "scaling_factor", "required": false}]' + params: '[{"name": "image_standardization_jpeg_compression_ratio", "type": "integer", + "default": 75, "shape": null}, {"name": "image_standardization_image_size", "type": + "integer", "default": 512, "shape": null}, {"name": "get_scaling_factor", "type": + "boolean", "default": true, "shape": null}]' +utc_time_created: '2024-10-09 20:19:13.335408' diff --git a/assets/models/system/medimageparse/MLmodel b/assets/models/system/medimageparse/MLmodel new file mode 100644 index 0000000000..e048a0e74f --- /dev/null +++ b/assets/models/system/medimageparse/MLmodel @@ -0,0 +1,25 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/checkpoints + uri: /mnt/batch/tasks/shared/LS_root/mounts/clusters/albertogpu2/code/BiomedParseDeployment/checkpoints + cloudpickle_version: 3.0.0 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.9.19 + streamable: false +mlflow_version: 2.16.0 +model_size_bytes: 1802811987 +model_uuid: b06e9c81acff49e0990cd94799897046 +signature: + inputs: '[{"type": "binary", "name": "image", "required": true}, {"type": "string", + "name": "text", "required": true}]' + outputs: '[{"type": "binary", "name": "pred_seg_mask", "required": true}, {"type": + "string", "name": "pred_text", "required": false}]' + params: null +utc_time_created: '2024-09-27 00:51:53.191138' diff --git a/assets/models/system/microsoft-beit-base-patch16-224-pt22k-ft22k/MLmodel b/assets/models/system/microsoft-beit-base-patch16-224-pt22k-ft22k/MLmodel new file mode 100644 index 0000000000..b7e1dd0f3e --- /dev/null +++ b/assets/models/system/microsoft-beit-base-patch16-224-pt22k-ft22k/MLmodel @@ -0,0 +1,35 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.19 + transformers: + code: null + components: + - image_processor + framework: pt + image_processor_type: BeitImageProcessor + instance_type: ImageClassificationPipeline + model_binary: model + pipeline_model_type: BeitForImageClassification + source_model_name: /mnt/azureml/cr/j/f17d2d7fdde9443aa64c856d91b9e977/cap/data-capability/wd/INPUT_model_path + task: image-classification + torch_dtype: torch.float32 + transformers_version: 4.40.1 +metadata: + base_model_name: microsoft/beit-base-patch16-224-pt22k-ft22k + base_model_task: image-classification + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 411925513 +model_uuid: 901d257ea3af4e5b80ebb5c6fc0496f2 +signature: + inputs: '[{"type": "string", "required": true}]' + outputs: '[{"type": "string", "name": "label", "required": true}, {"type": "double", + "name": "score", "required": true}]' + params: null +utc_time_created: '2024-05-03 09:38:56.180594' diff --git a/assets/models/system/microsoft-deberta-base-mnli/MLmodel b/assets/models/system/microsoft-deberta-base-mnli/MLmodel new file mode 100644 index 0000000000..f1b1d03891 --- /dev/null +++ b/assets/models/system/microsoft-deberta-base-mnli/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: TextClassificationPipeline + model_binary: model + pipeline_model_type: DebertaForSequenceClassification + task: text-classification + tokenizer_type: DebertaTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: microsoft/deberta-base-mnli + base_model_task: text-classification + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 560188804 +model_uuid: dd48481151434aa39f36a23de28fd7e5 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-06 11:34:10.001232' diff --git a/assets/models/system/microsoft-deberta-base/MLmodel b/assets/models/system/microsoft-deberta-base/MLmodel new file mode 100644 index 0000000000..d00a0cee05 --- /dev/null +++ b/assets/models/system/microsoft-deberta-base/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: FillMaskPipeline + model_binary: model + pipeline_model_type: DebertaForMaskedLM + task: fill-mask + tokenizer_type: DebertaTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: microsoft/deberta-base + base_model_task: fill-mask + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 560386938 +model_uuid: 75c8faf8f61c4b92a126586ad3d9b192 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-08 12:41:34.638597' diff --git a/assets/models/system/microsoft-deberta-large-mnli/MLmodel b/assets/models/system/microsoft-deberta-large-mnli/MLmodel new file mode 100644 index 0000000000..4446bbc4e1 --- /dev/null +++ b/assets/models/system/microsoft-deberta-large-mnli/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: TextClassificationPipeline + model_binary: model + pipeline_model_type: DebertaForSequenceClassification + task: text-classification + tokenizer_type: DebertaTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: microsoft/deberta-large-mnli + base_model_task: text-classification + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 1628315091 +model_uuid: 7a11d947046041a3a727bb054c304a6b +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-08 12:58:18.261716' diff --git a/assets/models/system/microsoft-deberta-large/MLmodel b/assets/models/system/microsoft-deberta-large/MLmodel new file mode 100644 index 0000000000..1136b036b5 --- /dev/null +++ b/assets/models/system/microsoft-deberta-large/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: FillMaskPipeline + model_binary: model + pipeline_model_type: DebertaForMaskedLM + task: fill-mask + tokenizer_type: DebertaTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: microsoft/deberta-large + base_model_task: fill-mask + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 1628512201 +model_uuid: 9344b10080ae451cb4df0943ddb5298c +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-06 10:10:35.400000' diff --git a/assets/models/system/microsoft-deberta-xlarge/MLmodel b/assets/models/system/microsoft-deberta-xlarge/MLmodel new file mode 100644 index 0000000000..8560bf0b5e --- /dev/null +++ b/assets/models/system/microsoft-deberta-xlarge/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: FillMaskPipeline + model_binary: model + pipeline_model_type: DebertaForMaskedLM + task: fill-mask + tokenizer_type: DebertaTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: microsoft/deberta-xlarge + base_model_task: fill-mask + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 3039159993 +model_uuid: b4f24e3654354151beec1fdd567593c9 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-06 10:14:07.624757' diff --git a/assets/models/system/microsoft-llava-med-v1.5-mistral-7b/MLmodel b/assets/models/system/microsoft-llava-med-v1.5-mistral-7b/MLmodel new file mode 100644 index 0000000000..d6a870e787 --- /dev/null +++ b/assets/models/system/microsoft-llava-med-v1.5-mistral-7b/MLmodel @@ -0,0 +1,30 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/llava-med-v1.5-mistral-7b + uri: ../../llava-med-v1.5-mistral-7b/llava-med-v1.5-mistral-7b/../../llava-med-v1.5-mistral-7b/llava-med-v1.5-mistral-7b + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.9.19 + streamable: false +metadata: + base_model_name: null + base_model_task: image-text-to-text + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.16.2 +model_size_bytes: 15133110292 +model_uuid: 7404c3512a654116a76b9c1855266d5f +signature: + inputs: '[{"type": "string", "name": "image", "required": true}, {"type": "string", + "name": "prompt", "required": true}, {"type": "string", "name": "direct_question", + "required": true}]' + outputs: '[{"type": "string", "name": "response", "required": true}]' + params: null +utc_time_created: '2024-10-07 17:42:14.494661' diff --git a/assets/models/system/microsoft-orca-2-13b/MLmodel b/assets/models/system/microsoft-orca-2-13b/MLmodel new file mode 100644 index 0000000000..76055b99fe --- /dev/null +++ b/assets/models/system/microsoft-orca-2-13b/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: TextGenerationPipeline + model_binary: model + pipeline_model_type: LlamaForCausalLM + task: text-generation + tokenizer_type: LlamaTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: microsoft/Orca-2-13b + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 52066001151 +model_uuid: 78a3f212f77849c2aeb3f5f4754659c6 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: '[{"name": "top_p", "type": "float", "default": 0.9, "shape": null}, {"name": "temperature", "type": "float", "default": 0.2, "shape": null}, {"name": "max_new_tokens", "type": "integer", "default": 50, "shape": null}, {"name": "do_sample", "type": "boolean", "default": true, "shape": null}, {"name": "return_full_text", "type": "boolean", "default": true, "shape": null}]' +utc_time_created: '2023-12-08 11:46:43.706730' diff --git a/assets/models/system/microsoft-orca-2-7b/MLmodel b/assets/models/system/microsoft-orca-2-7b/MLmodel new file mode 100644 index 0000000000..667a098c48 --- /dev/null +++ b/assets/models/system/microsoft-orca-2-7b/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: TextGenerationPipeline + model_binary: model + pipeline_model_type: LlamaForCausalLM + task: text-generation + tokenizer_type: LlamaTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: microsoft/Orca-2-7b + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 26956165610 +model_uuid: 8911b994c877417e84842221b8acd04c +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: '[{"name": "top_p", "type": "float", "default": 0.9, "shape": null}, {"name": "temperature", "type": "float", "default": 0.2, "shape": null}, {"name": "max_new_tokens", "type": "integer", "default": 50, "shape": null}, {"name": "do_sample", "type": "boolean", "default": true, "shape": null}, {"name": "return_full_text", "type": "boolean", "default": true, "shape": null}]' +utc_time_created: '2023-12-08 11:36:30.800706' diff --git a/assets/models/system/microsoft-phi-1-5/MLmodel b/assets/models/system/microsoft-phi-1-5/MLmodel new file mode 100644 index 0000000000..f622b97c84 --- /dev/null +++ b/assets/models/system/microsoft-phi-1-5/MLmodel @@ -0,0 +1,25 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: microsoft/phi-1_5 + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 1.13.1 + task_type: text-generation + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.33.1 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.17 +mlflow_version: 2.6.0 +model_uuid: c1fab90dbbd74498abfe8894d35b62d6 +utc_time_created: '2023-11-02 06:27:25.427477' diff --git a/assets/models/system/microsoft-phi-2/MLmodel b/assets/models/system/microsoft-phi-2/MLmodel new file mode 100644 index 0000000000..c6756b6bbf --- /dev/null +++ b/assets/models/system/microsoft-phi-2/MLmodel @@ -0,0 +1,31 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: microsoft/phi-2 + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 1.13.1 + task_type: text-generation + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.37.2 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:35 + base_model_name: microsoft/phi-2 + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.9.2 +model_uuid: 1f710d3000b045718a1dda0dae6ecba9 +utc_time_created: '2024-02-27 18:02:16.596861' diff --git a/assets/models/system/microsoft-swinv2-base-patch4-window12-192-22k/MLmodel b/assets/models/system/microsoft-swinv2-base-patch4-window12-192-22k/MLmodel new file mode 100644 index 0000000000..f88b8356d9 --- /dev/null +++ b/assets/models/system/microsoft-swinv2-base-patch4-window12-192-22k/MLmodel @@ -0,0 +1,35 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.19 + transformers: + code: null + components: + - image_processor + framework: pt + image_processor_type: ViTImageProcessor + instance_type: ImageClassificationPipeline + model_binary: model + pipeline_model_type: Swinv2ForImageClassification + source_model_name: /mnt/azureml/cr/j/f2eeb4a9d43a4689a23c183f741be95b/cap/data-capability/wd/INPUT_model_path + task: image-classification + torch_dtype: torch.float32 + transformers_version: 4.40.1 +metadata: + base_model_name: microsoft/swinv2-base-patch4-window12-192-22k + base_model_task: image-classification + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 438322938 +model_uuid: 021401000cd743bda83ae5cb65d0f5da +signature: + inputs: '[{"type": "string", "required": true}]' + outputs: '[{"type": "string", "name": "label", "required": true}, {"type": "double", + "name": "score", "required": true}]' + params: null +utc_time_created: '2024-05-03 05:05:17.229727' diff --git a/assets/models/system/mistral-community-Mixtral-8x22B-v01/MLmodel b/assets/models/system/mistral-community-Mixtral-8x22B-v01/MLmodel new file mode 100644 index 0000000000..4b959d08a5 --- /dev/null +++ b/assets/models/system/mistral-community-Mixtral-8x22B-v01/MLmodel @@ -0,0 +1,32 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: mistral-community/Mixtral-8x22B-v0.1 + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 1.13.1 + task_type: text-generation + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.38.2 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: mistral-community/Mixtral-8x22B-v0.1 + base_model_task: text-generation + model_provider_name: mistral + is_acft_model: true + is_finetuned_model: false + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:45 +mlflow_version: 2.10.2 +model_uuid: 34f520fef38a454295423f8260e5e306 +utc_time_created: '2024-04-12 09:50:13.545434' diff --git a/assets/models/system/mistralai-Mixtral-8x22B-Instruct-v0-1/MLmodel b/assets/models/system/mistralai-Mixtral-8x22B-Instruct-v0-1/MLmodel new file mode 100644 index 0000000000..552f418828 --- /dev/null +++ b/assets/models/system/mistralai-Mixtral-8x22B-Instruct-v0-1/MLmodel @@ -0,0 +1,32 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: mistralai/Mixtral-8x22B-Instruct-v0.1 + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 1.13.1 + task_type: chat-completion + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.38.2 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: mistralai/Mixtral-8x22B-Instruct-v0.1 + base_model_task: chat-completion + model_provider_name: mistral + is_acft_model: true + is_finetuned_model: false + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:45 +mlflow_version: 2.10.2 +model_uuid: 82fb63e5bfa848e09b973a5f1487d323 +utc_time_created: '2024-04-18 09:27:36.255096' diff --git a/assets/models/system/mistralai-Mixtral-8x22B-v0-1/MLmodel b/assets/models/system/mistralai-Mixtral-8x22B-v0-1/MLmodel new file mode 100644 index 0000000000..eb085dff52 --- /dev/null +++ b/assets/models/system/mistralai-Mixtral-8x22B-v0-1/MLmodel @@ -0,0 +1,32 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: mistralai/Mixtral-8x22B-v0.1 + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 1.13.1 + task_type: text-generation + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.38.2 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: mistralai/Mixtral-8x22B-v0.1 + base_model_task: text-generation + model_provider_name: mistral + is_acft_model: true + is_finetuned_model: false + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:45 +mlflow_version: 2.10.2 +model_uuid: ba7600fcf6f143d587b6a050dc62c051 +utc_time_created: '2024-04-18 08:34:20.309804' diff --git a/assets/models/system/mistralai-Mixtral-8x7B-Instruct-v01/MLmodel b/assets/models/system/mistralai-Mixtral-8x7B-Instruct-v01/MLmodel new file mode 100644 index 0000000000..c2df32cca5 --- /dev/null +++ b/assets/models/system/mistralai-Mixtral-8x7B-Instruct-v01/MLmodel @@ -0,0 +1,26 @@ +flavors: + hftransformersv2: + code: null + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: mistralai/Mixtral-8x7B-Instruct-v0.1 + model_data: data + pytorch_version: 1.13.1 + task_type: chat-completion + transformers_version: 4.37.2 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: mistralai/Mixtral-8x7B-Instruct-v0.1 + base_model_task: chat-completion + model_provider_name: mistral + is_acft_model: true + is_finetuned_model: false + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:45 +mlflow_version: 2.9.2 +model_uuid: 34f291bb24c14940a710a707d48aeb7d +utc_time_created: '2024-02-26 23:18:23.547777' diff --git a/assets/models/system/mmd-3x-deformable-detr_refine_twostage_r50_16xb2-50e_coco/MLmodel b/assets/models/system/mmd-3x-deformable-detr_refine_twostage_r50_16xb2-50e_coco/MLmodel new file mode 100644 index 0000000000..787a036cc4 --- /dev/null +++ b/assets/models/system/mmd-3x-deformable-detr_refine_twostage_r50_16xb2-50e_coco/MLmodel @@ -0,0 +1,34 @@ +flavors: + python_function: + artifacts: + config_path: + path: artifacts/deformable-detr_refine_twostage_r50_16xb2-50e_coco.py + uri: /mnt/azureml/cr/j/450110263b3d433d9f5b56703495b429/cap/data-capability/wd/INPUT_model_path/model/deformable-detr_refine_twostage_r50_16xb2-50e_coco.py + model_metadata: + path: artifacts/model_metadata.json + uri: /mnt/azureml/cr/j/450110263b3d433d9f5b56703495b429/cap/data-capability/wd/INPUT_model_path/model/model_metadata.json + weights_path: + path: artifacts/deformable-detr_refine_twostage_r50_16xb2-50e_coco_weights.pth + uri: /mnt/azureml/cr/j/450110263b3d433d9f5b56703495b429/cap/data-capability/wd/INPUT_model_path/model/deformable-detr_refine_twostage_r50_16xb2-50e_coco_weights.pth + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: deformable-detr_refine_twostage_r50_16xb2-50e_coco + base_model_task: image-object-detection + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 254719196 +model_uuid: 7cc9397f61d54db0982e812eed04e6e8 +signature: + inputs: '[{"type": "binary", "name": "image", "required": true}]' + outputs: '[{"type": "string", "name": "boxes", "required": true}]' + params: '[{"name": "text_prompt", "type": "string", "default": null, "shape": null}, + {"name": "custom_entities", "type": "boolean", "default": true, "shape": null}]' +utc_time_created: '2024-04-29 16:43:08.671620' diff --git a/assets/models/system/mmd-3x-mask-rcnn_swin-t-p4-w7_fpn_1x_coco/MLmodel b/assets/models/system/mmd-3x-mask-rcnn_swin-t-p4-w7_fpn_1x_coco/MLmodel new file mode 100644 index 0000000000..0d4bc6dd5d --- /dev/null +++ b/assets/models/system/mmd-3x-mask-rcnn_swin-t-p4-w7_fpn_1x_coco/MLmodel @@ -0,0 +1,34 @@ +flavors: + python_function: + artifacts: + config_path: + path: artifacts/mask-rcnn_swin-t-p4-w7_fpn_1x_coco.py + uri: /mnt/azureml/cr/j/2cb04185befc4aada74a65e7e39010dc/cap/data-capability/wd/INPUT_model_path/model/mask-rcnn_swin-t-p4-w7_fpn_1x_coco.py + model_metadata: + path: artifacts/model_metadata.json + uri: /mnt/azureml/cr/j/2cb04185befc4aada74a65e7e39010dc/cap/data-capability/wd/INPUT_model_path/model/model_metadata.json + weights_path: + path: artifacts/mask-rcnn_swin-t-p4-w7_fpn_1x_coco_weights.pth + uri: /mnt/azureml/cr/j/2cb04185befc4aada74a65e7e39010dc/cap/data-capability/wd/INPUT_model_path/model/mask-rcnn_swin-t-p4-w7_fpn_1x_coco_weights.pth + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.19 +metadata: + base_model_name: mask-rcnn_swin-t-p4-w7_fpn_1x_coco + base_model_task: image-instance-segmentation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 191490861 +model_uuid: a86c066aad6a4cae9457f4ade7db156e +signature: + inputs: '[{"type": "binary", "name": "image", "required": true}]' + outputs: '[{"type": "string", "name": "boxes", "required": true}]' + params: '[{"name": "text_prompt", "type": "string", "default": null, "shape": null}, + {"name": "custom_entities", "type": "boolean", "default": true, "shape": null}]' +utc_time_created: '2024-06-12 05:08:49.343455' diff --git a/assets/models/system/mmd-3x-rtmdet-ins_x_8xb16-300e_coco/MLmodel b/assets/models/system/mmd-3x-rtmdet-ins_x_8xb16-300e_coco/MLmodel new file mode 100644 index 0000000000..dd5bbb447b --- /dev/null +++ b/assets/models/system/mmd-3x-rtmdet-ins_x_8xb16-300e_coco/MLmodel @@ -0,0 +1,29 @@ +flavors: + python_function: + artifacts: + config_path: + path: artifacts/rtmdet-ins_x_8xb16-300e_coco.py + uri: /mnt/azureml/cr/j/189f6a6fe964498cbe59ee92e6e3c2ca/cap/data-capability/wd/INPUT_model_path/model/rtmdet-ins_x_8xb16-300e_coco.py + weights_path: + path: artifacts/rtmdet-ins_x_8xb16-300e_coco_weights.pth + uri: /mnt/azureml/cr/j/189f6a6fe964498cbe59ee92e6e3c2ca/cap/data-capability/wd/INPUT_model_path/model/rtmdet-ins_x_8xb16-300e_coco_weights.pth + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: rtmdet-ins_x_8xb16-300e_coco + base_model_task: image-instance-segmentation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.6.0 +model_uuid: 8c3fc1b92aca47a991e389eb33b74911 +signature: + inputs: '[{"type": "binary", "name": "image"}]' + outputs: '[{"type": "string", "name": "boxes"}]' + params: null +utc_time_created: '2023-12-13 10:51:52.592728' diff --git a/assets/models/system/mmd-3x-sparse-rcnn_r101_fpn_300-proposals_crop-ms-480-800-3x_coco/MLmodel b/assets/models/system/mmd-3x-sparse-rcnn_r101_fpn_300-proposals_crop-ms-480-800-3x_coco/MLmodel new file mode 100644 index 0000000000..65758619ca --- /dev/null +++ b/assets/models/system/mmd-3x-sparse-rcnn_r101_fpn_300-proposals_crop-ms-480-800-3x_coco/MLmodel @@ -0,0 +1,34 @@ +flavors: + python_function: + artifacts: + config_path: + path: artifacts/sparse-rcnn_r101_fpn_300-proposals_crop-ms-480-800-3x_coco.py + uri: /mnt/azureml/cr/j/78fe29e9328a4e2787f714ab97cb9d52/cap/data-capability/wd/INPUT_model_path/model/sparse-rcnn_r101_fpn_300-proposals_crop-ms-480-800-3x_coco.py + model_metadata: + path: artifacts/model_metadata.json + uri: /mnt/azureml/cr/j/78fe29e9328a4e2787f714ab97cb9d52/cap/data-capability/wd/INPUT_model_path/model/model_metadata.json + weights_path: + path: artifacts/sparse-rcnn_r101_fpn_300-proposals_crop-ms-480-800-3x_coco_weights.pth + uri: /mnt/azureml/cr/j/78fe29e9328a4e2787f714ab97cb9d52/cap/data-capability/wd/INPUT_model_path/model/sparse-rcnn_r101_fpn_300-proposals_crop-ms-480-800-3x_coco_weights.pth + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: sparse-rcnn_r101_fpn_300-proposals_crop-ms-480-800-3x_coco + base_model_task: image-object-detection + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 501976232 +model_uuid: 9425478e46964dd08856de811fe5c05b +signature: + inputs: '[{"type": "binary", "name": "image", "required": true}]' + outputs: '[{"type": "string", "name": "boxes", "required": true}]' + params: '[{"name": "text_prompt", "type": "string", "default": null, "shape": null}, + {"name": "custom_entities", "type": "boolean", "default": true, "shape": null}]' +utc_time_created: '2024-04-29 11:42:11.452662' diff --git a/assets/models/system/mmd-3x-sparse-rcnn_r50_fpn_300-proposals_crop-ms-480-800-3x_coco/MLmodel b/assets/models/system/mmd-3x-sparse-rcnn_r50_fpn_300-proposals_crop-ms-480-800-3x_coco/MLmodel new file mode 100644 index 0000000000..c116607dc8 --- /dev/null +++ b/assets/models/system/mmd-3x-sparse-rcnn_r50_fpn_300-proposals_crop-ms-480-800-3x_coco/MLmodel @@ -0,0 +1,34 @@ +flavors: + python_function: + artifacts: + config_path: + path: artifacts/sparse-rcnn_r50_fpn_300-proposals_crop-ms-480-800-3x_coco.py + uri: /mnt/azureml/cr/j/e4c5bdf39d33465597d09f8d16ba24f7/cap/data-capability/wd/INPUT_model_path/model/sparse-rcnn_r50_fpn_300-proposals_crop-ms-480-800-3x_coco.py + model_metadata: + path: artifacts/model_metadata.json + uri: /mnt/azureml/cr/j/e4c5bdf39d33465597d09f8d16ba24f7/cap/data-capability/wd/INPUT_model_path/model/model_metadata.json + weights_path: + path: artifacts/sparse-rcnn_r50_fpn_300-proposals_crop-ms-480-800-3x_coco_weights.pth + uri: /mnt/azureml/cr/j/e4c5bdf39d33465597d09f8d16ba24f7/cap/data-capability/wd/INPUT_model_path/model/sparse-rcnn_r50_fpn_300-proposals_crop-ms-480-800-3x_coco_weights.pth + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: sparse-rcnn_r50_fpn_300-proposals_crop-ms-480-800-3x_coco + base_model_task: image-object-detection + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 425748500 +model_uuid: e8e4369d6bf644809df75ca42b4063f8 +signature: + inputs: '[{"type": "binary", "name": "image", "required": true}]' + outputs: '[{"type": "string", "name": "boxes", "required": true}]' + params: '[{"name": "text_prompt", "type": "string", "default": null, "shape": null}, + {"name": "custom_entities", "type": "boolean", "default": true, "shape": null}]' +utc_time_created: '2024-04-29 09:48:40.160024' diff --git a/assets/models/system/mmd-3x-vfnet_r50-mdconv-c3-c5_fpn_ms-2x_coco/MLmodel b/assets/models/system/mmd-3x-vfnet_r50-mdconv-c3-c5_fpn_ms-2x_coco/MLmodel new file mode 100644 index 0000000000..43201290d5 --- /dev/null +++ b/assets/models/system/mmd-3x-vfnet_r50-mdconv-c3-c5_fpn_ms-2x_coco/MLmodel @@ -0,0 +1,34 @@ +flavors: + python_function: + artifacts: + config_path: + path: artifacts/vfnet_r50-mdconv-c3-c5_fpn_ms-2x_coco.py + uri: /mnt/azureml/cr/j/ab758ddb3c5c47bb8d62496c28393a47/cap/data-capability/wd/INPUT_model_path/model/vfnet_r50-mdconv-c3-c5_fpn_ms-2x_coco.py + model_metadata: + path: artifacts/model_metadata.json + uri: /mnt/azureml/cr/j/ab758ddb3c5c47bb8d62496c28393a47/cap/data-capability/wd/INPUT_model_path/model/model_metadata.json + weights_path: + path: artifacts/vfnet_r50-mdconv-c3-c5_fpn_ms-2x_coco_weights.pth + uri: /mnt/azureml/cr/j/ab758ddb3c5c47bb8d62496c28393a47/cap/data-capability/wd/INPUT_model_path/model/vfnet_r50-mdconv-c3-c5_fpn_ms-2x_coco_weights.pth + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: vfnet_r50-mdconv-c3-c5_fpn_ms-2x_coco + base_model_task: image-object-detection + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 135920797 +model_uuid: 86802d54584f429da5ef89c4128a274a +signature: + inputs: '[{"type": "binary", "name": "image", "required": true}]' + outputs: '[{"type": "string", "name": "boxes", "required": true}]' + params: '[{"name": "text_prompt", "type": "string", "default": null, "shape": null}, + {"name": "custom_entities", "type": "boolean", "default": true, "shape": null}]' +utc_time_created: '2024-04-29 13:03:27.379241' diff --git a/assets/models/system/mmd-3x-vfnet_x101-64x4d-mdconv-c3-c5_fpn_ms-2x_coco/MLmodel b/assets/models/system/mmd-3x-vfnet_x101-64x4d-mdconv-c3-c5_fpn_ms-2x_coco/MLmodel new file mode 100644 index 0000000000..e1536c59a0 --- /dev/null +++ b/assets/models/system/mmd-3x-vfnet_x101-64x4d-mdconv-c3-c5_fpn_ms-2x_coco/MLmodel @@ -0,0 +1,34 @@ +flavors: + python_function: + artifacts: + config_path: + path: artifacts/vfnet_x101-64x4d-mdconv-c3-c5_fpn_ms-2x_coco.py + uri: /mnt/azureml/cr/j/02b83876dafe4e5ca87af8f3cae8cb97/cap/data-capability/wd/INPUT_model_path/model/vfnet_x101-64x4d-mdconv-c3-c5_fpn_ms-2x_coco.py + model_metadata: + path: artifacts/model_metadata.json + uri: /mnt/azureml/cr/j/02b83876dafe4e5ca87af8f3cae8cb97/cap/data-capability/wd/INPUT_model_path/model/model_metadata.json + weights_path: + path: artifacts/vfnet_x101-64x4d-mdconv-c3-c5_fpn_ms-2x_coco_weights.pth + uri: /mnt/azureml/cr/j/02b83876dafe4e5ca87af8f3cae8cb97/cap/data-capability/wd/INPUT_model_path/model/vfnet_x101-64x4d-mdconv-c3-c5_fpn_ms-2x_coco_weights.pth + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: vfnet_x101-64x4d-mdconv-c3-c5_fpn_ms-2x_coco + base_model_task: image-object-detection + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 395598039 +model_uuid: 5d5ad3dae64e4868b9eaafd9b78a5e51 +signature: + inputs: '[{"type": "binary", "name": "image", "required": true}]' + outputs: '[{"type": "string", "name": "boxes", "required": true}]' + params: '[{"name": "text_prompt", "type": "string", "default": null, "shape": null}, + {"name": "custom_entities", "type": "boolean", "default": true, "shape": null}]' +utc_time_created: '2024-04-29 14:32:10.759864' diff --git a/assets/models/system/mmd-3x-yolof_r50_c5_8x8_1x_coco/MLmodel b/assets/models/system/mmd-3x-yolof_r50_c5_8x8_1x_coco/MLmodel new file mode 100644 index 0000000000..5e308cef4e --- /dev/null +++ b/assets/models/system/mmd-3x-yolof_r50_c5_8x8_1x_coco/MLmodel @@ -0,0 +1,34 @@ +flavors: + python_function: + artifacts: + config_path: + path: artifacts/yolof_r50_c5_8x8_1x_coco.py + uri: /mnt/azureml/cr/j/da24db315f314653a4d64a0dcfb05562/cap/data-capability/wd/INPUT_model_path/model/yolof_r50_c5_8x8_1x_coco.py + model_metadata: + path: artifacts/model_metadata.json + uri: /mnt/azureml/cr/j/da24db315f314653a4d64a0dcfb05562/cap/data-capability/wd/INPUT_model_path/model/model_metadata.json + weights_path: + path: artifacts/yolof_r50_c5_8x8_1x_coco_weights.pth + uri: /mnt/azureml/cr/j/da24db315f314653a4d64a0dcfb05562/cap/data-capability/wd/INPUT_model_path/model/yolof_r50_c5_8x8_1x_coco_weights.pth + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: yolof_r50_c5_8x8_1x_coco + base_model_task: image-object-detection + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 177017811 +model_uuid: 063ebfbfcdb14dda9660fa7101808596 +signature: + inputs: '[{"type": "binary", "name": "image", "required": true}]' + outputs: '[{"type": "string", "name": "boxes", "required": true}]' + params: '[{"name": "text_prompt", "type": "string", "default": null, "shape": null}, + {"name": "custom_entities", "type": "boolean", "default": true, "shape": null}]' +utc_time_created: '2024-04-29 15:29:37.707125' diff --git a/assets/models/system/mmeft/MLmodel b/assets/models/system/mmeft/MLmodel new file mode 100644 index 0000000000..56104c3214 --- /dev/null +++ b/assets/models/system/mmeft/MLmodel @@ -0,0 +1,16 @@ +flavors: + python_function: + artifacts: + checkpoint_folder: + path: artifacts/pytorch_output + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.17 +mlflow_version: 2.5.0 +model_uuid: 3059912bc0324b0d882812ba9fb2ef6e +utc_time_created: '2023-10-11 08:09:14.250969' diff --git a/assets/models/system/ocsort_yolox_x_crowdhuman_mot17-private-half/MLmodel b/assets/models/system/ocsort_yolox_x_crowdhuman_mot17-private-half/MLmodel new file mode 100644 index 0000000000..07a285005e --- /dev/null +++ b/assets/models/system/ocsort_yolox_x_crowdhuman_mot17-private-half/MLmodel @@ -0,0 +1,33 @@ +flavors: + python_function: + artifacts: + config_path: + path: artifacts/ocsort_yolox_x_crowdhuman_mot17-private-half.py + uri: /mnt/azureml/cr/j/28f9a2e41f604dd084eccb0e0ae0ccac/cap/data-capability/wd/INPUT_model_path/model/ocsort_yolox_x_crowdhuman_mot17-private-half.py + model_metadata: + path: artifacts/model_metadata.json + uri: /mnt/azureml/cr/j/28f9a2e41f604dd084eccb0e0ae0ccac/cap/data-capability/wd/INPUT_model_path/model/model_metadata.json + weights_path: + path: artifacts/ocsort_yolox_x_crowdhuman_mot17-private-half_weights.pth + uri: /mnt/azureml/cr/j/28f9a2e41f604dd084eccb0e0ae0ccac/cap/data-capability/wd/INPUT_model_path/model/ocsort_yolox_x_crowdhuman_mot17-private-half_weights.pth + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: null + base_model_task: video-multi-object-tracking + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 793188485 +model_uuid: bfd98256af3b406189875d5da8193a7e +signature: + inputs: '[{"type": "string", "name": "video", "required": true}]' + outputs: '[{"type": "string", "name": "boxes", "required": true}]' + params: null +utc_time_created: '2024-04-30 13:37:10.816882' diff --git a/assets/models/system/openai-clip-image-text-embeddings-vit-base-patch32/MLmodel b/assets/models/system/openai-clip-image-text-embeddings-vit-base-patch32/MLmodel new file mode 100644 index 0000000000..5bae514d8a --- /dev/null +++ b/assets/models/system/openai-clip-image-text-embeddings-vit-base-patch32/MLmodel @@ -0,0 +1,29 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/d6f9b5dc5ddd4bdf818494c53d06ae3d/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: openai/clip-vit-base-patch32 + base_model_task: embeddings + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 1819563896 +model_uuid: 403d01e48f2a4d61bbbaa5fcc2c53280 +signature: + inputs: '[{"type": "binary", "name": "image", "required": true}, {"type": "string", + "name": "text", "required": true}]' + outputs: '[{"type": "string", "name": "image_features", "required": true}, {"type": + "string", "name": "text_features", "required": true}]' + params: null +utc_time_created: '2024-04-25 07:03:48.934077' diff --git a/assets/models/system/openai-clip-image-text-embeddings-vit-large-patch14-336/MLmodel b/assets/models/system/openai-clip-image-text-embeddings-vit-large-patch14-336/MLmodel new file mode 100644 index 0000000000..dea0dd60f2 --- /dev/null +++ b/assets/models/system/openai-clip-image-text-embeddings-vit-large-patch14-336/MLmodel @@ -0,0 +1,29 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/48f276d69c9846b9b3217ffa1270c918/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: openai/clip-vit-large-patch14-336 + base_model_task: embeddings + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 3428065021 +model_uuid: 49a27f96ba36438ca6641cc3666c36a4 +signature: + inputs: '[{"type": "binary", "name": "image", "required": true}, {"type": "string", + "name": "text", "required": true}]' + outputs: '[{"type": "string", "name": "image_features", "required": true}, {"type": + "string", "name": "text_features", "required": true}]' + params: null +utc_time_created: '2024-04-25 08:49:03.198351' diff --git a/assets/models/system/openai-clip-vit-base-patch32/MLmodel b/assets/models/system/openai-clip-vit-base-patch32/MLmodel new file mode 100644 index 0000000000..8db861638b --- /dev/null +++ b/assets/models/system/openai-clip-vit-base-patch32/MLmodel @@ -0,0 +1,29 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/eef2729fab174fe694dd2815a39cae40/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: openai/clip-vit-base-patch32 + base_model_task: zero-shot-image-classification + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 1819558189 +model_uuid: 8bba28fc3aa2481b87b373aebf1019d1 +signature: + inputs: '[{"type": "binary", "name": "image", "required": true}, {"type": "string", + "name": "text", "required": true}]' + outputs: '[{"type": "string", "name": "probs", "required": true}, {"type": "string", + "name": "labels", "required": true}]' + params: null +utc_time_created: '2024-04-22 15:31:39.817903' diff --git a/assets/models/system/openai-clip-vit-large-patch14/MLmodel b/assets/models/system/openai-clip-vit-large-patch14/MLmodel new file mode 100644 index 0000000000..6c11634a91 --- /dev/null +++ b/assets/models/system/openai-clip-vit-large-patch14/MLmodel @@ -0,0 +1,29 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/dc792b43295d4cebbcb95a186f7e3905/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: openai/clip-vit-large-patch14 + base_model_task: zero-shot-image-classification + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 6846549179 +model_uuid: 8410eda7b0d54953852d76753ca9e7ce +signature: + inputs: '[{"type": "binary", "name": "image", "required": true}, {"type": "string", + "name": "text", "required": true}]' + outputs: '[{"type": "string", "name": "probs", "required": true}, {"type": "string", + "name": "labels", "required": true}]' + params: null +utc_time_created: '2024-04-22 07:38:01.529355' diff --git a/assets/models/system/openai-whisper-large-v3/MLmodel b/assets/models/system/openai-whisper-large-v3/MLmodel new file mode 100644 index 0000000000..63519d38e5 --- /dev/null +++ b/assets/models/system/openai-whisper-large-v3/MLmodel @@ -0,0 +1,21 @@ +flavors: + hftransformersv2: + code: code + hf_config_class: WhisperConfig + hf_predict_module: predict + hf_pretrained_class: WhisperForConditionalGeneration + hf_tokenizer_class: WhisperProcessor + huggingface_id: openai/whisper-large-v3 + model_data: data + pytorch_version: 2.1.0+cu118 + task_type: automatic-speech-recognition + transformers_version: 4.34.0 + python_function: + code: code + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.10.11 +mlflow_version: 2.6.0 +model_uuid: 7876f44f26274843af12ad59cde6f0e2 +utc_time_created: '2023-11-08 10:17:29.004434' diff --git a/assets/models/system/openai-whisper-large/MLmodel b/assets/models/system/openai-whisper-large/MLmodel new file mode 100644 index 0000000000..d6db2d06de --- /dev/null +++ b/assets/models/system/openai-whisper-large/MLmodel @@ -0,0 +1,25 @@ +flavors: + hftransformersv2: + code: code + hf_config_class: WhisperConfig + hf_predict_module: predict + hf_pretrained_class: WhisperForConditionalGeneration + hf_tokenizer_class: WhisperProcessor + huggingface_id: openai/whisper-large + model_data: data + task_type: automatic-speech-recognition + transformers_version: 4.36.2 + python_function: + code: code + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: openai/whisper-large + base_model_task: automatic-speech-recognition + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_uuid: 9438889fbd7844e5bfde5fdc5c282cfd +utc_time_created: '2023-12-20 15:22:21.785005' diff --git a/assets/models/system/phi-3-medium-128k-instruct/MLmodel b/assets/models/system/phi-3-medium-128k-instruct/MLmodel new file mode 100644 index 0000000000..9ad7fac086 --- /dev/null +++ b/assets/models/system/phi-3-medium-128k-instruct/MLmodel @@ -0,0 +1,31 @@ +flavors: + hftransformersv2: + code: code + config_hf_load_kwargs: + trust_remote_code: true + attn_implementation: eager + model_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + hf_predict_module: predict_phi + task_type: chat-completion + model_data: data + pytorch_version: 1.13.1 + transformers_version: 4.38.2 + model_id: microsoft/phi-3-medium-128k-instruct + python_function: + code: code + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: Phi-3-medium-128k-instruct + base_model_task: chat-completion + model_provider_name: microsoft + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:57 +mlflow_version: 2.10.2 +model_uuid: 53cc26bc8d2843c48369a20e205983f9 +utc_time_created: '2024-05-01 17:06:09.382806' diff --git a/assets/models/system/phi-3-medium-4k-instruct/MLmodel b/assets/models/system/phi-3-medium-4k-instruct/MLmodel new file mode 100644 index 0000000000..d137fc061f --- /dev/null +++ b/assets/models/system/phi-3-medium-4k-instruct/MLmodel @@ -0,0 +1,31 @@ +flavors: + hftransformersv2: + code: code + config_hf_load_kwargs: + trust_remote_code: true + model_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + hf_predict_module: predict_phi + task_type: chat-completion + model_data: data + pytorch_version: 1.13.1 + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.38.2 + python_function: + code: code + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: Phi-3-medium-4k-instruct + base_model_task: chat-completion + model_provider_name: microsoft + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:57 +mlflow_version: 2.10.2 +model_uuid: 10946e531dd94e1cae9f1d17bd165539 +utc_time_created: '2024-05-01 17:02:34.974343' diff --git a/assets/models/system/phi-3-mini-128k-instruct/MLmodel b/assets/models/system/phi-3-mini-128k-instruct/MLmodel new file mode 100644 index 0000000000..fe591d69e1 --- /dev/null +++ b/assets/models/system/phi-3-mini-128k-instruct/MLmodel @@ -0,0 +1,30 @@ +flavors: + hftransformersv2: + code: code + config_hf_load_kwargs: + trust_remote_code: true + attn_implementation: eager + model_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + hf_predict_module: predict_phi + task_type: chat-completion + model_data: data + pytorch_version: 1.13.1 + transformers_version: 4.38.2 + python_function: + code: code + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:57 + base_model_name: Phi-3-mini-128k-instruct + base_model_task: chat-completion + model_provider_name: microsoft +mlflow_version: 2.10.2 +model_uuid: 53cc26bc8d2843c48369a20e205983f9 +utc_time_created: '2024-05-01 17:06:09.382806' diff --git a/assets/models/system/phi-3-mini-4k-instruct/MLmodel b/assets/models/system/phi-3-mini-4k-instruct/MLmodel new file mode 100644 index 0000000000..986c1bb2fe --- /dev/null +++ b/assets/models/system/phi-3-mini-4k-instruct/MLmodel @@ -0,0 +1,31 @@ +flavors: + hftransformersv2: + code: code + config_hf_load_kwargs: + trust_remote_code: true + model_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + hf_predict_module: predict_phi + task_type: chat-completion + model_data: data + pytorch_version: 1.13.1 + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.38.2 + python_function: + code: code + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:57 + base_model_name: Phi-3-mini-4k-instruct + base_model_task: chat-completion + model_provider_name: microsoft +mlflow_version: 2.10.2 +model_uuid: 10946e531dd94e1cae9f1d17bd165539 +utc_time_created: '2024-05-01 17:02:34.974343' diff --git a/assets/models/system/phi-3-small-128k-instruct/MLmodel b/assets/models/system/phi-3-small-128k-instruct/MLmodel new file mode 100644 index 0000000000..c38ac0ca76 --- /dev/null +++ b/assets/models/system/phi-3-small-128k-instruct/MLmodel @@ -0,0 +1,31 @@ +flavors: + hftransformersv2: + code: code + config_hf_load_kwargs: + trust_remote_code: true + model_hf_load_kwargs: + trust_remote_code: true + tokenizer_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + hf_predict_module: predict_phi + task_type: chat-completion + model_data: data + pytorch_version: 1.13.1 + transformers_version: 4.38.2 + python_function: + code: code + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: Phi-3-small-128k-instruct + base_model_task: chat-completion + model_provider_name: microsoft + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:49 +mlflow_version: 2.10.2 +model_uuid: 53cc26bc8d2843c48369a20e205983f9 +utc_time_created: '2024-05-01 17:06:09.382806' diff --git a/assets/models/system/phi-3-small-8k-instruct/MLmodel b/assets/models/system/phi-3-small-8k-instruct/MLmodel new file mode 100644 index 0000000000..1e1884a902 --- /dev/null +++ b/assets/models/system/phi-3-small-8k-instruct/MLmodel @@ -0,0 +1,31 @@ +flavors: + hftransformersv2: + code: code + config_hf_load_kwargs: + trust_remote_code: true + model_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + hf_predict_module: predict_phi + task_type: chat-completion + model_data: data + pytorch_version: 1.13.1 + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.38.2 + python_function: + code: code + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: Phi-3-small-8k-instruct + base_model_task: chat-completion + model_provider_name: microsoft + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:57 +mlflow_version: 2.10.2 +model_uuid: 10946e531dd94e1cae9f1d17bd165539 +utc_time_created: '2024-05-01 17:02:34.974343' diff --git a/assets/models/system/phi-3-vision-128k-instruct/MLmodel b/assets/models/system/phi-3-vision-128k-instruct/MLmodel new file mode 100644 index 0000000000..b91c023038 --- /dev/null +++ b/assets/models/system/phi-3-vision-128k-instruct/MLmodel @@ -0,0 +1,24 @@ +flavors: + python_function: + artifacts: + model_dir: + path: model + uri: /code/mlflow_conversion/../phi3-mini-v-rc3-13-long-20240510 + cloudpickle_version: 3.0.0 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.10.14 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:47 + base_model_name: phi3-mini-vision + base_model_task: chat-completion + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.12.1 +model_size_bytes: 8295925056 +model_uuid: ab940bbf7a4f4959aa4346b5a8d9b301 +utc_time_created: '2024-05-09 08:30:34.663253' diff --git a/assets/models/system/phi-3.5-mini-128k-instruct/MLmodel b/assets/models/system/phi-3.5-mini-128k-instruct/MLmodel new file mode 100644 index 0000000000..ab89bf2665 --- /dev/null +++ b/assets/models/system/phi-3.5-mini-128k-instruct/MLmodel @@ -0,0 +1,31 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 1.13.1 + task_type: chat-completion + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.42.4 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.9.19 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:48 + base_model_name: Phi-3.5-mini-128k-instruct + base_model_task: chat-completion + model_provider_name: microsoft + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.12.2 +model_uuid: 5c0caeea31274b9b9862e4999abcb1f7 +utc_time_created: '2024-08-07 18:49:36.978050' diff --git a/assets/models/system/phi-3.5-moe-128k-instruct/MLmodel b/assets/models/system/phi-3.5-moe-128k-instruct/MLmodel new file mode 100644 index 0000000000..21e0187016 --- /dev/null +++ b/assets/models/system/phi-3.5-moe-128k-instruct/MLmodel @@ -0,0 +1,31 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 1.13.1 + task_type: chat-completion + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.42.4 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.9.19 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:49 + base_model_name: Phi-3.5-MoE-128k-Instruct + base_model_task: chat-completion + model_provider_name: microsoft + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.12.2 +model_uuid: 446884a71aaf424fb68027e62f674ec9 +utc_time_created: '2024-08-08 04:05:00.904328' diff --git a/assets/models/system/phi-3.5-vision-128k-instruct/MLmodel b/assets/models/system/phi-3.5-vision-128k-instruct/MLmodel new file mode 100644 index 0000000000..0e4d7f5b52 --- /dev/null +++ b/assets/models/system/phi-3.5-vision-128k-instruct/MLmodel @@ -0,0 +1,31 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 1.13.1 + task_type: chat-completion + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.42.4 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.9.19 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:50 + base_model_name: Phi-3.5-vision-128k-instruct + base_model_task: chat-completion + model_provider_name: microsoft + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.12.2 +model_uuid: 5cdd71025fe4432a958ccb6816f5b592 +utc_time_created: '2024-08-14 01:01:45.504805' diff --git a/assets/models/system/projecte-aina-FLOR-1-3B-Instructed/MLmodel b/assets/models/system/projecte-aina-FLOR-1-3B-Instructed/MLmodel new file mode 100644 index 0000000000..d2c3200e1a --- /dev/null +++ b/assets/models/system/projecte-aina-FLOR-1-3B-Instructed/MLmodel @@ -0,0 +1,25 @@ +flavors: + hftransformersv2: + code: null + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: projecte-aina/FLOR-1.3B-Instructed + model_data: data + pytorch_version: 1.13.1 + task_type: text-generation + transformers_version: 4.38.2 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: projecte-aina/FLOR-1.3B-Instructed + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:34 +mlflow_version: 2.10.2 +model_uuid: 3c8ce19fc4be4069b350e232e1e63581 +utc_time_created: '2024-04-09 10:55:54.141808' diff --git a/assets/models/system/projecte-aina-FLOR-1-3B/MLmodel b/assets/models/system/projecte-aina-FLOR-1-3B/MLmodel new file mode 100644 index 0000000000..a075b4fb57 --- /dev/null +++ b/assets/models/system/projecte-aina-FLOR-1-3B/MLmodel @@ -0,0 +1,25 @@ +flavors: + hftransformersv2: + code: null + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: projecte-aina/FLOR-1.3B + model_data: data + pytorch_version: 1.13.1 + task_type: text-generation + transformers_version: 4.38.2 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: projecte-aina/FLOR-1.3B + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:34 +mlflow_version: 2.10.2 +model_uuid: 65dc053ef77246d28480b68d607f422d +utc_time_created: '2024-04-09 10:34:41.935027' diff --git a/assets/models/system/projecte-aina-FLOR-6-3B-Instructed/MLmodel b/assets/models/system/projecte-aina-FLOR-6-3B-Instructed/MLmodel new file mode 100644 index 0000000000..33de88ca4f --- /dev/null +++ b/assets/models/system/projecte-aina-FLOR-6-3B-Instructed/MLmodel @@ -0,0 +1,25 @@ +flavors: + hftransformersv2: + code: null + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: projecte-aina/FLOR-6.3B-Instructed + model_data: data + pytorch_version: 1.13.1 + task_type: text-generation + transformers_version: 4.38.2 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: projecte-aina/FLOR-6.3B-Instructed + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:34 +mlflow_version: 2.10.2 +model_uuid: c90304129c96472e8996753c7074fd6c +utc_time_created: '2024-04-09 12:00:44.374988' diff --git a/assets/models/system/projecte-aina-FLOR-6-3B/MLmodel b/assets/models/system/projecte-aina-FLOR-6-3B/MLmodel new file mode 100644 index 0000000000..92f355208b --- /dev/null +++ b/assets/models/system/projecte-aina-FLOR-6-3B/MLmodel @@ -0,0 +1,25 @@ +flavors: + hftransformersv2: + code: null + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: projecte-aina/FLOR-6.3B + model_data: data + pytorch_version: 1.13.1 + task_type: text-generation + transformers_version: 4.38.2 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: projecte-aina/FLOR-6.3B + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:34 +mlflow_version: 2.10.2 +model_uuid: 049f792555b74c068625cc5a26cd1f4b +utc_time_created: '2024-04-09 09:04:55.053956' diff --git a/assets/models/system/projecte-aina-aguila-7b/MLmodel b/assets/models/system/projecte-aina-aguila-7b/MLmodel new file mode 100644 index 0000000000..3e5daa4a20 --- /dev/null +++ b/assets/models/system/projecte-aina-aguila-7b/MLmodel @@ -0,0 +1,31 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: projecte-aina/aguila-7b + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 1.13.1 + task_type: text-generation + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.38.2 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.18 +metadata: + base_model_name: projecte-aina/aguila-7b + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:34 +mlflow_version: 2.10.2 +model_uuid: 7ec8ff40799941fea68163499aa8f499 +utc_time_created: '2024-04-09 11:46:41.435612' diff --git a/assets/models/system/roberta-base-openai-detector/MLmodel b/assets/models/system/roberta-base-openai-detector/MLmodel new file mode 100644 index 0000000000..688b877613 --- /dev/null +++ b/assets/models/system/roberta-base-openai-detector/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: TextClassificationPipeline + model_binary: model + pipeline_model_type: RobertaForSequenceClassification + task: text-classification + tokenizer_type: RobertaTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: roberta-base-openai-detector + base_model_task: text-classification + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 501978488 +model_uuid: 08b042413efb4d819f87c7a1c71e288d +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-06 10:07:48.753482' diff --git a/assets/models/system/roberta-base/MLmodel b/assets/models/system/roberta-base/MLmodel new file mode 100644 index 0000000000..21e5979f36 --- /dev/null +++ b/assets/models/system/roberta-base/MLmodel @@ -0,0 +1,33 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: FillMaskPipeline + model_binary: model + pipeline_model_type: RobertaForMaskedLM + source_model_name: /mnt/azureml/cr/j/8b39f9623f4c49af9b2d3d116db6abcb/cap/data-capability/wd/INPUT_model_path + task: fill-mask + tokenizer_type: RobertaTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: roberta-base + base_model_task: fill-mask + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 502179447 +model_uuid: a997439da0414442bce9249baa598bb0 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-08 12:32:47.488723' diff --git a/assets/models/system/roberta-large-mnli/MLmodel b/assets/models/system/roberta-large-mnli/MLmodel new file mode 100644 index 0000000000..ecedef2263 --- /dev/null +++ b/assets/models/system/roberta-large-mnli/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: TextClassificationPipeline + model_binary: model + pipeline_model_type: RobertaForSequenceClassification + task: text-classification + tokenizer_type: RobertaTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: roberta-large-mnli + base_model_task: text-classification + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 1424901580 +model_uuid: 9b911c8e67784a918c7deec55366f3ae +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-06 09:28:00.442644' diff --git a/assets/models/system/roberta-large-openai-detector/MLmodel b/assets/models/system/roberta-large-openai-detector/MLmodel new file mode 100644 index 0000000000..03b9a23c0d --- /dev/null +++ b/assets/models/system/roberta-large-openai-detector/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: TextClassificationPipeline + model_binary: model + pipeline_model_type: RobertaForSequenceClassification + task: text-classification + tokenizer_type: RobertaTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: roberta-large-openai-detector + base_model_task: text-classification + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 1424897287 +model_uuid: 99d22215ddb743e6b5e328cd5eec4b37 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-08 12:58:45.198757' diff --git a/assets/models/system/roberta-large/MLmodel b/assets/models/system/roberta-large/MLmodel new file mode 100644 index 0000000000..2ac0a392b1 --- /dev/null +++ b/assets/models/system/roberta-large/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: FillMaskPipeline + model_binary: model + pipeline_model_type: RobertaForMaskedLM + task: fill-mask + tokenizer_type: RobertaTokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: roberta-large + base_model_task: fill-mask + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 1425098430 +model_uuid: 1350c70e836740e98f021b3b03234a11 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-08 12:43:51.156589' diff --git a/assets/models/system/runwayml-stable-diffusion-v1-5/MLmodel b/assets/models/system/runwayml-stable-diffusion-v1-5/MLmodel new file mode 100644 index 0000000000..0bc8e85190 --- /dev/null +++ b/assets/models/system/runwayml-stable-diffusion-v1-5/MLmodel @@ -0,0 +1,31 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/a6b6795e6acf4fc18a1a4fe1c03fdb31/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:40 + base_model_name: runwayml/stable-diffusion-v1-5 + base_model_task: text-to-image + is_acft_model: true + is_finetuned_model: false + model_type: stable-diffusion +mlflow_version: 2.10.2 +model_size_bytes: 47259953389 +model_uuid: 27c47a516a2444bb94036b5ead86fde1 +signature: + inputs: '[{"type": "string", "name": "prompt", "required": true}]' + outputs: '[{"type": "string", "name": "prompt", "required": true}, {"type": "binary", + "name": "generated_image", "required": true}, {"type": "boolean", "name": "nsfw_content_detected", + "required": true}]' + params: null +utc_time_created: '2024-04-26 15:33:01.269024' diff --git a/assets/models/system/runwayml_stable_diffusion_inpainting/MLmodel b/assets/models/system/runwayml_stable_diffusion_inpainting/MLmodel new file mode 100644 index 0000000000..ea1e3b4ae1 --- /dev/null +++ b/assets/models/system/runwayml_stable_diffusion_inpainting/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/cdb039c2c6534253a6e691ea7a5c99cd/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:40 + base_model_name: runwayml/stable-diffusion-inpainting + base_model_task: text-to-image-inpainting + is_acft_model: true + is_finetuned_model: false + model_type: stable-diffusion +mlflow_version: 2.10.2 +model_size_bytes: 15230254004 +model_uuid: 881df27d68914fa2884545bacaa43cfb +signature: + inputs: '[{"type": "string", "name": "prompt", "required": true}, {"type": "binary", + "name": "image", "required": true}, {"type": "binary", "name": "mask_image", "required": + true}]' + outputs: '[{"type": "binary", "name": "generated_image", "required": true}, {"type": + "boolean", "name": "nsfw_content_detected", "required": true}]' + params: null +utc_time_created: '2024-04-30 01:15:02.397790' diff --git a/assets/models/system/salesforce-blip-image-captioning-base/MLmodel b/assets/models/system/salesforce-blip-image-captioning-base/MLmodel new file mode 100644 index 0000000000..1c417590ca --- /dev/null +++ b/assets/models/system/salesforce-blip-image-captioning-base/MLmodel @@ -0,0 +1,27 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/6cecbaba40744c30bf36f7031b6ae706/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: Salesforce/blip-image-captioning-base + base_model_task: image-to-text + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 1981062986 +model_uuid: 8ef09f676a06427d87a30ddfeff09025 +signature: + inputs: '[{"type": "binary", "name": "image", "required": true}]' + outputs: '[{"type": "string", "name": "text", "required": true}]' + params: null +utc_time_created: '2024-04-22 11:59:24.072388' diff --git a/assets/models/system/salesforce-blip-vqa-base/MLmodel b/assets/models/system/salesforce-blip-vqa-base/MLmodel new file mode 100644 index 0000000000..e69d08925d --- /dev/null +++ b/assets/models/system/salesforce-blip-vqa-base/MLmodel @@ -0,0 +1,28 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/f0ea4f7ffaa44c7d88b9304ecc4764c7/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: Salesforce/blip-vqa-base + base_model_task: visual-question-answering + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 4618441266 +model_uuid: 987d81257ab54ebeb2029c4d3e4be447 +signature: + inputs: '[{"type": "binary", "name": "image", "required": true}, {"type": "string", + "name": "text", "required": true}]' + outputs: '[{"type": "string", "name": "text", "required": true}]' + params: null +utc_time_created: '2024-04-22 10:43:57.261704' diff --git a/assets/models/system/salesforce-blip2-opt-2-7b-image-to-text/MLmodel b/assets/models/system/salesforce-blip2-opt-2-7b-image-to-text/MLmodel new file mode 100644 index 0000000000..29d1da6ed3 --- /dev/null +++ b/assets/models/system/salesforce-blip2-opt-2-7b-image-to-text/MLmodel @@ -0,0 +1,27 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/ef5977c70376491d8cdd55783a260430/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: Salesforce/blip2-opt-2.7b + base_model_task: image-to-text + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 30991270404 +model_uuid: a293637c150348f9a2734ed4a034a989 +signature: + inputs: '[{"type": "binary", "name": "image", "required": true}]' + outputs: '[{"type": "string", "name": "text", "required": true}]' + params: null +utc_time_created: '2024-04-25 12:19:28.066757' diff --git a/assets/models/system/salesforce-blip2-opt-2-7b-vqa/MLmodel b/assets/models/system/salesforce-blip2-opt-2-7b-vqa/MLmodel new file mode 100644 index 0000000000..ea718030ba --- /dev/null +++ b/assets/models/system/salesforce-blip2-opt-2-7b-vqa/MLmodel @@ -0,0 +1,28 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/5988b47da7f64b9ca8524253ad7b55a7/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: Salesforce/blip2-opt-2.7b + base_model_task: visual-question-answering + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 30991270416 +model_uuid: 6f2b106a5f5e4643a1ac890a6d80d976 +signature: + inputs: '[{"type": "binary", "name": "image", "required": true}, {"type": "string", + "name": "text", "required": true}]' + outputs: '[{"type": "string", "name": "text", "required": true}]' + params: null +utc_time_created: '2024-04-25 12:28:17.836508' diff --git a/assets/models/system/snowflake-arctic-base/MLmodel b/assets/models/system/snowflake-arctic-base/MLmodel new file mode 100644 index 0000000000..76fe5e3d18 --- /dev/null +++ b/assets/models/system/snowflake-arctic-base/MLmodel @@ -0,0 +1,22 @@ +flavors: + hftransformersv2: + code: null + hf_config_class: AutoConfig + hf_pretrained_class: YakForCausalLM + hf_tokenizer_class: YakTokenizer + model_data: data + pytorch_version: 2.0.1+cu117 + task_type: text-generation + transformers_version: 4.40.0 + model_hf_load_kwargs: + torch_dtype: torch.bfloat16 + device_map: auto + python_function: + code: code + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.10.13 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:34 +mlflow_version: 2.3.1 diff --git a/assets/models/system/snowflake-artic-instruct/MLmodel b/assets/models/system/snowflake-artic-instruct/MLmodel new file mode 100644 index 0000000000..b73880f284 --- /dev/null +++ b/assets/models/system/snowflake-artic-instruct/MLmodel @@ -0,0 +1,22 @@ +flavors: + hftransformersv2: + code: null + hf_config_class: AutoConfig + hf_pretrained_class: YakForCausalLM + hf_tokenizer_class: YakTokenizer + model_data: data + pytorch_version: 2.0.1+cu117 + task_type: chat-completion + transformers_version: 4.40.0 + model_hf_load_kwargs: + torch_dtype: torch.bfloat16 + device_map: auto + python_function: + code: code + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.10.13 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:34 +mlflow_version: 2.3.1 \ No newline at end of file diff --git a/assets/models/system/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco/MLmodel b/assets/models/system/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco/MLmodel new file mode 100644 index 0000000000..f65dbaf7dd --- /dev/null +++ b/assets/models/system/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco/MLmodel @@ -0,0 +1,31 @@ +flavors: + python_function: + artifacts: + config_path: + path: artifacts/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py + uri: /mnt/azureml/cr/j/b83d034676074474a3791f082ae3084f/cap/data-capability/wd/INPUT_model_path/model/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py + model_metadata: + path: artifacts/model_metadata.json + uri: /mnt/azureml/cr/j/b83d034676074474a3791f082ae3084f/cap/data-capability/wd/INPUT_model_path/model/model_metadata.json + weights_path: + path: artifacts/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco_weights.pth + uri: /mnt/azureml/cr/j/b83d034676074474a3791f082ae3084f/cap/data-capability/wd/INPUT_model_path/model/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco_weights.pth + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.16 +metadata: + base_model_name: sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco + is_acft_model: true + is_finetuned_model: false + base_model_task: object-detection +mlflow_version: 2.3.1 +model_uuid: ec3ce935fc584d208279b913f86fc5a6 +signature: + inputs: '[{"name": "image", "type": "binary"}]' + outputs: '[{"name": "boxes", "type": "string"}]' +utc_time_created: '2023-07-27 15:41:56.993551' diff --git a/assets/models/system/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco/MLmodel b/assets/models/system/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco/MLmodel new file mode 100644 index 0000000000..ef63ad25d4 --- /dev/null +++ b/assets/models/system/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco/MLmodel @@ -0,0 +1,31 @@ +flavors: + python_function: + artifacts: + config_path: + path: artifacts/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py + uri: /mnt/azureml/cr/j/159e7ed584ba451cb145090daf6e8431/cap/data-capability/wd/INPUT_model_path/model/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py + model_metadata: + path: artifacts/model_metadata.json + uri: /mnt/azureml/cr/j/159e7ed584ba451cb145090daf6e8431/cap/data-capability/wd/INPUT_model_path/model/model_metadata.json + weights_path: + path: artifacts/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco_weights.pth + uri: /mnt/azureml/cr/j/159e7ed584ba451cb145090daf6e8431/cap/data-capability/wd/INPUT_model_path/model/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco_weights.pth + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.16 +metadata: + base_model_name: sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco + is_acft_model: true + is_finetuned_model: false + base_model_task: object-detection +mlflow_version: 2.3.1 +model_uuid: 4f13f4d0db6e4b36b3a827675ca9a1df +signature: + inputs: '[{"name": "image", "type": "binary"}]' + outputs: '[{"name": "boxes", "type": "string"}]' +utc_time_created: '2023-07-27 15:43:44.989876' diff --git a/assets/models/system/sshleifer-distilbart-cnn-12-6/MLmodel b/assets/models/system/sshleifer-distilbart-cnn-12-6/MLmodel new file mode 100644 index 0000000000..7eae50165a --- /dev/null +++ b/assets/models/system/sshleifer-distilbart-cnn-12-6/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: SummarizationPipeline + model_binary: model + pipeline_model_type: BartForConditionalGeneration + task: summarization + tokenizer_type: BartTokenizerFast + transformers_version: 4.37.2 +metadata: + base_model_name: sshleifer/distilbart-cnn-12-6 + base_model_task: summarization + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.9.2 +model_size_bytes: 1225682894 +model_uuid: fc1837a4e5d24a2da2fdd5f08ff88ee2 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2024-02-16 11:25:51.868062' diff --git a/assets/models/system/stabilityai-stable-diffusion-2-1/MLmodel b/assets/models/system/stabilityai-stable-diffusion-2-1/MLmodel new file mode 100644 index 0000000000..eaf82c6be7 --- /dev/null +++ b/assets/models/system/stabilityai-stable-diffusion-2-1/MLmodel @@ -0,0 +1,31 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/cba71d8fb308487295f17a3f5b118800/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:40 + base_model_name: stabilityai/stable-diffusion-2-1 + base_model_task: text-to-image + is_acft_model: true + is_finetuned_model: false + model_type: stable-diffusion +mlflow_version: 2.10.2 +model_size_bytes: 36341317201 +model_uuid: 17444b9830c4427c8592bd117d7f4e20 +signature: + inputs: '[{"type": "string", "name": "prompt", "required": true}]' + outputs: '[{"type": "string", "name": "prompt", "required": true}, {"type": "binary", + "name": "generated_image", "required": true}, {"type": "boolean", "name": "nsfw_content_detected", + "required": true}]' + params: null +utc_time_created: '2024-04-26 18:07:46.559945' diff --git a/assets/models/system/stabilityai-stable-diffusion-2-inpainting/MLmodel b/assets/models/system/stabilityai-stable-diffusion-2-inpainting/MLmodel new file mode 100644 index 0000000000..87a34db8ac --- /dev/null +++ b/assets/models/system/stabilityai-stable-diffusion-2-inpainting/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/3899aa02ded1467784a0cad2edb0acb4/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:40 + base_model_name: stabilityai/stable-diffusion-2-inpainting + base_model_task: text-to-image-inpainting + is_acft_model: true + is_finetuned_model: false + model_type: stable-diffusion +mlflow_version: 2.10.2 +model_size_bytes: 25917103274 +model_uuid: 26d1e57570c9408faef345b385ea4ff6 +signature: + inputs: '[{"type": "string", "name": "prompt", "required": true}, {"type": "binary", + "name": "image", "required": true}, {"type": "binary", "name": "mask_image", "required": + true}]' + outputs: '[{"type": "binary", "name": "generated_image", "required": true}, {"type": + "boolean", "name": "nsfw_content_detected", "required": true}]' + params: null +utc_time_created: '2024-04-30 01:22:07.772432' diff --git a/assets/models/system/stabilityai-stable-diffusion-xl-base-1-0/MLmodel b/assets/models/system/stabilityai-stable-diffusion-xl-base-1-0/MLmodel new file mode 100644 index 0000000000..f9eb6f3244 --- /dev/null +++ b/assets/models/system/stabilityai-stable-diffusion-xl-base-1-0/MLmodel @@ -0,0 +1,30 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/489ecda0636748b782264c9edfd6a0f9/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: stabilityai/stable-diffusion-xl-base-1.0 + base_model_task: text-to-image + is_acft_model: true + is_finetuned_model: false + model_type: stable-diffusion +mlflow_version: 2.10.2 +model_size_bytes: 76912779018 +model_uuid: c55b4e71831747caa9a7846239e1afab +signature: + inputs: '[{"type": "string", "name": "prompt", "required": true}]' + outputs: '[{"type": "string", "name": "prompt", "required": true}, {"type": "binary", + "name": "generated_image", "required": true}, {"type": "boolean", "name": "nsfw_content_detected", + "required": true}]' + params: null +utc_time_created: '2024-04-29 11:17:25.233762' diff --git a/assets/models/system/stabilityai-stable-diffusion-xl-refiner-1-0/MLmodel b/assets/models/system/stabilityai-stable-diffusion-xl-refiner-1-0/MLmodel new file mode 100644 index 0000000000..e15c728568 --- /dev/null +++ b/assets/models/system/stabilityai-stable-diffusion-xl-refiner-1-0/MLmodel @@ -0,0 +1,29 @@ +flavors: + python_function: + artifacts: + model_dir: + path: artifacts/INPUT_model_path + uri: /mnt/azureml/cr/j/1072f8cb6ce441698bec335701c81429/cap/data-capability/wd/INPUT_model_path + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.18 +metadata: + base_model_name: stabilityai/stable-diffusion-xl-refiner-1.0 + base_model_task: image-to-image + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.10.2 +model_size_bytes: 30888665872 +model_uuid: 0b6ce11600f5498cb97ec3e0efeb5d60 +signature: + inputs: '[{"type": "string", "name": "prompt", "required": true}, {"type": "binary", + "name": "image", "required": true}]' + outputs: '[{"type": "binary", "name": "generated_image", "required": true}, {"type": + "boolean", "name": "nsfw_content_detected", "required": true}]' + params: null +utc_time_created: '2024-04-29 17:41:13.002668' diff --git a/assets/models/system/t5-base/MLmodel b/assets/models/system/t5-base/MLmodel new file mode 100644 index 0000000000..2796aed3b2 --- /dev/null +++ b/assets/models/system/t5-base/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: TranslationPipeline + model_binary: model + pipeline_model_type: T5ForConditionalGeneration + task: translation_en_to_de + tokenizer_type: T5TokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: t5-base + base_model_task: translation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 894906549 +model_uuid: 92cf466cc762499a93ae48ec61a675b2 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-06 09:32:16.075732' diff --git a/assets/models/system/t5-large/MLmodel b/assets/models/system/t5-large/MLmodel new file mode 100644 index 0000000000..0d26b144d3 --- /dev/null +++ b/assets/models/system/t5-large/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: TranslationPipeline + model_binary: model + pipeline_model_type: T5ForConditionalGeneration + task: translation_en_to_de + tokenizer_type: T5TokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: t5-large + base_model_task: translation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 2954018583 +model_uuid: 0f047bf8e8934806aeab9556f1480676 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-06 10:51:04.807632' diff --git a/assets/models/system/t5-small/MLmodel b/assets/models/system/t5-small/MLmodel new file mode 100644 index 0000000000..58f3597182 --- /dev/null +++ b/assets/models/system/t5-small/MLmodel @@ -0,0 +1,32 @@ +flavors: + python_function: + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.transformers + model_binary: model + python_version: 3.8.18 + transformers: + code: null + components: + - tokenizer + framework: pt + instance_type: TranslationPipeline + model_binary: model + pipeline_model_type: T5ForConditionalGeneration + task: translation_en_to_de + tokenizer_type: T5TokenizerFast + transformers_version: 4.35.2 +metadata: + base_model_name: t5-small + base_model_task: translation + is_acft_model: true + is_finetuned_model: false +mlflow_version: 2.8.1 +model_size_bytes: 245280615 +model_uuid: 690b7253fc2f4fe4a17e2541d342f814 +signature: + inputs: '[{"type": "string"}]' + outputs: '[{"type": "string"}]' + params: null +utc_time_created: '2023-12-06 12:32:36.738961' diff --git a/assets/models/system/tiiuae-falcon-40b-instruct/MLmodel b/assets/models/system/tiiuae-falcon-40b-instruct/MLmodel new file mode 100644 index 0000000000..a0d51ecc68 --- /dev/null +++ b/assets/models/system/tiiuae-falcon-40b-instruct/MLmodel @@ -0,0 +1,31 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: tiiuae/falcon-40b-instruct + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 2.0.0+cu117 + task_type: text-generation + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.31.0 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.17 +metadata: + base_model_name: tiiuae/falcon-40b-instruct + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:31 +mlflow_version: 2.3.1 +model_uuid: 5c3884d746a240f8976a0d20416477e5 +utc_time_created: '2023-09-22 11:06:32.341334' diff --git a/assets/models/system/tiiuae-falcon-40b/MLmodel b/assets/models/system/tiiuae-falcon-40b/MLmodel new file mode 100644 index 0000000000..f93f5178b2 --- /dev/null +++ b/assets/models/system/tiiuae-falcon-40b/MLmodel @@ -0,0 +1,31 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: tiiuae/falcon-40b + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 2.0.0+cu117 + task_type: text-generation + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.31.0 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.17 +metadata: + base_model_name: tiiuae/falcon-40b + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:45 +mlflow_version: 2.3.1 +model_uuid: 9eec31aa11e447e2a3b9ff4a1ca17b33 +utc_time_created: '2023-09-22 12:14:01.214117' diff --git a/assets/models/system/tiiuae-falcon-7b-instruct/MLmodel b/assets/models/system/tiiuae-falcon-7b-instruct/MLmodel new file mode 100644 index 0000000000..bbdeb31827 --- /dev/null +++ b/assets/models/system/tiiuae-falcon-7b-instruct/MLmodel @@ -0,0 +1,31 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: tiiuae/falcon-7b-instruct + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 1.13.1 + task_type: text-generation + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.33.1 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.17 +metadata: + base_model_name: tiiuae/falcon-7b-instruct + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:31 +mlflow_version: 2.6.0 +model_uuid: 3a326f91867b431893d4afb0681bd7cf +utc_time_created: '2023-10-10 17:16:21.244188' diff --git a/assets/models/system/tiiuae-falcon-7b/MLmodel b/assets/models/system/tiiuae-falcon-7b/MLmodel new file mode 100644 index 0000000000..f05263c23c --- /dev/null +++ b/assets/models/system/tiiuae-falcon-7b/MLmodel @@ -0,0 +1,31 @@ +flavors: + hftransformersv2: + code: null + config_hf_load_kwargs: + trust_remote_code: true + hf_config_class: AutoConfig + hf_pretrained_class: AutoModelForCausalLM + hf_tokenizer_class: AutoTokenizer + huggingface_id: tiiuae/falcon-7b + model_data: data + model_hf_load_args: + trust_remote_code: true + pytorch_version: 2.0.0+cu117 + task_type: text-generation + tokenizer_hf_load_kwargs: + trust_remote_code: true + transformers_version: 4.31.0 + python_function: + data: data + env: conda.yaml + loader_module: azureml.evaluate.mlflow.hftransformers + python_version: 3.8.17 +metadata: + base_model_name: tiiuae/falcon-7b + base_model_task: text-generation + is_acft_model: true + is_finetuned_model: false + azureml.base_image: mcr.microsoft.com/azureml/curated/foundation-model-inference:45 +mlflow_version: 2.3.1 +model_uuid: bf84c6fac15b483fa54bfc1edbe5c17e +utc_time_created: '2023-09-25 11:46:10.329850' diff --git a/assets/models/system/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco/MLmodel b/assets/models/system/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco/MLmodel new file mode 100644 index 0000000000..e45e70ea7b --- /dev/null +++ b/assets/models/system/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco/MLmodel @@ -0,0 +1,31 @@ +flavors: + python_function: + artifacts: + config_path: + path: artifacts/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py + uri: /mnt/azureml/cr/j/8e5f3e1ad3a24d1fa4be7a1bec20c46b/cap/data-capability/wd/INPUT_model_path/model/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py + model_metadata: + path: artifacts/model_metadata.json + uri: /mnt/azureml/cr/j/8e5f3e1ad3a24d1fa4be7a1bec20c46b/cap/data-capability/wd/INPUT_model_path/model/model_metadata.json + weights_path: + path: artifacts/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco_weights.pth + uri: /mnt/azureml/cr/j/8e5f3e1ad3a24d1fa4be7a1bec20c46b/cap/data-capability/wd/INPUT_model_path/model/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco_weights.pth + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.16 +metadata: + base_model_name: vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco + is_acft_model: true + is_finetuned_model: false + base_model_task: object-detection +mlflow_version: 2.3.1 +model_uuid: f731813815584467a3e16bb6af954b12 +signature: + inputs: '[{"name": "image", "type": "binary"}]' + outputs: '[{"name": "boxes", "type": "string"}]' +utc_time_created: '2023-07-27 15:47:08.899391' diff --git a/assets/models/system/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/MLmodel b/assets/models/system/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/MLmodel new file mode 100644 index 0000000000..22f22a1965 --- /dev/null +++ b/assets/models/system/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/MLmodel @@ -0,0 +1,31 @@ +flavors: + python_function: + artifacts: + config_path: + path: artifacts/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py + uri: /mnt/azureml/cr/j/f699e7f8c22a41ca89777490fafdfebd/cap/data-capability/wd/INPUT_model_path/model/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py + model_metadata: + path: artifacts/model_metadata.json + uri: /mnt/azureml/cr/j/f699e7f8c22a41ca89777490fafdfebd/cap/data-capability/wd/INPUT_model_path/model/model_metadata.json + weights_path: + path: artifacts/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco_weights.pth + uri: /mnt/azureml/cr/j/f699e7f8c22a41ca89777490fafdfebd/cap/data-capability/wd/INPUT_model_path/model/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco_weights.pth + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.16 +metadata: + base_model_name: vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco + is_acft_model: true + is_finetuned_model: false + base_model_task: object-detection +mlflow_version: 2.3.1 +model_uuid: 626f9d83701b41a2a7fdc6d73996b694 +signature: + inputs: '[{"name": "image", "type": "binary"}]' + outputs: '[{"name": "boxes", "type": "string"}]' +utc_time_created: '2023-07-27 15:51:55.558576' diff --git a/assets/models/system/yolof_r50_c5_8x8_1x_coco/MLmodel b/assets/models/system/yolof_r50_c5_8x8_1x_coco/MLmodel new file mode 100644 index 0000000000..372b9ccc9c --- /dev/null +++ b/assets/models/system/yolof_r50_c5_8x8_1x_coco/MLmodel @@ -0,0 +1,31 @@ +flavors: + python_function: + artifacts: + config_path: + path: artifacts/yolof_r50_c5_8x8_1x_coco.py + uri: /mnt/azureml/cr/j/55490231b74a4eb8b636a0b891f81e2a/cap/data-capability/wd/INPUT_model_path/model/yolof_r50_c5_8x8_1x_coco.py + model_metadata: + path: artifacts/model_metadata.json + uri: /mnt/azureml/cr/j/55490231b74a4eb8b636a0b891f81e2a/cap/data-capability/wd/INPUT_model_path/model/model_metadata.json + weights_path: + path: artifacts/yolof_r50_c5_8x8_1x_coco_weights.pth + uri: /mnt/azureml/cr/j/55490231b74a4eb8b636a0b891f81e2a/cap/data-capability/wd/INPUT_model_path/model/yolof_r50_c5_8x8_1x_coco_weights.pth + cloudpickle_version: 2.2.1 + code: code + env: + conda: conda.yaml + virtualenv: python_env.yaml + loader_module: mlflow.pyfunc.model + python_model: python_model.pkl + python_version: 3.8.16 +metadata: + base_model_name: yolof_r50_c5_8x8_1x_coco + is_acft_model: true + is_finetuned_model: false + base_model_task: object-detection +mlflow_version: 2.3.1 +model_uuid: 16639924bbc64882955b58d3211eb052 +signature: + inputs: '[{"name": "image", "type": "binary"}]' + outputs: '[{"name": "boxes", "type": "string"}]' +utc_time_created: '2023-07-27 17:17:34.930160' From 049a0afdc812846dfa3dab427cb6e9278fd271ef Mon Sep 17 00:00:00 2001 From: Jeff Omhover Date: Wed, 6 Nov 2024 16:51:58 -0800 Subject: [PATCH 2/3] Add multimodal tag to phi-3.5-vision (#3543) * Add multimodal tag to phi-3.5-vision * Update spec.yaml --------- Co-authored-by: Ali Soylemezoglu Co-authored-by: Kelly <40868256+lykelly19@users.noreply.github.com> --- assets/models/system/phi-3.5-vision-128k-instruct/spec.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/assets/models/system/phi-3.5-vision-128k-instruct/spec.yaml b/assets/models/system/phi-3.5-vision-128k-instruct/spec.yaml index 755edccca6..3486081f87 100644 --- a/assets/models/system/phi-3.5-vision-128k-instruct/spec.yaml +++ b/assets/models/system/phi-3.5-vision-128k-instruct/spec.yaml @@ -17,7 +17,7 @@ tags: languages: "en" inputModalities: "text,image" trainingDataDate: "Aug 2024" - keywords: "Reasoning,Understanding,Low latency" + keywords: "Multimodal,Reasoning,Low latency" licenseDescription: "Microsoft.\nCopyright (c) Microsoft Corporation.\n\nMIT License\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE." playgroundRateLimitTier: "low" Featured: "" From 82ec219d64a6bb2cb75ed1215bacf93def5db0e7 Mon Sep 17 00:00:00 2001 From: XiangRao <131976564+ricardrao@users.noreply.github.com> Date: Thu, 7 Nov 2024 10:52:14 +0800 Subject: [PATCH 3/3] Upgrate DBCopilot component version (#3572) --- .../data_ingestion_db_to_acs/spec.yaml | 12 ++++++------ .../data_ingestion_db_to_faiss/spec.yaml | 12 ++++++------ .../data_ingestion_dbcopilot_acs_e2e/spec.yaml | 10 +++++----- .../data_ingestion_dbcopilot_faiss_e2e/spec.yaml | 10 +++++----- 4 files changed, 22 insertions(+), 22 deletions(-) diff --git a/assets/large_language_models/components_pipelines/data_ingestion_db_to_acs/spec.yaml b/assets/large_language_models/components_pipelines/data_ingestion_db_to_acs/spec.yaml index 1f7c3fa128..67a15e43aa 100644 --- a/assets/large_language_models/components_pipelines/data_ingestion_db_to_acs/spec.yaml +++ b/assets/large_language_models/components_pipelines/data_ingestion_db_to_acs/spec.yaml @@ -4,7 +4,7 @@ tags: Preview: "" name: llm_ingest_db_to_acs display_name: LLM - SQL Datastore to ACS Pipeline -version: 0.0.96 +version: 0.0.97 description: Single job pipeline to chunk data from AzureML sql data store, and create ACS embeddings index settings: default_compute: serverless @@ -164,7 +164,7 @@ jobs: properties: compute_specification: automatic: true - component: "azureml:llm_rag_generate_embeddings:0.0.64" + component: "azureml:llm_rag_generate_embeddings:0.0.66" inputs: chunks_source: type: uri_folder @@ -216,7 +216,7 @@ jobs: path: ${{parent.inputs.acs_config}} outputs: index: ${{parent.outputs.grounding_index}} - component: "azureml:llm_rag_update_acs_index:0.0.68" + component: "azureml:llm_rag_update_acs_index:0.0.70" type: command ######################################### db_sample_loading_generator: @@ -241,7 +241,7 @@ jobs: ######################################### generate_sample_embeddings: type: command - component: "azureml:llm_rag_generate_embeddings:0.0.64" + component: "azureml:llm_rag_generate_embeddings:0.0.66" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -262,7 +262,7 @@ jobs: ######################################### create_sample_acs_index_job: type: command - component: "azureml:llm_rag_update_acs_index:0.0.68" + component: "azureml:llm_rag_update_acs_index:0.0.70" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -296,7 +296,7 @@ jobs: outputs: asset_id: type: uri_file - component: "azureml:llm_rag_register_mlindex_asset:0.0.68" + component: "azureml:llm_rag_register_mlindex_asset:0.0.70" type: command create_prompt_flow: environment_variables: diff --git a/assets/large_language_models/components_pipelines/data_ingestion_db_to_faiss/spec.yaml b/assets/large_language_models/components_pipelines/data_ingestion_db_to_faiss/spec.yaml index 04072a4e40..eb3c723bf3 100644 --- a/assets/large_language_models/components_pipelines/data_ingestion_db_to_faiss/spec.yaml +++ b/assets/large_language_models/components_pipelines/data_ingestion_db_to_faiss/spec.yaml @@ -4,7 +4,7 @@ tags: Preview: "" name: llm_ingest_db_to_faiss display_name: LLM - SQL Datastore to FAISS Pipeline -version: 0.0.96 +version: 0.0.97 description: Single job pipeline to chunk data from AzureML sql data store, and create FAISS embeddings index settings: default_compute: serverless @@ -154,7 +154,7 @@ jobs: properties: compute_specification: automatic: true - component: "azureml:llm_rag_generate_embeddings:0.0.64" + component: "azureml:llm_rag_generate_embeddings:0.0.66" inputs: chunks_source: type: uri_folder @@ -203,7 +203,7 @@ jobs: path: ${{parent.jobs.generate_meta_embeddings.outputs.embeddings}} outputs: index: ${{parent.outputs.grounding_index}} - component: "azureml:llm_rag_create_faiss_index:0.0.69" + component: "azureml:llm_rag_create_faiss_index:0.0.71" type: command ######################################### @@ -229,7 +229,7 @@ jobs: ######################################### generate_sample_embeddings: type: command - component: "azureml:llm_rag_generate_embeddings:0.0.64" + component: "azureml:llm_rag_generate_embeddings:0.0.66" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -250,7 +250,7 @@ jobs: ######################################### create_sample_faiss_index_job: type: command - component: "azureml:llm_rag_create_faiss_index:0.0.69" + component: "azureml:llm_rag_create_faiss_index:0.0.71" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -280,7 +280,7 @@ jobs: outputs: asset_id: type: uri_file - component: "azureml:llm_rag_register_mlindex_asset:0.0.68" + component: "azureml:llm_rag_register_mlindex_asset:0.0.70" type: command create_prompt_flow: environment_variables: diff --git a/assets/large_language_models/components_pipelines/data_ingestion_dbcopilot_acs_e2e/spec.yaml b/assets/large_language_models/components_pipelines/data_ingestion_dbcopilot_acs_e2e/spec.yaml index 507c1eea39..16bea458ab 100644 --- a/assets/large_language_models/components_pipelines/data_ingestion_dbcopilot_acs_e2e/spec.yaml +++ b/assets/large_language_models/components_pipelines/data_ingestion_dbcopilot_acs_e2e/spec.yaml @@ -2,7 +2,7 @@ $schema: https://azuremlschemas.azureedge.net/latest/pipelineComponent.schema.js type: pipeline name: llm_ingest_dbcopilot_acs_e2e -version: 0.0.65 +version: 0.0.66 display_name: Data Ingestion for DB Data Output to ACS E2E Deployment description: Single job pipeline to chunk data from AzureML DB Datastore and create acs embeddings index @@ -170,7 +170,7 @@ jobs: ######################################### generate_meta_embeddings: type: command - component: "azureml:llm_rag_generate_embeddings:0.0.64" + component: "azureml:llm_rag_generate_embeddings:0.0.66" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -191,7 +191,7 @@ jobs: ######################################### create_meta_acs_index_job: type: command - component: "azureml:llm_rag_update_acs_index:0.0.68" + component: "azureml:llm_rag_update_acs_index:0.0.70" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -233,7 +233,7 @@ jobs: ######################################### generate_sample_embeddings: type: command - component: "azureml:llm_rag_generate_embeddings:0.0.64" + component: "azureml:llm_rag_generate_embeddings:0.0.66" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -254,7 +254,7 @@ jobs: ######################################### create_sample_acs_index_job: type: command - component: "azureml:llm_rag_update_acs_index:0.0.68" + component: "azureml:llm_rag_update_acs_index:0.0.70" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} diff --git a/assets/large_language_models/components_pipelines/data_ingestion_dbcopilot_faiss_e2e/spec.yaml b/assets/large_language_models/components_pipelines/data_ingestion_dbcopilot_faiss_e2e/spec.yaml index 0f8a991933..b68803265d 100644 --- a/assets/large_language_models/components_pipelines/data_ingestion_dbcopilot_faiss_e2e/spec.yaml +++ b/assets/large_language_models/components_pipelines/data_ingestion_dbcopilot_faiss_e2e/spec.yaml @@ -2,7 +2,7 @@ $schema: https://azuremlschemas.azureedge.net/latest/pipelineComponent.schema.js type: pipeline name: llm_ingest_dbcopilot_faiss_e2e -version: 0.0.65 +version: 0.0.66 display_name: Data Ingestion for DB Data Output to FAISS E2E Deployment description: Single job pipeline to chunk data from AzureML DB Datastore and create faiss embeddings index @@ -160,7 +160,7 @@ jobs: ######################################### generate_meta_embeddings: type: command - component: "azureml:llm_rag_generate_embeddings:0.0.64" + component: "azureml:llm_rag_generate_embeddings:0.0.66" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -181,7 +181,7 @@ jobs: ######################################### create_meta_faiss_index_job: type: command - component: "azureml:llm_rag_create_faiss_index:0.0.69" + component: "azureml:llm_rag_create_faiss_index:0.0.71" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -221,7 +221,7 @@ jobs: ######################################### generate_sample_embeddings: type: command - component: "azureml:llm_rag_generate_embeddings:0.0.64" + component: "azureml:llm_rag_generate_embeddings:0.0.66" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}} @@ -242,7 +242,7 @@ jobs: ######################################### create_sample_faiss_index_job: type: command - component: "azureml:llm_rag_create_faiss_index:0.0.69" + component: "azureml:llm_rag_create_faiss_index:0.0.71" resources: instance_count: ${{parent.inputs.serverless_instance_count}} instance_type: ${{parent.inputs.serverless_instance_type}}